vodiylik commited on
Commit
9fff7f6
1 Parent(s): f2017ce

initial model

Browse files
Files changed (41) hide show
  1. README.md +212 -1
  2. added_tokens.json +1 -0
  3. all_results.json +15 -0
  4. alphabet.json +1 -0
  5. config.json +107 -0
  6. eval_results.json +10 -0
  7. language_model/5gram.bin +3 -0
  8. language_model/attrs.json +1 -0
  9. language_model/unigrams.txt +0 -0
  10. preprocessor_config.json +10 -0
  11. pytorch_model.bin +3 -0
  12. runs/Aug04_21-55-39_nv-compute-01.local/1659632707.4885852/events.out.tfevents.1659632707.nv-compute-01.local.42881.1 +3 -0
  13. runs/Aug04_21-55-39_nv-compute-01.local/events.out.tfevents.1659632707.nv-compute-01.local.42881.0 +3 -0
  14. runs/Aug04_22-07-48_nv-compute-01.local/1659633408.307794/events.out.tfevents.1659633408.nv-compute-01.local.630012.1 +3 -0
  15. runs/Aug04_22-07-48_nv-compute-01.local/events.out.tfevents.1659633408.nv-compute-01.local.630012.0 +3 -0
  16. runs/Aug04_22-18-29_nv-compute-01.local/1659634057.210297/events.out.tfevents.1659634057.nv-compute-01.local.1217099.1 +3 -0
  17. runs/Aug04_22-18-29_nv-compute-01.local/events.out.tfevents.1659634057.nv-compute-01.local.1217099.0 +3 -0
  18. runs/Aug04_22-28-50_nv-compute-01.local/1659634670.2909548/events.out.tfevents.1659634670.nv-compute-01.local.1804142.1 +3 -0
  19. runs/Aug04_22-28-50_nv-compute-01.local/events.out.tfevents.1659634670.nv-compute-01.local.1804142.0 +3 -0
  20. runs/Aug04_23-12-35_nv-compute-01.local/1659637312.655474/events.out.tfevents.1659637312.nv-compute-01.local.2392879.1 +3 -0
  21. runs/Aug04_23-12-35_nv-compute-01.local/events.out.tfevents.1659637312.nv-compute-01.local.2392879.0 +3 -0
  22. runs/Aug04_23-35-25_nv-compute-01.local/1659638671.4950814/events.out.tfevents.1659638671.nv-compute-01.local.2980773.1 +3 -0
  23. runs/Aug04_23-35-25_nv-compute-01.local/events.out.tfevents.1659638671.nv-compute-01.local.2980773.0 +3 -0
  24. runs/Aug05_09-12-07_nv-compute-01.local/1659673263.5076785/events.out.tfevents.1659673263.nv-compute-01.local.3596914.1 +3 -0
  25. runs/Aug05_09-12-07_nv-compute-01.local/events.out.tfevents.1659673263.nv-compute-01.local.3596914.0 +3 -0
  26. runs/Aug05_17-07-26_nv-compute-01.local/1659701790.1811883/events.out.tfevents.1659701790.nv-compute-01.local.41562.1 +3 -0
  27. runs/Aug05_17-07-26_nv-compute-01.local/events.out.tfevents.1659701790.nv-compute-01.local.41562.0 +3 -0
  28. runs/Aug08_10-23-16_nv-compute-01.local/1659936693.8116791/events.out.tfevents.1659936693.nv-compute-01.local.740937.1 +3 -0
  29. runs/Aug08_10-23-16_nv-compute-01.local/events.out.tfevents.1659936693.nv-compute-01.local.740937.0 +3 -0
  30. runs/Aug08_10-23-16_nv-compute-01.local/events.out.tfevents.1660036815.nv-compute-01.local.740937.2 +3 -0
  31. runs/Aug09_14-42-44_nv-compute-01.local/1660038683.2601411/events.out.tfevents.1660038683.nv-compute-01.local.1373764.1 +3 -0
  32. runs/Aug09_14-42-44_nv-compute-01.local/events.out.tfevents.1660038683.nv-compute-01.local.1373764.0 +3 -0
  33. runs/Aug09_14-42-44_nv-compute-01.local/events.out.tfevents.1660038911.nv-compute-01.local.1373764.2 +3 -0
  34. runs/Aug09_18-26-07_nv-compute-01.local/1660052090.4725804/events.out.tfevents.1660052090.nv-compute-01.local.2137132.1 +3 -0
  35. runs/Aug09_18-26-07_nv-compute-01.local/events.out.tfevents.1660052090.nv-compute-01.local.2137132.0 +3 -0
  36. special_tokens_map.json +1 -0
  37. tokenizer_config.json +1 -0
  38. train_results.json +8 -0
  39. trainer_state.json +0 -0
  40. training_args.bin +3 -0
  41. vocab.json +1 -0
README.md CHANGED
@@ -1,3 +1,214 @@
1
  ---
2
- license: other
 
 
 
 
 
 
 
 
 
 
 
3
  ---
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
  ---
2
+ language:
3
+ - uz
4
+ license: apache-2.0
5
+ tags:
6
+ - automatic-speech-recognition
7
+ - mozilla-foundation/common_voice_10_0
8
+ - generated_from_trainer
9
+ datasets:
10
+ - common_voice_10_0
11
+ model-index:
12
+ - name: xls-r-uzbek-cv10
13
+ results: []
14
  ---
15
+
16
+ <!-- This model card has been generated automatically according to the information the Trainer had access to. You
17
+ should probably proofread and complete it, then remove this comment. -->
18
+
19
+ # xls-r-uzbek-cv10
20
+
21
+ This model is a fine-tuned version of [facebook/wav2vec2-xls-r-300m](https://huggingface.co/facebook/wav2vec2-xls-r-300m) on the MOZILLA-FOUNDATION/COMMON_VOICE_10_0 - UZ dataset.
22
+ It achieves the following results on the evaluation set:
23
+ - Loss: 0.2491
24
+ - Wer: 0.2588
25
+ - Cer: 0.0513
26
+
27
+ ## Model description
28
+
29
+ More information needed
30
+
31
+ ## Intended uses & limitations
32
+
33
+ More information needed
34
+
35
+ ## Training and evaluation data
36
+
37
+ More information needed
38
+
39
+ ## Training procedure
40
+
41
+ ### Training hyperparameters
42
+
43
+ The following hyperparameters were used during training:
44
+ - learning_rate: 3e-05
45
+ - train_batch_size: 8
46
+ - eval_batch_size: 8
47
+ - seed: 42
48
+ - gradient_accumulation_steps: 4
49
+ - total_train_batch_size: 32
50
+ - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
51
+ - lr_scheduler_type: linear
52
+ - lr_scheduler_warmup_steps: 500
53
+ - num_epochs: 100.0
54
+ - mixed_precision_training: Native AMP
55
+
56
+ ### Training results
57
+
58
+ | Training Loss | Epoch | Step | Cer | Validation Loss | Wer |
59
+ |:-------------:|:-----:|:-----:|:------:|:---------------:|:------:|
60
+ | 3.1215 | 0.68 | 500 | 1.0 | 3.1188 | 1.0 |
61
+ | 2.8562 | 1.36 | 1000 | 0.9689 | 2.5724 | 1.0002 |
62
+ | 1.2709 | 2.04 | 1500 | 0.1471 | 0.6278 | 0.6478 |
63
+ | 1.0817 | 2.72 | 2000 | 0.1304 | 0.4989 | 0.5931 |
64
+ | 0.9801 | 3.4 | 2500 | 0.1225 | 0.4582 | 0.5667 |
65
+ | 0.951 | 4.08 | 3000 | 0.1149 | 0.4239 | 0.5381 |
66
+ | 0.8834 | 4.76 | 3500 | 0.1092 | 0.4016 | 0.5158 |
67
+ | 0.857 | 5.44 | 4000 | 0.1047 | 0.3785 | 0.4992 |
68
+ | 0.8307 | 6.12 | 4500 | 0.1004 | 0.3720 | 0.4811 |
69
+ | 0.805 | 6.8 | 5000 | 0.0937 | 0.3450 | 0.4537 |
70
+ | 0.7828 | 7.48 | 5500 | 0.0912 | 0.3421 | 0.4460 |
71
+ | 0.7789 | 8.16 | 6000 | 0.0890 | 0.3295 | 0.4337 |
72
+ | 0.755 | 8.84 | 6500 | 0.0862 | 0.3257 | 0.4222 |
73
+ | 0.7464 | 9.52 | 7000 | 0.0847 | 0.3269 | 0.4155 |
74
+ | 0.7293 | 10.2 | 7500 | 0.0823 | 0.3121 | 0.4025 |
75
+ | 0.7283 | 10.88 | 8000 | 0.0789 | 0.2991 | 0.3941 |
76
+ | 0.7145 | 11.56 | 8500 | 0.0786 | 0.2961 | 0.3868 |
77
+ | 0.6963 | 12.24 | 9000 | 0.0767 | 0.2972 | 0.3784 |
78
+ | 0.6981 | 12.92 | 9500 | 0.0757 | 0.2880 | 0.3750 |
79
+ | 0.6888 | 13.6 | 10000 | 0.0745 | 0.2865 | 0.3703 |
80
+ | 0.6733 | 14.29 | 10500 | 0.0744 | 0.2887 | 0.3663 |
81
+ | 0.6701 | 14.97 | 11000 | 0.0735 | 0.2857 | 0.3624 |
82
+ | 0.6634 | 15.65 | 11500 | 0.0723 | 0.2822 | 0.3581 |
83
+ | 0.6484 | 16.33 | 12000 | 0.0706 | 0.2778 | 0.3503 |
84
+ | 0.6626 | 17.01 | 12500 | 0.0697 | 0.2697 | 0.3477 |
85
+ | 0.6341 | 17.69 | 13000 | 0.0708 | 0.2804 | 0.3511 |
86
+ | 0.6402 | 18.37 | 13500 | 0.0681 | 0.2665 | 0.3418 |
87
+ | 0.6343 | 19.05 | 14000 | 0.0687 | 0.2748 | 0.3425 |
88
+ | 0.6383 | 19.73 | 14500 | 0.0677 | 0.2696 | 0.3383 |
89
+ | 0.6178 | 20.41 | 15000 | 0.0690 | 0.2743 | 0.3417 |
90
+ | 0.6097 | 21.09 | 15500 | 0.0671 | 0.2663 | 0.3352 |
91
+ | 0.6245 | 21.77 | 16000 | 0.0665 | 0.2685 | 0.3318 |
92
+ | 0.6137 | 22.45 | 16500 | 0.0655 | 0.2700 | 0.3262 |
93
+ | 0.6018 | 23.13 | 17000 | 0.0652 | 0.2657 | 0.3225 |
94
+ | 0.6063 | 23.81 | 17500 | 0.0663 | 0.2664 | 0.3276 |
95
+ | 0.5917 | 24.49 | 18000 | 0.0658 | 0.2725 | 0.3264 |
96
+ | 0.5984 | 25.17 | 18500 | 0.0643 | 0.2593 | 0.3197 |
97
+ | 0.5949 | 25.85 | 19000 | 0.0635 | 0.2581 | 0.3161 |
98
+ | 0.5863 | 26.53 | 19500 | 0.0639 | 0.2543 | 0.3196 |
99
+ | 0.5858 | 27.21 | 20000 | 0.0628 | 0.2620 | 0.3136 |
100
+ | 0.5902 | 27.89 | 20500 | 0.0627 | 0.2549 | 0.3157 |
101
+ | 0.5794 | 28.57 | 21000 | 0.0624 | 0.2543 | 0.3136 |
102
+ | 0.5744 | 29.25 | 21500 | 0.0620 | 0.2542 | 0.3091 |
103
+ | 0.5899 | 29.93 | 22000 | 0.0624 | 0.2540 | 0.3122 |
104
+ | 0.5597 | 30.61 | 22500 | 0.0609 | 0.2500 | 0.3057 |
105
+ | 0.5595 | 31.29 | 23000 | 0.0616 | 0.2539 | 0.3087 |
106
+ | 0.5664 | 31.97 | 23500 | 0.0610 | 0.2504 | 0.3070 |
107
+ | 0.5608 | 32.65 | 24000 | 0.0611 | 0.2535 | 0.3066 |
108
+ | 0.5557 | 33.33 | 24500 | 0.0608 | 0.2538 | 0.3047 |
109
+ | 0.5741 | 34.01 | 25000 | 0.0596 | 0.2480 | 0.3009 |
110
+ | 0.5614 | 34.69 | 25500 | 0.0601 | 0.2516 | 0.3033 |
111
+ | 0.5436 | 35.37 | 26000 | 0.0601 | 0.2540 | 0.3004 |
112
+ | 0.555 | 36.05 | 26500 | 0.0595 | 0.2486 | 0.2993 |
113
+ | 0.5474 | 36.73 | 27000 | 0.0598 | 0.2536 | 0.3003 |
114
+ | 0.5352 | 37.41 | 27500 | 0.0597 | 0.2589 | 0.2986 |
115
+ | 0.5489 | 38.1 | 28000 | 0.0586 | 0.2485 | 0.2925 |
116
+ | 0.5438 | 38.77 | 28500 | 0.0581 | 0.2500 | 0.2908 |
117
+ | 0.541 | 39.46 | 29000 | 0.0577 | 0.2451 | 0.2879 |
118
+ | 0.5462 | 40.14 | 29500 | 0.0581 | 0.2510 | 0.2935 |
119
+ | 0.529 | 40.82 | 30000 | 0.0575 | 0.2435 | 0.2879 |
120
+ | 0.5169 | 41.5 | 30500 | 0.0572 | 0.2474 | 0.2860 |
121
+ | 0.5281 | 42.18 | 31000 | 0.0575 | 0.2478 | 0.2884 |
122
+ | 0.527 | 42.86 | 31500 | 0.0568 | 0.2492 | 0.2845 |
123
+ | 0.5172 | 43.54 | 32000 | 0.0575 | 0.2451 | 0.2885 |
124
+ | 0.5154 | 44.22 | 32500 | 0.0574 | 0.2490 | 0.2873 |
125
+ | 0.5129 | 44.9 | 33000 | 0.0569 | 0.2446 | 0.2853 |
126
+ | 0.5075 | 45.58 | 33500 | 0.0565 | 0.2485 | 0.2828 |
127
+ | 0.5077 | 46.26 | 34000 | 0.0559 | 0.2452 | 0.2807 |
128
+ | 0.5004 | 46.94 | 34500 | 0.0572 | 0.2501 | 0.2882 |
129
+ | 0.5319 | 47.62 | 35000 | 0.0575 | 0.2516 | 0.2856 |
130
+ | 0.4956 | 48.3 | 35500 | 0.0567 | 0.2495 | 0.2821 |
131
+ | 0.5053 | 48.98 | 36000 | 0.0565 | 0.2482 | 0.2825 |
132
+ | 0.5014 | 49.66 | 36500 | 0.0559 | 0.2441 | 0.2808 |
133
+ | 0.4945 | 50.34 | 37000 | 0.0562 | 0.2460 | 0.2807 |
134
+ | 0.51 | 51.02 | 37500 | 0.0547 | 0.2434 | 0.2741 |
135
+ | 0.5095 | 51.7 | 38000 | 0.0558 | 0.2434 | 0.2790 |
136
+ | 0.5026 | 52.38 | 38500 | 0.0560 | 0.2478 | 0.2787 |
137
+ | 0.5081 | 53.06 | 39000 | 0.0566 | 0.2485 | 0.2821 |
138
+ | 0.5021 | 53.74 | 39500 | 0.0551 | 0.2410 | 0.2752 |
139
+ | 0.4945 | 54.42 | 40000 | 0.0552 | 0.2436 | 0.2766 |
140
+ | 0.4882 | 55.1 | 40500 | 0.0555 | 0.2438 | 0.2769 |
141
+ | 0.497 | 55.78 | 41000 | 0.0550 | 0.2423 | 0.2758 |
142
+ | 0.4925 | 56.46 | 41500 | 0.0560 | 0.2474 | 0.2790 |
143
+ | 0.4894 | 57.14 | 42000 | 0.0559 | 0.2497 | 0.2797 |
144
+ | 0.4767 | 57.82 | 42500 | 0.0556 | 0.2528 | 0.2800 |
145
+ | 0.4796 | 58.5 | 43000 | 0.0549 | 0.2463 | 0.2755 |
146
+ | 0.4767 | 59.18 | 43500 | 0.0548 | 0.2452 | 0.2753 |
147
+ | 0.4786 | 59.86 | 44000 | 0.0551 | 0.2480 | 0.2769 |
148
+ | 0.4804 | 60.54 | 44500 | 0.0556 | 0.2514 | 0.2789 |
149
+ | 0.4794 | 61.22 | 45000 | 0.0539 | 0.2391 | 0.2715 |
150
+ | 0.4789 | 61.9 | 45500 | 0.0546 | 0.2461 | 0.2725 |
151
+ | 0.4683 | 62.58 | 46000 | 0.0541 | 0.2444 | 0.2707 |
152
+ | 0.4721 | 63.27 | 46500 | 0.0539 | 0.2468 | 0.2693 |
153
+ | 0.4792 | 63.94 | 47000 | 0.0546 | 0.2479 | 0.2738 |
154
+ | 0.4712 | 64.63 | 47500 | 0.0547 | 0.2466 | 0.2742 |
155
+ | 0.4607 | 65.31 | 48000 | 0.0539 | 0.2503 | 0.2707 |
156
+ | 0.4712 | 65.99 | 48500 | 0.0543 | 0.2458 | 0.2718 |
157
+ | 0.4647 | 66.67 | 49000 | 0.0538 | 0.2474 | 0.2693 |
158
+ | 0.4736 | 67.35 | 49500 | 0.0541 | 0.2514 | 0.2696 |
159
+ | 0.4718 | 68.03 | 50000 | 0.0540 | 0.2506 | 0.2692 |
160
+ | 0.4695 | 68.71 | 50500 | 0.0538 | 0.2499 | 0.2675 |
161
+ | 0.4549 | 69.39 | 51000 | 0.0534 | 0.2491 | 0.2669 |
162
+ | 0.4605 | 70.07 | 51500 | 0.0532 | 0.2497 | 0.2660 |
163
+ | 0.4538 | 70.75 | 52000 | 0.0536 | 0.2472 | 0.2684 |
164
+ | 0.4571 | 71.43 | 52500 | 0.0523 | 0.2441 | 0.2629 |
165
+ | 0.4608 | 72.11 | 53000 | 0.0529 | 0.2469 | 0.2652 |
166
+ | 0.4541 | 72.79 | 53500 | 0.0533 | 0.2498 | 0.2673 |
167
+ | 0.4424 | 73.47 | 54000 | 0.0530 | 0.2504 | 0.2658 |
168
+ | 0.4482 | 74.15 | 54500 | 0.0534 | 0.2517 | 0.2684 |
169
+ | 0.4554 | 74.83 | 55000 | 0.0529 | 0.2471 | 0.2656 |
170
+ | 0.444 | 75.51 | 55500 | 0.0535 | 0.2493 | 0.2675 |
171
+ | 0.4464 | 76.19 | 56000 | 0.0524 | 0.2461 | 0.2635 |
172
+ | 0.4436 | 76.87 | 56500 | 0.0526 | 0.2479 | 0.2641 |
173
+ | 0.4432 | 77.55 | 57000 | 0.0526 | 0.2513 | 0.2641 |
174
+ | 0.4459 | 78.23 | 57500 | 0.0521 | 0.2460 | 0.2625 |
175
+ | 0.4433 | 78.91 | 58000 | 0.0521 | 0.2457 | 0.2622 |
176
+ | 0.4407 | 79.59 | 58500 | 0.0528 | 0.2531 | 0.2659 |
177
+ | 0.4389 | 80.27 | 59000 | 0.0521 | 0.2485 | 0.2631 |
178
+ | 0.4384 | 80.95 | 59500 | 0.0522 | 0.2502 | 0.2653 |
179
+ | 0.4306 | 81.63 | 60000 | 0.0528 | 0.2480 | 0.2665 |
180
+ | 0.4505 | 82.31 | 60500 | 0.0523 | 0.2461 | 0.2637 |
181
+ | 0.4442 | 82.99 | 61000 | 0.0523 | 0.2519 | 0.2641 |
182
+ | 0.4349 | 83.67 | 61500 | 0.0522 | 0.2509 | 0.2625 |
183
+ | 0.4398 | 84.35 | 62000 | 0.0523 | 0.2510 | 0.2659 |
184
+ | 0.4398 | 85.03 | 62500 | 0.0526 | 0.2507 | 0.2648 |
185
+ | 0.4355 | 85.71 | 63000 | 0.0523 | 0.2500 | 0.2653 |
186
+ | 0.4373 | 86.39 | 63500 | 0.0524 | 0.2523 | 0.2650 |
187
+ | 0.4391 | 87.07 | 64000 | 0.0523 | 0.2509 | 0.2635 |
188
+ | 0.4381 | 87.75 | 64500 | 0.0521 | 0.2502 | 0.2635 |
189
+ | 0.4297 | 88.43 | 65000 | 0.0521 | 0.2521 | 0.2632 |
190
+ | 0.44 | 89.12 | 65500 | 0.0520 | 0.2507 | 0.2624 |
191
+ | 0.4313 | 89.8 | 66000 | 0.0519 | 0.2497 | 0.2623 |
192
+ | 0.4402 | 90.48 | 66500 | 0.0517 | 0.2488 | 0.2608 |
193
+ | 0.4324 | 91.16 | 67000 | 0.0512 | 0.2485 | 0.2585 |
194
+ | 0.4317 | 91.84 | 67500 | 0.0513 | 0.2488 | 0.2587 |
195
+ | 0.437 | 92.52 | 68000 | 0.0513 | 0.2473 | 0.2590 |
196
+ | 0.4389 | 93.2 | 68500 | 0.0512 | 0.2472 | 0.2581 |
197
+ | 0.4428 | 93.88 | 69000 | 0.0512 | 0.2475 | 0.2587 |
198
+ | 0.4294 | 94.56 | 69500 | 0.0513 | 0.2489 | 0.2596 |
199
+ | 0.4247 | 95.24 | 70000 | 0.0515 | 0.2499 | 0.2597 |
200
+ | 0.4309 | 95.92 | 70500 | 0.0514 | 0.2493 | 0.2590 |
201
+ | 0.4366 | 96.6 | 71000 | 0.0512 | 0.2492 | 0.2592 |
202
+ | 0.4245 | 97.28 | 71500 | 0.0513 | 0.2493 | 0.2587 |
203
+ | 0.4346 | 97.96 | 72000 | 0.0512 | 0.2478 | 0.2583 |
204
+ | 0.4289 | 98.64 | 72500 | 0.0512 | 0.2489 | 0.2585 |
205
+ | 0.4246 | 99.32 | 73000 | 0.0513 | 0.2487 | 0.2589 |
206
+ | 0.4241 | 100.0 | 73500 | 0.0513 | 0.2491 | 0.2588 |
207
+
208
+
209
+ ### Framework versions
210
+
211
+ - Transformers 4.16.2
212
+ - Pytorch 1.12.0
213
+ - Datasets 2.4.0
214
+ - Tokenizers 0.10.3
added_tokens.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"<s>": 31, "</s>": 32}
all_results.json ADDED
@@ -0,0 +1,15 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "epoch": 100.0,
3
+ "eval_cer": 0.05127812810788953,
4
+ "eval_loss": 0.2491369992494583,
5
+ "eval_runtime": 179.6938,
6
+ "eval_samples": 5785,
7
+ "eval_samples_per_second": 32.194,
8
+ "eval_steps_per_second": 4.029,
9
+ "eval_wer": 0.25884590640821237,
10
+ "train_loss": 0.0,
11
+ "train_runtime": 32.0497,
12
+ "train_samples": 23538,
13
+ "train_samples_per_second": 73442.278,
14
+ "train_steps_per_second": 2293.316
15
+ }
alphabet.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"labels": [" ", "a", "b", "c", "d", "e", "f", "g", "h", "i", "j", "k", "l", "m", "n", "o", "p", "q", "r", "s", "t", "u", "v", "w", "x", "y", "z", "\u2018", "\u2019", "\u2047", "", "<s>", "</s>"], "is_bpe": false}
config.json ADDED
@@ -0,0 +1,107 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "facebook/wav2vec2-xls-r-300m",
3
+ "activation_dropout": 0.1,
4
+ "adapter_kernel_size": 3,
5
+ "adapter_stride": 2,
6
+ "add_adapter": false,
7
+ "apply_spec_augment": true,
8
+ "architectures": [
9
+ "Wav2Vec2ForPreTraining"
10
+ ],
11
+ "attention_dropout": 0.0,
12
+ "bos_token_id": 1,
13
+ "classifier_proj_size": 256,
14
+ "codevector_dim": 768,
15
+ "contrastive_logits_temperature": 0.1,
16
+ "conv_bias": true,
17
+ "conv_dim": [
18
+ 512,
19
+ 512,
20
+ 512,
21
+ 512,
22
+ 512,
23
+ 512,
24
+ 512
25
+ ],
26
+ "conv_kernel": [
27
+ 10,
28
+ 3,
29
+ 3,
30
+ 3,
31
+ 3,
32
+ 2,
33
+ 2
34
+ ],
35
+ "conv_stride": [
36
+ 5,
37
+ 2,
38
+ 2,
39
+ 2,
40
+ 2,
41
+ 2,
42
+ 2
43
+ ],
44
+ "ctc_loss_reduction": "mean",
45
+ "ctc_zero_infinity": true,
46
+ "diversity_loss_weight": 0.1,
47
+ "do_stable_layer_norm": true,
48
+ "eos_token_id": 2,
49
+ "feat_extract_activation": "gelu",
50
+ "feat_extract_dropout": 0.0,
51
+ "feat_extract_norm": "layer",
52
+ "feat_proj_dropout": 0.05,
53
+ "feat_quantizer_dropout": 0.0,
54
+ "final_dropout": 0.0,
55
+ "hidden_act": "gelu",
56
+ "hidden_dropout": 0.0,
57
+ "hidden_size": 1024,
58
+ "initializer_range": 0.02,
59
+ "intermediate_size": 4096,
60
+ "layer_norm_eps": 1e-05,
61
+ "layerdrop": 0.01,
62
+ "mask_feature_length": 64,
63
+ "mask_feature_min_masks": 0,
64
+ "mask_feature_prob": 0.15,
65
+ "mask_time_length": 10,
66
+ "mask_time_min_masks": 2,
67
+ "mask_time_prob": 0.5,
68
+ "model_type": "wav2vec2",
69
+ "num_adapter_layers": 3,
70
+ "num_attention_heads": 16,
71
+ "num_codevector_groups": 2,
72
+ "num_codevectors_per_group": 320,
73
+ "num_conv_pos_embedding_groups": 16,
74
+ "num_conv_pos_embeddings": 128,
75
+ "num_feat_extract_layers": 7,
76
+ "num_hidden_layers": 24,
77
+ "num_negatives": 100,
78
+ "output_hidden_size": 1024,
79
+ "pad_token_id": 30,
80
+ "proj_codevector_dim": 768,
81
+ "tdnn_dilation": [
82
+ 1,
83
+ 2,
84
+ 3,
85
+ 1,
86
+ 1
87
+ ],
88
+ "tdnn_dim": [
89
+ 512,
90
+ 512,
91
+ 512,
92
+ 512,
93
+ 1500
94
+ ],
95
+ "tdnn_kernel": [
96
+ 5,
97
+ 3,
98
+ 3,
99
+ 1,
100
+ 1
101
+ ],
102
+ "torch_dtype": "float32",
103
+ "transformers_version": "4.16.2",
104
+ "use_weighted_layer_sum": false,
105
+ "vocab_size": 33,
106
+ "xvector_output_dim": 512
107
+ }
eval_results.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "epoch": 100.0,
3
+ "eval_cer": 0.05127812810788953,
4
+ "eval_loss": 0.2491369992494583,
5
+ "eval_runtime": 179.6938,
6
+ "eval_samples": 5785,
7
+ "eval_samples_per_second": 32.194,
8
+ "eval_steps_per_second": 4.029,
9
+ "eval_wer": 0.25884590640821237
10
+ }
language_model/5gram.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c56c82570201d5d07a91f2e891e8be826e563d9042d049d2f1a07c5ffa594f59
3
+ size 27130351
language_model/attrs.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"alpha": 0.5, "beta": 1.5, "unk_score_offset": -10.0, "score_boundary": true}
language_model/unigrams.txt ADDED
The diff for this file is too large to render. See raw diff
 
preprocessor_config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "do_normalize": true,
3
+ "feature_extractor_type": "Wav2Vec2FeatureExtractor",
4
+ "feature_size": 1,
5
+ "padding_side": "right",
6
+ "padding_value": 0,
7
+ "processor_class": "Wav2Vec2ProcessorWithLM",
8
+ "return_attention_mask": true,
9
+ "sampling_rate": 16000
10
+ }
pytorch_model.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:aa836b17e74601d72ea693ce0a25110397658fce89d3953e83d1a7e08490c948
3
+ size 1262058993
runs/Aug04_21-55-39_nv-compute-01.local/1659632707.4885852/events.out.tfevents.1659632707.nv-compute-01.local.42881.1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bcd1187d02d951f85d8217612a512dec2f095852f6ec138f810232c62d032d78
3
+ size 4775
runs/Aug04_21-55-39_nv-compute-01.local/events.out.tfevents.1659632707.nv-compute-01.local.42881.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:18646570681413c5afac0a0396a4c757df6565261a6bc86cbf131b0fc51f65b6
3
+ size 4699
runs/Aug04_22-07-48_nv-compute-01.local/1659633408.307794/events.out.tfevents.1659633408.nv-compute-01.local.630012.1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6b1b511754b42cc33f09bec5fdbed85b481d904d28ae8eeeda024b9f6128b69b
3
+ size 4775
runs/Aug04_22-07-48_nv-compute-01.local/events.out.tfevents.1659633408.nv-compute-01.local.630012.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:29349a1a03737cc2a16a4ac12a11ad3ed1d447ba3557fe2ee333d90076768247
3
+ size 4699
runs/Aug04_22-18-29_nv-compute-01.local/1659634057.210297/events.out.tfevents.1659634057.nv-compute-01.local.1217099.1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:045b37331ccd870778fa570fe93cc8d0708702bfa99df00299adcf98ce2b613c
3
+ size 4775
runs/Aug04_22-18-29_nv-compute-01.local/events.out.tfevents.1659634057.nv-compute-01.local.1217099.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:66c7d9d3e81aa7f8672bcf407f0f92a988e12f90ac8d9d331722a7907f8f2f62
3
+ size 4699
runs/Aug04_22-28-50_nv-compute-01.local/1659634670.2909548/events.out.tfevents.1659634670.nv-compute-01.local.1804142.1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c17522f2dcb19f31a315cbe0474022c6b26a74e6ed67bbbd4e1f576d9836e337
3
+ size 4775
runs/Aug04_22-28-50_nv-compute-01.local/events.out.tfevents.1659634670.nv-compute-01.local.1804142.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3b998f7a3c1c171ea11076a69dbaca6efa7a101b68a4a9c02978f1e8388ae48d
3
+ size 4699
runs/Aug04_23-12-35_nv-compute-01.local/1659637312.655474/events.out.tfevents.1659637312.nv-compute-01.local.2392879.1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:57800bdb8fa7bd627891643f708f14c398b8076731661307415e340d76670969
3
+ size 4775
runs/Aug04_23-12-35_nv-compute-01.local/events.out.tfevents.1659637312.nv-compute-01.local.2392879.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2c743533b2ab2ef6757e57875a804f4e2dd27619e5fbfb4462d01e6f9ed858bf
3
+ size 4699
runs/Aug04_23-35-25_nv-compute-01.local/1659638671.4950814/events.out.tfevents.1659638671.nv-compute-01.local.2980773.1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:df05737b6a7820e249451aaa57dad2e67e0d26c2b67e861f8dbb01e8d5744562
3
+ size 4775
runs/Aug04_23-35-25_nv-compute-01.local/events.out.tfevents.1659638671.nv-compute-01.local.2980773.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8aca76a6b7e21947dccbac14cc7edda4f81c009c37ee90965c22e4b4a2389a22
3
+ size 4699
runs/Aug05_09-12-07_nv-compute-01.local/1659673263.5076785/events.out.tfevents.1659673263.nv-compute-01.local.3596914.1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4e0d12a10d670b82e6e172f32722c6828c1abf562456b9bcf7657ed5c09eafb3
3
+ size 4775
runs/Aug05_09-12-07_nv-compute-01.local/events.out.tfevents.1659673263.nv-compute-01.local.3596914.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2350d2def988755ada841347fef8766c8650d6322ddbcbcfcce1dbda3ed40dc8
3
+ size 39195
runs/Aug05_17-07-26_nv-compute-01.local/1659701790.1811883/events.out.tfevents.1659701790.nv-compute-01.local.41562.1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:fbbb1c2a17d69d74391561aff4686aec46da8fec5a6026897d335f34e2f50400
3
+ size 4775
runs/Aug05_17-07-26_nv-compute-01.local/events.out.tfevents.1659701790.nv-compute-01.local.41562.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0a20b43ea9577b59abbbf8c1043fee60e2c05cc003d1afe2a333d4655dd79cda
3
+ size 11678
runs/Aug08_10-23-16_nv-compute-01.local/1659936693.8116791/events.out.tfevents.1659936693.nv-compute-01.local.740937.1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9da9458e37a5614d505947ca7c14ec796dc9c7326520b7d7978d6768001a32b5
3
+ size 4775
runs/Aug08_10-23-16_nv-compute-01.local/events.out.tfevents.1659936693.nv-compute-01.local.740937.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9a1def292ff77ea4fa1d29f40c8ecd26ea3f6b1aac713487c848455c00b62a99
3
+ size 135151
runs/Aug08_10-23-16_nv-compute-01.local/events.out.tfevents.1660036815.nv-compute-01.local.740937.2 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:88644678fb14e17d9015cbc3551035033f0eea3f86708662c4cebbe1a77d181a
3
+ size 412
runs/Aug09_14-42-44_nv-compute-01.local/1660038683.2601411/events.out.tfevents.1660038683.nv-compute-01.local.1373764.1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4345b32268675250d804de20efc3cb9660b72c86a4db00f47e81c19cd8fd7d2e
3
+ size 4775
runs/Aug09_14-42-44_nv-compute-01.local/events.out.tfevents.1660038683.nv-compute-01.local.1373764.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5fd79264cba9e901bebac44f5fe211392212c391fcce75aaf8363d075d1d9004
3
+ size 5059
runs/Aug09_14-42-44_nv-compute-01.local/events.out.tfevents.1660038911.nv-compute-01.local.1373764.2 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:449aba773d02e3ca46c1d93cda083501ea275b5351a4db5c1a95ef43ded03ca2
3
+ size 412
runs/Aug09_18-26-07_nv-compute-01.local/1660052090.4725804/events.out.tfevents.1660052090.nv-compute-01.local.2137132.1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e5de40398d971598c96ce3f6806c6151b1eeddf0f18a717bdc336820a5692dea
3
+ size 4775
runs/Aug09_18-26-07_nv-compute-01.local/events.out.tfevents.1660052090.nv-compute-01.local.2137132.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a761a7d63463e1f5bd2a589c623e83700a496191be11957284a71eefc73bed19
3
+ size 5059
special_tokens_map.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"bos_token": "<s>", "eos_token": "</s>", "unk_token": "[UNK]", "pad_token": "[PAD]", "additional_special_tokens": [{"content": "<s>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": true}, {"content": "</s>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": true}, {"content": "<s>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": true}, {"content": "</s>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": true}, {"content": "<s>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": true}, {"content": "</s>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": true}, {"content": "<s>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": true}, {"content": "</s>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": true}, {"content": "<s>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": true}, {"content": "</s>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": true}, {"content": "<s>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": true}, {"content": "</s>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": true}, {"content": "<s>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": true}, {"content": "</s>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": true}, {"content": "<s>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": true}, {"content": "</s>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": true}, {"content": "<s>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": true}, {"content": "</s>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": true}, {"content": "<s>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": true}, {"content": "</s>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": true}, {"content": "<s>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": true}, {"content": "</s>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": true}, {"content": "<s>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": true}, {"content": "</s>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": true}, {"content": "<s>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": true}, {"content": "</s>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": true}, {"content": "<s>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": true}, {"content": "</s>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": true}, {"content": "<s>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": true}, {"content": "</s>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": true}, {"content": "<s>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": true}, {"content": "</s>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": true}]}
tokenizer_config.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"unk_token": "[UNK]", "bos_token": "<s>", "eos_token": "</s>", "pad_token": "[PAD]", "do_lower_case": false, "word_delimiter_token": "|", "special_tokens_map_file": null, "tokenizer_file": null, "name_or_path": "xls-r-uzbek-cv10", "tokenizer_class": "Wav2Vec2CTCTokenizer", "processor_class": "Wav2Vec2ProcessorWithLM"}
train_results.json ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "epoch": 100.0,
3
+ "train_loss": 0.0,
4
+ "train_runtime": 32.0497,
5
+ "train_samples": 23538,
6
+ "train_samples_per_second": 73442.278,
7
+ "train_steps_per_second": 2293.316
8
+ }
trainer_state.json ADDED
The diff for this file is too large to render. See raw diff
 
training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:68cd067c9de8b7cc29ad7cdfabf6c48590d9ff327f02ddf02abde2f54382bd8a
3
+ size 3055
vocab.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"a": 1, "b": 2, "c": 3, "d": 4, "e": 5, "f": 6, "g": 7, "h": 8, "i": 9, "j": 10, "k": 11, "l": 12, "m": 13, "n": 14, "o": 15, "p": 16, "q": 17, "r": 18, "s": 19, "t": 20, "u": 21, "v": 22, "w": 23, "x": 24, "y": 25, "z": 26, "‘": 27, "’": 28, "|": 0, "[UNK]": 29, "[PAD]": 30}