End of training
Browse files- README.md +18 -32
- added_tokens.json +4 -0
- config.json +8 -0
- model.safetensors +1 -1
- runs/Jul21_10-03-31_cs-01k0p4mp7ex7fxc1hfkvvfygh8/events.out.tfevents.1753092422.cs-01k0p4mp7ex7fxc1hfkvvfygh8.21540.0 +3 -0
- runs/Jul21_10-15-24_cs-01k0p4mp7ex7fxc1hfkvvfygh8/events.out.tfevents.1753093122.cs-01k0p4mp7ex7fxc1hfkvvfygh8.27519.0 +3 -0
- special_tokens_map.json +6 -0
- tokenizer_config.json +49 -0
- training_args.bin +1 -1
- vocab.json +97 -0
README.md
CHANGED
|
@@ -1,7 +1,7 @@
|
|
| 1 |
---
|
| 2 |
library_name: transformers
|
| 3 |
license: apache-2.0
|
| 4 |
-
base_model: facebook/wav2vec2-
|
| 5 |
tags:
|
| 6 |
- generated_from_trainer
|
| 7 |
metrics:
|
|
@@ -16,11 +16,11 @@ should probably proofread and complete it, then remove this comment. -->
|
|
| 16 |
|
| 17 |
# model
|
| 18 |
|
| 19 |
-
This model is a fine-tuned version of [facebook/wav2vec2-
|
| 20 |
It achieves the following results on the evaluation set:
|
| 21 |
-
- Loss:
|
| 22 |
-
- Wer: 0
|
| 23 |
-
- Cer: 0
|
| 24 |
|
| 25 |
## Model description
|
| 26 |
|
|
@@ -39,40 +39,26 @@ More information needed
|
|
| 39 |
### Training hyperparameters
|
| 40 |
|
| 41 |
The following hyperparameters were used during training:
|
| 42 |
-
- learning_rate:
|
| 43 |
-
- train_batch_size:
|
| 44 |
-
- eval_batch_size:
|
| 45 |
- seed: 42
|
|
|
|
|
|
|
| 46 |
- optimizer: Use adamw_torch with betas=(0.9,0.999) and epsilon=1e-08 and optimizer_args=No additional optimizer arguments
|
| 47 |
- lr_scheduler_type: linear
|
| 48 |
-
- lr_scheduler_warmup_steps:
|
| 49 |
-
- num_epochs:
|
| 50 |
- mixed_precision_training: Native AMP
|
| 51 |
|
| 52 |
### Training results
|
| 53 |
|
| 54 |
-
| Training Loss | Epoch
|
| 55 |
-
|
| 56 |
-
|
|
| 57 |
-
|
|
| 58 |
-
|
|
| 59 |
-
|
|
| 60 |
-
| 1.0088 | 24.8756 | 5000 | 1.0035 | 0.8149 | 0.2409 |
|
| 61 |
-
| 0.8933 | 29.8507 | 6000 | 1.0224 | 0.8326 | 0.2442 |
|
| 62 |
-
| 0.7856 | 34.8259 | 7000 | 1.0826 | 0.7804 | 0.2334 |
|
| 63 |
-
| 0.7118 | 39.8010 | 8000 | 1.1140 | 0.7814 | 0.2331 |
|
| 64 |
-
| 0.6442 | 44.7761 | 9000 | 1.1626 | 0.7857 | 0.2319 |
|
| 65 |
-
| 0.5947 | 49.7512 | 10000 | 1.1976 | 0.7798 | 0.2318 |
|
| 66 |
-
| 0.5439 | 54.7264 | 11000 | 1.2419 | 0.7835 | 0.2234 |
|
| 67 |
-
| 0.4957 | 59.7015 | 12000 | 1.3443 | 0.7523 | 0.2220 |
|
| 68 |
-
| 0.4581 | 64.6766 | 13000 | 1.3568 | 0.7704 | 0.2216 |
|
| 69 |
-
| 0.4346 | 69.6517 | 14000 | 1.3921 | 0.7643 | 0.2227 |
|
| 70 |
-
| 0.4044 | 74.6269 | 15000 | 1.4720 | 0.7572 | 0.2208 |
|
| 71 |
-
| 0.3794 | 79.6020 | 16000 | 1.4621 | 0.7496 | 0.2189 |
|
| 72 |
-
| 0.3586 | 84.5771 | 17000 | 1.4913 | 0.7460 | 0.2198 |
|
| 73 |
-
| 0.3432 | 89.5522 | 18000 | 1.5223 | 0.7460 | 0.2195 |
|
| 74 |
-
| 0.3219 | 94.5274 | 19000 | 1.5370 | 0.7392 | 0.2162 |
|
| 75 |
-
| 0.3144 | 99.5025 | 20000 | 1.5326 | 0.7415 | 0.2170 |
|
| 76 |
|
| 77 |
|
| 78 |
### Framework versions
|
|
|
|
| 1 |
---
|
| 2 |
library_name: transformers
|
| 3 |
license: apache-2.0
|
| 4 |
+
base_model: facebook/wav2vec2-large-xlsr-53
|
| 5 |
tags:
|
| 6 |
- generated_from_trainer
|
| 7 |
metrics:
|
|
|
|
| 16 |
|
| 17 |
# model
|
| 18 |
|
| 19 |
+
This model is a fine-tuned version of [facebook/wav2vec2-large-xlsr-53](https://huggingface.co/facebook/wav2vec2-large-xlsr-53) on the None dataset.
|
| 20 |
It achieves the following results on the evaluation set:
|
| 21 |
+
- Loss: 3.6245
|
| 22 |
+
- Wer: 1.0
|
| 23 |
+
- Cer: 1.0
|
| 24 |
|
| 25 |
## Model description
|
| 26 |
|
|
|
|
| 39 |
### Training hyperparameters
|
| 40 |
|
| 41 |
The following hyperparameters were used during training:
|
| 42 |
+
- learning_rate: 3e-05
|
| 43 |
+
- train_batch_size: 8
|
| 44 |
+
- eval_batch_size: 8
|
| 45 |
- seed: 42
|
| 46 |
+
- gradient_accumulation_steps: 4
|
| 47 |
+
- total_train_batch_size: 32
|
| 48 |
- optimizer: Use adamw_torch with betas=(0.9,0.999) and epsilon=1e-08 and optimizer_args=No additional optimizer arguments
|
| 49 |
- lr_scheduler_type: linear
|
| 50 |
+
- lr_scheduler_warmup_steps: 500
|
| 51 |
+
- num_epochs: 30
|
| 52 |
- mixed_precision_training: Native AMP
|
| 53 |
|
| 54 |
### Training results
|
| 55 |
|
| 56 |
+
| Training Loss | Epoch | Step | Validation Loss | Wer | Cer |
|
| 57 |
+
|:-------------:|:------:|:----:|:---------------:|:---:|:---:|
|
| 58 |
+
| 5.867 | 1.9963 | 400 | 4.7211 | 1.0 | 1.0 |
|
| 59 |
+
| 3.9734 | 3.9963 | 800 | 3.8661 | 1.0 | 1.0 |
|
| 60 |
+
| 3.8277 | 5.9963 | 1200 | 3.8014 | 1.0 | 1.0 |
|
| 61 |
+
| 3.7313 | 7.9963 | 1600 | 3.6245 | 1.0 | 1.0 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 62 |
|
| 63 |
|
| 64 |
### Framework versions
|
added_tokens.json
ADDED
|
@@ -0,0 +1,4 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"</s>": 96,
|
| 3 |
+
"<s>": 95
|
| 4 |
+
}
|
config.json
CHANGED
|
@@ -59,12 +59,20 @@
|
|
| 59 |
"intermediate_size": 4096,
|
| 60 |
"layer_norm_eps": 1e-05,
|
| 61 |
"layerdrop": 0.1,
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 62 |
"mask_feature_length": 10,
|
| 63 |
"mask_feature_min_masks": 0,
|
| 64 |
"mask_feature_prob": 0.0,
|
| 65 |
"mask_time_length": 10,
|
| 66 |
"mask_time_min_masks": 2,
|
|
|
|
|
|
|
| 67 |
"mask_time_prob": 0.05,
|
|
|
|
| 68 |
"model_type": "wav2vec2",
|
| 69 |
"num_adapter_layers": 3,
|
| 70 |
"num_attention_heads": 16,
|
|
|
|
| 59 |
"intermediate_size": 4096,
|
| 60 |
"layer_norm_eps": 1e-05,
|
| 61 |
"layerdrop": 0.1,
|
| 62 |
+
"mask_channel_length": 10,
|
| 63 |
+
"mask_channel_min_space": 1,
|
| 64 |
+
"mask_channel_other": 0.0,
|
| 65 |
+
"mask_channel_prob": 0.0,
|
| 66 |
+
"mask_channel_selection": "static",
|
| 67 |
"mask_feature_length": 10,
|
| 68 |
"mask_feature_min_masks": 0,
|
| 69 |
"mask_feature_prob": 0.0,
|
| 70 |
"mask_time_length": 10,
|
| 71 |
"mask_time_min_masks": 2,
|
| 72 |
+
"mask_time_min_space": 1,
|
| 73 |
+
"mask_time_other": 0.0,
|
| 74 |
"mask_time_prob": 0.05,
|
| 75 |
+
"mask_time_selection": "static",
|
| 76 |
"model_type": "wav2vec2",
|
| 77 |
"num_adapter_layers": 3,
|
| 78 |
"num_attention_heads": 16,
|
model.safetensors
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 1262205180
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:a938469e1f7258ad0dd72fe0579c2e5a9b70551d3ec6bb4768e50d12d1d38538
|
| 3 |
size 1262205180
|
runs/Jul21_10-03-31_cs-01k0p4mp7ex7fxc1hfkvvfygh8/events.out.tfevents.1753092422.cs-01k0p4mp7ex7fxc1hfkvvfygh8.21540.0
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:a8eef627e54be06da2f9d5b7e5f9991e5b47603262ba69b80260e3f6d7152c2f
|
| 3 |
+
size 6772
|
runs/Jul21_10-15-24_cs-01k0p4mp7ex7fxc1hfkvvfygh8/events.out.tfevents.1753093122.cs-01k0p4mp7ex7fxc1hfkvvfygh8.27519.0
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:85609f0a5a3c2e76ba319edce9ddbb3a71d6a3bdc62a9a36ef19d8efb35377cc
|
| 3 |
+
size 11751
|
special_tokens_map.json
ADDED
|
@@ -0,0 +1,6 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"bos_token": "<s>",
|
| 3 |
+
"eos_token": "</s>",
|
| 4 |
+
"pad_token": "[PAD]",
|
| 5 |
+
"unk_token": "[UNK]"
|
| 6 |
+
}
|
tokenizer_config.json
ADDED
|
@@ -0,0 +1,49 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"added_tokens_decoder": {
|
| 3 |
+
"93": {
|
| 4 |
+
"content": "[UNK]",
|
| 5 |
+
"lstrip": true,
|
| 6 |
+
"normalized": false,
|
| 7 |
+
"rstrip": true,
|
| 8 |
+
"single_word": false,
|
| 9 |
+
"special": false
|
| 10 |
+
},
|
| 11 |
+
"94": {
|
| 12 |
+
"content": "[PAD]",
|
| 13 |
+
"lstrip": true,
|
| 14 |
+
"normalized": false,
|
| 15 |
+
"rstrip": true,
|
| 16 |
+
"single_word": false,
|
| 17 |
+
"special": false
|
| 18 |
+
},
|
| 19 |
+
"95": {
|
| 20 |
+
"content": "<s>",
|
| 21 |
+
"lstrip": false,
|
| 22 |
+
"normalized": false,
|
| 23 |
+
"rstrip": false,
|
| 24 |
+
"single_word": false,
|
| 25 |
+
"special": true
|
| 26 |
+
},
|
| 27 |
+
"96": {
|
| 28 |
+
"content": "</s>",
|
| 29 |
+
"lstrip": false,
|
| 30 |
+
"normalized": false,
|
| 31 |
+
"rstrip": false,
|
| 32 |
+
"single_word": false,
|
| 33 |
+
"special": true
|
| 34 |
+
}
|
| 35 |
+
},
|
| 36 |
+
"bos_token": "<s>",
|
| 37 |
+
"clean_up_tokenization_spaces": false,
|
| 38 |
+
"do_lower_case": false,
|
| 39 |
+
"eos_token": "</s>",
|
| 40 |
+
"extra_special_tokens": {},
|
| 41 |
+
"model_max_length": 1000000000000000019884624838656,
|
| 42 |
+
"pad_token": "[PAD]",
|
| 43 |
+
"processor_class": "Wav2Vec2Processor",
|
| 44 |
+
"replace_word_delimiter_char": " ",
|
| 45 |
+
"target_lang": null,
|
| 46 |
+
"tokenizer_class": "Wav2Vec2CTCTokenizer",
|
| 47 |
+
"unk_token": "[UNK]",
|
| 48 |
+
"word_delimiter_token": "|"
|
| 49 |
+
}
|
training_args.bin
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 5304
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:dc78a7f8f8c20f650c4a14201b815bf3dbe9933f7d5a1fe9a9547121e8578283
|
| 3 |
size 5304
|
vocab.json
ADDED
|
@@ -0,0 +1,97 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"(": 1,
|
| 3 |
+
")": 2,
|
| 4 |
+
"/": 3,
|
| 5 |
+
"0": 4,
|
| 6 |
+
"1": 5,
|
| 7 |
+
"2": 6,
|
| 8 |
+
"3": 7,
|
| 9 |
+
"4": 8,
|
| 10 |
+
"5": 9,
|
| 11 |
+
"6": 10,
|
| 12 |
+
"7": 11,
|
| 13 |
+
"8": 12,
|
| 14 |
+
"9": 13,
|
| 15 |
+
"[PAD]": 94,
|
| 16 |
+
"[UNK]": 93,
|
| 17 |
+
"a": 14,
|
| 18 |
+
"b": 15,
|
| 19 |
+
"c": 16,
|
| 20 |
+
"e": 17,
|
| 21 |
+
"f": 18,
|
| 22 |
+
"g": 19,
|
| 23 |
+
"h": 20,
|
| 24 |
+
"i": 21,
|
| 25 |
+
"k": 22,
|
| 26 |
+
"l": 23,
|
| 27 |
+
"m": 24,
|
| 28 |
+
"n": 25,
|
| 29 |
+
"o": 26,
|
| 30 |
+
"p": 27,
|
| 31 |
+
"r": 28,
|
| 32 |
+
"s": 29,
|
| 33 |
+
"t": 30,
|
| 34 |
+
"u": 31,
|
| 35 |
+
"v": 32,
|
| 36 |
+
"w": 33,
|
| 37 |
+
"y": 34,
|
| 38 |
+
"|": 0,
|
| 39 |
+
"ค": 35,
|
| 40 |
+
"บ": 36,
|
| 41 |
+
"ย": 37,
|
| 42 |
+
"ร": 38,
|
| 43 |
+
"ั": 39,
|
| 44 |
+
"ກ": 40,
|
| 45 |
+
"ຂ": 41,
|
| 46 |
+
"ຄ": 42,
|
| 47 |
+
"ງ": 43,
|
| 48 |
+
"ຈ": 44,
|
| 49 |
+
"ຊ": 45,
|
| 50 |
+
"ຍ": 46,
|
| 51 |
+
"ດ": 47,
|
| 52 |
+
"ຕ": 48,
|
| 53 |
+
"ຖ": 49,
|
| 54 |
+
"ທ": 50,
|
| 55 |
+
"ນ": 51,
|
| 56 |
+
"ບ": 52,
|
| 57 |
+
"ປ": 53,
|
| 58 |
+
"ຜ": 54,
|
| 59 |
+
"ຝ": 55,
|
| 60 |
+
"ພ": 56,
|
| 61 |
+
"ຟ": 57,
|
| 62 |
+
"ມ": 58,
|
| 63 |
+
"ຢ": 59,
|
| 64 |
+
"ຣ": 60,
|
| 65 |
+
"ລ": 61,
|
| 66 |
+
"ວ": 62,
|
| 67 |
+
"ສ": 63,
|
| 68 |
+
"ຫ": 64,
|
| 69 |
+
"ອ": 65,
|
| 70 |
+
"ຮ": 66,
|
| 71 |
+
"ະ": 67,
|
| 72 |
+
"ັ": 68,
|
| 73 |
+
"າ": 69,
|
| 74 |
+
"ຳ": 70,
|
| 75 |
+
"ິ": 71,
|
| 76 |
+
"ີ": 72,
|
| 77 |
+
"ຶ": 73,
|
| 78 |
+
"ື": 74,
|
| 79 |
+
"ຸ": 75,
|
| 80 |
+
"ູ": 76,
|
| 81 |
+
"ົ": 77,
|
| 82 |
+
"ຼ": 78,
|
| 83 |
+
"ຽ": 79,
|
| 84 |
+
"ເ": 80,
|
| 85 |
+
"ແ": 81,
|
| 86 |
+
"ໂ": 82,
|
| 87 |
+
"ໃ": 83,
|
| 88 |
+
"ໄ": 84,
|
| 89 |
+
"ໆ": 85,
|
| 90 |
+
"່": 86,
|
| 91 |
+
"້": 87,
|
| 92 |
+
"໊": 88,
|
| 93 |
+
"໋": 89,
|
| 94 |
+
"ໍ": 90,
|
| 95 |
+
"ໜ": 91,
|
| 96 |
+
"ໝ": 92
|
| 97 |
+
}
|