leenag commited on
Commit
6cf1ec0
1 Parent(s): ccbef6e

End of training

Browse files
Files changed (2) hide show
  1. README.md +16 -16
  2. generation_config.json +3 -2
README.md CHANGED
@@ -19,8 +19,8 @@ should probably proofread and complete it, then remove this comment. -->
19
 
20
  This model is a fine-tuned version of [openai/whisper-small](https://huggingface.co/openai/whisper-small) on the Spoken Bible Corpus: Malasar dataset.
21
  It achieves the following results on the evaluation set:
22
- - Loss: 0.0136
23
- - Wer: 6.4752
24
 
25
  ## Model description
26
 
@@ -51,21 +51,21 @@ The following hyperparameters were used during training:
51
 
52
  ### Training results
53
 
54
- | Training Loss | Epoch | Step | Validation Loss | Wer |
55
- |:-------------:|:-----:|:----:|:---------------:|:-------:|
56
- | 0.0424 | 0.64 | 250 | 0.0426 | 19.6509 |
57
- | 0.0247 | 1.28 | 500 | 0.0276 | 12.7252 |
58
- | 0.0184 | 1.92 | 750 | 0.0254 | 13.8514 |
59
- | 0.0077 | 2.56 | 1000 | 0.0152 | 7.2072 |
60
- | 0.005 | 3.21 | 1250 | 0.0139 | 6.9257 |
61
- | 0.0032 | 3.85 | 1500 | 0.0149 | 6.9257 |
62
- | 0.0025 | 4.49 | 1750 | 0.0145 | 6.7005 |
63
- | 0.0009 | 5.13 | 2000 | 0.0136 | 6.4752 |
64
 
65
 
66
  ### Framework versions
67
 
68
- - Transformers 4.37.0.dev0
69
- - Pytorch 2.0.1+cu117
70
- - Datasets 2.12.0
71
- - Tokenizers 0.15.0
 
19
 
20
  This model is a fine-tuned version of [openai/whisper-small](https://huggingface.co/openai/whisper-small) on the Spoken Bible Corpus: Malasar dataset.
21
  It achieves the following results on the evaluation set:
22
+ - Loss: 0.0139
23
+ - Wer: 7.6014
24
 
25
  ## Model description
26
 
 
51
 
52
  ### Training results
53
 
54
+ | Training Loss | Epoch | Step | Validation Loss | Wer |
55
+ |:-------------:|:------:|:----:|:---------------:|:-------:|
56
+ | 0.042 | 0.6410 | 250 | 0.0392 | 18.1869 |
57
+ | 0.023 | 1.2821 | 500 | 0.0318 | 14.5833 |
58
+ | 0.0158 | 1.9231 | 750 | 0.0215 | 10.5293 |
59
+ | 0.0106 | 2.5641 | 1000 | 0.0175 | 11.5428 |
60
+ | 0.0035 | 3.2051 | 1250 | 0.0145 | 7.5450 |
61
+ | 0.0027 | 3.8462 | 1500 | 0.0139 | 9.1779 |
62
+ | 0.0018 | 4.4872 | 1750 | 0.0144 | 7.5450 |
63
+ | 0.0016 | 5.1282 | 2000 | 0.0139 | 7.6014 |
64
 
65
 
66
  ### Framework versions
67
 
68
+ - Transformers 4.41.2
69
+ - Pytorch 2.1.2+cu121
70
+ - Datasets 2.16.0
71
+ - Tokenizers 0.19.1
generation_config.json CHANGED
@@ -160,10 +160,11 @@
160
  "<|yo|>": 50325,
161
  "<|zh|>": 50260
162
  },
163
- "max_initial_timestamp_index": 1,
164
  "max_length": 448,
165
  "no_timestamps_token_id": 50363,
166
  "pad_token_id": 50257,
 
167
  "return_timestamps": false,
168
  "suppress_tokens": [
169
  1,
@@ -259,5 +260,5 @@
259
  "transcribe": 50359,
260
  "translate": 50358
261
  },
262
- "transformers_version": "4.37.0.dev0"
263
  }
 
160
  "<|yo|>": 50325,
161
  "<|zh|>": 50260
162
  },
163
+ "max_initial_timestamp_index": 50,
164
  "max_length": 448,
165
  "no_timestamps_token_id": 50363,
166
  "pad_token_id": 50257,
167
+ "prev_sot_token_id": 50361,
168
  "return_timestamps": false,
169
  "suppress_tokens": [
170
  1,
 
260
  "transcribe": 50359,
261
  "translate": 50358
262
  },
263
+ "transformers_version": "4.41.2"
264
  }