ceyda commited on
Commit
ab8048e
1 Parent(s): 3d0bc0a

fairseq tuned_wer

Browse files
Files changed (1) hide show
  1. README.md +3 -3
README.md CHANGED
@@ -23,7 +23,7 @@ model-index:
23
  metrics:
24
  - name: Test WER
25
  type: wer
26
- value: 27.08
27
  ---
28
 
29
  # Wav2Vec2-Base-760-Turkish
@@ -109,7 +109,7 @@ def evaluate(batch):
109
  logits = model(inputs.input_values.to("cuda"), attention_mask=inputs.attention_mask.to("cuda")).logits
110
 
111
  pred_ids = torch.argmax(logits, dim=-1)
112
- batch["pred_strings"] = processor.batch_decode(pred_ids)
113
  return batch
114
 
115
  result = test_dataset.map(evaluate, batched=True, batch_size=8)
@@ -117,7 +117,7 @@ result = test_dataset.map(evaluate, batched=True, batch_size=8)
117
  print("WER: {:2f}".format(100 * wer.compute(predictions=result["pred_strings"], references=result["sentence"])))
118
  ```
119
 
120
- **Test Result**: 27.08 % (in progress)
121
 
122
 
123
  ## Training
 
23
  metrics:
24
  - name: Test WER
25
  type: wer
26
+ value: 24.91
27
  ---
28
 
29
  # Wav2Vec2-Base-760-Turkish
 
109
  logits = model(inputs.input_values.to("cuda"), attention_mask=inputs.attention_mask.to("cuda")).logits
110
 
111
  pred_ids = torch.argmax(logits, dim=-1)
112
+ batch["pred_strings"] = processor.batch_decode(pred_ids,skip_special_tokens=True)
113
  return batch
114
 
115
  result = test_dataset.map(evaluate, batched=True, batch_size=8)
 
117
  print("WER: {:2f}".format(100 * wer.compute(predictions=result["pred_strings"], references=result["sentence"])))
118
  ```
119
 
120
+ **Test Result**: 24.91 % (in progress)
121
 
122
 
123
  ## Training