Vaibhav Srivastav commited on
Commit
d32240b
1 Parent(s): 0448aa2

hotfix for decoding logits issue

Browse files
Files changed (1) hide show
  1. app.py +1 -1
app.py CHANGED
@@ -32,7 +32,7 @@ def predict_and_decode(input_file):
32
  speech = load_data(input_file)
33
  #tokenize
34
  input_values = processor(speech, return_tensors="pt", sampling_rate=16000).input_values
35
- logits = model(input_values).logits
36
  vocab_list = list(processor.tokenizer.get_vocab().keys())
37
  # #Take argmax
38
  # predicted_ids = torch.argmax(logits, dim=-1)
 
32
  speech = load_data(input_file)
33
  #tokenize
34
  input_values = processor(speech, return_tensors="pt", sampling_rate=16000).input_values
35
+ logits = model(input_values).logits.cpu().detach().numpy()[0]
36
  vocab_list = list(processor.tokenizer.get_vocab().keys())
37
  # #Take argmax
38
  # predicted_ids = torch.argmax(logits, dim=-1)