slplab commited on
Commit
44deeae
1 Parent(s): 922184b

Rename handler to handler.py

Browse files
Files changed (1) hide show
  1. handler → handler.py +3 -5
handler → handler.py RENAMED
@@ -12,9 +12,7 @@ import io
12
 
13
  class EndpointHandler:
14
  def __init__(self, path=""):
15
- # Load the model and tokenizer
16
- # #self.model = WhisperForCTC.from_pretrained(path)
17
- self.tokenizer = WhisperTokenizer.from_pretrained(path)
18
  self.model = WhisperForConditionalGeneration.from_pretrained(path)
19
  #self.tokenizer = WhisperTokenizer.from_pretrained(path)
20
  self.processor = WhisperProcessor.from_pretrained(path, language="korean", task='transcribe')
@@ -33,8 +31,8 @@ class EndpointHandler:
33
  #print(f'1. inputs: {inputs}')
34
 
35
 
36
- # inputs, _ = sf.read(io.BytesIO(data['inputs']))
37
- inputs, _ = sf.read(data['inputs'])
38
  print(f'2. inputs: {inputs}')
39
 
40
  input_features = self.feature_extractor(inputs, sampling_rate=16000).input_features[0]
 
12
 
13
  class EndpointHandler:
14
  def __init__(self, path=""):
15
+ self.tokenizer = WhisperTokenizer.from_pretrained(openai/whisper-large, language="korean", task='transcribe')
 
 
16
  self.model = WhisperForConditionalGeneration.from_pretrained(path)
17
  #self.tokenizer = WhisperTokenizer.from_pretrained(path)
18
  self.processor = WhisperProcessor.from_pretrained(path, language="korean", task='transcribe')
 
31
  #print(f'1. inputs: {inputs}')
32
 
33
 
34
+ inputs, _ = sf.read(io.BytesIO(data['inputs']))
35
+ #inputs, _ = sf.read(data['inputs'])
36
  print(f'2. inputs: {inputs}')
37
 
38
  input_features = self.feature_extractor(inputs, sampling_rate=16000).input_features[0]