Spaces:
Runtime error
Runtime error
File size: 874 Bytes
86ff979 d80a989 86ff979 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 |
import gradio as gr
import torch
import torchaudio
import torchaudio.functional as AF
from asr import Transcribe
def transcribe(audio_file, lang_id: str):
print(f"audio_file={audio_file}")
print(lang_id)
freq = 16000
# Return the transcript.
transcript = ""
# load the auido file to tensor
waveform, orig_freq = torchaudio.load(audio_file.name)
# resample audio to 16Khz
if orig_freq != freq:
waveform = AF.resample(waveform, orig_freq, freq)
return transcriber(waveform, lang_id), audio_file.name
if __name__ == "__main__":
transcriber = Transcribe()
inputs = [gr.File(), gr.Dropdown(choices=["amh", "orm", "som"])]
outputs = [
gr.Textbox(label="Transcript"),
gr.Audio(label="Audio", type="filepath"),
]
app = gr.Interface(transcribe, inputs=inputs, outputs=outputs)
app.launch()
|