ahricat commited on
Commit
4222d8d
1 Parent(s): 133ee24

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +3 -3
app.py CHANGED
@@ -1,4 +1,4 @@
1
- @app.function()
2
  class InteractiveChat:
3
  def __init__(self, model_name="openai/whisper-large", tts_choice="OpenVoice", **kwargs):
4
  self.whisper_processor = WhisperProcessor.from_pretrained(model_name)
@@ -13,7 +13,7 @@ class InteractiveChat:
13
  predicted_ids = self.whisper_model.generate(input_features)
14
  transcription = self.whisper_processor.batch_decode(predicted_ids, skip_special_tokens=True)[0]
15
 
16
- # Use the transcription as input for Zephyr
17
  response = self.zephyr_pipeline(transcription, max_length=1000)[0]["generated_text"]
18
  return transcription, response
19
 
@@ -50,4 +50,4 @@ with gr.Blocks() as demo:
50
  input_data.change(lambda x, model: model.generate_response(x), inputs=[input_data, model_choice],
51
  outputs=output_text)
52
  input_data.change(lambda x, model: model.speak(x[1]), inputs=[output_text, model_choice],
53
- outputs=None) # Speak the response
 
1
+ @app.function(
2
  class InteractiveChat:
3
  def __init__(self, model_name="openai/whisper-large", tts_choice="OpenVoice", **kwargs):
4
  self.whisper_processor = WhisperProcessor.from_pretrained(model_name)
 
13
  predicted_ids = self.whisper_model.generate(input_features)
14
  transcription = self.whisper_processor.batch_decode(predicted_ids, skip_special_tokens=True)[0]
15
 
16
+
17
  response = self.zephyr_pipeline(transcription, max_length=1000)[0]["generated_text"]
18
  return transcription, response
19
 
 
50
  input_data.change(lambda x, model: model.generate_response(x), inputs=[input_data, model_choice],
51
  outputs=output_text)
52
  input_data.change(lambda x, model: model.speak(x[1]), inputs=[output_text, model_choice],
53
+ outputs=None)) # Speak the response