MuhammadFarhan67 commited on
Commit
d5b821a
1 Parent(s): 5de15bf

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +48 -0
app.py ADDED
@@ -0,0 +1,48 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import gradio as gr
2
+ import torch
3
+ from transformers import pipeline
4
+ from gtts import gTTS
5
+ import tempfile
6
+ import os
7
+ from groq import Groq
8
+
9
+ # Load the Whisper model from Hugging Face
10
+ device = "cuda" if torch.cuda.is_available() else "cpu"
11
+ whisper_model = pipeline("automatic-speech-recognition", model="openai/whisper-base", device=device)
12
+
13
+ # Initialize Groq client
14
+ client = Groq(api_key="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")
15
+
16
+ # Function to handle the voice-to-voice conversation
17
+ def voice_to_voice_conversation(audio):
18
+ # Read and transcribe audio using Whisper
19
+ transcription = whisper_model(audio)["text"]
20
+
21
+ # Get response from Groq API using Llama 8b
22
+ chat_completion = client.chat.completions.create(
23
+ messages=[{"role": "user", "content": transcription}],
24
+ model="llama3-8b-8192",
25
+ )
26
+ response_text = chat_completion.choices[0].message.content
27
+
28
+ # Convert text to speech using GTTS and save to a temporary file
29
+ tts = gTTS(response_text)
30
+ with tempfile.NamedTemporaryFile(delete=False, suffix=".mp3") as tmp_file:
31
+ tts.save(tmp_file.name)
32
+ tmp_file_path = tmp_file.name
33
+
34
+ # Load the generated speech as an audio file for Gradio
35
+ return transcription, tmp_file_path
36
+
37
+ # Gradio Interface
38
+ interface = gr.Interface(
39
+ fn=voice_to_voice_conversation,
40
+ inputs=gr.Audio(type="filepath"),
41
+ outputs=[gr.Textbox(label="Transcription"), gr.Audio(label="Response Audio")],
42
+ title="Voice-to-Voice Chatbot",
43
+ description="Speak into the microphone, and the chatbot will respond with a generated voice message.",
44
+ live=False
45
+ )
46
+
47
+ # Launch the interface
48
+ interface.launch()