aai / playground /testapp /index.html
barreloflube's picture
Refactor code to update UI buttons in audio_tab()
70eeaf7
raw
history blame
2.52 kB
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>Voice Assistant</title>
<style>
body {
font-family: Arial, sans-serif;
margin: 20px;
}
#transcription {
margin-top: 20px;
padding: 10px;
border: 1px solid #ccc;
height: 150px;
overflow-y: auto;
}
#audio-player {
margin-top: 20px;
}
</style>
</head>
<body>
<h1>Voice Assistant</h1>
<button id="start-btn">Start Recording</button>
<button id="stop-btn" disabled>Stop Recording</button>
<div id="transcription"></div>
<audio id="audio-player" controls></audio>
<script>
const startBtn = document.getElementById('start-btn');
const stopBtn = document.getElementById('stop-btn');
const transcriptionDiv = document.getElementById('transcription');
const audioPlayer = document.getElementById('audio-player');
let websocket;
let mediaRecorder;
let audioChunks = [];
startBtn.addEventListener('click', async () => {
startBtn.disabled = true;
stopBtn.disabled = false;
websocket = new WebSocket('ws://localhost:8000/ws');
websocket.binaryType = 'arraybuffer';
websocket.onmessage = (event) => {
if (event.data instanceof ArrayBuffer) {
const audioBlob = new Blob([event.data], { type: 'audio/wav' });
audioPlayer.src = URL.createObjectURL(audioBlob);
audioPlayer.play();
} else {
transcriptionDiv.innerText += event.data + '\n';
}
};
const stream = await navigator.mediaDevices.getUserMedia({ audio: true });
mediaRecorder = new MediaRecorder(stream);
mediaRecorder.ondataavailable = (event) => {
if (event.data.size > 0) {
audioChunks.push(event.data);
websocket.send(event.data);
}
};
mediaRecorder.start(1000); // Send audio data every second
});
stopBtn.addEventListener('click', () => {
startBtn.disabled = false;
stopBtn.disabled = true;
mediaRecorder.stop();
websocket.close();
});
</script>
</body>
</html>