Commit
·
9939489
1
Parent(s):
da5176d
Create script.js
Browse files
script.js
ADDED
@@ -0,0 +1,169 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
let audioContext;
|
2 |
+
let analyser;
|
3 |
+
let sourceNode;
|
4 |
+
let mediaRecorder;
|
5 |
+
let audioChunks = [];
|
6 |
+
|
7 |
+
document.addEventListener("DOMContentLoaded", () => {
|
8 |
+
const audioResponseElement = document.getElementById("audioResponse");
|
9 |
+
const micButton = document.getElementById("micButton");
|
10 |
+
const blobs = document.querySelectorAll(".blob");
|
11 |
+
|
12 |
+
initAudioContext(audioResponseElement);
|
13 |
+
function initAudioContext(audioElement) {
|
14 |
+
audioContext = new (window.AudioContext || window.webkitAudioContext)();
|
15 |
+
analyser = audioContext.createAnalyser();
|
16 |
+
analyser.fftSize = 64;
|
17 |
+
|
18 |
+
if (audioElement && !sourceNode) {
|
19 |
+
sourceNode = audioContext.createMediaElementSource(audioElement);
|
20 |
+
sourceNode.connect(analyser);
|
21 |
+
analyser.connect(audioContext.destination);
|
22 |
+
}
|
23 |
+
}
|
24 |
+
|
25 |
+
function startWaitAnimation() {
|
26 |
+
document.getElementById("thought-bubble").style.display = "block";
|
27 |
+
|
28 |
+
micButton.classList.add("disabled");
|
29 |
+
micButton.disabled = true;
|
30 |
+
}
|
31 |
+
|
32 |
+
function stopWaitAnimation() {
|
33 |
+
document.getElementById("thought-bubble").style.display = "none";
|
34 |
+
|
35 |
+
micButton.classList.remove("disabled");
|
36 |
+
micButton.disabled = false;
|
37 |
+
}
|
38 |
+
|
39 |
+
function enableMicButton() {
|
40 |
+
micButton.classList.remove("disabled");
|
41 |
+
micButton.disabled = false;
|
42 |
+
}
|
43 |
+
|
44 |
+
function hideBlobs() {
|
45 |
+
document.querySelector(".blob-container").style.display = "none";
|
46 |
+
}
|
47 |
+
|
48 |
+
function showBlobs() {
|
49 |
+
document.querySelector(".blob-container").style.display = "flex";
|
50 |
+
}
|
51 |
+
|
52 |
+
async function startRecording() {
|
53 |
+
audioChunks = [];
|
54 |
+
try {
|
55 |
+
const stream = await navigator.mediaDevices.getUserMedia({ audio: true });
|
56 |
+
mediaRecorder = new MediaRecorder(stream);
|
57 |
+
mediaRecorder.ondataavailable = (event) => audioChunks.push(event.data);
|
58 |
+
mediaRecorder.start();
|
59 |
+
} catch (error) {
|
60 |
+
console.error("Error accessing audio devices:", error);
|
61 |
+
micButton.checked = false; // Uncheck the button in case of an error
|
62 |
+
stopWaitAnimation();
|
63 |
+
}
|
64 |
+
}
|
65 |
+
|
66 |
+
function stopRecording() {
|
67 |
+
mediaRecorder.stop();
|
68 |
+
mediaRecorder.onstop = async () => {
|
69 |
+
const audioBlob = new Blob(audioChunks, { type: "audio/wav" });
|
70 |
+
const audioBase64 = await blobToBase64(audioBlob);
|
71 |
+
sendAudioToServer(audioBase64);
|
72 |
+
};
|
73 |
+
}
|
74 |
+
|
75 |
+
function blobToBase64(blob) {
|
76 |
+
const reader = new FileReader();
|
77 |
+
return new Promise((resolve) => {
|
78 |
+
reader.onloadend = () => resolve(reader.result.split(",")[1]);
|
79 |
+
reader.readAsDataURL(blob);
|
80 |
+
});
|
81 |
+
}
|
82 |
+
function sendAudioToServer(audioBase64) {
|
83 |
+
fetch(
|
84 |
+
"https://h8v918qrvg.execute-api.eu-central-1.amazonaws.com/Prod/sts/question",
|
85 |
+
{
|
86 |
+
method: "POST",
|
87 |
+
headers: { "Content-Type": "text/plain" },
|
88 |
+
body: audioBase64,
|
89 |
+
}
|
90 |
+
)
|
91 |
+
.then((response) => response.text())
|
92 |
+
.then((data) => {
|
93 |
+
if (data.message === "Question not provided in POST body") {
|
94 |
+
console.error("Question not provided in POST body");
|
95 |
+
stopWaitAnimation();
|
96 |
+
enableMicButton(); // Re-enable the microphone button
|
97 |
+
return; // Stop further processing
|
98 |
+
}
|
99 |
+
|
100 |
+
const audioSrc = `data:audio/wav;base64,${data}`;
|
101 |
+
audioResponseElement.src = audioSrc;
|
102 |
+
audioResponseElement.play().then(() => {
|
103 |
+
visualize(); // Start the visualizer after the audio starts playing
|
104 |
+
});
|
105 |
+
})
|
106 |
+
.catch((error) => {
|
107 |
+
console.error("Error:", error);
|
108 |
+
stopWaitAnimation();
|
109 |
+
});
|
110 |
+
}
|
111 |
+
|
112 |
+
micButton.addEventListener("change", () => {
|
113 |
+
hideBlobs();
|
114 |
+
if (micButton.checked) {
|
115 |
+
startRecording();
|
116 |
+
} else {
|
117 |
+
stopRecording();
|
118 |
+
startWaitAnimation();
|
119 |
+
}
|
120 |
+
});
|
121 |
+
|
122 |
+
function visualize() {
|
123 |
+
if (!audioContext) {
|
124 |
+
console.error("AudioContext not initialized");
|
125 |
+
return;
|
126 |
+
}
|
127 |
+
|
128 |
+
if (!sourceNode) {
|
129 |
+
console.error("SourceNode not initialized");
|
130 |
+
return;
|
131 |
+
}
|
132 |
+
const bufferLength = analyser.frequencyBinCount;
|
133 |
+
const dataArray = new Uint8Array(bufferLength);
|
134 |
+
|
135 |
+
function draw() {
|
136 |
+
requestAnimationFrame(draw);
|
137 |
+
analyser.getByteFrequencyData(dataArray);
|
138 |
+
|
139 |
+
const segmentLength = Math.floor(bufferLength / blobs.length);
|
140 |
+
for (let i = 0; i < blobs.length; i++) {
|
141 |
+
// Use an average value of a segment of the dataArray for each blob
|
142 |
+
const dataValue = dataArray[(i * segmentLength) / 2] || 0;
|
143 |
+
// Fallback to 0 if undefined
|
144 |
+
const height = (dataValue / 128.0) * 50 + 50;
|
145 |
+
|
146 |
+
blobs[i].style.height = `${height}px`;
|
147 |
+
}
|
148 |
+
}
|
149 |
+
|
150 |
+
draw();
|
151 |
+
}
|
152 |
+
|
153 |
+
audioResponseElement.onended = () => {
|
154 |
+
hideBlobs(); // Hide the blobs once the audio has finished playing
|
155 |
+
stopWaitAnimation();
|
156 |
+
enableMicButton(); // Ensure thought bubble is not showing
|
157 |
+
};
|
158 |
+
|
159 |
+
audioResponseElement.onplay = () => {
|
160 |
+
showBlobs(); // Show the blobs when audio starts playing
|
161 |
+
stopWaitAnimation(); // Hide the thought bubble when audio starts playing
|
162 |
+
};
|
163 |
+
|
164 |
+
document.body.addEventListener("click", () => {
|
165 |
+
if (audioContext && audioContext.state === "suspended") {
|
166 |
+
audioContext.resume();
|
167 |
+
}
|
168 |
+
});
|
169 |
+
});
|