JPLTedCas's picture
Update static/js/app.js
af3f30b
//webkitURL is deprecated but nevertheless
URL = window.URL || window.webkitURL;
var gumStream; //stream from getUserMedia()
var rec; //Recorder.js object
var input; //MediaStreamAudioSourceNode we'll be recording
// shim for AudioContext when it's not avb.
var AudioContext = window.AudioContext || window.webkitAudioContext;
var audioContext //audio context to help us record
var recordButton = document.getElementById("recordButton");
var stopButton = document.getElementById("stopButton");
//var pauseButton = document.getElementById("pauseButton");
//add events to those 2 buttons
recordButton.addEventListener("click", startRecording);
stopButton.addEventListener("click", stopRecording);
//pauseButton.addEventListener("click", pauseRecording);
function startRecording() {
console.log("recordButton clicked");
/*
Simple constraints object, for more advanced audio features see
https://addpipe.com/blog/audio-constraints-getusermedia/
*/
var constraints = { audio: true, video: false }
/*
Disable the record button until we get a success or fail from getUserMedia()
*/
recordButton.disabled = true;
stopButton.disabled = false;
//pauseButton.disabled = false
/*
We're using the standard promise based getUserMedia()
https://developer.mozilla.org/en-US/docs/Web/API/MediaDevices/getUserMedia
*/
navigator.mediaDevices.getUserMedia(constraints).then(function (stream) {
console.log("getUserMedia() success, stream created, initializing Recorder.js ...");
/*
create an audio context after getUserMedia is called
sampleRate might change after getUserMedia is called, like it does on macOS when recording through AirPods
the sampleRate defaults to the one set in your OS for your playback device
*/
audioContext = new AudioContext();
//update the format
document.getElementById("formats").innerHTML = "Format: 1 channel pcm @ " + audioContext.sampleRate / 1000 + "kHz"
/* assign to gumStream for later use */
gumStream = stream;
/* use the stream */
input = audioContext.createMediaStreamSource(stream);
/*
Create the Recorder object and configure to record mono sound (1 channel)
Recording 2 channels will double the file size
*/
rec = new Recorder(input, { numChannels: 1 })
//start the recording process
rec.record()
console.log("Recording started");
}).catch(function (err) {
//enable the record button if getUserMedia() fails
recordButton.disabled = false;
stopButton.disabled = true;
//pauseButton.disabled = true
});
}
//function pauseRecording() {
// console.log("pauseButton clicked rec.recording=", rec.recording);
// if (rec.recording) {
// //pause
// rec.stop();
// pauseButton.innerHTML = "Resume";
// } else {
// //resume
// rec.record()
// pauseButton.innerHTML = "Pause";
// }
//}
function stopRecording() {
console.log("stopButton clicked");
//disable the stop button, enable the record too allow for new recordings
stopButton.disabled = true;
recordButton.disabled = false;
//pauseButton.disabled = true;
//reset button just in case the recording is stopped while paused
//pauseButton.innerHTML = "Pause";
//tell the recorder to stop the recording
rec.stop();
//stop microphone access
gumStream.getAudioTracks()[0].stop();
//create the wav blob and pass it on to createDownloadLink
//rec.exportWAV(createDownloadLink);
// Exportar los datos de audio como un Blob una vez que la grabaci�n haya finalizado
rec.exportWAV(function (blob) {
// La funci�n de devoluci�n de llamada se llama con el Blob que contiene los datos de audio en formato WAV
// Puedes utilizar este Blob como desees, por ejemplo, crear una URL para descargarlo
var url = URL.createObjectURL(blob);
// Puedes utilizar audioUrl para reproducir o descargar el archivo de audio
/////////////////////////////////////////////////////////////////////////
//var url = URL.createObjectURL(blob);
var au = document.createElement('audio');
var li = document.createElement('li');
var link = document.createElement('a');
//name of .wav file to use during upload and download (without extendion)
var filename = new Date().toISOString();
//add controls to the <audio> element
au.controls = true;
au.src = url;
//save to disk link
link.href = url;
link.download = filename + ".wav"; //download forces the browser to donwload the file using the filename
link.innerHTML = "Save to disk";
//add the new audio element to li
li.appendChild(au);
//add the filename to the li
li.appendChild(document.createTextNode(filename + ".wav "))
//add the save to disk link to li
li.appendChild(link);
//upload link
var upload = document.createElement('a');
upload.href = "#";
upload.innerHTML = "Upload";
//upload.addEventListener("click", function (event) {
var xhr = new XMLHttpRequest();
xhr.onload = function (e) {
if (this.readyState === 4) {
console.log("Server returned: ", e.target.responseText);
}
};
// Supongamos que "data" es una cadena de bytes en formato WAV
var formData = new FormData();
// Supongamos que "audioBlob" es un objeto Blob que contiene el audio WAV
formData.append("audio_data", blob, "archivo.wav");
xhr.open("POST", "/escuchar_trauma", true);
xhr.onreadystatechange = function () {
if (xhr.readyState === 4 && xhr.status === 200) {
// Manejar la respuesta del servidor
console.log("Respuesta del servidor:", xhr.responseText);
////////////////////////////////////////////////////////
// Muestra el resultado del reconocimiento en el cuadro de texto
//document.getElementById("responseTextBox").value = xhr.responseText;
// Buscar el contenido dentro de las etiquetas <p></p>
//var parser = new DOMParser();
//var responseHTML = parser.parseFromString(xhr.responseText, 'text/html');
/*var paragraphContent = responseHTML.querySelector('p').textContent;*/
// Muestra el resultado del reconocimiento en el cuadro de texto
//document.getElementById("responseTextBox").value = paragraphContent;
// Muestra el resultado del reconocimiento como texto plano
//var textElement = document.getElementById("textElement"); // Reemplaza "textElement" con el ID adecuado
//textElement.textContent = paragraphContent;
//////////////////////////////////////////////////////////
}
};
xhr.send(formData);
//////////////////////////////////////////
// 4. This will be called after the response is received
xhr.onload = function () {
if (xhr.status != 200) {
// analyze HTTP status of the response
alert(`Error ${xhr.status}: ${xhr.statusText}`);
// e.g. 404: Not Found
} else { // show the result
$('body').html(xhr.response)
}
};
///////////////////////////////////////////
//});
//li.appendChild(document.createTextNode(" "))//add a space in between
//li.appendChild(upload)//add the upload link to li
//add the li element to the ol
recordingsList.appendChild(li);
});
}