Spaces:
Runtime error
Runtime error
File size: 5,096 Bytes
2485dd8 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 |
import debug from './debug';
type AddAudioToBufferFunction = (
samples: Array<number>,
sampleRate: number,
) => void;
export type BufferedSpeechPlayer = {
addAudioToBuffer: AddAudioToBufferFunction;
setGain: (gain: number) => void;
start: () => void;
stop: () => void;
};
type Options = {
onEnded?: () => void;
onStarted?: () => void;
};
export default function createBufferedSpeechPlayer({
onStarted,
onEnded,
}: Options): BufferedSpeechPlayer {
const audioContext = new AudioContext();
const gainNode = audioContext.createGain();
gainNode.connect(audioContext.destination);
let unplayedAudioBuffers: Array<AudioBuffer> = [];
let currentPlayingBufferSource: AudioBufferSourceNode | null = null;
let isPlaying = false;
// This means that the player starts in the 'stopped' state, and you need to call player.start() for it to start playing
let shouldPlayWhenAudioAvailable = false;
const setGain = (gain: number) => {
gainNode.gain.setValueAtTime(gain, audioContext.currentTime);
};
const start = () => {
shouldPlayWhenAudioAvailable = true;
debug()?.start();
playNextBufferIfNotAlreadyPlaying();
};
// Stop will stop the audio and clear the buffers
const stop = () => {
shouldPlayWhenAudioAvailable = false;
// Stop the current buffers
currentPlayingBufferSource?.stop();
currentPlayingBufferSource = null;
unplayedAudioBuffers = [];
onEnded != null && onEnded();
isPlaying = false;
return;
};
const playNextBufferIfNotAlreadyPlaying = () => {
if (!isPlaying) {
playNextBuffer();
}
};
const playNextBuffer = () => {
if (shouldPlayWhenAudioAvailable === false) {
console.debug(
'[BufferedSpeechPlayer][playNextBuffer] Not playing any more audio because shouldPlayWhenAudioAvailable is false.',
);
// NOTE: we do not need to set isPlaying = false or call onEnded because that will be handled in the stop() function
return;
}
if (unplayedAudioBuffers.length === 0) {
console.debug(
'[BufferedSpeechPlayer][playNextBuffer] No buffers to play.',
);
if (isPlaying) {
isPlaying = false;
onEnded != null && onEnded();
}
return;
}
// If isPlaying is false, then we are starting playback fresh rather than continuing it, and should call onStarted
if (isPlaying === false) {
isPlaying = true;
onStarted != null && onStarted();
}
const source = audioContext.createBufferSource();
// Get the first unplayed buffer from the array, and remove it from the array
const buffer = unplayedAudioBuffers.shift() ?? null;
source.buffer = buffer;
console.debug(
`[BufferedSpeechPlayer] Playing buffer with ${source.buffer?.length} samples`,
);
source.connect(gainNode);
const startTime = new Date().getTime();
source.start();
currentPlayingBufferSource = source;
// This is probably not necessary, but it doesn't hurt
isPlaying = true;
// TODO: consider changing this to a while loop to avoid deep recursion
const onThisBufferPlaybackEnded = () => {
console.debug(
`[BufferedSpeechPlayer] Buffer with ${source.buffer?.length} samples ended.`,
);
source.removeEventListener('ended', onThisBufferPlaybackEnded);
const endTime = new Date().getTime();
debug()?.playedAudio(startTime, endTime, buffer);
currentPlayingBufferSource = null;
// We don't set isPlaying = false here because we are attempting to continue playing. It will get set to false if there are no more buffers to play
playNextBuffer();
};
source.addEventListener('ended', onThisBufferPlaybackEnded);
};
const addAudioToBuffer: AddAudioToBufferFunction = (samples, sampleRate) => {
const incomingArrayBufferChunk = audioContext.createBuffer(
// 1 channel
1,
samples.length,
sampleRate,
);
incomingArrayBufferChunk.copyToChannel(
new Float32Array(samples),
// first channel
0,
);
console.debug(
`[addAudioToBufferAndPlay] Adding buffer with ${incomingArrayBufferChunk.length} samples to queue.`,
);
unplayedAudioBuffers.push(incomingArrayBufferChunk);
debug()?.receivedAudio(
incomingArrayBufferChunk.length / incomingArrayBufferChunk.sampleRate,
);
const audioBuffersTableInfo = unplayedAudioBuffers.map((buffer, i) => {
return {
index: i,
duration: buffer.length / buffer.sampleRate,
samples: buffer.length,
};
});
const totalUnplayedDuration = unplayedAudioBuffers.reduce((acc, buffer) => {
return acc + buffer.length / buffer.sampleRate;
}, 0);
console.debug(
`[addAudioToBufferAndPlay] Current state of incoming audio buffers (${totalUnplayedDuration.toFixed(
1,
)}s unplayed):`,
);
console.table(audioBuffersTableInfo);
if (shouldPlayWhenAudioAvailable) {
playNextBufferIfNotAlreadyPlaying();
}
};
return {addAudioToBuffer, setGain, stop, start};
}
|