<!DOCTYPE html>
<html>
  <head>
    <meta charset="UTF-8" />
    <title>TTS页面</title>
  </head>
  <body>
    <h1>TTS页面</h1>

    <label for="text">合成内容：</label>
    <input type="text" id="text" placeholder="输入要合成的内容" />

    <label for="voice">音色：</label>
    <select id="voice">
      <option value="aisbabyxu">音色1</option>
      <option value="voice2">音色2</option>
      <option value="voice3">音色3</option>
      <!-- 添加更多音色选项 -->
    </select>

    <button id="submitBtn">合成并播放</button>
    <div id="outputConcatone" class="output"></div>
    <div id="submitBtn">合成并播放结果</div>
    <div id="outputConcat" class="output">eeeeeeeeeeeeeeeeeeeeee</div>
    <button id="audioConcat">拼接段落音频</button>
    <div id="outputsubmitBtn" class="output"></div>
    <script>
      window.dd = [];
      var AudioSrcUrl = [];
      var audioContext = new AudioContext();
      // 拼接音频的方法
      const concatAudio = (arrBufferList) => {
        // 获得 AudioBuffer
        const audioBufferList = arrBufferList;
        // 最大通道数
        const maxChannelNumber = Math.max(
          ...audioBufferList.map((audioBuffer) => audioBuffer.numberOfChannels)
        );
        // 总长度
        const totalLength = audioBufferList
          .map((buffer) => buffer.length)
          .reduce((lenA, lenB) => lenA + lenB, 0);

        // 创建一个新的 AudioBuffer
        const newAudioBuffer = audioContext.createBuffer(
          maxChannelNumber,
          totalLength,
          audioBufferList[0].sampleRate
        );
        // 将所有的 AudioBuffer 的数据拷贝到新的 AudioBuffer 中
        let offset = 0;

        audioBufferList.forEach((audioBuffer, index) => {
          for (
            let channel = 0;
            channel < audioBuffer.numberOfChannels;
            channel++
          ) {
            newAudioBuffer
              .getChannelData(channel)
              .set(audioBuffer.getChannelData(channel), offset);
          }

          offset += audioBuffer.length;
        });

        return newAudioBuffer;
      };

      // AudioBuffer 转 blob
      function bufferToWave(abuffer, len) {
        var numOfChan = abuffer.numberOfChannels,
          length = len * numOfChan * 2 + 44,
          buffer = new ArrayBuffer(length),
          view = new DataView(buffer),
          channels = [],
          i,
          sample,
          offset = 0,
          pos = 0;

        // write WAVE header
        // "RIFF"
        setUint32(0x46464952);
        // file length - 8
        setUint32(length - 8);
        // "WAVE"
        setUint32(0x45564157);
        // "fmt " chunk
        setUint32(0x20746d66);
        // length = 16
        setUint32(16);
        // PCM (uncompressed)
        setUint16(1);
        setUint16(numOfChan);
        setUint32(abuffer.sampleRate);
        // avg. bytes/sec
        setUint32(abuffer.sampleRate * 2 * numOfChan);
        // block-align
        setUint16(numOfChan * 2);
        // 16-bit (hardcoded in this demo)
        setUint16(16);
        // "data" - chunk
        setUint32(0x61746164);
        // chunk length
        setUint32(length - pos - 4);

        // write interleaved data
        for (i = 0; i < abuffer.numberOfChannels; i++)
          channels.push(abuffer.getChannelData(i));

        while (pos < length) {
          // interleave channels
          for (i = 0; i < numOfChan; i++) {
            // clamp
            sample = Math.max(-1, Math.min(1, channels[i][offset]));
            // scale to 16-bit signed int
            sample = (0.5 + sample < 0 ? sample * 32768 : sample * 32767) | 0;
            // write 16-bit sample
            view.setInt16(pos, sample, true);
            pos += 2;
          }
          // next source sample
          offset++;
        }

        // create Blob
        return new Blob([buffer], { type: 'audio/wav' });

        function setUint16(data) {
          view.setUint16(pos, data, true);
          pos += 2;
        }

        function setUint32(data) {
          view.setUint32(pos, data, true);
          pos += 4;
        }
      }

      audioConcat.onclick = async function () {
        const arrBufferList = await Promise.all(
          AudioSrcUrl.map((src) => getnewAudioBuffer(src))
        );
        concatAudioBuffer = concatAudio(arrBufferList);
        const newAudioSrc = URL.createObjectURL(
          bufferToWave(concatAudioBuffer, concatAudioBuffer.length)
        );
        outputsubmitBtn.innerHTML = `<audio id="outputConcataudio" src="${newAudioSrc}" controls></audio>`;
      };
      const getnewAudioBuffer = (src) => {
        return new Promise((resolve, reject) => {
          fetch(src)
            .then((response) => response.arrayBuffer())
            .then((arrayBuffer) => {
              audioContext.decodeAudioData(arrayBuffer).then((buffer) => {
                resolve(buffer);
              });
            });
        });
      };
      /**
       * base64转Uint8Array
       */
      const base64ToUint8Array = (base64String) => {
        var binaryString = window.atob(base64String);
        var bytes = new Uint8Array(binaryString.length);
        for (var i = 0; i < binaryString.length; i++) {
          bytes[i] = binaryString.charCodeAt(i);
        }
        return bytes.buffer;
      };
      document
        .getElementById('submitBtn')
        .addEventListener('click', function () {
          console.log('click');
          AudioSrcUrl = [];
          // outputConcatone.innerHTML = '';
          // outputConcat.innerHTML = '';
          // outputsubmitBtn.innerHTML = '';
          var text = document.getElementById('text').value;
          var voice = document.getElementById('voice').value;

          // var audioContext = new (window.AudioContext ||
          //   window.webkitAudioContext)();
          var mediaSource = new MediaSource();
          var audioElement = document.createElement('audio');
          audioElement.src = URL.createObjectURL(mediaSource);
          document.body.appendChild(audioElement);

          var sourceBuffer = null;
          var isPlaying = false;
          var startTime = 0;
          var buffer = [],
            size = 0;

          mediaSource.addEventListener('sourceopen', function () {
            sourceBuffer = mediaSource.addSourceBuffer('audio/mpeg');
          });

          var socket = new WebSocket('ws://192.168.2.113:9002/jhl/tts');
          console.log('socket', socket);
          // socket.binaryType = 'arraybuffer';
          socket.onopen = function () {
            console.log('连接已建立');
            var message = {
              text: text,
              voice: voice,
            };
            socket.send(JSON.stringify(message));
          };
          // 这个背景音乐合并的时候需要，所以放外面了
          let concatAudioBuffer = null;
          const arrBufferList = [];

          socket.onmessage = async function (event) {
            // console.log('event.data', event.data);
            let reslut = JSON.parse(event.data);
            console.log('reslut', reslut.data);
            window.dd.push(reslut.data);
            if (reslut.data.status == 1) {
              // console.log(atob(reslut.data.audio));
              let arrayBuffer = base64ToUint8Array(reslut.data.audio);
              // console.log('arrayBuffer', arrayBuffer);
              let blob = new Blob([arrayBuffer], { type: 'audio/wav' });
              let AudioSrc = URL.createObjectURL(blob);
              AudioSrcUrl.push(AudioSrc);
            } else {
              // console.log(atob(reslut.data.audio));
              let arrayBuffer = base64ToUint8Array(reslut.data.audio);
              // console.log('arrayBuffer', arrayBuffer);
              let blob = new Blob([arrayBuffer], { type: 'audio/wav' });
              let AudioSrc = URL.createObjectURL(blob);
              AudioSrcUrl.push(AudioSrc);
              console.log('AudioSrcUrl', AudioSrcUrl);
              const arrBufferList = await Promise.all(
                AudioSrcUrl.map((src) => getnewAudioBuffer(src))
              );
              concatAudioBuffer = concatAudio(arrBufferList);
              const newAudioSrc = URL.createObjectURL(
                bufferToWave(concatAudioBuffer, concatAudioBuffer.length)
              );
              outputConcat.innerHTML = `<audio id="outputConcataudio" src="${newAudioSrc}" controls></audio>`;
            }
            // let blob = new Blob([event.data]);
            // let AudioSrc = URL.createObjectURL(blob);
            // AudioSrcUrl.push(AudioSrc);
            // outputConcatone.innerHTML += `<audio id="outputConcataudioone" src="${AudioSrc}" controls></audio>`;
            // console.log(event.data);
            // let audioBuffer = await getAudioBuffer(event.data);
            // // 拼接音频
            // arrBufferList.push(audioBuffer);
            // concatAudioBuffer = concatAudio(arrBufferList);
            // const newAudioSrc = URL.createObjectURL(
            //   bufferToWave(concatAudioBuffer, concatAudioBuffer.length)
            // );
            // outputConcat.innerHTML = `<audio id="outputConcataudio" src="${newAudioSrc}" controls></audio>`;
            // outputConcataudio.play();
          };
          const getAudioBuffer = (src) => {
            return new Promise((resolve, reject) => {
              audioContext.decodeAudioData(src).then((buffer) => {
                resolve(buffer);
              });
            });
          };
          socket.onclose = function (event) {
            console.log('连接已关闭');
            buffer = [];
            size = 0;
          };
        });
    </script>
  </body>
</html>
