<!DOCTYPE html>
<html>

<head>
  <meta charset="UTF-8" />
  <title></title>
</head>

<body>
  <button id="openCamera">打开摄像头</button>
  <button id="screenRecord">屏幕录制</button>
  <button id="start">开始录制</button>
  <button id="stop">停止录制</button>
  <button id="play">播放录制的视频</button>
  <button id="save">保存录制的视频</button> <!-- 添加保存按钮 -->
  <video src="" id="originVideo"></video>
  <video src="" id="playVideo" width="400" height="500"></video>

  <script>

    const openCamera = document.querySelector("#openCamera")
    const screenRecord = document.querySelector("#screenRecord")
    const start = document.querySelector("#start")
    const stop = document.querySelector("#stop")
    const play = document.querySelector("#play")
    const save = document.querySelector("#save")
    const originVideo = document.querySelector("#originVideo")
    const playVideo = document.querySelector("#playVideo")

    let systemStream;
    let audioStream;
    let blobData = []
    let recordInstance;
    let mergedStream ;
    // 打开摄像头
    openCamera.addEventListener("click", () => {
      handleOpenCamera()
    })

    // 屏幕录制
    screenRecord.addEventListener("click", () => {
      handleScreenRecord()
    })

    // 开始录制
    start.addEventListener("click", () => {
      startRecord()
    })

    // 停止录制
    stop.addEventListener("click", () => {
      recordInstance && recordInstance.stop()
    })

    // 播放录制的视频
    play.addEventListener("click", () => {
      const blob = new Blob(blobData, { type: 'video/mp4' })
      const videoUrl = URL.createObjectURL(blob)
      playVideo.src = videoUrl;
      playVideo.play()
    })
    save.addEventListener("click", () => {
      if (blobData.length > 0) {
        const blob = new Blob(blobData, { type: 'video/mp4' })
        const url = URL.createObjectURL(blob);
        const a = document.createElement('a');
        a.style.display = 'none';
        a.href = url;
        a.download = 'recording.mp4'; // 设置下载文件名
        document.body.appendChild(a);
        a.click();
        window.URL.revokeObjectURL(url); // 释放对象URL
        document.body.removeChild(a);
      } else {
        console.error('No recorded data available.');
      }
    })
    // 打开摄像头
    const handleOpenCamera = async () => {
      stream = await navigator.mediaDevices.getUserMedia({
        video: {
          width: 400, height: 500
        },
        audio: true
      })
      getAudioStream(); 
      console.log("handleOpenCamera stream", stream);
      originVideo.srcObject = stream
      originVideo.play()
    }

    // 屏幕录制
    const handleScreenRecord = async () => {
      systemStream = await navigator.mediaDevices.getDisplayMedia({
        video: {
          width: 1920, height: 1080
        },
        audio: {
    echoCancellation: true,
    noiseSuppression: true
  }
      })
      console.log("handlescreenRecord stream", systemStream);
      getAudioStream(); 
    }
    function getAudioStream() {
      navigator.mediaDevices.getUserMedia({ audio: true, video: false })
        .then(function (stream) {
          audioStream = stream;
          console.log('Micro audio started.', audioStream);
          mergeStreams(); // 合并音频流
          stream.onended = () => {
            console.log('Micro audio ended.');
          }
        })
        .catch(function (error) {
          console.error('getUserMedia() failed:', error);
        });
    }
    function mergeStreams() {
      if (systemStream && audioStream) {
        // 获取麦克风音频轨道
        mergedStream = new MediaStream();
        const audioContext = new AudioContext();
        const dest = audioContext.createMediaStreamDestination();
        audioStream.getAudioTracks().forEach((track) => {
const source = audioContext.createMediaStreamSource(
new MediaStream([track])
);
source.connect(dest);
});
          systemStream.getAudioTracks().forEach((track) => {
          const source = audioContext.createMediaStreamSource(
          new MediaStream([track])
          );
          source.connect(dest);
          });
          dest.stream.getTracks().forEach((track) => mergedStream.addTrack(track));
          audioStream.getTracks().forEach((track) => mergedStream.addTrack(track));  
          systemStream.getTracks().forEach((track) => mergedStream.addTrack(track));
        // const audioTrack = audioStream.getAudioTracks()[0];
        // // 将麦克风音频轨道添加到系统音频流中
        // systemStream.addTrack(audioTrack);
    //     systemStream.getTracks().forEach(track => {
    //     mergedStream.addTrack(track);
    //     });

    //     // 将麦克风音频流的轨道添加到新流中
    //     audioStream.getTracks().forEach(track => {
    //   mergedStream.addTrack(track);
    // });
    console.log('Streams merged.',mergedStream);
      } else {
        console.error('Both streams are not available to merge.');
      }
    }
    //开始录制
    const startRecord = () => {
      recordInstance = new MediaRecorder(mergedStream, { mimeType: 'video/webm' })
      console.log("startRecord stream", mergedStream);
      if (recordInstance) {
        recordInstance.start()
        recordInstance.ondataavailable = function (e) {
          blobData.push(e.data)
        }
        recordInstance.onstop = function (e) {
          console.log("startRecord onstop");
        }
      }
    }

  </script>

</body>

</html>
