<!DOCTYPE html>
<html lang="en">
  <head>
    <meta charset="UTF-8" />
    <title></title>
  </head>
  <body>
    <button id="button_take">拍照</button>
    <button id="button_record">录制</button>
    <button id="button_stop">停止录制</button><br />

    <div style="float: left; width: 50%"><video id="v"></video></div>
    <div style="float: right; width: 50%"><video id="c"></video></div>
    <canvas id="canvas"></canvas><br />
    <!-- <img
      src="http://placehold.it/640&text=Your%20image%20here%20..."
      id="photo"
      alt="photo"
      style="float: left"
    /> -->
  </body>
  <script>
    var record = document.getElementById("button_record");
    var stop = document.getElementById("button_stop");

    var copyVideo = document.getElementById("c");
    copyVideo.controls = false;
    var chunks = [];
		let audioCtx;

		const canvas = document.querySelector('#canvas');
		const canvasCtx = canvas.getContext("2d");
		// 成功获取摄像头数据流
    function onSuccess(stream) {
			visualize(stream)

      const options = {
        audioBitsPerSecond: 128000,
        videoBitsPerSecond: 2500000,
        mimeType: "video/webm",
      };
      const mediaRecorder = new MediaRecorder(stream, options);

      record.addEventListener("click", function () {
        copyVideo.controls = false;
        mediaRecorder.start(500);
      });
      stop.addEventListener("click", function () {
        mediaRecorder.stop();
      });

      mediaRecorder.onstop = function (e) {
        console.log(chunks);
        var blob = new Blob(chunks, { type: "video/webm" });
        chunks = [];
        const videoURL = window.URL.createObjectURL(blob);
        copyVideo.src = videoURL;
        // 需要手动播放视频
        copyVideo.controls = true;
      };

      mediaRecorder.ondataavailable = function (e) {
        chunks.push(e.data);
      };

      // 旧的浏览器可能没有srcObject
      if ("srcObject" in v) {
        v.srcObject = stream;
      } else {
        // 防止再新的浏览器里使用它，应为它已经不再支持了
        v.src = window.URL.createObjectURL(stream);
      }
      v.onloadedmetadata = function (e) {
        v.play();
        videoPlaying = true;
      };
    }
		// 未成功获取摄像头数据流
    function onError(err) {
      console.log("The following error occured: " + err);
    }
  </script>
  <script>
    if (navigator.mediaDevices === undefined) {
      navigator.mediaDevices = {};
    }
    if (navigator.mediaDevices.getUserMedia === undefined) {
      navigator.mediaDevices.getUserMedia = function (constraints) {
        var getUserMedia =
          navigator.webkitGetUserMedia ||
          navigator.mozGetUserMedia ||
          navigator.msGetUserMedia;
        if (!getUserMedia) {
          return Promise.reject(
            new Error("getUserMedia is not implemented in this browser")
          );
        }
        return new Promise(function (resolve, reject) {
          getUserMedia.call(navigator, constraints, resolve, reject);
        });
      };
    }
    const constraints = {
      video: true,
      audio: true,
    };
    let videoPlaying = false;
    let v = document.getElementById("v");
    navigator.mediaDevices.getUserMedia(constraints).then(onSuccess, onError);

		// 这里也可以循环绘制cancas 也可以从canvas 捕获视频数据
    // setInterval(()=>{
    // 	document.getElementById('button_take').dispatchEvent(new Event('click'))
    // },1000/10)
    document.getElementById("button_take").addEventListener(
      "click",
      function () {
        if (videoPlaying) {
          let canvas = document.getElementById("canvas");
          canvas.width = v.videoWidth;
          canvas.height = v.videoHeight;
          canvas.getContext("2d").drawImage(v, 0, 0);
          let data = canvas.toDataURL("image/webp");
          document.getElementById("photo").setAttribute("src", data);
          //   canvas.getContext("2d").drawImage(Img,0,0,width,height); //将图片绘制到canvas中
        }
      },
      false
    );
  </script>
	<script>
		function visualize(stream) {
			if(!audioCtx) {
				audioCtx = new AudioContext();
			}

			const source = audioCtx.createMediaStreamSource(stream);

			const analyser = audioCtx.createAnalyser();
			analyser.fftSize = 2048;
			const bufferLength = analyser.frequencyBinCount;
			const dataArray = new Uint8Array(bufferLength);
			console.log(dataArray)
			source.connect(analyser);
			//analyser.connect(audioCtx.destination);

			draw()

			function draw() {
				const WIDTH = canvas.width
				const HEIGHT = canvas.height;

				requestAnimationFrame(draw);

				analyser.getByteTimeDomainData(dataArray);

				canvasCtx.fillStyle = 'rgb(200, 200, 200)';
				canvasCtx.fillRect(0, 0, WIDTH, HEIGHT);

				canvasCtx.lineWidth = 2;
				canvasCtx.strokeStyle = 'rgb(0, 0, 0)';

				canvasCtx.beginPath();

				let sliceWidth = WIDTH * 1.0 / bufferLength;
				let x = 0;
				console.log()

				for(let i = 0; i < bufferLength; i++) {

					let v = dataArray[i] / 128.0;
					let y = v * HEIGHT/2;

					if(i === 0) {
						canvasCtx.moveTo(x, y);
					} else {
						canvasCtx.lineTo(x, y);
					}

					x += sliceWidth;
				}

				canvasCtx.lineTo(canvas.width, canvas.height/2);
				canvasCtx.stroke();

			}
		}

	</script>
</html>
