<!DOCTYPE html>
<html>

<head>
  <script src="https://unpkg.com/konva@4.0.0/konva.min.js"></script>
  <title>media-recorder</title>
  <meta charset="UTF-8" />
</head>

<body>
  <div>
    <button class="record-btn">视频录制</button>
    <button class="record-btn" onclick="closeluzhi()">停止录制</button>
    <!-- <button class="record-audio-btn">音频录制</button> -->
  </div>
  <div style="height: 400px;">
    <select id="tool">
      <option value="brush">Brush</option>
      <option value="eraser">Eraser</option>
    </select>
    <div style="height: 350px; border: 1px solid #cccc; background-color:white;" id="container"></div>
    <canvas style="height: 350px; border: 1px solid #cccc; visibility: hidden;" id="canvasContainer"></canvas>
  </div>

  <video class="video" width="300px" height="200px" controls></video>
  <audio style="width: 400px; height:80px" controls></audio>
  </div>
  <div>
    <script>
      var width = window.innerWidth;
      var height = 400;


      // first we need Konva core things: stage and layer
      var stage = new Konva.Stage({
        container: 'container',
        width: width,
        height: height
      });
      const _canvas = stage.toCanvas({ x: 0, y: 0, width, height })
      var layer = new Konva.Layer();
      stage.add(layer);

      // then we are going to draw into special canvas element
      var canvas = document.createElement('canvas');
      canvas.width = stage.width();
      canvas.height = stage.height();
      var ctx=canvas.getContext("2d");
      var img11=new Image()
      img11.url='https://gimg2.baidu.com/image_search/src=http%3A%2F%2Fda.bandisoft.com%2Fhoneycam%2Fimgs%2Fhow-to-use.jpg&refer=http%3A%2F%2Fda.bandisoft.com&app=2002&size=f9999,10000&q=a80&n=0&g=0n&fmt=auto?sec=1669860485&t=4dc6829e47dc0f4e65d712caa2973065';
      img11.onload=(()=>{
        ctx.drawImage(img11,0,0,canvas.width,canvas.height )
      })

      // created canvas we can add to layer as "Konva.Image" element
      var image = new Konva.Image({
        image: canvas,
        x: 0,
        y: 0,
      
      });
      layer.add(image);
      stage.draw();

      // Good. Now we need to get access to context element
      var context = canvas.getContext('2d');
      context.strokeStyle = '#df4b26';
      context.lineJoin = 'round';
      context.lineWidth = 5;

      var isPaint = false;
      var lastPointerPosition;
      var mode = 'brush';

      // now we need to bind some events
      // we need to start drawing on mousedown
      // and stop drawing on mouseup
      image.on('mousedown touchstart', function () {
        isPaint = true;
        lastPointerPosition = stage.getPointerPosition();
      });

      // will it be better to listen move/end events on the window?

      stage.on('mouseup touchend', function () {
        isPaint = false;
      });

      // and core function - drawing
      stage.on('mousemove touchmove', function () {
        if (!isPaint) {
          return;
        }

        if (mode === 'brush') {
          context.globalCompositeOperation = 'source-over';
        }
        if (mode === 'eraser') {
          context.globalCompositeOperation = 'destination-out';
        }
        context.beginPath();

        var localPos = {
          x: lastPointerPosition.x - image.x(),
          y: lastPointerPosition.y - image.y()
        };
        context.moveTo(localPos.x, localPos.y);
        var pos = stage.getPointerPosition();
        localPos = {
          x: pos.x - image.x(),
          y: pos.y - image.y()
        };
        context.lineTo(localPos.x, localPos.y);
        context.closePath();
        context.stroke();

        lastPointerPosition = pos;
        layer.batchDraw();
      });

      var select = document.getElementById('tool');
      select.addEventListener('change', function () {
        mode = select.value;
      });
    </script>

    <script>
      var _audio_mediaRecorder;
      var _video_mediaRecorder;
      var _audiostream;

      function closeluzhi() {
        _video_mediaRecorder.stop();
        _audio_mediaRecorder.stop();
        _audiostream.getTracks().forEach(function (track) {
          track.stop();
        });
      }
      const btn = document.querySelector('.record-btn');
      // const audiobtn = document.querySelector('.record-audio-btn');
      const audiofn = async function () {
        _audiostream = await navigator.mediaDevices.getUserMedia({
          audio: true,
        });

        // 浏览器对webm的支持情况
        const mime = MediaRecorder.isTypeSupported('audio/webm;codecs=opus')
          ? 'audio/webm;codecs=opus'
          : 'audio/webm';

        // 实例一个媒体录制对象录制
        _audio_mediaRecorder = new MediaRecorder(_audiostream, {
          mimeType: 'audio/webm; codecs=pcm'
        });
        //audio/webm;
        // blob parts
        const chunks = [];

        _audio_mediaRecorder.ondataavailable = function (e) {
          chunks.push(e.data);
        };

        _audio_mediaRecorder.onstop = function () {
          const blob = new Blob(chunks, {
            type: chunks[0].type,
          });
          const url = URL.createObjectURL(blob);

          const video = document.querySelector('audio');
          video.src = url;

          // 视频下载
          const a = document.createElement('a');
          a.href = url;
          a.download = 'audio.webm';
          a.click();

          URL.revokeObjectURL(blob);
        };



      }
      //  audiobtn.addEventListener('click', audiofn);
      const btnvideofn = async function () {
        // 开始记录
        await audiofn();
        _audio_mediaRecorder.start();
        // Optional frames per second argument.
        var stream = canvas.captureStream(25);

        // 浏览器对webm的支持情况
        const mime = MediaRecorder.isTypeSupported('video/webm; codecs=vp9')
          ? 'video/webm;codecs=vp9'
          : 'video/webm;';

        // 实例一个媒体录制对象录制
        _video_mediaRecorder = new MediaRecorder(stream, {
          mimeType: 'video/webm;codecs=h264,opus'
        });
        //audio/webm;
        // blob parts
        const chunks = [];

        _video_mediaRecorder.ondataavailable = function (e) {
          chunks.push(e.data);
        };

        _video_mediaRecorder.onstop = function () {
          const blob = new Blob(chunks, {
            type: chunks[0].type,
          });
          const url = URL.createObjectURL(blob);

          const video = document.querySelector('video');
          video.src = url;

          // 视频下载
          const a = document.createElement('a');
          a.href = url;
          a.download = 'video.webm';
          a.click();

          URL.revokeObjectURL(blob);
        };

        // 开始记录
        _video_mediaRecorder.start();

        // setTimeout(() => {
        //   mediaRecorder.stop();
        // }, 20000)
      }
      btn.addEventListener('click', btnvideofn);
    </script>
</body>

</html>