<template>
  <view>
    <button @click="setVideo">视频画面，音频拆除</button>
    <button @click="videoDecoder">视频解码</button>
    <!--    <button @click="testMediaRecorderWithCanvas">testMediaRecorderWithCanvas</button>-->
    <video v-if="videoUrl" :src="videoUrl"></video>
    <canvas type="2d" id="target1" :style="'width: 100vw;height: ' + height + 'px;'"></canvas>
    <view style="width: 100%;display: flex;flex-wrap: wrap">
      <image v-for="item in list" mode="widthFix" :src="item" style="width: 100upx;height: 100upx;"></image>
    </view>
    <!--    <canvas type="webgl" id="target1" style="border:1px solid blue;box-sizing: content-box;width:300px;height:150px;"></canvas>-->
  </view>
</template>

<script>
// import {createScopedThreejs} from '../components/threejs-miniprogram'

export default {
  data() {
    return {
      MediaContainer: {},
      VideoDecoder: {},
      canvas: {},
      context: {},
      windowInfo: {},
      videoUrl: "",
      height: "",
      list: [],
      ended: false
    };
  },
  onShow() {
    this.init()
  },
  methods: {
    async init() {
      this.MediaContainer = wx.createMediaContainer();
      this.VideoDecoder = wx.createVideoDecoder();
      this.windowInfo = await wx.getWindowInfo()
      this.canvas = await this.getCanvasNode('target1')
      this.context = this.canvas.getContext('2d')

    },
    // 获取视频的画面，声音
    setVideo() {
      let _this = this;
      wx.chooseVideo({
        sourceType: ['album', 'camera'],
        maxDuration: 60,
        camera: 'back',
        success: async res => {
          let videoPath = res.tempFilePath
          _this.MediaContainer.extractDataSource({
            source: videoPath,
            success: function (res) {
              _this.MediaContainer.addTrack(res.tracks[0]);
              _this.MediaContainer.addTrack(res.tracks[1]);
              _this.MediaContainer.export({
                success: function (video) {
                  _this.videoUrl = video.tempFilePath
                }
              })
            },
            fail: function (res) {
              console.log(res, '视频解析失败')
            }
          })
        }
      })
    },
    // 视频解码
    async videoDecoder() {
      let _this = this
      const canvas = await this.getCanvasNode('target1')
      const windowInfo = await wx.getWindowInfo()
      const context = canvas.getContext('2d')
      // const recorder = wx.createMediaRecorder(canvas, {
      //   fps: 30,
      // })
      const {
        tempFiles: [{tempFilePath}],
      } = await wx.chooseMedia({mediaType: ['video'], count: 1});
      console.log(tempFilePath)
      // 创建解码器
      const decoder = wx.createVideoDecoder()

      await decoder.start({
        source: tempFilePath,
      })

      let ended = false

      decoder.on('ended', () => {
        ended = true
      })

      let frameData

      // 等待开始解码
      do {
        await new Promise(resolve => canvas.requestAnimationFrame(resolve))
        frameData = decoder.getFrameData()
      } while (!frameData)


      // 设置 canvas 宽高
      this.height = (windowInfo.windowWidth * frameData.height) / frameData.width
      canvas.height = frameData.height
      canvas.width = frameData.width
      const startTime = Date.now()
      const recorder = wx.createMediaRecorder(canvas, {
        fps: 30,
        videoBitsPerSecond: 3000,
      })
      await new Promise(resolve => {
        recorder.on('start', resolve)
        recorder.start()
      })
      const render = ({data, width, height}) => {
        const imageData = canvas.createImageData(data, width, height)
        context.putImageData(imageData, 0, 0)
      }

      // 循环绘制解码结果
      do {
        if (frameData && frameData.pkPts <= Date.now() - startTime) {
          // 控制在到达 pts 后显示
          render(frameData)
          frameData = null
        } else {
          await new Promise(resolve => canvas.requestAnimationFrame(resolve))
          await new Promise(resolve => recorder.requestFrame(resolve))
          if (!frameData) frameData = decoder.getFrameData()
        }
      } while (!ended)

      console.log('ended')
      let res = await new Promise(resolve => {
        recorder.on('stop', resolve)
        recorder.stop()
      })
      console.log(res.tempFilePath, 'tempFilePath')
      recorder.destroy()
      // console.log(_this.list, '解码结果')
      // 解码结束
      await decoder.remove()
    },
    getCanvasNode(id) {
      return new Promise(resolve => {
        this.createSelectorQuery()
            .select('#' + id)
            .node(res => resolve(res.node))
            .exec()
      })
    },
    // async testMediaRecorderWithCanvas() {
    //   this.testWebglCanvas(
    //       await this.getCanvasNode('target1'),
    //       'videoSrc1'
    //   ).catch(console.error)
    // },
    // async testWebglCanvas(canvas, targetProps) {
    //   const render = await this.drawWebGLCanvas(canvas)
    //   const fps = 300
    //
    //   const recorder = wx.createMediaRecorder(canvas, {
    //     fps,
    //     videoBitsPerSecond: 3000,
    //   })
    //
    //   await new Promise(resolve => {
    //     recorder.on('start', resolve)
    //     recorder.start()
    //   })
    //
    //   let frames = fps * 5
    //   while (frames--) {
    //     await new Promise(resolve => recorder.requestFrame(resolve))
    //     console.log(frames)
    //     render()
    //   }
    //
    //   const {tempFilePath} = await new Promise(resolve => {
    //     recorder.on('stop', resolve)
    //     recorder.stop()
    //   })
    //   console.log(tempFilePath)
    //
    //   recorder.destroy()
    //
    //   console.log(tempFilePath, 'tempFilePath')
    // },
    // async drawWebGLCanvas(canvas) {
    //   const THREE = createScopedThreejs(canvas)
    //   var camera, scene, renderer;
    //   var mesh;
    //   camera = new THREE.PerspectiveCamera(70, canvas.width / canvas.height, 1, 1000);
    //   camera.position.z = 400;
    //   scene = new THREE.Scene();
    //   var texture = await new Promise(resolve => new THREE.TextureLoader().load('/static/image/nv.png', resolve));
    //   var geometry = new THREE.BoxBufferGeometry(200, 200, 200);
    //   var material = new THREE.MeshBasicMaterial({map: texture});
    //   mesh = new THREE.Mesh(geometry, material);
    //   scene.add(mesh);
    //   renderer = new THREE.WebGLRenderer({antialias: false});
    //   renderer.setPixelRatio(1);
    //   renderer.setSize(canvas.width, canvas.height);
    //
    //   return function render() {
    //     mesh.rotation.x += 0.005;
    //     mesh.rotation.y += 0.1;
    //     renderer.render(scene, camera);
    //   }
    // },


    initOffscreenCanvas(canvasWidth, canvasHeight) {
      const canvas = wx.createOffscreenCanvas({
        type: '2d',
        width: canvasWidth,
        height: canvasHeight
      })
      return {
        canvas,
        context: canvas.getContext('2d')
      }
    },
  },
}
</script>

<style lang="scss">

</style>
