<template>
    <div class="video-view-video">
<!-- 视频头部信息 -->
      <div class="video-view-video-navigation">
<!--   视频信息 -->
        <div  class="video-info" :title=this.peerId>
          {{this.peerId}}
        </div>
<!--图标-->
        <div class="video-view-video-icon">
          <div style="background: black" :class="openAudio ? 'video-icon':'video-icon video-icon-close'" :onclick="clickAudio" >
            <el-icon :size="20">
              <Microphone/>
            </el-icon>
          </div>
          <div style="background: black" :class="openVideo ? 'video-icon':'video-icon video-icon-close'" :onclick="clickVideo">
            <el-icon :size="20" >
              <VideoCamera/>
            </el-icon>
          </div>
        </div>
      </div>
<!-- end 图标 -->
      <canvas ref="canvas" :style="openVideo? '': 'height:100%' " class="audio-canvas"></canvas>
      <video ref="video" muted autoplay :style="openVideo? '': 'display:none'"></video>
      <audio ref="audio" autoplay></audio>

    </div>
<!--  </div>-->

</template>

<script>
export default {
  name: "VideoView",
  props: {
    'videoTrack': MediaStreamTrack,
    'audioTrack': MediaStreamTrack,
    'peerId':String,
    'openVideo': Boolean,
    'openAudio': Boolean,
    'localAudio': Boolean
  },
  watch: {
    videoTrack(newVideoTrack, oldVideoTrack){
      this.$nextTick(()=> {
        const video = this.$refs.video;
        video.srcObject = new MediaStream([newVideoTrack])
      })
    },
    audioTrack(newAudioTrack, oldVideoTrack) {
      this.$nextTick(() => {
        this.audioRender(newAudioTrack)
      })
    },
    openAudio(newOpenAudio) {
      if (newOpenAudio) {
        this.audioRender(this.audioTrack)
      }
    }
  },
  mounted() {
    this.$nextTick(()=> {
      const video = this.$refs.video;
      if (this.videoTrack.kind !== undefined)
        video.srcObject = new MediaStream([this.videoTrack])
      if (this.audioTrack.kind !== undefined)
        this.audioRender(this.audioTrack)
    })
  },
  methods: {
    //转换频谱
    audioRender(audioTrack) {

      if (!this.openAudio || this.audioTrack.kind !== 'audio') {
        return
      }
      console.log('openAudio', this.openAudio)
      const audioElement = this.$refs.audio;
      const canvasElement = this.$refs.canvas;
      const canvasContext = canvasElement.getContext("2d");

      // 创建 AudioContext 对象
      const audioContext = new (window.AudioContext || window.webkitAudioContext)();

      // 创建 AnalyserNode 对象
      const analyserNode = audioContext.createAnalyser();

      // 音量控制
      const gainNode = audioContext.createGain();


      // 获取音频流的 MediaStreamTrack

      // 获取音频流的 MediaStreamTrack 的 MediaStream
      const audioStream = new MediaStream([audioTrack]);

      // 将音频流的 MediaStream 连接到 AnalyserNode
      const audioSourceNode = audioContext.createMediaStreamSource(audioStream);

      audioSourceNode.connect(analyserNode);
      analyserNode.connect(gainNode)
      gainNode.connect(audioContext.destination);

      console.log('localAudio', this.localAudio)
      if (this.localAudio)
        gainNode.gain.value = 0; // 设置音量为0.5，可以根据需要调整范围（0.0到1.0）

      // 设置频谱参数
      analyserNode.fftSize = 2048;
      const bufferLength = analyserNode.frequencyBinCount;
      const dataArray = new Uint8Array(bufferLength);

      // 更新频谱数据
      const updateSpectrumData = () => {
        if (!this.openAudio) {
          canvasContext.clearRect(0, 0, canvasElement.width, canvasElement.height);
          return
        }
        // 将频域数据复制到 dataArray 数组中
        requestAnimationFrame(updateSpectrumData)
        analyserNode.getByteFrequencyData(dataArray);
       // console.log(dataArray)
        // 清空画布
        canvasContext.clearRect(0, 0, canvasElement.width, canvasElement.height);
        // 绘制频谱
        const barWidth = canvasElement.width / bufferLength;
        let x = 0;
        for (let i = 0; i < bufferLength; i++) {
          const barHeight = dataArray[i];
          canvasContext.fillStyle = `rgba(0, 0, 0, 0.8)`;
          canvasContext.fillRect(x, canvasElement.height - barHeight / 2, barWidth, barHeight / 2);
          x += barWidth + 1;
        }
      };
      updateSpectrumData()

      if (!this.localAudio) {
        audioElement.srcObject = new MediaStream([audioTrack]);
      }

    },

    clickVideo(){
      this.$emit('clickVideo', this.peerId)
    },

    clickAudio() {
      this.$emit('clickAudio', this.peerId)
    }
  }
}
</script>

<style scoped>


.video-view-video {
  width: 95%;
  position: relative;
  left: 2.5%;
  box-shadow: var(--el-box-shadow-dark);
  box-sizing: border-box;
  bottom: 0;
  /*padding: 5px;*/
  height: 260px;
  border-radius: 20px;
}

.video-view-video-navigation {
  width: 93%;
  left:5%;
  position: absolute;
  margin: 0.2em auto;
  z-index: 2;
}

.video-view-video-icon {
  float: right;
  margin-right: 0.3em;
}
.video-info{
  float:left;
  width: 50%;
  overflow: hidden;   /* 必须要设置overflow属性值为“hidden” */
  white-space: nowrap;  /* 禁止文本换行 */
  text-overflow: ellipsis;  /* 超出容器大小的文本以省略号表示 */
}

.video-icon {
  cursor: pointer;
  color: #ffffff;
  width: 1.2em;
  box-sizing: border-box;
  height: 1.2em;
  text-align: center;
  margin: 0.1em;
  background: black;
  border-radius: 0.2em;
  float:left;
  font-size: 20px;
  position: relative;
}


.video-icon-close::before {
  content: '';
  position: absolute;
  left: 0;
  right: 0;
  width: 100%;
  height: 0.6em;
  box-sizing: border-box;
  border-bottom: 2px solid #fff;
  transform-origin: bottom center;
  transform: rotateZ(45deg) scale(1.141);
}


video {
  position: relative;
  width: 100%;
  height: 100%;
  border-radius: 20px;
  object-fit: cover;
}

.audio-canvas{
  z-index:1;
  position: absolute;
  bottom:0;
  width:90%;
  left:4%;
  box-sizing: border-box;
  height:30%;
}


</style>
