<template>
  <q-page padding>
    <video ref="videoRef" class="video" controls></video>
    <div class="row">
      <q-btn color="positive" @click="startVoiceRecording" label="Start Recording"></q-btn>
      <q-toggle color="negative" v-model="removeMode" label="Remove Mode" />
      <div style="width: 320px" class="q-ml-md">
        <q-slider
          dark
          label
          :min="1"
          :max="50"
          :step="0.1"
          v-model="zoomValue"
          color="positive"
        ></q-slider>
      </div>
      <q-btn color="primary" @click="exportAudio" label="Export Audio"></q-btn>
    </div>
    <div ref="audioWaveform" class="wave-container track"></div>
    <div ref="videoWaveform" class="wave-container track"></div>
  </q-page>
</template>
<script lang="ts">
import { defineComponent, ref, watch } from 'vue';
import { useQuasar, DialogChainObject } from 'quasar';
import VideoEditorDBHelper from 'src/utils/videodb';
import VoiceRecorder from 'src/utils/VoiceRecorder';
import { getVideoBlobAudioBufferArray } from 'src/utils/common';
import WavesUI from 'src/plugins/waves-ui/waves-ui';
import Timeline from 'src/plugins/waves-ui/core/timeline';
import Track from 'src/plugins/waves-ui/core/track';
import Layer from 'src/plugins/waves-ui/core/layer';
import WaveformLayer from 'src/plugins/waves-ui/helpers/waveform-layer';
import CursorLayer from 'src/plugins/waves-ui//helpers/cursor-layer';
import { AudioBufferLoader } from 'src/plugins/waves-loaders';
import LayerTimeContext from 'src/plugins/waves-ui/core/layer-time-context';
import PlayalbeWaveformLayer from 'src/plugins/waves-utils/PlayableWaveformLayer';
import AudioExporter from 'src/plugins/waves-utils/AudioExporter';
const DEFAULT_ZOOM_VALUE = 1;
const WAVE_THEME_COLOR = '#808080';
export default defineComponent({
  setup() {
    const $q = useQuasar();
    const videoInfo = ref({
      id: '2',
      url: 'http://localhost/videos/blender.mp4',
      name: 'blender',
    });
    const videoWaveform = ref<InstanceType<typeof HTMLElement>>();
    const audioWaveform = ref<InstanceType<typeof HTMLElement>>();
    const videoRef = ref<InstanceType<typeof HTMLVideoElement>>();
    const db = new VideoEditorDBHelper();
    const zoomValue = ref(DEFAULT_ZOOM_VALUE);
    const voiceRecorder: VoiceRecorder = new VoiceRecorder();
    const removeMode = ref(false);
    const isPlaying = ref(false);
    let minPxPerSec = ref(0);
    let timeline: Timeline;
    let videoTrack: Track;
    let voiceTrack: Track;
    let cursors: CursorLayer[] = [];
    let recordingDialog: DialogChainObject | null = null;
    let currentPosition = 0;
    let waveformLayer: WaveformLayer | null = null;
    let voiceList: PlayalbeWaveformLayer[] = [];
    let micphoneInfo = {
      duration: 0,
      length: 0,
      numberOfChannels: 1,
      sampleRate: 44100,
    };
    voiceRecorder.on('onStart', () => {
      recordingDialog = $q
        .dialog({
          message: 'recording your voice',
          persistent: true,
          progress: true,
        })
        .onOk(() => {
          voiceRecorder.stop();
        });
    });
    // voiceRecorder.on('onData', (data) => {
    //   console.log('[VoiceRecorder] onData', data);
    // });
    voiceRecorder.on('onReady', (recorder) => {
      console.log(recorder);
    });
    voiceRecorder.on('onStop', (data) => {
      if (voiceTrack) {
        const audiourl = URL.createObjectURL(data);
        const loader = new AudioBufferLoader();
        void loader.load(audiourl).then((buffer: AudioBuffer) => {
          addVoice(buffer);
        });
      }
    });
    watch(zoomValue, (newValue) => {
      if (timeline) {
        timeline.zoom = newValue;
        timeline.tracks.update();
      }
    });
    const addVoice = (buffer: AudioBuffer) => {
      micphoneInfo.numberOfChannels = buffer.numberOfChannels;
      micphoneInfo.sampleRate = buffer.sampleRate;
      const layer = new PlayalbeWaveformLayer(buffer, {
        height: 128,
        color: WAVE_THEME_COLOR,
        id: `voiceWaveform-${voiceList.length}`,
      });
      layer.setTimeContext(
        new WavesUI.core.LayerTimeContext(timeline.timeContext)
      );
      layer.duration = buffer.duration;
      layer.start = currentPosition;
      layer.setContextEditable(true);
      voiceList.push(layer);
      voiceTrack.add(layer);
      voiceTrack.render();
      voiceTrack.update();
    };
    const loadingDialog = $q.dialog({
      message: 'Loading, Please Wait...',
      persistent: true,
      progress: true,
      ok: false,
    });
    db.initDB()
      .then(async () => {
        const result = await db.get(videoInfo.value.id);
        if (result) {
          loadingDialog.update({ message: 'Restoring Video Content' });
          const { video, audio } = await db.get(videoInfo.value.id);
          if (video && audio) {
            await showVideo(video, audio);
          } else {
            loadingDialog.hide();
            $q.dialog({
              title: 'Error',
              message: 'you should not be here',
            });
          }
        } else {
          loadingDialog.update({ message: 'Downloading Video Content' });
          const response = await fetch(videoInfo.value.url).catch((err) => {
            loadingDialog.hide();
            console.error(err);
            $q.dialog({
              title: 'Error',
              message: err.messag,
            });
          });
          console.log(response);
          if (response) {
            loadingDialog.update({ message: 'Processing Audio Data' });
            const videoBlob = await response.blob();
            const audioData = await getVideoBlobAudioBufferArray(videoBlob);
            const audioBlob = new Blob([audioData], { type: 'audio/mp3' });
            await db.store(videoInfo.value.id, videoBlob, audioBlob);
            await showVideo(videoBlob, audioBlob);
          }
        }
      })
      .catch((error) => {
        console.error(error);
        loadingDialog.hide();
        $q.dialog({
          title: 'Error',
          message: error.message,
        });
      });
    const showVideo = async (blob: Blob, audioBlob: Blob) => {
      loadingDialog.update({ message: 'Processing Waveform Data' });
      const videoUrl = URL.createObjectURL(blob);
      if (videoRef.value) {
        videoRef.value.src = videoUrl;
        const audioUrl = URL.createObjectURL(audioBlob);
        const audioLoader = new AudioBufferLoader();
        const audioBuffer = await audioLoader.load(audioUrl);
        micphoneInfo.duration = audioBuffer.duration;
        micphoneInfo.length = audioBuffer.length;
        initTimeline(audioBuffer);
        handleVideoEvent();
        loadingDialog.hide();
      }
    };
    const initTimeline = (audioBuffer: AudioBuffer) => {
      var width = videoWaveform.value!.getBoundingClientRect().width;
      var timeAxisHeight = 18;
      var layerHeight = 128;
      var duration = audioBuffer.duration;
      var pixelsPerSecond = width / duration;
      timeline = new WavesUI.core.Timeline(pixelsPerSecond, width);
      timeline.zoom = zoomValue.value;
      timeline.state = new WavesUI.states.ContextEditionState(timeline);
      videoTrack = new WavesUI.core.Track(
        videoWaveform.value,
        layerHeight + timeAxisHeight
      );
      voiceTrack = new WavesUI.core.Track(audioWaveform.value, layerHeight);
      timeline.add(videoTrack, 'videoWaveform');
      timeline.add(voiceTrack, 'voiceWaveform');
      var timeAxis = new WavesUI.axis.AxisLayer(
        WavesUI.axis.timeAxisGenerator(),
        {
          height: timeAxisHeight,
          top: layerHeight,
          id: 'videoWaveform',
        }
      );
      timeAxis.setTimeContext(timeline.timeContext);
      timeAxis.configureShape(
        WavesUI.shapes.Ticks,
        {},
        { color: 'steelblue', textColor: 'white' }
      );
      timeAxis.update();
      const videoCursor = new CursorLayer({
        height: layerHeight * 2,
      });
      const audioCursor = new CursorLayer({
        height: layerHeight * 2,
      });
      timeline.addLayer(videoCursor, 'videoWaveform');
      timeline.addLayer(audioCursor, 'voiceWaveform');
      cursors = [videoCursor, audioCursor];
      waveformLayer = new WavesUI.helpers.WaveformLayer(audioBuffer, {
        id: 'videoWaveform',
        height: layerHeight,
        color: WAVE_THEME_COLOR,
      });
      waveformLayer.setTimeContext(
        new WavesUI.core.LayerTimeContext(timeline.timeContext)
      );
      videoTrack.add(timeAxis);
      videoTrack.add(waveformLayer);
      videoTrack.render();
      videoTrack.update();
      timeline.on('event', timelineEventHandle);
      timeline.tracks.render();
      timeline.tracks.update('default');
    };
    const timelineEventHandle = (event: any, layers: Layer[]) => {
      if (layers && layers.length > 0) {
        if (event.type === 'click') {
          if (removeMode.value) {
            if (layers[0].params.id.indexOf('voiceWaveform') >= 0) {
              layers[0].destroy();
              voiceTrack.remove(layers[0]);
              removeMode.value = false;
            }
            return;
          }
          if (layers[0].timeContext) {
            const okLayers = layers.filter(
              (layer) => layer.timeContext instanceof LayerTimeContext
            );
            let cp: number = okLayers[0].timeContext.pixelToTime(event.x);
            cp -= timeline.offset;
            cursors.forEach((cursor) => {
              cursor.currentPosition = cp;
            });
            currentPosition = cp;
            timeline.tracks.update();
            videoRef.value!.currentTime = cp;
            event.originalEvent.preventDefault();
            return;
          }
        }
        if (isPlaying.value) {
          return;
        }
        if (layers[0].params.id) {
          const currentId = layers[0].params.id;
          switch (event.type) {
            case 'mouseover':
              {
                if (currentId === 'videoWaveform') {
                  timeline.state = new WavesUI.states.CenteredZoomState(
                    timeline
                  );
                } else if (currentId.indexOf('voiceWaveform') >= 0) {
                  timeline.state = new WavesUI.states.ContextEditionState(
                    timeline
                  );
                } else {
                  timeline.state = null;
                }
              }
              break;
          }
        }
      }
    };
    const handleVideoEvent = () => {
      if (videoRef.value) {
        videoRef.value.addEventListener('play', (event) => {
          isPlaying.value = true;
          voiceList.forEach((voice) => {
            voice.play(currentPosition);
          });
          console.log('isPlaying');
        });
        videoRef.value.addEventListener('pause', () => {
          isPlaying.value = false;
          voiceList.forEach((voice) => {
            voice.stop();
          });
          console.log('pausing');
        });
        videoRef.value.addEventListener('timeupdate', (event) => {
          currentPosition =
            videoRef.value!.currentTime % videoRef.value!.duration;
          if (cursors.length > 0) {
            cursors.forEach((cursor) => {
              cursor.currentPosition = currentPosition;
            });
            timeline.tracks.update();
          }
          let middlePixel = videoWaveform.value!.clientWidth / 2;
          if (timeline.timeToPixel(currentPosition) > middlePixel) {
            let middleTime: number =
              waveformLayer!.timeContext.pixelToTime(middlePixel);
            timeline!.offset = -currentPosition + middleTime;
          }
        });
      }
    };
    const removeVoiceBlock = () => {
      removeMode.value = true;
    };
    const exportAudio = () => {
      if (voiceList.length > 0) {
        const exporter = new AudioExporter(
          micphoneInfo.duration,
          micphoneInfo.length,
          micphoneInfo.numberOfChannels,
          micphoneInfo.sampleRate
        );
        exporter.export(voiceList);
      }
    };
    return {
      videoWaveform,
      audioWaveform,
      videoRef,
      videoInfo,
      minPxPerSec,
      zoomValue,
      removeMode,
      isPlaying,
      startVoiceRecording() {
        voiceRecorder.start();
      },
      stopVoiceRecording() {
        voiceRecorder.stop();
      },
      removeVoiceBlock,
      exportAudio,
    };
  },
});
</script>
<style lang="sass">
.wave-container
  height: 128px
.video
  width: 960px
  height: 540px
.track svg
  border-radius: 3px
</style>