import {
    computed,
    ComputedRef,
    nextTick,
    onMounted,
    reactive,
    Ref,
    ref
} from 'vue'
import { analyser, audioContext } from './audio'

const videoTypes = [
    'video/mp4',
    'video/webm',
    'video/webm;codecs=vp8',
    'video/webm;codecs=daala',
    'video/webm;codecs=h264',
    'video/mpeg'
]

export const detectionVideoTypes = computed(() => {
    return videoTypes.filter(item => MediaRecorder.isTypeSupported(item))
}) as ComputedRef<string[]>

// get video element
const screenPlayer: Ref<HTMLVideoElement | null> = ref(null)
const cameraPlayer: Ref<HTMLVideoElement | null> = ref(null)

export const getScreenPlayerElement = (value: Ref<HTMLVideoElement>) => {
    screenPlayer.value = value.value
}
export const getCameraPlayerElement = (value: HTMLVideoElement) => {
    cameraPlayer.value = value
}

// select change
export const videoType: Ref<string> = ref(detectionVideoTypes.value[0])

// 是否打开摄像头
// 是否打开麦克风
export const isOpenMedia = reactive({
    camera: true,
    microphone: true
})
export const switchCamera = (value: boolean) => {
    value ? media.openCamera() : media.closeCamera()
}

export const switchMicrophone = (value: boolean) => {
    value ? media.openMicrophone() : media.closeMicrophone()
}

/**
 * @description audio为true在弹出的窗口中会出现是否捕获音频
 * @description MediaStream Recording API的MediaRecorder界面提供了轻松录制媒体的功能
 * @link https://developer.mozilla.org/en-US/docs/Web/API/MediaRecorder
 */
const screenState: IScreenState = reactive({
    mediaStreamConstraints: { audio: true, video: true },
    chunks: [],
    mediaRecord: null,
    videoAndDownloadUrl: []
}) as IScreenState

export const flags = reactive({
    radio: false,
    start: false,
    stop: true,
    pause: true,
    resume: true
})

/**
 * @description startRecord函数，开始录制
 */
export const startRecord = async () => {
    if (isOpenMedia.camera) {
        media.openMedia()
    }

    const { mediaStreamConstraints } = screenState
    try {
        const result = await navigator.mediaDevices.getDisplayMedia(
            mediaStreamConstraints
        )

        flags.radio = true
        flags.start = true
        flags.pause = false
        flags.stop = false

        screenState.mediaRecord = new MediaRecorder(result, {
            // 使用MediaRecorder.isTypeSupported对videoTypes进行判断，做成select选择
            mimeType: videoType.value
        })

        const { mediaRecord } = screenState
        result.getVideoTracks()[0].onended = _ => {
            ;(mediaRecord as MediaRecorder).stop()
        }

        mediaRecord.start() // 让mediaRecord启动不然下面捕获不到ondataavailable事件

        mediaRecord.ondataavailable = async ({ data }) => {
            screenState.chunks.push(data)
        }

        mediaRecord.onstop = _ => {
            listenStop(result)
        }
    } catch ({ message }: any) {
        console.log(message)
        // 我真是傻逼，这里不就是点击了取消吗
        media.closeMedia()
        flags.start = false
    }
}

// 结束录制
export const stopRecord = () => {
    const { mediaRecord } = screenState
    ;(mediaRecord as MediaRecorder).stop()
}

/**
 * @description videoAndDownloadUrl 一个Blob数组，用来下载录屏资源
 */
export let videoAndDownloadUrl: Array<string> = reactive([])

/**
 * @description mediaRecord.onstop的后续操作
 */
const listenStop = (result: MediaStream) => {
    flags.radio = false
    flags.start = false
    flags.stop = true
    flags.pause = true
    flags.resume = true

    if (isOpenMedia.camera) {
        media.closeMedia()
    }
    const { chunks } = screenState

    const blob = new Blob([chunks[chunks.length - 1]], {
        type: chunks[chunks.length - 1].type
    })

    videoAndDownloadUrl.push(URL.createObjectURL(blob))
    nextTick(() => {
        screenPlayer.value!.src =
            videoAndDownloadUrl[videoAndDownloadUrl.length - 1]
        screenPlayer.value!.play()
    })

    /**
     * @description
     * mediaRecord.stop停止，只是它停止了，录屏是result这个东西，所以也要停止它
     * 与其停止流本身，不如停止流的轨迹
     */
    result.getTracks().forEach(track => {
        track.stop()
    })
}

// 暂停
export const pauseRecord = () => {
    screenState.mediaRecord && screenState.mediaRecord.pause()
    flags.pause = true
    flags.resume = false
}

// 继续
export const resumeRecord = () => {
    screenState.mediaRecord && screenState.mediaRecord.resume()
    flags.pause = false
    flags.resume = true
}

// 摄像头
let getTracks: MediaStreamTrack[] | null = null
let getAudioTracks: MediaStreamTrack[] | null = null
let getVideoTracks: MediaStreamTrack[] | null = null

const constraints: IConstraints = {
    // video: {
    //     width: { ideal: 300 },
    //     height: { ideal: 150 },
    //     // https://developer.mozilla.org/en-US/docs/Web/API/MediaTrackConstraints/facingMode
    //     facingMode: 'user'
    // },
    video: true,
    audio: isOpenMedia.microphone
}

const media = {
    utils(Tracks: MediaStreamTrack[]) {
        Tracks &&
            Tracks.forEach(track => {
                track.stop()
            })
    },
    isEnabled(Tracks: MediaStreamTrack[], enabled: boolean) {
        Tracks &&
            Tracks.forEach(track => {
                track.enabled = enabled
            })
    },
    openMedia() {
        navigator.mediaDevices
            .getUserMedia(constraints)
            .then((mediaStream: MediaStream) => {
                getTracks = mediaStream.getTracks()
                getAudioTracks = mediaStream.getAudioTracks()
                getVideoTracks = mediaStream.getVideoTracks()

                // 音频部分
                //https://developer.mozilla.org/en-US/docs/Web/API/MediaStreamAudioSourceNode
                const source = audioContext.createMediaStreamSource(mediaStream)
                source.connect(audioContext.destination)

                console.log('source =>', source)
                // getAudioTracks = source.mediaStream.getAudioTracks()
                // 捕获到之后的流媒体视频
                cameraPlayer.value!.srcObject = mediaStream
                cameraPlayer.value!.onloadedmetadata = (e: Event) => {
                    cameraPlayer.value!.play()
                }
            })
            .catch(function (err) {
                console.log(err.name + ': ' + err.message)
            })
    },
    closeMedia() {
        this.utils(getTracks as MediaStreamTrack[])
    },
    openMicrophone() {
        this.isEnabled(getAudioTracks as MediaStreamTrack[], true)
    },
    closeMicrophone() {
        this.isEnabled(getAudioTracks as MediaStreamTrack[], false)
    },
    openCamera() {
        this.isEnabled(getVideoTracks as MediaStreamTrack[], true)
    },
    closeCamera() {
        this.isEnabled(getVideoTracks as MediaStreamTrack[], false)
    }
}
