import { nextTick, reactive, Ref, ref, watch } from 'vue'
import { isEnabled, mediaStop } from '../../utils'

const screenPlayer: Ref<HTMLVideoElement | null> = ref(null)
const cameraPlayer: Ref<HTMLVideoElement | null> = ref(null)

export const getScreenPlayerElement = (value: Ref<HTMLVideoElement>) => {
    screenPlayer.value = value.value
}
export const getCameraPlayerElement = (value: HTMLVideoElement) => {
    cameraPlayer.value = value
}

export const switchMediaValue = reactive({
    microphone: false,
    camera: false
})

export const mediaRecorder = reactive({
    screen: null,
    audio: null,
    camera: null
}) as {
    screen: MediaRecorder | null
    audio: MediaRecorder | null
    camera: MediaRecorder | null
}

/**
 * @description 媒体设备的相关操作
 * @function screen 屏幕录制
 * @function audio 音频设备
 * @function camera 摄像头
 */
let open = true
export const mediaDeviceOperation = {
    screen: async () => {
        try {
            const result = await navigator.mediaDevices.getDisplayMedia({
                audio: true,
                video: true
            })
            return result
        } catch (error) {
            console.log('点击取消')
            open = false
            mediaRecorder.camera && mediaRecorder.camera.stop()
        }
    },
    async audio() {
        try {
            const result = await navigator.mediaDevices.getUserMedia({
                video: false,
                audio: true
            })
            return result
        } catch (error) {
            console.log('audio error')
        }
    },
    async camera() {
        try {
            const result = await navigator.mediaDevices.getUserMedia({
                video: {
                    width: { ideal: 300 },
                    height: { ideal: 150 }
                },
                audio: false
            })
            return result
        } catch (error) {
            console.log('camera error')
        }
    }
}

export const recordOperation = {
    async start() {
        const screenResult = await mediaDeviceOperation.screen()
        if (open) {
            mediaRecorder.screen = new MediaRecorder(
                screenResult as MediaStream,
                {
                    mimeType: 'video/webm'
                }
            )

            const audioResult = await audio.start()

            if (switchMediaValue.camera) {
                await camera.start()
            }

            const tracks = [
                ...(screenResult?.getVideoTracks() as MediaStreamTrack[]),
                ...(audioResult?.getAudioTracks() as MediaStreamTrack[])
            ]

            const newMediaStream = new MediaStream(tracks)
            screenPlayer.value!.srcObject = newMediaStream

            // 监听浏览器的停止共享按钮
            screenResult &&
                screenResult.getVideoTracks().forEach(track => {
                    track.addEventListener('ended', (event: Event) => {
                        console.log('event ended=>', event)
                        listen.onstop(null, screenResult)
                        mediaRecorder.audio!.stop()
                        mediaRecorder.camera!.stop()
                    })
                })

            mediaRecorder.screen.start()

            mediaRecorder.screen.addEventListener(
                'dataavailable',
                listen.ondataavailable
            )

            // 监听停止，音频和录屏
            mediaRecorder.screen.addEventListener('stop', event => {
                listen.onstop(event, screenResult as MediaStream)
            })
        }
    },
    stop() {
        mediaRecorder.screen && mediaRecorder.screen.stop()
        mediaRecorder.audio && mediaRecorder.audio.stop()
        mediaRecorder.camera && mediaRecorder.camera.stop()
    },
    pause() {
        ;(mediaRecorder.screen as MediaRecorder).pause()
    },
    resume() {
        ;(mediaRecorder.screen as MediaRecorder).resume()
    }
}

/**
 * @description videoAndDownloadUrl 一个Blob数组，用来下载录屏资源
 */
export let videoAndDownloadUrl: Array<string> = reactive([])

let chunks: Array<Blob> = []

const listen = {
    ondataavailable({ data }: any) {
        chunks.push(data)
        const blob = new Blob([chunks[chunks.length - 1]], {
            type: chunks[chunks.length - 1].type
        })

        videoAndDownloadUrl.push(URL.createObjectURL(blob))
    },
    onstop(event: any, result: MediaStream) {
        /**
         * @description 与其停止流本身，不如停止流的轨迹
         */
        result.getTracks().forEach(track => {
            track.stop()
        })
    },
    onended(stream: MediaStream) {
        this.onstop(null, stream)
    }
}

const audio = {
    async start() {
        const audioResult = await mediaDeviceOperation.audio()
        mediaRecorder.audio = new MediaRecorder(audioResult as MediaStream, {
            mimeType: 'audio/webm'
        })

        mediaRecorder.audio.start()
        mediaRecorder.audio.addEventListener('stop', event => {
            listen.onstop(event, audioResult as MediaStream)
        })

        if (!switchMediaValue.microphone) {
            mediaRecorder.audio.stream.getAudioTracks().forEach(track => {
                console.log('track =>', track)
                track.enabled = false
            })
        }
        return audioResult
    },
    stop() {
        mediaRecorder.audio &&
            mediaRecorder.audio.stream.getAudioTracks().forEach(track => {
                track.enabled = false
            })
    }
}

watch(
    () => switchMediaValue.microphone,
    value => {
        console.log('value 麦克风=>', value)
        value
            ? mediaRecorder.audio &&
              isEnabled(mediaRecorder.audio.stream.getAudioTracks(), true)
            : audio.stop()
    }
)

const camera = {
    async start() {
        const cameraResult = await mediaDeviceOperation.camera()
        mediaRecorder.camera = new MediaRecorder(cameraResult as MediaStream, {
            mimeType: 'video/webm'
        })

        cameraPlayer.value!.srcObject = cameraResult as MediaStream

        cameraPlayer.value!.onloadedmetadata = function (e: Event) {
            cameraPlayer.value!.play()
        }

        mediaRecorder.camera.start()

        mediaRecorder.camera.addEventListener('stop', event => {
            listen.onstop(event, cameraResult as MediaStream)
        })

        if (!switchMediaValue.camera) {
            isEnabled(mediaRecorder.camera.stream.getVideoTracks())
        }
        return cameraResult
    },
    stop() {
        mediaRecorder.camera &&
            mediaStop(mediaRecorder.camera.stream.getVideoTracks())
    }
}

watch(
    () => switchMediaValue.camera,
    value => {
        console.log('value 摄像头=>', value)
        value ? camera.start() : camera.stop()
    }
)
