export type SpeakerPack = {
    audio: ArrayBuffer[]

    words: string[]
    wtimes: number[]
    wdurations: number[]

    visemes: string[]
    vtimes: number[]
    vdurations: number[]

    animations: string[]
}

export class Speaker {
    private _audioCtx!: AudioContext
    private _audioSpeechSource!: AudioBufferSourceNode
    private _audioBackgroundSource!: AudioBufferSourceNode
    private _audioBackgroundGainNode!: GainNode
    private _audioSpeechGainNode!: GainNode
    private _audioReverbNode!: ConvolverNode

    private _opt: { [key: string]: unknown } = {
        pcmSampleRate: 22050
    }

    private _visemeDelay: number = 100

    private _stateName = 'idle'
    private _isSpeaking: boolean = false
    private _speechQueue: Record<string, unknown>[] = []

    private _isAudioPlaying: boolean = false
    private _audioPlaylist: Record<string, unknown>[] = []

    constructor(opt: { [key: string]: unknown } = {}) {
        this.initAudioContext()

        Object.assign(this._opt, opt)
    }

    initAudioContext = () => {
        this._audioCtx = new AudioContext()
        this._audioSpeechSource = this._audioCtx.createBufferSource()
        this._audioBackgroundSource = this._audioCtx.createBufferSource()

        this._audioBackgroundGainNode = this._audioCtx.createGain()
        this._audioSpeechGainNode = this._audioCtx.createGain()
        this._audioReverbNode = this._audioCtx.createConvolver()

        this.setReverb(null)
        this._audioBackgroundGainNode.connect(this._audioReverbNode)
        this._audioSpeechGainNode.connect(this._audioReverbNode)
        this._audioReverbNode.connect(this._audioCtx.destination)
    }

    play = (payload: SpeakerPack) => {

        const o: { [key: string]: unknown } = {}
        if (payload.words) {

            if (payload.visemes) {
                // for (let idx = 0; idx < payload.visemes.length; ++idx) {
                //     const viseme = payload.visemes[idx]
                //     const vtime = payload.vtimes[idx]
                //     const vduration = payload.vdurations[idx]
                //     lipsyncAnim.push({
                //         template: { name: 'viseme' },
                //         ts: [
                //             vtime - vduration - this._visemeDelay,
                //             vtime,
                //             vtime + vduration
                //         ],
                //         vs: {
                //             ['viseme_' + viseme]: [
                //                 null,
                //                 viseme === 'PP' || viseme ==='FF' ? 0.9 : 1,
                //             ]
                //         }
                //     })
                // }
            }
        }

        if (payload.audio) {
            o.audio = payload.audio
        }

        if (Object.keys(o).length) {
            this._speechQueue.push(o)
            this._speechQueue.push({ break: 300 })
            this.startSpeaking()
        }
    }

    pause = () => { }

    stop = () => { }

    startSpeaking = async (force: boolean = false) => {
        if (this._isSpeaking && !force) {
            return
        }

        this._stateName = 'talking'
        this._isSpeaking = true
        if (this._speechQueue.length) {
            let line: Record<string, unknown> = this._speechQueue.shift()!
            if (line.break) {
                setTimeout(this.startSpeaking.bind(this), line.break as number, true)
            } else if (line.audio) {
                this._audioPlaylist.push({
                    audio: line.audio
                })
                this.playAudio()
            } else {
                this.startSpeaking(true)
            }
        } else {
            this._stateName = 'idle'
            this._isSpeaking = false
        }
    }

    pauseSpeaking = () => {
        try {
            this._audioSpeechSource.stop()
        } catch (error) { }
        this._audioPlaylist.length = 0
        this._stateName = 'idle'
        this._isSpeaking = false
        this._isAudioPlaying = false
    }

    stopSpeaking = () => {
        try {
            this._audioSpeechSource.stop()
        } catch (error) { }
        this._audioPlaylist.length = 0
        this._speechQueue.length = 0
        this._stateName = 'idle'
        this._isSpeaking = false
        this._isAudioPlaying = false
    }

    playAudio = async (force: boolean = false) => {
        if (this._isAudioPlaying && !force) {
            return
        }
        this._isAudioPlaying = true
        if (this._audioPlaylist.length) {
            const item = this._audioPlaylist.shift()!
            if (this._audioCtx.state === 'suspended') {
                const resume = this._audioCtx.resume()
                const timeout = new Promise((_r, reject) => {
                    setTimeout(() => reject('p2'), 1000)
                })
                try {
                    await Promise.race([resume, timeout])
                } catch (error) {
                    console.log("Can't play audio. Web Audio API suspended. This is often due to calling some speak method before the first user action, which is typically prevented by the browser.")
                    this.playAudio(true)
                    return
                }
            }

            this._audioSpeechSource = this._audioCtx.createBufferSource()
            if (Array.isArray(item.audio)) {
                let buf = this.concatArrayBuffers(item.audio)
                this._audioSpeechSource.buffer = this.pcmToAudioBuffer(buf)
            } else {
                this._audioSpeechSource.buffer = item.audio as AudioBuffer
            }
            this._audioSpeechSource.connect(this._audioSpeechGainNode)
            this._audioSpeechSource.addEventListener(
                "ended",
                () => {
                    this._audioSpeechSource.disconnect();
                    this.playAudio(true);
                },
                { once: true }
            )

            const delay = 100
            this._audioBackgroundGainNode.gain.value = 1
            this._audioSpeechSource.start(delay / 1000);
        } else {
            this._isAudioPlaying = false
            this.startSpeaking(true)
        }
    }

    playBackgroundAudio = async (url: string | URL | Request) => {
        // Fetch audio
        let response = await fetch(url)
        let arraybuffer = await response.arrayBuffer()

        // Play audio in a loop
        this.stopBackgroundAudio()
        this._audioBackgroundSource = this._audioCtx.createBufferSource()
        this._audioBackgroundSource.loop = true
        this._audioBackgroundSource.buffer = await this._audioCtx.decodeAudioData(
            arraybuffer
        );
        this._audioBackgroundSource.playbackRate.value = 1
        this._audioBackgroundSource.connect(this._audioBackgroundGainNode)
        this._audioBackgroundSource.start(0)
    }

    stopBackgroundAudio = () => {
        try {
            this._audioBackgroundSource.stop()
        } catch (error) {
            this._audioBackgroundSource.disconnect()
        }
    }

    setReverb = async (url: string | URL | Request | null = null) => {
        if (url) {
            let response = await fetch(url)
            let arraybuffer = await response.arrayBuffer()
            this._audioReverbNode.buffer = await this._audioCtx.decodeAudioData(arraybuffer)
        } else {
            const impulse = this._audioCtx.createBuffer(2, this._audioCtx.sampleRate, this._audioCtx.sampleRate)
            impulse.getChannelData(0)[0] = 1
            impulse.getChannelData(1)[0] = 1
            this._audioReverbNode.buffer = impulse
        }
    }

    concatArrayBuffers = (bufs: ArrayBuffer[]) => {
        let len = 0;
        for (let i = 0; i < bufs.length; i++) {
            len += bufs[i].byteLength;
        }
        let buf = new ArrayBuffer(len);
        let arr = new Uint8Array(buf);
        let p = 0;
        for (let i = 0; i < bufs.length; i++) {
            arr.set(new Uint8Array(bufs[i]), p);
            p += bufs[i].byteLength;
        }
        return buf;
    }

    pcmToAudioBuffer(buf: ArrayBuffer) {
        const arr = new Int16Array(buf);
        const floats = new Float32Array(arr.length);
        for (let i = 0; i < arr.length; i++) {
            floats[i] =
                arr[i] >= 0x8000 ? -(0x10000 - arr[i]) / 0x8000 : arr[i] / 0x7fff;
        }
        const audio = this._audioCtx.createBuffer(
            1,
            floats.length,
            this._opt.pcmSampleRate as number
        )
        audio.copyToChannel(floats, 0, 0);
        return audio;
    }
}