const APPID = '69a9b3dd';
const URL = 'wss://yuanshu-aigc-api.hxgro.com/ws';
import fs from 'fs'
import { DemoServer } from './AIDemo';

var transWorker: any;

let startTime: number;
let endTime: number;

export class IatRecorder {
    private status: string;
    private language: string;
    private accent: string;
    private appId: string;
    private audioData: any[];
    private resultText: string;
    private resultTextTemp: string;
    private webSocket: WebSocket;
    private audioContext: AudioContext;
    private scriptProcessor: ScriptProcessorNode;
    //private mediaSource: MediaStreamAudioSourceNode;
    private isRecorderAutoSend: boolean;
    private _isStop: boolean;
    onTextChange: (str: string) => void;
    onWillStatusChange: (parStatus: string, status: string) => void;
    updateWaveform: (nums: Float32Array) => void;
    private handlerInterval: any;


    constructor(language?: string, accent?: string, appId?: string) {
        let self = this;
        this._isStop = true;
        this.status = 'end';
        this.language = language || 'zh_cn';
        this.accent = accent || 'mandarin';
        this.appId = appId || APPID;
        //记录音频数据
        this.audioData = [];
        //记录听写结果
        this.resultText = '';
        //流式结果返回功能下的听写结果，需要中间状态辅助记录
        this.resultTextTemp = '';
        if (null == transWorker) {
            Editor.assetDb.getAsset("editorResources/js/transcode.worker.js", true).then((info) => {
                let path = Editor.assetDb.getFullPath(info);
                transWorker = new Worker(path);
                transWorker.onmessage = function (event: any) {
                    self.audioData.push(...event.data)
                }
            });
        } else {
            transWorker.onmessage = function (event: any) {
                self.audioData.push(...event.data)
            }
        }
    }
    // 修改录音听写状态
    setStatus(status: string) {
        this.onWillStatusChange && this.status !== status && this.onWillStatusChange(this.status, status)
        this.status = status
    }
    setResultText(parm: { resultText?: string, resultTextTemp?: string }) {
        this.onTextChange && this.onTextChange(parm.resultTextTemp || parm.resultText || '')
        parm.resultText !== undefined && (this.resultText = parm.resultText)
        parm.resultTextTemp !== undefined && (this.resultTextTemp = parm.resultTextTemp)
    }
    // 修改听写参数
    setParams(language?: string, accent?: string) {
        language && (this.language = language)
        accent && (this.accent = accent)
    }
    // 连接websocket
    connectWebSocket() {
        if (this.webSocket) return;
        let iatWS = new WebSocket(URL)

        this.webSocket = iatWS
        this.setStatus('init')
        iatWS.onopen = () => {
            this.setStatus('ing')
            // 重新开始录音
            //setTimeout(() => {
            this.webSocketSend()
            //}, 100)
        }
        iatWS.onmessage = e => {
            this.result(e.data)
        }
        iatWS.onerror = () => {
            this.recorderStop()
        }
        iatWS.onclose = () => {
            endTime = new Date().getTime();
            //console.log("持续时间", endTime - startTime)
            this.recorderStop()
        }
    }
    // 暂停录音
    recorderStop() {
        this.closeWebSocket()
        this.setStatus('end')
    }

    result(resultData: string) {
        // 识别结束
        let jsonData = JSON.parse(resultData);
        if (jsonData.data && jsonData.data.result) {
            let data = jsonData.data.result
            let str = ''
            let ws = data.ws
            for (let i = 0; i < ws.length; i++) {
                str = str + ws[i].cw[0].w
            }
            // 开启wpgs会有此字段(前提：在控制台开通动态修正功能)
            // 取值为 "apd"时表示该片结果是追加到前面的最终结果；取值为"rpl" 时表示替换前面的部分结果，替换范围为rg字段
            if (data.pgs) {
                if ('apd' === data.pgs) {
                    // 将resultTextTemp同步给resultText
                    this.setResultText({
                        resultText: this.resultTextTemp,
                    })
                }
                // 将结果存储在resultTextTemp中
                this.setResultText({
                    resultTextTemp: this.resultText + str,
                })
            } else {
                this.setResultText({
                    resultText: this.resultText + str,
                })
            }
            //console.log("全部识别结果为：", this.resultText)
        }
        if (jsonData.code === 0 && jsonData.data.status === 2) {
            this.webSocket.close()
        }
        if (jsonData.code !== 0) {
            this.webSocket.close()
            console.log(`${jsonData.code}:${jsonData.message}`)
        }
    }
    // 对处理后的音频数据进行base64编码，
    toBase64(buffer: any) {
        var binary = ''
        var bytes = new Uint8Array(buffer)
        var len = bytes.byteLength
        for (var i = 0; i < len; i++) {
            binary += String.fromCharCode(bytes[i])
        }
        return window.btoa(binary)
    }
    // 向webSocket发送数据
    webSocketSend() {
        if (1 != this.webSocket.readyState) {
            return
        }
        let audioData = this.audioData.splice(0, 1280);
        let params = {
            common: {
                app_id: this.appId,
            },
            business: {
                language: this.language,
                domain: 'iat',
                accent: this.accent,
                vad_eos: 1000,
                dwa: 'wpgs'
            },
            data: {
                status: 0,
                format: 'audio/L16;rate=16000',
                encoding: 'raw',
                audio: this.toBase64(audioData),
            },
        }
        this.webSocket.send(JSON.stringify(params));
        startTime = new Date().getTime();
        clearInterval(this.handlerInterval);
        this.handlerInterval = setInterval(() => {
            try {
                // websocket未连接
                if (1 !== this.webSocket.readyState) {
                    console.log("websocket未连接")
                    this.audioData = []
                    clearInterval(this.handlerInterval);
                    return;
                }
                if (0 === this.audioData.length) {
                    console.log("自动关闭", this.status)
                    if ('end' === this.status) {
                        this.webSocket.send(
                            JSON.stringify({
                                data: {
                                    status: 2,
                                    format: 'audio/L16;rate=16000',
                                    encoding: 'raw',
                                    audio: '',
                                },
                            })
                        )
                        this.audioData = []
                        clearInterval(this.handlerInterval)
                    }
                    return;
                }
                audioData = this.audioData.splice(0, 1280);
                // 中间帧
                this.webSocket.send(
                    JSON.stringify({
                        data: {
                            status: 1,
                            format: 'audio/L16;rate=16000',
                            encoding: 'raw',
                            audio: this.toBase64(audioData),
                        },
                    })
                )
            } catch (err) {
                this.recorderStop();
            }
        }, 40);

    }

    closeWebSocket() {
        if (this.webSocket)
            this.webSocket.close();

        this.webSocket = null;
    }

    //从一个url创建一个音频源，用来测试语音识别
    private async createSourceFromFile(ctx:AudioContext, url:string){
        let buff = fs.readFileSync(url);
        let audioBuff = await ctx.decodeAudioData(buff.buffer)
        let source = ctx.createBufferSource();
        source.buffer = audioBuff;
        return source;
    }

    static async recordWav(outfile:string) {
        let stream = await navigator.mediaDevices.getUserMedia({ audio: true });
        let mediaRecorder = new MediaRecorder(stream);
        // 存储录音数据的数组
        let chunks: any[] = [];
        // 开始录音
        mediaRecorder.start();
        // 数据可用时触发(存储录音数据)
        mediaRecorder.ondataavailable = (e) => {
            chunks.push(e.data);
        }
        // 录音停止时保存录音数据到 blobs 对象
        mediaRecorder.onstop = (e) => {
            let blob = new Blob(chunks, { 'type': 'audio/wav; codecs=opus' });
            chunks = []; // 清空 chunks，等待下次录音
            let reader = new FileReader();
            reader.onload = function (event) {
                let buffer = Buffer.from((event as any).target.result);
                fs.writeFile(outfile, buffer, (err)=>{
                    if (err) {
                        console.log('Error: ', err);
                    } else {
                        console.log('Audio has been saved as '+outfile);
                    }
                });
            };
            reader.readAsArrayBuffer(blob);
        }
        //需要停止的话，调用 mediaRecorder.stop()
        return mediaRecorder;
    }

    // 初始化浏览器录音
    async recorderInit() {
        //创建一个AudioContext用来处理音频操作
        this.audioContext = new window.AudioContext();
        this.audioContext.resume();

        let stream :MediaStream=null;
        try{
            stream = await navigator.mediaDevices.getUserMedia({
                    audio: true,
                    video: false,
                });
        }catch(e){
            this.onGetMediaFail(e);
        }
        if(!stream){
            this.onGetMediaFail(null)
            return;
        }

        // 创建一个新的MediaStreamAudioSourceNode 对象，使来自MediaStream的音频可以被播放和操作
        let mediaSource:AudioBufferSourceNode|MediaStreamAudioSourceNode;
        mediaSource = this.audioContext.createMediaStreamSource(stream);
        
        if(DemoServer.isDemo){
            mediaSource = await this.createSourceFromFile(this.audioContext, DemoServer.getNextMicInput())
        }

        // 创建一个用于通过JavaScript直接处理音频
        this.scriptProcessor = this.audioContext.createScriptProcessor(0, 1, 1)
        this.scriptProcessor.onaudioprocess = e => {
            // 去处理音频数据
            if ('end' !== this.status || this.isRecorderAutoSend) {
                let nums = e.inputBuffer.getChannelData(0);

                if ('end' === this.status && this.isRecorderAutoSend) {
                    this.checkAutoStart(nums);
                }


                if (this.updateWaveform) {
                    this.updateWaveform(nums);
                }
                if ('end' !== this.status) {
                    if (transWorker)
                        transWorker.postMessage(nums);
                }
            }
        }
        // 连接
        mediaSource.connect(this.scriptProcessor)
        this.scriptProcessor.connect(this.audioContext.destination)
        this.connectWebSocket();

        //test
        if(mediaSource instanceof AudioBufferSourceNode)
            mediaSource.start();
    }

    private onGetMediaFail(e: any){
        this.audioContext && this.audioContext.close()
        this.audioContext = undefined
        // 关闭websocket
        if (this.webSocket && this.webSocket.readyState === 1) {
            this.webSocket.close()
            this.setStatus("end")
        }
        this.status = 'null'
    }    

    private checkAutoStart(data: Float32Array) {
        let maxNum = 0;
        for (let i = data.length - 1; i >= 0; i--) {
            if (maxNum < data[i]) {
                maxNum = data[i];
            }
        }
        if (maxNum > 0.15) {
            this.start(this.isRecorderAutoSend);
        }
    }
    async recorderStart() {
        if (!this.audioContext) {
            this.recorderInit();
        } else {
            this.audioContext.resume();
            //TEST
            if(DemoServer.isDemo){
                let source= await this.createSourceFromFile(this.audioContext,DemoServer.getNextMicInput())
                source.connect(this.scriptProcessor)
                source.start();
            }
            //TEST
            this.connectWebSocket();
        }
    }

    start(isRecorderAutoSend: boolean) {
        this._isStop = false;
        this.isRecorderAutoSend = isRecorderAutoSend;
        this.recorderStart();
        this.setResultText({ resultText: '', resultTextTemp: '' })
    }
    stop() {
        if (!this._isStop) {
            this._isStop = true;
            this.isRecorderAutoSend = false;
            this.setStatus('end')
        }
        //this.recorderStop();
    }

}