import { useMicVAD } from "@ricky0123/vad-react"
import { useEffect, useRef } from "react";

interface VadProps {
    onMessage: (message: string) => void; // 音频转文字输出
    recording?: boolean; // 是否录音
}

const Vad = (props: VadProps) => {
    const { onMessage, recording } = props;

    // 将Float32Array转换为WAV格式并下载
    const saveToWav = (audioData: Float32Array) => {
        const wavData = encodeWAV(audioData);
        const blob = new Blob([wavData], { type: 'audio/wav' });
        downloadRecording(blob)
    }

    // 将音频数据编码为WAV格式
    const encodeWAV = (samples: Float32Array) => {
        const buffer = new ArrayBuffer(44 + samples.length * 2);
        const view = new DataView(buffer);

        writeString(view, 0, 'RIFF');
        view.setUint32(4, 36 + samples.length * 2, true);
        writeString(view, 8, 'WAVE');
        writeString(view, 12, 'fmt ');
        view.setUint32(16, 16, true);
        view.setUint16(20, 1, true);
        view.setUint16(22, 1, true);
        view.setUint32(24, 16000, true);
        view.setUint32(28, 16000 * 2, true);
        view.setUint16(32, 2, true);
        view.setUint16(34, 16, true);
        writeString(view, 36, 'data');
        view.setUint32(40, samples.length * 2, true);

        const volume = 1;
        let index = 44;
        for (let i = 0; i < samples.length; i++) {
            view.setInt16(index, samples[i] * 0x7FFF * volume, true);
            index += 2;
        }

        return buffer;
    }

    // 上传音频文件
    const downloadRecording = (blob: Blob): void => {
        const formData = new FormData();
        formData.append("audio_file", blob, `recording-${new Date().getTime()}.wav`);

        console.log("开始语音转文字处理...");
        
        fetch("/api/asr", {
            method: "POST",
            body: formData
        })
            .then(response => {
                if (!response.ok) {
                    throw new Error(`HTTP error! status: ${response.status}`);
                }
                return response.json();
            })
            .then(data => {
                console.log("语音转文字结果:", data);
                const text = data.data?.result[0]?.text;
                if (text) {
                    console.log("识别到文字:", text);
                    onMessage(text);
                } else {
                    console.log("没有识别到文字");
                }
            })
            .catch(error => {
                console.error("语音转文字失败:", error);
            });
    }

    const writeString = (view: DataView, offset: number, string: string) => {
        for (let i = 0; i < string.length; i++) {
            view.setUint8(offset + i, string.charCodeAt(i));
        }
    }

    const vad = useMicVAD({
        baseAssetPath: "/modal/",
        positiveSpeechThreshold: 0.9, // 增加语音检测阈值，需要更大的声音才会开始录音
        onnxWASMBasePath: "/modal/",
        startOnLoad: false,
        onSpeechStart: () => {
            console.log("开始说话！");
        },
        onSpeechEnd: (audio) => {
            saveToWav(audio)
            console.log("结束说话")
        }
    })

    useEffect(() => {
        if (recording) {
            vad.start();
        } else {
            vad.pause();
        }
    }, [recording])

    return null
}

export default Vad