<!DOCTYPE html>
<html>
<head>
    <meta charset="UTF-8">
    <title>实时语音识别</title>
</head>
<body>
<button onclick="startRecording()">开始录音</button>
<button onclick="stopRecording()">停止录音</button>
<div id="results" style="margin-top:20px; border:1px solid #ccc; padding:10px;"></div>

<script>
    let mediaStream;
    let audioContext;
    let websocket;

    // 启动录音
    // 启动录音
    async function startRecording() {
        try {
            // 1. 初始化WebSocket
            websocket = new WebSocket('ws://' + window.location.host + '/speech');
            websocket.binaryType = 'arraybuffer';

            // 接收识别结果
            websocket.onmessage = function(event) {
                const msg = event.data.toString();
                if (msg.startsWith('INTERIM:')) {
                    log('中间结果: ' + msg.substring(8));
                } else if (msg.startsWith('FINAL:')) {
                    log('最终结果: ' + msg.substring(6));
                } else {
                    log('识别结果: ' + msg);
                }
            };

            // 在前端添加 WebSocket 连接状态的调试信息：
            websocket.onopen = function(event) {
                log('WebSocket 连接已打开');
            };

            websocket.onclose = function(event) {
                log('WebSocket 连接已关闭');
            };

            websocket.onerror = function(event) {
                log('WebSocket 错误: ' + event);
            };

            // 2. 获取麦克风权限（强制16kHz/单声道）
            mediaStream = await navigator.mediaDevices.getUserMedia({
                audio: {
                    sampleRate: 16000,
                    channelCount: 1
                }
            });

            // 3. 创建音频处理上下文
            audioContext = new AudioContext({ sampleRate: 16000 });
            const source = audioContext.createMediaStreamSource(mediaStream);
            const processor = audioContext.createScriptProcessor(4096, 1, 1);

            // 4. 实时处理音频数据
            processor.onaudioprocess = e => {
                const float32Data = e.inputBuffer.getChannelData(0);
                const int16Data = convertFloat32ToInt16(float32Data);
                websocket.send(int16Data.buffer); // 发送二进制
            };

            source.connect(processor);
            processor.connect(audioContext.destination);
            log('录音已开始...');
        } catch (err) {
            log('错误: ' + err.message);
        }
    }

    // 停止录音
    function stopRecording() {
        if (mediaStream) {
            mediaStream.getTracks().forEach(track => track.stop());
        }
        if (audioContext) {
            audioContext.close();
        }
        if (websocket) {
            websocket.close();
        }
        log('录音已停止');
    }

    // Float32转Int16（PCM格式）
    function convertFloat32ToInt16(buffer) {
        const int16Buffer = new Int16Array(buffer.length);
        for (let i = 0; i < buffer.length; i++) {
            int16Buffer[i] = Math.min(1, buffer[i]) * 0x7FFF; // 归一化到16位
        }
        return int16Buffer;
    }



    // 显示日志
    function log(text) {
        const div = document.getElementById('results');
        div.innerHTML += text + '<br>';
        div.scrollTop = div.scrollHeight;
    }

    // 在前端添加 WebSocket 连接状态的调试信息：
    websocket.onopen = function(event) {
        log('WebSocket 连接已打开');
    };

    websocket.onclose = function(event) {
        log('WebSocket 连接已关闭');
    };

    websocket.onerror = function(event) {
        log('WebSocket 错误: ' + event);
    };

</script>
</body>
</html>