package com.example.voskdemo.handler;

import com.example.voskdemo.recorder.AudioRecorder;
import com.example.voskdemo.service.VoskService;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Component;
import org.springframework.web.socket.BinaryMessage;
import org.springframework.web.socket.CloseStatus;
import org.springframework.web.socket.TextMessage;
import org.springframework.web.socket.WebSocketSession;
import org.springframework.web.socket.handler.TextWebSocketHandler;

import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;

@Component
@Slf4j
public class VoskWebSocketHandler extends TextWebSocketHandler {

    @Autowired
    private AudioRecorder audioRecorder;
    @Autowired
    private VoskService voskService;

    @Value("${recorder.enable}")
    private boolean recorderEnable;

    @Value("${vosk.recognize-buffer-size}")
    private int recognizeBufferSize;

    private final static String TEXT_KEYWORD = "\"text\"";
    private final static String EMPTY_KEYWORD = "\"text\" : \"\"";



    private final Map<String, ByteArrayOutputStream> sessionBuffers = new ConcurrentHashMap<>();

    @Override
    public void afterConnectionEstablished(WebSocketSession session) {
        log.info("客户端 {} 连接建立", session.getId());
    }

    @Override
    public void handleTextMessage(WebSocketSession session, TextMessage message) throws IOException {
        String payload = message.getPayload();
        if ("start".equals(payload)) {
            voskService.startRecognition();
            session.sendMessage(new TextMessage("{\"status\": \"started\"}"));
        } else if ("end".equals(payload)) {
            String result = voskService.getFinalResult();
            session.sendMessage(new TextMessage(result));
            voskService.stopRecognition();
        }
    }

    @Override
    public void handleBinaryMessage(WebSocketSession session, BinaryMessage message) {
        byte[] audioChunk = message.getPayload().array();
        
        // 获取或创建会话缓冲区
        ByteArrayOutputStream buffer = sessionBuffers.computeIfAbsent(
            session.getId(), k -> new ByteArrayOutputStream());
        
        try {
            buffer.write(audioChunk);
            
            if (recorderEnable) {
                // 保存音频到WAV文件
                String filename = "audio_" + session.getId() + ".wav";
                // 替换原来的保存代码
                audioRecorder.appendToWav(audioChunk, filename);
            }
            
            // 当缓冲区达到阈值时进行识别
            if (buffer.size() >= recognizeBufferSize) {
                byte[] audioData = buffer.toByteArray();
                buffer.reset();
                
                // 添加去重逻辑
                String recognized = voskService.recognizePartial(audioData);
                if (recognized.contains(TEXT_KEYWORD) && !recognized.contains(EMPTY_KEYWORD)) {
                    session.sendMessage(new TextMessage(recognized));
                }
            }
        } catch (Exception e) {
            log.error("识别失败", e);
            try {
                session.sendMessage(new TextMessage("{\"error\": \"" + e.getMessage() + "\"}"));
            } catch (IOException ex) {
                log.error("发送错误消息失败", ex);
            }
        }
    }

    @Override
    public void afterConnectionClosed(WebSocketSession session, CloseStatus status) {
        voskService.stopRecognition();
        // 清理会话缓冲区
        sessionBuffers.remove(session.getId());
        log.info("客户端 {} 连接关闭", session.getId());
    }
}