package com.hjzd.ai.smartpractice.websocket;

import com.google.gson.Gson;
import com.google.gson.GsonBuilder;
import com.google.gson.JsonObject;
import com.google.gson.LongSerializationPolicy;
import com.hjzd.ai.smartpractice.config.WebClientConfig;
import com.hjzd.ai.smartpractice.constant.LLMRole;
import com.hjzd.ai.smartpractice.constant.ServiceConstant;
import com.hjzd.ai.smartpractice.entity.DTO.chat.ChatCompletionMessage;
import com.hjzd.ai.smartpractice.service.ApiService;
import com.hjzd.ai.smartpractice.service.RedisService;
import com.hjzd.ai.smartpractice.util.SilenceDetectionUtil;
import jakarta.annotation.Resource;
import org.apache.commons.lang3.StringUtils;
import org.jetbrains.annotations.NotNull;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.http.HttpEntity;
import org.springframework.http.HttpHeaders;
import org.springframework.http.MediaType;
import org.springframework.http.ResponseEntity;
import org.springframework.stereotype.Component;
import org.springframework.web.client.RestTemplate;
import org.springframework.web.socket.BinaryMessage;
import org.springframework.web.socket.CloseStatus;
import org.springframework.web.socket.TextMessage;
import org.springframework.web.socket.WebSocketSession;
import org.springframework.web.socket.handler.BinaryWebSocketHandler;

import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.net.URI;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.List;
import java.util.Random;
import java.util.concurrent.TimeUnit;

import static com.hjzd.ai.smartpractice.constant.ServiceConstant.*;
import static com.hjzd.ai.smartpractice.websocket.SessionManager.*;

@Component
public class AudioStreamHandler extends BinaryWebSocketHandler {

    private static final Logger logger = LoggerFactory.getLogger(AudioStreamHandler.class);
    private static final Gson GSON = new GsonBuilder().setDateFormat("yyyy-MM-dd HH:mm:ss")
            .setLongSerializationPolicy(LongSerializationPolicy.STRING).disableHtmlEscaping().create();

    @Resource
    private RedisService redisService;
    @Resource
    private WebClientConfig webClientConfig;
    @Value("${ai-model.chattts.url}")
    private String ttsUrl;
    @Resource
    private ApiService apiService;
    @Value("${ai-model.llm.savaVoicePath}")
    private String saveVoicePath;
    @Value("${app.llm.type}")
    private String llmFlag;
    @Value("${app.tts.type}")
    private String ttsType;
    @Value("${conversation.service.url}")
    private String llmUrl;

    // 阿里云ASR配置
    @Value("${aliyun.asr.appKey}")
    private String aliyunAppKey;
    @Value("${aliyun.asr.akId}")
    private String aliyunAkId;
    @Value("${aliyun.asr.akSecret}")
    private String aliyunAkSecret;
    @Value("${aliyun.asr.url}")
    private String aliyunUrl;

    private static final int BLOCKED_AUDIO_CACHE_MAX_SIZE = 19200;
    private static final int ASR_CONNECTION_RETRY_COUNT = 10;
    private static final int ASR_CONNECTION_RETRY_DELAY_MS = 500;

    @Override
    public void afterConnectionEstablished(@NotNull WebSocketSession session) {
        logger.info(">>> WebSocket connection established. session: {}", session);
        try {
            URI uri = session.getUri();
            String uuid = extractUuidFromQuery(uri);

            AuthUserDTO authUserDTO = new AuthUserDTO();
            SessionManager.saveSession(session, authUserDTO);

            // 初始化阿里云ASR客户端

//
            SessionManager.initSessionPracticeCache(aliyunAppKey, session, redisService,
                    webClientConfig, audioStatusStop, ttsUrl, saveVoicePath, ttsType, llmFlag, llmUrl);


            // 初始化对话上下文
            initializeConversationContext(uuid, authUserDTO);

        } catch (Exception ex) {
            logger.error("WebSocket connection error", ex);
        }
    }

    private String extractUuidFromQuery(URI uri) {
        if (uri.getQuery() == null) return "";
        String[] paramPairs = uri.getQuery().split("&");
        for (String pair : paramPairs) {
            String[] kv = pair.split("=");
            if (kv.length == 2 && "uuid".equals(kv[0])) {
                return kv[1];
            }
        }
        return "";
    }

    private boolean waitForAsrConnection(AliyunAsrClient asrClient) throws InterruptedException {
        int retryCount = 0;
        while (!asrClient.isOpen() && retryCount < ASR_CONNECTION_RETRY_COUNT) {
            Thread.sleep(ASR_CONNECTION_RETRY_DELAY_MS);
            retryCount++;
        }
        return asrClient.isOpen();
    }

    private void initializeConversationContext(String uuid, AuthUserDTO authUserDTO) {
        String systemPrompt = apiService.getLlmRole();
        List<ChatCompletionMessage> messageList = new ArrayList<>();
        ChatCompletionMessage message = new ChatCompletionMessage();
        message.setRole(LLMRole.SYSTEM.getRole());
        message.setContent(systemPrompt);
        messageList.add(message);

        redisService.set(String.format(ServiceConstant.MOCK_CHAT_UNIQUE_KEY, uuid),
                GSON.toJson(messageList), 7, TimeUnit.DAYS);

        // 设置随机说话人ID
        long[] speakers = {2, 3, 6, 7, 9};
        long speaker = speakers[new Random().nextInt(speakers.length)];
        authUserDTO.setId(speaker);
        authUserDTO.setNick(uuid);
        //SessionManager.saveSession(session, authUserDTO);

        logger.info("Initialized conversation context for user: {}", uuid);
    }

    @Override
    protected void handleTextMessage(@NotNull WebSocketSession session, @NotNull TextMessage message) {
        try {
            String payload = message.getPayload();
            logger.info("Received text message: {}", payload);

            if (audioStatusStart.equals(payload)) {
                handleAudioStart(session);
            } else if (audioStatusStop.equals(payload)) {
                handleAudioStop(session);
            }
        } catch (Exception ex) {
            logger.error("Text message handling error", ex);
        }
    }

    private void handleAudioStart(WebSocketSession session) {
        SessionManager.getPracticeCache(session).put(AUDIO_STATUS_CACHE, audioStatusStart);
        ((StringBuilder) SessionManager.getPracticeCache(session).get(ASR_CACHE)).setLength(0);
        logger.info("Audio capture started");
    }

    private void handleAudioStop(WebSocketSession session) {
        SessionManager.getPracticeCache(session).put(AUDIO_STATUS_CACHE, audioStatusStop);
        logger.info("Audio capture stopped");

        synchronized (session) {
            byte[] mergedAudioBytes = (byte[]) SessionManager.getPracticeCache(session).get(AUDIO_CACHE);
            processRemainingAudio(session, mergedAudioBytes);

            AliyunAsrClient asrClient = (AliyunAsrClient) SessionManager.getPracticeCache(session).get(FUNASR_CACHE);
            asrClient.sendEof();

            logger.info("Final ASR result: {}", ((StringBuilder) SessionManager.getPracticeCache(session).get(ASR_CACHE)).toString());
        }
    }

    private void processRemainingAudio(WebSocketSession session, byte[] audioData) {
        while (audioData.length >= sendChunkSize) {
            byte[] chunk = new byte[sendChunkSize];
            byte[] remaining = new byte[audioData.length - sendChunkSize];

            System.arraycopy(audioData, 0, chunk, 0, sendChunkSize);
            System.arraycopy(audioData, sendChunkSize, remaining, 0, remaining.length);

            sendAudioChunk(session, chunk);

            SessionManager.getPracticeCache(session).put(AUDIO_CACHE, remaining);
            audioData = remaining;
        }

        // 处理最后不足一个chunk的数据
        if (audioData.length > 0) {
            byte[] lastChunk = new byte[sendChunkSize];
            System.arraycopy(audioData, 0, lastChunk, 0, audioData.length);
            sendAudioChunk(session, lastChunk);
            SessionManager.getPracticeCache(session).put(AUDIO_CACHE, new byte[0]);
        }
    }

    private void sendAudioChunk(WebSocketSession session, byte[] chunk) {
        AliyunAsrClient asrClient = (AliyunAsrClient) SessionManager.getPracticeCache(session).get(FUNASR_CACHE);
        if (!asrClient.isOpen()) {
            logger.error("ASR connection not ready");
            return;
        }
        asrClient.sendBytes(chunk);
    }

    @Override
    protected void handleBinaryMessage(@NotNull WebSocketSession session, @NotNull BinaryMessage message) {
        try {
            byte[] payload = message.getPayload().array();
            byte[] queuedAudioBytes = (byte[]) SessionManager.getPracticeCache(session).get(AUDIO_CACHE);
            byte[] mergedAudioBytes = new byte[queuedAudioBytes.length + payload.length];

            System.arraycopy(queuedAudioBytes, 0, mergedAudioBytes, 0, queuedAudioBytes.length);
            System.arraycopy(payload, 0, mergedAudioBytes, queuedAudioBytes.length, payload.length);

            if (audioStatusStart.equals(SessionManager.getPracticeCache(session).get(AUDIO_STATUS_CACHE))) {
                processAudioStream(session, mergedAudioBytes);
            }
        } catch (Exception e) {
            logger.error("Audio processing error", e);
        }
    }

    private void processAudioStream(WebSocketSession session, byte[] audioData) {
        synchronized (session) {
            while (audioData.length >= sendChunkSize) {
                byte[] chunk = new byte[sendChunkSize];
                byte[] remaining = new byte[audioData.length - sendChunkSize];

                System.arraycopy(audioData, 0, chunk, 0, sendChunkSize);
                System.arraycopy(audioData, sendChunkSize, remaining, 0, remaining.length);

                sendAudioChunk(session, chunk);
                SessionManager.getPracticeCache(session).put(AUDIO_CACHE, remaining);
                audioData = remaining;
            }
        }
    }

    @Override
    public void afterConnectionClosed(@NotNull WebSocketSession session, @NotNull CloseStatus status) throws IOException {
        logger.info("WebSocket connection closed: {}", session);

        AuthUserDTO authUserDTO = getSession(session);
        apiService.compoundSavedFile(authUserDTO.getNick());

        AliyunAsrClient asrClient = (AliyunAsrClient) SessionManager.getPracticeCache(session).get(FUNASR_CACHE);
        if (asrClient != null) {
            asrClient.close();
        }

    }

    public static void saveAudioCacheToDisk(@NotNull WebSocketSession session, String saveVoicePath, byte[] audioBytes) {
        String uniqueKey = SessionManager.getSession(session).getNick();
        String relativePath = Paths.get(saveVoicePath, "voice", uniqueKey).toString();

        File savePath = new File(relativePath);
        if (!savePath.exists()) {
            savePath.mkdirs();
        }

        String filename = System.currentTimeMillis() + ".wav";
        File targetFile = new File(savePath, filename);

        try (FileOutputStream fos = new FileOutputStream(targetFile)) {
            fos.write(audioBytes);
            logger.info("Audio saved to: {}", targetFile.getAbsolutePath());
        } catch (IOException e) {
            logger.error("Audio save error", e);
        }
    }
}
