package cn.iocoder.yudao.module.infra.llm.impl.selfbuilt;

import cn.hutool.core.collection.CollUtil;
import cn.hutool.core.util.StrUtil;
import cn.iocoder.yudao.framework.common.util.json.JsonUtils;
import cn.iocoder.yudao.module.infra.llm.ILLMService;
import cn.iocoder.yudao.module.infra.llm.LLMConfig;
import cn.iocoder.yudao.module.infra.llm.model.LLMChatRes;
import cn.iocoder.yudao.module.infra.llm.model.LLMMessage;
import cn.iocoder.yudao.module.infra.llm.model.LLMReq;
import cn.iocoder.yudao.module.infra.llm.model.LLMRes;
import cn.iocoder.yudao.module.infra.okhttp.OkHttpUtils;
import cn.iocoder.yudao.module.infra.prometheus.LLMConnectionMetrics;
import cn.iocoder.yudao.module.infra.prometheus.LLMRequestMetrics;
import cn.iocoder.yudao.module.infra.prometheus.LLMRequestTimeMetrics;
import com.fasterxml.jackson.core.JsonParseException;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import jakarta.annotation.PostConstruct;
import jakarta.annotation.Resource;
import lombok.extern.slf4j.Slf4j;
import okhttp3.*;
import okio.BufferedSource;
import org.springframework.stereotype.Service;

import javax.security.auth.callback.Callback;
import java.io.IOException;
import java.time.Duration;
import java.time.Instant;
import java.util.ArrayList;
import java.util.List;
import java.util.regex.Matcher;
import java.util.regex.Pattern;

// https://github.com/ollama/ollama/blob/main/docs/api.md
// https://platform.moonshot.cn/docs/api/chat#%E5%85%AC%E5%BC%80%E7%9A%84%E6%9C%8D%E5%8A%A1%E5%9C%B0%E5%9D%80
@Service("llm-self-built")
@Slf4j
public class SimpleOpenAILLMServiceImpl implements ILLMService {
    private static final Pattern ENDING_FIRST_PUNCTUATION = Pattern.compile("[!?。！？,，~]"); // 匹配结束符号
    // 定义最大输出长度
    private static final int MAX_LENGTH = 60;
    private LLMConfig.Config simpleOpenAIConfig;

    @Resource
    private LLMConfig llmConfig;
    @Resource
    private LLMConnectionMetrics llmConnectionMetrics;
    @Resource
    private LLMRequestMetrics llmRequestMetrics;
    @Resource
    private LLMRequestTimeMetrics llmRequestTimeMetrics;

    @PostConstruct
    public void init() {
        this.simpleOpenAIConfig = llmConfig.getService("self-built");
    }

    @Override
    public void streamChatResponse(Integer toneId, String prompt, String userMessage,
                                   List<LLMMessage> historyMessages,
                                   java.util.function.Consumer<LLMRes> onMessage) {
        final String apiUrl = simpleOpenAIConfig.getApiUrl();
        final String apiKey = simpleOpenAIConfig.getApiKey();
        final String model = simpleOpenAIConfig.getModel();

        final List<LLMReq.Message> messages = new ArrayList<>();
        if (StrUtil.isNotEmpty(prompt)) {
            messages.add(new LLMReq.Message("system", prompt));
        }
        if (CollUtil.isNotEmpty(historyMessages)) {
            for (LLMMessage historyMessage : historyMessages) {
                messages.add(new LLMReq.Message("user", historyMessage.getUserText()));
                messages.add(new LLMReq.Message("assistant", historyMessage.getReplyText()));
            }
        }
        messages.add(new LLMReq.Message("user", userMessage));

        final LLMReq req = new LLMReq();
        req.setStream(true);
        req.setModel(model);
        req.setMessages(messages);

        final String body = JsonUtils.toJsonString(req);

        final RequestBody requestBody = RequestBody.create(
                MediaType.parse("application/json"),
                body
        );

        final Request request = new Request.Builder()
                .url(apiUrl)
                .header("Authorization", "Bearer " + apiKey)
                .post(requestBody)
                .build();
        log.info("请求LLM开始, apiUrl = {}, body = {}", apiUrl, body);

        final StringBuilder finalResult = new StringBuilder();
        String remainingText = "";

        // 记录请求数
        llmConnectionMetrics.incrementConnections();
        llmRequestMetrics.recordProcessingRequest();

        Instant start = Instant.now();
        try (Response response = OkHttpUtils.getLlmClient().newCall(request).execute()) {
            // 同步调用
            if (response.isSuccessful() && response.body() != null) {
                try (BufferedSource source = response.body().source()) {
                    while (!source.exhausted()) {
                        final String chunk = source.readUtf8Line(); // 按行读取流数据
                        if (chunk != null && !chunk.isEmpty()) {
                            String text;
                            if (simpleOpenAIConfig.getOllama()) {
                                text = extractTextFromChunkForOllama(chunk);
                            } else {
                                text = extractTextFromChunk(chunk); // 提取 JSON 中的实际内容
                            }
                            log.info("请求LLM成功, text = {}", text);

                            boolean isEnd = false;
                            text = remainingText + text;
                            List<String> sentences = splitIntoSentences(text);
                            for (String sentence : sentences) {
                                if ((finalResult.length() + sentence.length() <= MAX_LENGTH)
                                        || !ENDING_FIRST_PUNCTUATION.matcher(sentence).find()) {
                                    // 如果句子长度小于等于最大长度，或者句子没有结束符号，则直接添加到结果中
                                    finalResult.append(sentence);
                                    onMessage.accept(LLMRes.builder().done(false).replyText(sentence).build());
                                } else {
                                    remainingText = sentence;
                                    isEnd = true;
                                    break;
                                }
                            }

                            if (isEnd) {
                                log.info("请求LLM结束，超出最大长度");
                                break;
                            }
                        }
                    }
                }

                log.info("请求LLM结束");
                onMessage.accept(LLMRes.builder().done(true).replyText(null).build());
                llmRequestMetrics.recordSuccessRequest();
                final String path = request.url().encodedPath();
                llmRequestTimeMetrics.recordSuccessRequestDuration(path, Duration.between(start, Instant.now()));
            } else {
                log.warn("请求LLM失败, code = {}, apiUrl = {}, response = {}, body = {}", response.code(), apiUrl, response.body().string(), body);
                llmRequestMetrics.recordExceptionRequest();
                throw new IOException("Unexpected code " + response.code());
            }
        } catch (IOException e) {
            log.warn(StrUtil.format("请求LLM异常, apiUrl = {}, body = {}", apiUrl, body), e);
            llmRequestMetrics.recordExceptionRequest();
            throw new RuntimeException(e);
        } finally {
            // 请求结束, 记录请求数
            llmConnectionMetrics.decrementConnections();
        }
    }

    @Override
    public String textChatResponse(String prompt, String userMessage, List<LLMMessage> historyMessages) {
        final String apiUrl = simpleOpenAIConfig.getApiUrl();
        final String apiKey = simpleOpenAIConfig.getApiKey();
        final String model = simpleOpenAIConfig.getModel();

        final List<LLMReq.Message> messages = new ArrayList<>();
        if (StrUtil.isNotEmpty(prompt)) {
            messages.add(new LLMReq.Message("system", prompt));
        }
        if (CollUtil.isNotEmpty(historyMessages)) {
            for (LLMMessage historyMessage : historyMessages) {
                messages.add(new LLMReq.Message("user", historyMessage.getUserText()));
                messages.add(new LLMReq.Message("assistant", historyMessage.getReplyText()));
            }
        }
        messages.add(new LLMReq.Message("user", userMessage));

        final LLMReq req = new LLMReq();
        req.setStream(false);
        req.setModel(model);
        req.setMessages(messages);

        final String body = JsonUtils.toJsonString(req);

        final RequestBody requestBody = RequestBody.create(
                MediaType.parse("application/json"),
                body
        );

        final Request request = new Request.Builder()
                .url(apiUrl)
                .header("Authorization", "Bearer " + apiKey)
                .post(requestBody)
                .build();
        log.info("请求LLM开始, apiUrl = {}, body = {}", apiUrl, body);

        final StringBuilder finalResult = new StringBuilder();
        String remainingText = "";

        // 记录请求数
        llmConnectionMetrics.incrementConnections();
        llmRequestMetrics.recordProcessingRequest();

        Instant start = Instant.now();
        try (Response response = OkHttpUtils.getLlmClient().newCall(request).execute()) {
            // 同步调用
            if (response.isSuccessful() && response.body() != null) {
                String str = response.body().string();
                log.info("请求LLM成功, text = {}", str);
                String text;
                if (simpleOpenAIConfig.getOllama()) {
                    text = extractTextFromChunkForOllama(str);
                } else {
                    text = extractTextNoStream(str); // 提取 JSON 中的实际内容
                }
                log.info("请求LLM成功, 解析结果 = {}", text);
                llmRequestMetrics.recordSuccessRequest();
                final String path = request.url().encodedPath();
                llmRequestTimeMetrics.recordSuccessRequestDuration(path, Duration.between(start, Instant.now()));
                return text;
            } else {
                log.warn("请求LLM失败, code = {}, apiUrl = {}, response = {}, body = {}", response.code(), apiUrl, response, body);
                llmRequestMetrics.recordExceptionRequest();
                throw new IOException("Unexpected code " + response.code());
            }
        } catch (IOException e) {
            log.warn(StrUtil.format("请求LLM异常, apiUrl = {}, body = {}", apiUrl, body), e);
            llmRequestMetrics.recordExceptionRequest();
            throw new RuntimeException(e);
        } finally {
            // 请求结束, 记录请求数
            llmConnectionMetrics.decrementConnections();
        }
    }

    private String extractTextFromChunkForOllama(String chunk) {
        try {
            ObjectMapper mapper = new ObjectMapper();
            JsonNode rootNode = mapper.readTree(chunk);
            final String content = rootNode.path("message").path("content").asText();
            return content;
        } catch (Exception e) {
            log.warn(StrUtil.format("解析 LLM 响应异常, chunk = {}", chunk), e);
            throw new RuntimeException(e);
        }
    }

    /*private String extractTextFromJson(String chunk) {
        try {
            LLMChatRes llmChatRes = JsonUtils.parseObject(chunk, LLMChatRes.class);
            return llmChatRes.getMessage().getContent();
        } catch (JsonParseException e) {
            // 专门捕获 JSON 解析异常，进一步定位问题
            log.warn(StrUtil.format("JSON 解析失败，可能是非法数据，chunk = {}", chunk), e);
            throw new RuntimeException(e);
        } catch (Exception e) {
            // 捕获其他异常
            log.warn(StrUtil.format("解析 LLM 响应异常, chunk = {}", chunk), e);
            throw new RuntimeException(e);
        }
    }*/

    private String extractTextNoStream(String chunk) {
        try {
            // 解析 JSON 数据
            ObjectMapper mapper = new ObjectMapper();
            JsonNode rootNode = mapper.readTree(chunk);
            JsonNode choicesNode = rootNode.path("choices").get(0);
            JsonNode message = choicesNode.path("message");
            final String content = message.path("content").asText();
            Pattern pattern = Pattern.compile("<think>.*?</think>", Pattern.DOTALL);
            Matcher matcher = pattern.matcher(content);
            String output = matcher.replaceAll("").trim();
            return output;
        } catch (JsonParseException e) {
            // 专门捕获 JSON 解析异常，进一步定位问题
            log.warn(StrUtil.format("JSON 解析失败，可能是非法数据，chunk = {}", chunk), e);
            throw new RuntimeException(e);
        } catch (Exception e) {
            // 捕获其他异常
            log.warn(StrUtil.format("解析 LLM 响应异常, chunk = {}", chunk), e);
            throw new RuntimeException(e);
        }
    }

    private String extractTextFromChunk(String chunk) {
        try {
            // 检查并移除 "data: " 前缀
            if (chunk.startsWith("data: ")) {
                chunk = chunk.substring(6).trim();
            }

            // 检查 [DONE] 标志，直接忽略
            if ("[DONE]".equalsIgnoreCase(chunk.trim())) {
                log.info("接收到流式响应结束标志: [DONE]");
                return null; // 或返回特定结束信号，根据需求调整
            }

            // 解析 JSON 数据
            ObjectMapper mapper = new ObjectMapper();
            JsonNode rootNode = mapper.readTree(chunk);
            JsonNode choicesNode = rootNode.path("choices").get(0);
            JsonNode deltaNode = choicesNode.path("delta");
            final String content = deltaNode.path("content").asText();
            return content;
        } catch (JsonParseException e) {
            // 专门捕获 JSON 解析异常，进一步定位问题
            log.warn(StrUtil.format("JSON 解析失败，可能是非法数据，chunk = {}", chunk), e);
            throw new RuntimeException(e);
        } catch (Exception e) {
            // 捕获其他异常
            log.warn(StrUtil.format("解析 LLM 响应异常, chunk = {}", chunk), e);
            throw new RuntimeException(e);
        }
    }

    private List<String> splitIntoSentences(String text) {
        List<String> sentences = new ArrayList<>();
        Matcher matcher = ENDING_FIRST_PUNCTUATION.matcher(text);
        int start = 0;
        while (matcher.find()) {
            String sentence = text.substring(start, matcher.end());
            sentences.add(sentence);
            start = matcher.end();
        }
        if (start < text.length()) {
            sentences.add(text.substring(start));
        }
        return sentences;
    }

}