package com.medical.utils;



import com.medical.constants.APIConstant;
import com.medical.constants.CacheConstants;
import com.medical.enums.TimeEnum;
import com.medical.mapper.ChatMessagesMapper;
import com.medical.mapper.KnowledgeMapper;
import com.medical.model.entity.Knowledge;
import com.medical.model.param.*;
import com.medical.service.RedisService;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.core.ParameterizedTypeReference;
import org.springframework.http.HttpEntity;
import org.springframework.http.HttpHeaders;
import org.springframework.http.ResponseEntity;
import org.springframework.scheduling.annotation.Async;
import org.springframework.stereotype.Component;
import org.springframework.web.reactive.function.client.WebClient;
import reactor.core.publisher.Mono;

import java.util.*;
import java.util.concurrent.atomic.AtomicReference;
import java.util.stream.Collectors;
import java.util.stream.IntStream;

import static cn.hutool.json.XMLTokener.entity;

/**
 * @author TISNIW
 * @version 1.0
 * @date 2025/6/14 15:49
 * @desc
 */
@Slf4j
@Component
public class ChatUtil {
    private final WebClient webClient;
    @Autowired
    private RedisService redisService;
    @Autowired
    private ChatMessagesMapper messagesMapper;
    @Autowired
    private KnowledgeMapper knowledgeMapper;
    private static final float SIMILARITY_THRESHOLD = 0.5f;

    public ChatUtil() {
        this.webClient = WebClient.builder()
                .baseUrl(APIConstant.OLLAMA_MODEL_LIST_URL)
                .build();
    }

    @Async
    public void chatStream(String model, String prompt, List<Messages> message, String sessionId, boolean isThick) {
        if (message == null || message.isEmpty()) {
            message = new ArrayList<>();
            Messages m = new Messages();
            m.setRole("user");
            m.setContent(prompt);
            message.add(m);
        }
        log.info("用户问题: {}",prompt);
        ChatParameters chatParameters = ChatParameters.builder()
                .model(model)
                .messages(message)
                .think(isThick)
                .stream(true)
                .build();

        // 用于拼接 AI 返回的完整回复
        AtomicReference<String> aiReplyContent = new AtomicReference<>("");
        AtomicReference<String> aiReplyThinking = new AtomicReference<>("");

        webClient.post()
                .uri("/api/chat")
                .bodyValue(chatParameters)
                .retrieve()
                .bodyToFlux(String.class)
                .map(json -> {
                    try {
                        return JsonUtils.parseObject(json, MReply.class);
                    } catch (Exception e) {
                        log.error("解析LLM返回JSON失败: {}", json, e);
                        return null;
                    }
                })
                .filter(Objects::nonNull)
                .doOnNext(mReply -> {
                    Map<String, Object> m = mReply.getMessage();
                    aiReplyContent.updateAndGet(s -> s + String.valueOf(m.get("content") != null ? m.get("content") : ""));
                    aiReplyThinking.updateAndGet(s -> s + String.valueOf(m.get("thinking") != null ? m.get("thinking") : ""));

                    Emitters.sendMessage(sessionId, JsonUtils.writeValueAsString(mReply));
                })
                .doOnError(error -> {
                    log.error("调用LLM模型出错", error);
                    Emitters.sendMessage(sessionId, "[ERROR] " + error.getMessage());
                    Emitters.removeEmitter(sessionId);
                })
                .doOnComplete(() -> {
                    // 流结束，保存 AI 回复到数据库
                    String finalReply = aiReplyContent.get();
                    String finalThinking = aiReplyThinking.get();
                    Messages m = new Messages();
                    if (finalReply != null && !finalReply.isEmpty()) {
                        messagesMapper.insertMessageAI(sessionId, "assistant", finalReply, finalThinking);
                        m.setRole("assistant");
                        m.setContent(finalReply);
                        m.setThinking(finalThinking);
                    }
                    chatParameters.getMessages().add(m);
                    redisService.setCacheObject(CacheConstants.AI_CHAT_SESSION_KEY + sessionId, chatParameters, TimeEnum.THIRTY_MINUTE);
                    Emitters.removeEmitter(sessionId);
                })
                .subscribe();
    }

    /**
     * 生成文本的嵌入向量
     */
    public List<List<Double>> generateEmbeddings(List<String> inputs, String model) {
        Map<String, Object> requestBody = new HashMap<>();
        requestBody.put("model", model);
        requestBody.put("input", inputs);

        Mono<EmbeddingResponse> responseMono = webClient.post()
                .uri("/api/embed")
                .header("Content-Type", "application/json")
                .bodyValue(requestBody)
                .retrieve()
                .bodyToMono(EmbeddingResponse.class);

        EmbeddingResponse response = responseMono.block();
        return response != null ? response.getEmbeddings() : Collections.emptyList();
    }

    /**
     * 检索与问题最相似的知识条目
     */
    public List<Knowledge> retrieveSimilarKnowledge(String query) {
        if (query == null || query.isEmpty()) {
            return Collections.emptyList();
        }

        List<Knowledge> allEntries = knowledgeMapper.selectList();
        if (allEntries.isEmpty()) {
            return Collections.emptyList();
        }

        List<String> contents = allEntries.stream()
                .map(Knowledge::getContent)
                .collect(Collectors.toList());

        // 将查询内容插入到第一个位置
        contents.add(0, query);

        List<List<Double>> embeddings = generateEmbeddings(contents, "bge-m3:567m");
        if (embeddings.isEmpty() || embeddings.size() != contents.size()) {
            return Collections.emptyList(); // 错误处理
        }

        List<Double> queryEmbedding = embeddings.get(0);
        List<List<Double>> entryEmbeddings = embeddings.subList(1, embeddings.size());

        return IntStream.range(0, allEntries.size())
                .boxed()
                .parallel()
                .filter(i -> computeCosineSimilarity(queryEmbedding, entryEmbeddings.get(i)) > SIMILARITY_THRESHOLD)
                .map(i -> {
                    Knowledge knowledge = allEntries.get(i);
                    double similarity = computeCosineSimilarity(queryEmbedding, entryEmbeddings.get(i));
                    knowledge.setSimilarityScore(similarity);
                    return knowledge;
                })
                .sorted(Comparator.comparingDouble(Knowledge::getSimilarityScore).reversed())
                .collect(Collectors.toList());
    }

    /**
     * 计算两个向量之间的余弦相似度
     */
    private double computeCosineSimilarity(List<Double> vecA, List<Double> vecB) {
        double dotProduct = 0.0, normA = 0.0, normB = 0.0;

        for (int i = 0; i < vecA.size(); i++) {
            double a = vecA.get(i);
            double b = vecB.get(i);
            dotProduct += a * b;
            normA += a * a;
            normB += b * b;
        }
        log.info("dotProduct: {}, normA: {}, normB: {}", dotProduct, normA, normB);
        log.info("similarity: {}", dotProduct / (Math.sqrt(normA) * Math.sqrt(normB)));
        return dotProduct / (Math.sqrt(normA) * Math.sqrt(normB));
    }
}
