package com.sys.ai.service.abstracts;

import com.alibaba.fastjson2.JSON;
import com.gy.ai.agent.core.BaseResponse;
import com.gy.ai.agent.core.ReqContext;
import com.gy.ai.agent.core.ReqOptions;
import com.gy.ai.agent.listener.StreamRespListener;
import com.gy.ai.agent.platform.dashscope.DashscopePlatform;
import com.gy.ai.agent.platform.dashscope.DashscopePlatformConfig;
import com.gy.ai.agent.platform.dashscope.entity.DashscopePlatformChatResp;
import com.gy.ai.agent.platform.ollama.OllamaPlatform;
import com.gy.ai.agent.platform.ollama.OllamaPlatformConfig;
import com.gy.ai.agent.platform.ollama.entity.OllamaChatResp;
import com.gy.ai.agent.platform.siliconflow.SiliconflowPlatform;
import com.gy.ai.agent.platform.siliconflow.SiliconflowPlatformConfig;
import com.gy.ai.agent.platform.siliconflow.entity.SiliconflowChatResp;
import com.gy.ai.agent.platform.siliconflow.entity.SiliconflowGenerateImageResp;
import com.sys.ai.builder.AIMessageBuilder;
import com.sys.ai.domain.*;
import com.sys.ai.enums.LLMModelTypeEnums;
import com.sys.ai.enums.PlatformEnums;
import com.sys.ai.fo.SendMessageFO;
import com.sys.ai.service.*;
import com.sys.ai.vo.EmbeddingsVO;
import com.sys.ai.vo.KnowledgeFragmentationVO;
import com.sys.ai.vo.MessageVO;
import com.sys.ai.vo.ReRankVO;
import com.sys.common.constant.CacheConstants;
import com.sys.common.constant.SSEMessageConstants;
import com.sys.common.core.domain.model.LoginUser;
import com.sys.common.core.redis.RedisCache;
import com.sys.common.exception.ServiceException;
import com.sys.common.sse.SseComponent;
import com.sys.common.utils.EnumUtil;
import com.sys.common.utils.file.FileUploadUtils;
import com.sys.common.utils.uuid.UUID;
import jakarta.annotation.Resource;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils;
import org.springframework.beans.BeanUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.util.Assert;
import org.springframework.util.CollectionUtils;

import java.text.MessageFormat;
import java.util.*;
import java.util.concurrent.ThreadPoolExecutor;

/**
 * @author LGY
 * Create by 2025/6/3 14:37
 */
@Slf4j
public abstract class AiAbstract implements AiService {

    protected static final String START_STATUS = "start";
    protected static final String DOING_STATUS = "doing";
    protected static final String DONE_STATUS = "done";

    @Autowired
    protected IAiSendHistoryService sendHistoryService;
    @Autowired
    protected IAiAppsService aiAppsService;
    @Autowired
    protected RedisCache redisCache;
    @Autowired
    protected IDbOptionService dbOptionService;
    @Autowired
    protected IAiModelService aiModelService;
    @Autowired
    protected IAiPlatformService aiPlatformService;
    @Autowired
    protected IAiKnowledgeService knowledgeService;
    @Autowired
    protected IAiWorkflowService workflowService;
    @Autowired
    protected SseComponent sseComponent;
    @Resource(name = "myExecutor")
    protected ThreadPoolExecutor executor;

    /**
     * 流程模式
     *
     * @param sendMessageFO 入参
     * @return
     */
    protected void executeFlowMode(SendMessageFO sendMessageFO, LoginUser currentUser, MessageVO messageVO) {
        AiWorkflow aiWorkflow = workflowService.getById(sendMessageFO.getWorkflowId());
        Assert.notNull(aiWorkflow, "流程不存在或已被删除");
        Map<String, Object> params = new HashMap<>();
        params.put("input", sendMessageFO.getMessage());
        Map<String, Object> resMap = workflowService.executeWorkFlow(aiWorkflow.getGraph(), params, null);
        messageVO.setContent((String) resMap.get("output"))
                .setLlmMessageId(String.format("flow-%s", UUID.fastUUID()));
        // 消息缓存
        onCurrentMessage(messageVO);
        // 消息推送
        sendSimpleSSE(messageVO.getContent(), messageVO, currentUser);
        // 消息完成后 保存
        onCurrentCompleteMessage(messageVO, sendMessageFO, currentUser, null);
    }

    /**
     * 正常模式 (模型)
     *
     * @param sendMessageFO 发送消息入参
     * @param messageVO     返回消息装载对象
     * @param currentUser   用户
     * @return
     */
    protected void executeNormalMode(SendMessageFO sendMessageFO, MessageVO messageVO, LoginUser currentUser) {
        AiModel aiModel = aiModelService.getById(sendMessageFO.getModelId());
        Assert.notNull(aiModel, "模型不存在或已被删除");
        AiPlatform aiPlatform = aiPlatformService.getById(aiModel.getPlatformId());
        Assert.notNull(aiPlatform, "LLM提供平台不存在或已被删除");
        AiApps aiApps = aiAppsService.getById(sendMessageFO.getAiAppsId());
        // 知识库数据
        List<KnowledgeFragmentationVO> knowledgeQAVOList = this.getKnowledgeQAVOList(sendMessageFO, aiApps);
        // 构建请求参数
        ReqOptions reqOptions = AIMessageBuilder.buildRequestParam(sendHistoryService, messageVO, sendMessageFO, aiApps,
                knowledgeQAVOList, aiModel, aiPlatform, currentUser);
        this.sendRequestToLLM(reqOptions, aiPlatform, messageVO, sendMessageFO, currentUser, knowledgeQAVOList, aiModel, aiApps);
    }


    private void sendRequestToLLM(ReqOptions reqOptions, AiPlatform aiPlatform, MessageVO messageVO, SendMessageFO sendMessageFO,
                                  LoginUser currentUser, List<KnowledgeFragmentationVO> knowledgeQAVOList, AiModel aiModel, AiApps aiApps) {
        if (!CollectionUtils.isEmpty(knowledgeQAVOList)) {
            // 知识库模式
            boolean isPreview = sendMessageFO.getHasPreview() != null && sendMessageFO.getHasPreview();
            // 是否需要大模型再渲染
            boolean hasLlmRender = false;
            if (isPreview) {
                hasLlmRender = Objects.equals("Y", sendMessageFO.getPreviewParam().getHasLlmRender());
            } else {
                hasLlmRender = Objects.equals("Y", aiApps.getHasLlmRender());
            }
            if (hasLlmRender) {
                // 将内容给到大模型再次进行渲染， 调整prompt
                sendRequestToLLMByClient(reqOptions, aiPlatform, messageVO, sendMessageFO, currentUser, aiModel);
            } else {
                String llmMessageId = String.format("knowledge-%s", UUID.randomUUID());
                MessageVO startDoneMessage = new MessageVO();
                startDoneMessage.setLlmMessageId(llmMessageId);
                // 开始消息体
                sendStartDoneStatusSSE(startDoneMessage, currentUser, START_STATUS);

                knowledgeQAVOList.forEach(knowledgeQAVO -> {
                    MessageVO respMessageVo = new MessageVO();
                    BeanUtils.copyProperties(messageVO, respMessageVo);
                    String mt = knowledgeQAVO.getFragmentation();
                    messageVO.setContent(StringUtils.isBlank(messageVO.getContent()) ? mt : messageVO.getContent() + mt)
                            .setLlmMessageId(llmMessageId);
                    respMessageVo.setContent(mt);
                    onCurrentMessage(messageVO);
                    sseComponent.sendMessage(currentUser.getUsername(), SSEMessageConstants.SERVICE_TYPE.AI, SSEMessageConstants.CUSTOM.ADMIN, JSON.toJSONString(respMessageVo));
                    onCurrentCompleteMessage(messageVO, sendMessageFO, currentUser, aiModel);
                });
                // 单独保存这个数据
                if (!sendMessageFO.getHasPreview()) {
                    // 非预览发生的数据请求 记录 才进行保存
                    this.saveSendHistory(messageVO, sendMessageFO, currentUser, aiModel);
                }
                sendStartDoneStatusSSE(startDoneMessage, currentUser, DONE_STATUS);
            }
        } else {
            // 非挂载知识库的
            sendRequestToLLMByClient(reqOptions, aiPlatform, messageVO, sendMessageFO, currentUser, aiModel);
        }
    }

    private void sendRequestToLLMByClient(ReqOptions reqOptions, AiPlatform aiPlatform, MessageVO messageVO, SendMessageFO sendMessageFO,
                                          LoginUser currentUser, AiModel aiModel) {
        PlatformEnums platformEnums = EnumUtil.getEnumByCode(aiPlatform.getCode(), PlatformEnums.class);
        Assert.notNull(platformEnums, "暂不支持的LLM平台");
        switch (platformEnums) {
            case SILICON_FLOW:
                sendSiliconflowRequest(reqOptions, aiPlatform, messageVO, sendMessageFO, currentUser, aiModel);
                break;
            case OLLAMA:
                sendOllamaRequest(reqOptions, aiPlatform, messageVO, sendMessageFO, currentUser, aiModel);
                break;
            case DASHSCOPE:
                sendDashscopeRequest(reqOptions, aiPlatform, messageVO, sendMessageFO, currentUser, aiModel);
                break;
            default:
                throw new RuntimeException("暂不支持的LLM平台");
        }
    }

    private void sendDashscopeRequest(ReqOptions reqOptions, AiPlatform aiPlatform, MessageVO messageVO, SendMessageFO sendMessageFO,
                                      LoginUser currentUser, AiModel aiModel) {
        DashscopePlatformConfig dashscopePlatformConfig = new DashscopePlatformConfig(aiModel.getCode(), aiPlatform.getApiKey(), aiPlatform.getBaseUrl());
        DashscopePlatform dashscopePlatform = new DashscopePlatform(dashscopePlatformConfig);
        LLMModelTypeEnums llmModelTypeEnums = EnumUtil.getEnumByCode(aiModel.getType(), LLMModelTypeEnums.class);
        switch (llmModelTypeEnums) {
            case CHAT:
                if (!sendMessageFO.isStream()) {
                    // 非流式的
                    DashscopePlatformChatResp dashscopePlatformChatResp = dashscopePlatform.chat(reqOptions);
                    DashscopePlatformChatResp.Message delta = dashscopePlatformChatResp.getChoices().get(0).getMessage();
                    messageVO.setContent(delta.getContent())
                            .setReasoningContent(null)
                            .setLlmMessageId(dashscopePlatformChatResp.getId());
                    onCurrentMessage(messageVO);
                    // 推送消息
                    sendSimpleSSE(messageVO.getContent(), messageVO, currentUser);
                    onCurrentCompleteMessage(messageVO, sendMessageFO, currentUser, aiModel);
                } else {
                    // 流式的
                    dashscopePlatform.chatStream(reqOptions, new StreamRespListener() {
                        private final MessageVO currentMessageVo = new MessageVO();

                        @Override
                        public void onStart(ReqContext context) {
                            // 开始
                            sendStartDoneStatusSSE(currentMessageVo, currentUser, START_STATUS);
                        }

                        @Override
                        public void onMessage(ReqContext reqContext, BaseResponse baseResponse) {
                            log.info("收到消息: {}", JSON.toJSONString(baseResponse.getSourceData()));
                            DashscopePlatformChatResp dashscopePlatformChatResp = baseResponse.getResponse(DashscopePlatformChatResp.class);
                            DashscopePlatformChatResp.Delta delta = dashscopePlatformChatResp.getChoices().get(0).getDelta();
                            messageVO.setContent(delta.getContent())
                                    .setReasoningContent(delta.getReasoning_content())
                                    .setLlmMessageId(dashscopePlatformChatResp.getId());
                            onCurrentMessage(messageVO);
                            BeanUtils.copyProperties(messageVO, currentMessageVo);
                            sseComponent.sendMessage(currentUser.getUsername(), SSEMessageConstants.SERVICE_TYPE.AI, SSEMessageConstants.CUSTOM.ADMIN, JSON.toJSONString(messageVO));
                        }

                        @Override
                        public void onStop(ReqContext context) {
                            onCurrentCompleteMessage(currentMessageVo, sendMessageFO, currentUser, aiModel);
                            // 推送完成标识
                            sendStartDoneStatusSSE(currentMessageVo, currentUser, DONE_STATUS);
                        }
                    });
                }
                break;
            case IMAGE:
                throw new ServiceException("百炼平台未支持文生图同步API能力");
            default:
                throw new ServiceException("暂不支持的LLM类型");
        }
    }

    private void sendOllamaRequest(ReqOptions reqOptions, AiPlatform aiPlatform, MessageVO messageVO, SendMessageFO sendMessageFO,
                                   LoginUser currentUser, AiModel aiModel) {
        OllamaPlatformConfig ollamaPlatformConfig = new OllamaPlatformConfig(aiModel.getCode(), aiPlatform.getApiKey(), aiPlatform.getBaseUrl());
        OllamaPlatform ollamaPlatform = new OllamaPlatform(ollamaPlatformConfig);
        LLMModelTypeEnums llmModelTypeEnums = EnumUtil.getEnumByCode(aiModel.getType(), LLMModelTypeEnums.class);
        final String llmMessageId = String.format("ollama-%s", UUID.randomUUID());
        switch (llmModelTypeEnums) {
            case CHAT:
                if (!sendMessageFO.isStream()) {
                    // 非流式的
                    OllamaChatResp ollamaChatResp = ollamaPlatform.chat(reqOptions);
                    messageVO.setContent(ollamaChatResp.getMessage().getContent())
                            .setReasoningContent(null)
                            .setLlmMessageId(llmMessageId);
                    onCurrentMessage(messageVO);
                    // 推送消息
                    sendSimpleSSE(messageVO.getContent(), messageVO, currentUser);
                    onCurrentCompleteMessage(messageVO, sendMessageFO, currentUser, aiModel);
                } else {
                    // 流式的
                    ollamaPlatform.chatStream(reqOptions, new StreamRespListener() {
                        private final MessageVO currentMessageVo = new MessageVO();

                        @Override
                        public void onStart(ReqContext context) {
                            // 开始
                            sendStartDoneStatusSSE(currentMessageVo, currentUser, START_STATUS);
                        }

                        @Override
                        public void onMessage(ReqContext reqContext, BaseResponse baseResponse) {
                            log.info("收到消息: {}", JSON.toJSONString(baseResponse.getSourceData()));
                            OllamaChatResp ollamaChatResp = baseResponse.getResponse(OllamaChatResp.class);
                            messageVO.setContent(ollamaChatResp.getMessage().getContent())
                                    .setReasoningContent(null)
                                    .setLlmMessageId(llmMessageId);
                            onCurrentMessage(messageVO);
                            BeanUtils.copyProperties(messageVO, currentMessageVo);
                            sseComponent.sendMessage(currentUser.getUsername(), SSEMessageConstants.SERVICE_TYPE.AI, SSEMessageConstants.CUSTOM.ADMIN, JSON.toJSONString(messageVO));
                        }

                        @Override
                        public void onStop(ReqContext context) {
                            onCurrentCompleteMessage(currentMessageVo, sendMessageFO, currentUser, aiModel);
                            // 推送完成标识
                            sendStartDoneStatusSSE(currentMessageVo, currentUser, DONE_STATUS);
                        }
                    });
                }
                break;
            case IMAGE:
                throw new ServiceException("Ollama未支持文生图API能力");
            default:
                throw new ServiceException("暂不支持的LLM模型类型");
        }
    }

    /**
     * 硅基流动请求
     *
     * @param reqOptions    请求参数
     * @param aiPlatform    平台
     * @param messageVO     消息装载对象
     * @param sendMessageFO 发送消息入参
     * @param currentUser   用户
     * @param aiModel       模型
     */
    private void sendSiliconflowRequest(ReqOptions reqOptions, AiPlatform aiPlatform, MessageVO messageVO, SendMessageFO sendMessageFO,
                                        LoginUser currentUser, AiModel aiModel) {
        SiliconflowPlatformConfig siliconflowPlatformConfig = new SiliconflowPlatformConfig(aiModel.getCode(), aiPlatform.getApiKey(), aiPlatform.getBaseUrl());
        SiliconflowPlatform siliconflowPlatform = new SiliconflowPlatform(siliconflowPlatformConfig);
        LLMModelTypeEnums llmModelTypeEnums = EnumUtil.getEnumByCode(aiModel.getType(), LLMModelTypeEnums.class);
        switch (llmModelTypeEnums) {
            case CHAT:
                if (!sendMessageFO.isStream()) {
                    // 非流式的
                    SiliconflowChatResp siliconflowChatResp = siliconflowPlatform.chat(reqOptions);
                    SiliconflowChatResp.Delta delta = siliconflowChatResp.getChoices().get(0).getMessage();
                    messageVO.setContent(delta.getContent())
                            .setReasoningContent(delta.getReasoning_content())
                            .setLlmMessageId(siliconflowChatResp.getId());
                    onCurrentMessage(messageVO);
                    // 推送消息
                    sendSimpleSSE(messageVO.getContent(), messageVO, currentUser);
                    onCurrentCompleteMessage(messageVO, sendMessageFO, currentUser, aiModel);
                } else {
                    // 流式的
                    siliconflowPlatform.chatStream(reqOptions, new StreamRespListener() {
                        private final MessageVO currentMessageVo = new MessageVO();

                        @Override
                        public void onStart(ReqContext context) {
                            // 开始
                            sendStartDoneStatusSSE(currentMessageVo, currentUser, START_STATUS);
                        }

                        @Override
                        public void onMessage(ReqContext reqContext, BaseResponse baseResponse) {
                            log.info("收到消息: {}", JSON.toJSONString(baseResponse.getSourceData()));
                            SiliconflowChatResp siliconflowChatResp = baseResponse.getResponse(SiliconflowChatResp.class);
                            SiliconflowChatResp.Delta delta = siliconflowChatResp.getChoices().get(0).getDelta();
                            messageVO.setContent(delta.getContent())
                                    .setReasoningContent(delta.getReasoning_content())
                                    .setLlmMessageId(siliconflowChatResp.getId());
                            onCurrentMessage(messageVO);
                            BeanUtils.copyProperties(messageVO, currentMessageVo);
                            // 推送消息
                            sseComponent.sendMessage(currentUser.getUsername(), SSEMessageConstants.SERVICE_TYPE.AI, SSEMessageConstants.CUSTOM.ADMIN, JSON.toJSONString(messageVO));
                        }

                        @Override
                        public void onStop(ReqContext context) {
                            onCurrentCompleteMessage(currentMessageVo, sendMessageFO, currentUser, aiModel);
                            // 推送完成标识
                            sendStartDoneStatusSSE(currentMessageVo, currentUser, DONE_STATUS);
                        }
                    });
                }
                break;
            case IMAGE:
                SiliconflowGenerateImageResp siliconflowGenerateImageResp = siliconflowPlatform.generateImage(reqOptions);
                List<String> urls = siliconflowGenerateImageResp.getImages().stream().map(SiliconflowGenerateImageResp.Images::getUrl).toList();
                dealWithTextToImageData(urls, messageVO);
                onCurrentMessage(messageVO);
                sendSimpleSSE(messageVO.getContent(), messageVO, currentUser);
                onCurrentCompleteMessage(messageVO, sendMessageFO, currentUser, aiModel);
                break;
            default:
                throw new ServiceException("暂不支持的LLM平台");
        }
    }

    private void dealWithTextToImageData(List<String> urls, MessageVO messageVO) {
        StringBuilder content = new StringBuilder();
        content.append("<div style=\"display: flex;gap: 5%; margin: 20px 0;padding: 0 50px;flex-wrap: wrap;\">");
        for (String imageUrl : urls) {
            imageUrl = FileUploadUtils.uploadMinio(imageUrl);
            String div = """
                    <div style="margin: 15px 0;width: 45%">
                       <img src="{0}" style="width: 100%;border-radius: 5px;"/>
                    </div>
                    """;
            content.append(MessageFormat.format(div, imageUrl))
                    .append("\n");
        }
        content.append("</div>");
        messageVO.setContent(content.toString())
                .setReasoningContent(null)
                .setLlmMessageId("text-to-image-" + UUID.fastUUID());
    }

    private void onCurrentMessage(MessageVO messageVO) {
        log.info("将数据暂时缓存到redis中: {}", JSON.toJSONString(messageVO));
        // 将数据暂时缓存到redis中
        List<MessageVO> messageVOList = redisCache.getCacheList(CacheConstants.LLM_MESSAGE_CACHE + messageVO.getLlmMessageId());
        if (CollectionUtils.isEmpty(messageVOList)) {
            messageVOList = new ArrayList<>();
        }
        messageVOList.add(messageVO);
        redisCache.deleteObject(CacheConstants.LLM_MESSAGE_CACHE + messageVO.getLlmMessageId());
        redisCache.setCacheList(CacheConstants.LLM_MESSAGE_CACHE + messageVO.getLlmMessageId(), messageVOList);
    }

    /**
     * 消息完成
     *
     * @param messageVO     消息对象
     * @param sendMessageFO 发送入参
     * @param currentUser   当前用户
     */
    private void onCurrentCompleteMessage(MessageVO messageVO, SendMessageFO sendMessageFO, LoginUser currentUser, AiModel aiModel) {
        // 从redis中获取数据
        List<MessageVO> messageVOList = redisCache.getCacheList(CacheConstants.LLM_MESSAGE_CACHE + messageVO.getLlmMessageId());
        redisCache.deleteObject(CacheConstants.LLM_MESSAGE_CACHE + messageVO.getLlmMessageId());
        if (sendMessageFO.getHasPreview()) {
            // 预览发生的数据请求 记录 不进行保存
            return;
        }
        if (!CollectionUtils.isEmpty(messageVOList)) {
            MessageVO saveMessageVo = new MessageVO();
            // 保存发送记录
            StringBuilder content = new StringBuilder();
            StringBuilder reasonContent = new StringBuilder();
            messageVOList.forEach(item -> {
                if (!StringUtils.isEmpty(item.getContent())) {
                    content.append(item.getContent());
                }
                if (!StringUtils.isEmpty(item.getReasoningContent())) {
                    reasonContent.append(item.getReasoningContent());
                }
            });
            saveMessageVo.setContent(content.toString())
                    .setReasoningContent(reasonContent.toString())
                    .setRequestAnswerGroupName(messageVO.getRequestAnswerGroupName())
                    .setRequestAnswerGroupCode(messageVO.getRequestAnswerGroupCode());
            this.saveSendHistory(saveMessageVo, sendMessageFO, currentUser, aiModel);
        }
    }

    /**
     * 获取知识库数据
     *
     * @param sendMessageFO 入参
     * @param aiApps        应用
     * @return
     */
    protected List<KnowledgeFragmentationVO> getKnowledgeQAVOList(SendMessageFO sendMessageFO, AiApps aiApps) {
        List<KnowledgeFragmentationVO> knowledgeQAVOList = null;
        String knowledgeId = null;
        if (sendMessageFO.getHasPreview() != null && sendMessageFO.getHasPreview()) {
            // 预览模式
            knowledgeId = sendMessageFO.getPreviewParam().getKnowledgeId();
        } else {
            // 正常模式下
            if (aiApps == null) {
                // 应用不存在
                return knowledgeQAVOList;
            }
            knowledgeId = aiApps.getKnowledgeId();
        }
        if (StringUtils.isEmpty(knowledgeId)) {
            // 未挂载知识库的
            return knowledgeQAVOList;
        }
        // 挂载了知识库的
        // 1、 将问题转成向量
        AiKnowledge aiKnowledge = knowledgeService.getById(knowledgeId);
        EmbeddingsVO embeddingsVO = generateQAToEmbeddings(sendMessageFO.getMessage(), aiKnowledge.getRankModelId());
        // 2、 查询知识库数据
        knowledgeQAVOList = dbOptionService.query(embeddingsVO.getData().get(0).getEmbedding(), List.of(knowledgeId));

        if ((!CollectionUtils.isEmpty(knowledgeQAVOList) && knowledgeQAVOList.size() == 1) ||
                (!CollectionUtils.isEmpty(knowledgeQAVOList) && knowledgeQAVOList.get(0).getHasErrorTip() != null && knowledgeQAVOList.get(0).getHasErrorTip())) {
            // 未从知识库中获取到相关答案的 || 知识库中只返回了一条数据的  直接返回  无需进行重排序
            return knowledgeQAVOList;
        }
        // 将获取的数据进行重排序
        List<String> contentList = knowledgeQAVOList.stream().map(KnowledgeFragmentationVO::getFragmentation).toList();
        ReRankVO contentReRankVO = textToReRank(contentList, sendMessageFO.getMessage(), aiKnowledge.getReRankModelId());

        List<KnowledgeFragmentationVO> res = new ArrayList<>();
        for (ReRankVO.ReRankResults item : contentReRankVO.getResults()) {
            KnowledgeFragmentationVO knowledgeFragmentationVO = new KnowledgeFragmentationVO();
            Integer index = item.getIndex();
            KnowledgeFragmentationVO kv = knowledgeQAVOList.get(index);
            knowledgeFragmentationVO.setFragmentation(kv.getFragmentation());
            res.add(knowledgeFragmentationVO);
        }
        return res;
    }

    /**
     * 发送记录保存
     *
     * @param messageVO     消息返回对象
     * @param sendMessageFO 消息发送对象
     * @param currentUser   发送人
     * @param aiModel       调用模型
     */
    private void saveSendHistory(MessageVO messageVO, SendMessageFO sendMessageFO, LoginUser currentUser, AiModel aiModel) {
        log.info("保存发送记录: {}", JSON.toJSONString(messageVO));
        String result = messageVO.getContent();
        if (sendMessageFO.isUseDeepThing() && StringUtils.isNotBlank(messageVO.getReasoningContent())) {
            result = messageVO.getReasoningContent() + messageVO.getContent();
        }
        // 保存发送记录
        AiSendHistory aiSendHistory = new AiSendHistory();
        aiSendHistory.setCode(aiModel != null ? aiModel.getCode() : null)
                .setGroupCode(messageVO.getRequestAnswerGroupCode())
                .setGroupName(messageVO.getRequestAnswerGroupName())
                .setQuestion(sendMessageFO.getMessage())
                .setAnswer(result)
                .setContent(messageVO.getContent())
                .setReasoningContent(messageVO.getReasoningContent())
                .setAppId(sendMessageFO.getAiAppsId())
                .setCreateBy(currentUser.getId());
        sendHistoryService.save(aiSendHistory);
    }

    protected void sendStartDoneStatusSSE(MessageVO messageVO, LoginUser currentUser, String status) {
        // 开始
        messageVO.setContent("")
                .setStatus(status);
        sseComponent.sendMessage(currentUser.getUsername(), SSEMessageConstants.SERVICE_TYPE.AI, SSEMessageConstants.CUSTOM.ADMIN, JSON.toJSONString(messageVO));
    }

    protected void sendSimpleSSE(String content, MessageVO messageVO, LoginUser currentUser) {
        // 开始
        sendStartDoneStatusSSE(messageVO, currentUser, START_STATUS);
        // 消息
        messageVO.setStatus(DOING_STATUS)
                .setContent(content);
        sseComponent.sendMessage(currentUser.getUsername(), SSEMessageConstants.SERVICE_TYPE.AI, SSEMessageConstants.CUSTOM.ADMIN, JSON.toJSONString(messageVO));

        // 结束
        sendStartDoneStatusSSE(messageVO, currentUser, DONE_STATUS);
    }
}
