package com.sys.ai.service.impl;

import com.gy.ai.agent.core.BaseResponse;
import com.gy.ai.agent.core.ReqOptions;
import com.gy.ai.agent.platform.dashscope.DashscopePlatform;
import com.gy.ai.agent.platform.dashscope.DashscopePlatformConfig;
import com.gy.ai.agent.platform.dashscope.entity.DashscopePlatformEmbeddingResp;
import com.gy.ai.agent.platform.dashscope.entity.DashscopePlatformRerankReq;
import com.gy.ai.agent.platform.dashscope.entity.DashscopePlatformRerankResp;
import com.gy.ai.agent.platform.ollama.OllamaPlatform;
import com.gy.ai.agent.platform.ollama.OllamaPlatformConfig;
import com.gy.ai.agent.platform.ollama.entity.OllamaEmbeddingResp;
import com.gy.ai.agent.platform.siliconflow.SiliconflowPlatform;
import com.gy.ai.agent.platform.siliconflow.SiliconflowPlatformConfig;
import com.gy.ai.agent.platform.siliconflow.entity.SiliconflowEmbeddingResp;
import com.gy.ai.agent.platform.siliconflow.entity.SiliconflowRerankReq;
import com.gy.ai.agent.platform.siliconflow.entity.SiliconflowRerankResp;
import com.sys.ai.domain.AiModel;
import com.sys.ai.domain.AiPlatform;
import com.sys.ai.enums.PlatformEnums;
import com.sys.ai.fo.SendMessageFO;
import com.sys.ai.service.abstracts.AiAbstract;
import com.sys.ai.vo.EmbeddingsVO;
import com.sys.ai.vo.MessageVO;
import com.sys.ai.vo.ReRankVO;
import com.sys.common.core.domain.model.LoginUser;
import com.sys.common.exception.ServiceException;
import com.sys.common.utils.EnumUtil;
import com.sys.common.utils.SecurityUtils;
import com.sys.common.utils.bean.BeanUtils;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils;
import org.springframework.stereotype.Service;
import org.springframework.util.Assert;

import java.util.List;

/**
 * @author LGY
 * Create by 2025/5/8 10:04
 */
@Slf4j
@Service
public class AiServiceImpl extends AiAbstract {


    @Override
    public void sendStreamMessage(SendMessageFO sendMessageFO) {
        LoginUser currentUser = SecurityUtils.getLoginUser();
        executor.execute(() -> {
            MessageVO messageVO = new MessageVO();
            try {
                // 继续优化分支模块逻辑
                if (StringUtils.isBlank(sendMessageFO.getModelId()) && StringUtils.isBlank(sendMessageFO.getWorkflowId())) {
                    throw new ServiceException("未选择模型或流程");
                }
                String requestAnswerGroupCode = StringUtils.isBlank(sendMessageFO.getRequestAnswerGroupCode()) ?
                        String.valueOf(System.currentTimeMillis()) : sendMessageFO.getRequestAnswerGroupCode();
                String requestAnswerGroupName = StringUtils.isBlank(sendMessageFO.getRequestAnswerGroupName()) ?
                        sendMessageFO.getMessage().substring(0, Math.min(sendMessageFO.getMessage().length(), 35)) :
                        sendMessageFO.getRequestAnswerGroupName();
                messageVO.setRequestAnswerGroupCode(requestAnswerGroupCode)
                        .setRequestAnswerGroupName(requestAnswerGroupName);
                if (!StringUtils.isBlank(sendMessageFO.getWorkflowId())) {
                    // 流程编排模式
                    this.executeFlowMode(sendMessageFO, currentUser, messageVO);
                } else {
                    // 正常模式
                    this.executeNormalMode(sendMessageFO, messageVO, currentUser);
                }
            } catch (Exception e) {
                log.error("LLM请求异常", e);
                sendSimpleSSE(e.getMessage(), messageVO, currentUser);
            }
        });
    }


    @Override
    public EmbeddingsVO generateQAToEmbeddings(String text, String modelId) {
        AiModel aiModel = aiModelService.getById(modelId);
        Assert.notNull(aiModel, "模型不存在");
        AiPlatform aiPlatform = aiPlatformService.getById(aiModel.getPlatformId());
        Assert.notNull(aiPlatform, "平台不存在");
        EmbeddingsVO embeddingsVO = null;
        PlatformEnums platformEnums = EnumUtil.getEnumByCode(aiPlatform.getCode(), PlatformEnums.class);
        Assert.notNull(platformEnums, "LLM提供平台不存在");
        switch (platformEnums) {
            case SILICON_FLOW:
                SiliconflowPlatformConfig siliconflowPlatformConfig = new SiliconflowPlatformConfig(aiModel.getCode(), aiPlatform.getApiKey(), aiPlatform.getBaseUrl());
                SiliconflowPlatform siliconflowPlatform = new SiliconflowPlatform(siliconflowPlatformConfig);
                SiliconflowEmbeddingResp siliconflowRerankResp = siliconflowPlatform.embedding(text);
                embeddingsVO = BeanUtils.copyPropertiesByJSON(siliconflowRerankResp, EmbeddingsVO.class);
                break;
            case OLLAMA:
                OllamaPlatformConfig ollamaPlatformConfig = new OllamaPlatformConfig(aiModel.getCode(), aiPlatform.getApiKey(), aiPlatform.getBaseUrl());
                OllamaPlatform ollamaPlatform = new OllamaPlatform(ollamaPlatformConfig);
                OllamaEmbeddingResp ollamaEmbeddingResp = ollamaPlatform.embedding(text);
                List<EmbeddingsVO.EmbeddingData> embeddingDataList = ollamaEmbeddingResp.getEmbeddings().stream().map(item -> {
                    EmbeddingsVO.EmbeddingData embeddingData = new EmbeddingsVO.EmbeddingData();
                    embeddingData.setEmbedding(item);
                    return embeddingData;
                }).toList();
                embeddingsVO = new EmbeddingsVO();
                embeddingsVO.setModel(ollamaEmbeddingResp.getModel())
                        .setData(embeddingDataList);
                break;
            case DASHSCOPE:
                DashscopePlatformConfig dashscopePlatformConfig = new DashscopePlatformConfig(aiModel.getCode(), aiPlatform.getApiKey(), aiPlatform.getBaseUrl());
                DashscopePlatform dashscopePlatform = new DashscopePlatform(dashscopePlatformConfig);
                DashscopePlatformEmbeddingResp resp = dashscopePlatform.embedding(text);
                embeddingsVO = BeanUtils.copyPropertiesByJSON(resp, EmbeddingsVO.class);
                break;
            default:
                throw new RuntimeException("暂不支持的LLM平台");
        }
        return embeddingsVO;
    }

    /**
     * 文本结果重排序
     *
     * @param documents     待重排文本
     * @param queryText     目标查询文本
     * @param reRankModelId 重排模型id
     * @return
     */
    @Override
    public ReRankVO textToReRank(List<String> documents, String queryText, String reRankModelId) {
        AiModel aiModel = aiModelService.getById(reRankModelId);
        Assert.notNull(aiModel, "模型不存在");
        AiPlatform aiPlatform = aiPlatformService.getById(aiModel.getPlatformId());
        Assert.notNull(aiPlatform, "LLM提供平台不存在");
        ReRankVO reRankVO = null;
        PlatformEnums platformEnums = EnumUtil.getEnumByCode(aiPlatform.getCode(), PlatformEnums.class);
        Assert.notNull(platformEnums, "LLM提供平台不存在");
        switch (platformEnums) {
            case SILICON_FLOW:
                SiliconflowPlatformConfig siliconflowPlatformConfig = new SiliconflowPlatformConfig(aiModel.getCode(), aiPlatform.getApiKey(),
                        aiPlatform.getBaseUrl());
                SiliconflowPlatform siliconflowPlatform = new SiliconflowPlatform(siliconflowPlatformConfig);
                SiliconflowRerankReq siliconflowRerankReq = new SiliconflowRerankReq();
                siliconflowRerankReq.setQuery(queryText)
                        .setDocuments(documents)
                        .setTop_n(documents.size())
                        .setReturn_documents(false)
                        .setMax_chunks_per_doc(1024)
                        .setOverlap_tokens(80);
                SiliconflowRerankResp siliconflowRerankResp = siliconflowPlatform.rerank(siliconflowRerankReq);
                reRankVO = BeanUtils.copyPropertiesByJSON(siliconflowRerankResp, ReRankVO.class);
                break;
            case OLLAMA:
                // ollama api 暂未提供重排api
                OllamaPlatformConfig ollamaPlatformConfig = new OllamaPlatformConfig(aiModel.getCode(), aiPlatform.getApiKey(), aiPlatform.getBaseUrl());
                OllamaPlatform ollamaPlatform = new OllamaPlatform(ollamaPlatformConfig);
                BaseResponse baseResponse = ollamaPlatform.rerank(ReqOptions.DEFAULT);
                reRankVO = BeanUtils.copyPropertiesByJSON(baseResponse, ReRankVO.class);
                break;
            case DASHSCOPE:
                DashscopePlatformConfig dashscopePlatformConfig = new DashscopePlatformConfig(aiModel.getCode(), aiPlatform.getApiKey(), aiPlatform.getBaseUrl());
                DashscopePlatform dashscopePlatform = new DashscopePlatform(dashscopePlatformConfig);
                DashscopePlatformRerankReq dashscopePlatformRerankReq = new DashscopePlatformRerankReq();
                dashscopePlatformRerankReq.setInput(new DashscopePlatformRerankReq.Input()
                                .setQuery(queryText)
                                .setDocuments(documents))
                        .setParameters(new DashscopePlatformRerankReq.Parameters()
                                .setReturn_documents(false)
                                .setTop_n(documents.size()));
                DashscopePlatformRerankResp dashscopePlatformRerankResp = dashscopePlatform.rerank(dashscopePlatformRerankReq);
                List<ReRankVO.ReRankResults> reRankResultsList = dashscopePlatformRerankResp.getOutput().getResults().stream().map(item -> {
                    ReRankVO.ReRankResults reRankResults = new ReRankVO.ReRankResults();
                    reRankResults.setIndex(item.getIndex())
                            .setRelevanceScore(item.getRelevance_score());
                    return reRankResults;
                }).toList();
                reRankVO = new ReRankVO();
                reRankVO.setId(dashscopePlatformRerankResp.getRequest_id())
                        .setResults(reRankResultsList);
            default:
                throw new RuntimeException("暂不支持的LLM平台");
        }
        return reRankVO;
    }
}
