package com.sz.admin.ai.factory.chatFactory.handler.impl;

import com.alibaba.cloud.ai.dashscope.api.DashScopeApi;
import com.alibaba.cloud.ai.dashscope.chat.DashScopeChatModel;
import com.alibaba.cloud.ai.dashscope.chat.DashScopeChatOptions;
import com.sz.admin.ai.factory.chatFactory.entity.ChatPlatformEnum;
import com.sz.admin.ai.factory.chatFactory.entity.MessageTypoeEnum;
import com.sz.admin.ai.factory.chatFactory.entity.dto.ChatDTO;
import com.sz.admin.ai.factory.chatFactory.entity.dto.ChatEmbeddingModelDTO;
import com.sz.admin.ai.factory.chatFactory.entity.dto.ChatMessageDTO;
import com.sz.admin.ai.factory.chatFactory.entity.dto.ChatModelDTO;
import com.sz.admin.ai.factory.chatFactory.handler.ChatHandler;
import com.sz.admin.ai.factory.embeddingFactory.EmbeddingHandlerFactory;
import com.sz.admin.ai.factory.embeddingFactory.Handler.EmbeddingHandler;
import com.sz.admin.ai.factory.embeddingFactory.entity.EmbeddingHandlerRequestDTO;
import com.sz.admin.ai.util.MessageChatMemory;
import com.sz.admin.aichatmessage.pojo.dto.AiChatMessageUpdateDTO;
import com.sz.admin.aichatmessage.pojo.po.AiChatMessage;
import com.sz.admin.aichatmessage.service.AiChatMessageService;
import com.sz.core.util.JsonUtils;
import lombok.AllArgsConstructor;
import lombok.extern.log4j.Log4j2;
import org.springframework.ai.chat.client.ChatClient;
import org.springframework.ai.chat.client.advisor.MessageChatMemoryAdvisor;
import org.springframework.ai.chat.messages.UserMessage;
import org.springframework.ai.chat.model.ChatResponse;
import org.springframework.ai.model.Media;
import org.springframework.ai.ollama.OllamaChatModel;
import org.springframework.http.codec.ServerSentEvent;
import org.springframework.stereotype.Component;
import org.springframework.util.CollectionUtils;
import reactor.core.publisher.Flux;

/**
 * @描述:
 * @Author: TangYuan
 * @Date: 2025/3/7 16:26
 */
@Component
@AllArgsConstructor
@Log4j2
public class AliyunChatHandlerImpl implements ChatHandler {

    private final EmbeddingHandlerFactory embeddingHandlerFactory;

    private final MessageChatMemory messageChatMemory;

    private final AiChatMessageService aiChatMessageService;

    /**
     * 创建聊天模型
     * @param chatModelDTO chatModelDTO
     * @return {@link OllamaChatModel}
     */
    private static DashScopeChatModel getModel(ChatModelDTO chatModelDTO) {

        DashScopeApi dashScopeApi = new DashScopeApi("sk-1ee02e3b0e564d18b289e818f485f763");
        DashScopeChatOptions dashScopeChatOptions = DashScopeChatOptions
                .builder()
                // 设置用于生成的随机数种子。将此设置为特定数字将使模型为同一提示生成相同的文本。（默认值：-1）
                .withSeed(-1)
                // 降低产生无意义的可能性。较高的值（例如 100）将给出更多样化的答案，而较低的值（例如 10）将更保守。（默认值：40）
                .withTopK(40)
                // 与 top-k 一起使用。较高的值（例如 0.95）将导致文本更加多样化，而较低的值（例如 0.5）将生成更集中和保守的文本。（默认值：0.9）
                .withTopP(0.9)
                // 模型的温度。提高温度将使模型更有创意地回答。（默认值：0.8）
                .withTemperature(Double.valueOf(chatModelDTO.getTemperature()))
                // 模型
                .withModel(chatModelDTO.getModelName())
                .build();

        return new DashScopeChatModel(dashScopeApi, dashScopeChatOptions);
    }

    /**
     * 是否使用文件问答
     * @param chatMessageDTO chatMessageDTO
     * @param promptUserSpec promptUserSpec
     */
    private static void useFileChat(ChatMessageDTO chatMessageDTO, ChatClient.PromptUserSpec promptUserSpec) {
        UserMessage userMessage = new UserMessage(chatMessageDTO.getContent());
        if (!CollectionUtils.isEmpty(userMessage.getMedia())) {
            Media[] medias = new Media[userMessage.getMedia().size()];
            promptUserSpec.media(userMessage.getMedia().toArray(medias));
        }
        promptUserSpec.text(userMessage.getText());
    }

    /**
     * 是否使用向量数据库
     * @param chatMessageDTO          chatMessageDTO
     * @param embeddingMode           embeddingMode
     * @param advisorSpec             advisorSpec
     * @param embeddingHandlerFactory embeddingHandlerFactory
     */
    private static void useEmbedding(ChatMessageDTO chatMessageDTO, ChatEmbeddingModelDTO embeddingMode, ChatClient.AdvisorSpec advisorSpec, EmbeddingHandlerFactory embeddingHandlerFactory) {
        if (embeddingMode != null) {
            EmbeddingHandler ollamaEmbedding = embeddingHandlerFactory.getHandler(embeddingMode.getModelPlatformName());
            // 是否使用向量数据库
            if (ollamaEmbedding != null && embeddingMode.getKnowledgeBaseId() != null) {
                EmbeddingHandlerRequestDTO embeddingHandlerRequestDTO = EmbeddingHandlerRequestDTO.builder()
                        // 设置产品平台
                        .thirdPartyName(embeddingMode.getModelPlatformName())
                        .modelName(embeddingMode.getModelName())
                        .modelKey("")
                        .userId(embeddingMode.getUserId())
                        .knowledgeBaseId(embeddingMode.getKnowledgeBaseId())
                        .build();
                ollamaEmbedding.useVectorStore(embeddingHandlerRequestDTO, 5, advisorSpec, chatMessageDTO.getContent());
            }
        }
    }

    /**
     * 开始时，将消息存进数据库
     * @param chat chat
     * @return {@link AiChatMessage}
     */
    private AiChatMessage startMessage(ChatDTO chat) {
        // 保存用户发送的消息
        AiChatMessage aiChatMessage = new AiChatMessage();
        aiChatMessage.setThirdPartyName(chat.getUser().getThirdPartyName());
        aiChatMessage.setSessionId(chat.getMessage().getSessionId());
        aiChatMessage.setType(MessageTypoeEnum.getEnum(chat.getMessage().getRole()).getCapitalString());
        aiChatMessage.setContent(chat.getMessage().getContent());
        aiChatMessage.setPlatformName(chat.getChatModel().getModelPlatformName());
        aiChatMessage.setUseModel(chat.getChatModel().getModelName());
        aiChatMessage.setTotalToken(0);
        aiChatMessageService.save(aiChatMessage);
        return aiChatMessage;
    }

    /**
     * 结束时，将消息存进数据库，并修改请求时的token
     */
    private void endMessage(ChatDTO chat, AiChatMessage aiChatMessage, String chatMessage, Long promptTokens, Long completionTokens, String capitalString) {
        log.info(chatMessage);

        // 更新用户发送的消息使用的token
        AiChatMessageUpdateDTO aiChatMessageUpdateDTO = new AiChatMessageUpdateDTO();
        aiChatMessageUpdateDTO.setId(aiChatMessage.getId());
        aiChatMessageUpdateDTO.setTotalToken(Math.toIntExact(promptTokens));
        aiChatMessageService.update(aiChatMessageUpdateDTO);

        // 保存助手返回的消息记录
        AiChatMessage aiChatMessage1 = new AiChatMessage();
        aiChatMessage1.setThirdPartyName(chat.getUser().getThirdPartyName());
        aiChatMessage1.setSessionId(chat.getMessage().getSessionId());
        aiChatMessage1.setType(MessageTypoeEnum.getEnum(capitalString).getCapitalString());
        aiChatMessage1.setContent(String.valueOf(chatMessage));
        aiChatMessage1.setPlatformName(chat.getChatModel().getModelPlatformName());
        aiChatMessage1.setUseModel(chat.getChatModel().getModelName());
        aiChatMessage1.setTotalToken(Math.toIntExact(completionTokens));
        aiChatMessageService.save(aiChatMessage1);
    }

    /**
     * 获取ChatPlatformEnum
     * @return {@link ChatPlatformEnum}
     */
    @Override
    public ChatPlatformEnum getChatPlatformEnum() {
        return ChatPlatformEnum.ALIYUN;
    }

    @Override
    public Flux<ServerSentEvent<String>> chat(ChatDTO chat) {
        ChatMessageDTO chatMessageDTO = chat.getMessage();
        ChatModelDTO chatModelDTO = chat.getChatModel();

        DashScopeChatModel dashScopeChatModel = getModel(chatModelDTO);

        AiChatMessage aiChatMessage = startMessage(chat);
        
        Flux<ChatResponse> chatResponseFlux = ChatClient
                .builder(dashScopeChatModel)
                .defaultUser(promptUserSpec -> {
                    useFileChat(chatMessageDTO, promptUserSpec);
                })
                .defaultAdvisors(advisorSpec -> {
                    advisorSpec.advisors(new MessageChatMemoryAdvisor(messageChatMemory, chatMessageDTO.getSessionId(), Integer.parseInt(chatModelDTO.getContextNumber())));
                    useEmbedding(chatMessageDTO, chat.getEmbeddingModel(), advisorSpec, embeddingHandlerFactory);
                })
                .build()
                .prompt(chatMessageDTO.getPromptText())
                .stream()
                .chatResponse();

        StringBuilder chatMessage = new StringBuilder();
        StringBuilder thinkMessage = new StringBuilder();
        return chatResponseFlux.map(chatResponse -> {
            chatMessage.append(chatResponse.getResult().getOutput().getText());
            thinkMessage.append(chatResponse.getResults().getFirst().getOutput().getMetadata().get("reasoningContent"));
            if (chatResponse.getResult() != null
                    && chatResponse.getResult().getOutput() != null
                    && chatResponse.getResult().getOutput().getMetadata() != null
                    && (chatResponse.getResult().getOutput().getMetadata().get("finishReason").equals("STOP")
                    || chatResponse.getResult().getOutput().getMetadata().get("finishReason").equals("stop"))) {
                System.out.println(chatMessage);
                System.out.println(thinkMessage);
                endMessage(chat,
                        aiChatMessage,
                        chatMessage.toString(),
                        chatResponse.getMetadata().getUsage().getPromptTokens(),
                        chatResponse.getMetadata().getUsage().getGenerationTokens(),
                        chatResponse.getResult().getOutput().getMessageType().getValue()
                );
            }
            return ServerSentEvent.builder(JsonUtils.toJsonString(chatResponse))
                    // 和前端监听的事件向对应
                    .event("message")
                    .build();
        });
    }
}