package com.chatplus.application.aiprocessor.channel.chat.chatglm;

import cn.bugstack.chatglm.model.ChatCompletionRequest;
import cn.bugstack.chatglm.model.Model;
import cn.bugstack.chatglm.model.Role;
import cn.bugstack.chatglm.session.Configuration;
import cn.bugstack.chatglm.session.OpenAiSession;
import cn.bugstack.chatglm.session.OpenAiSessionFactory;
import cn.bugstack.chatglm.session.defaults.DefaultOpenAiSessionFactory;
import com.chatplus.application.aiprocessor.provider.ChatAiProcessorServiceProvider;
import com.chatplus.application.aiprocessor.channel.chat.ChatAiProcessorService;
import com.chatplus.application.aiprocessor.handler.dto.ChatRecordMessage;
import com.chatplus.application.aiprocessor.listener.impl.ChatGLMEventSourceListener;
import com.chatplus.application.common.logging.SouthernQuietLogger;
import com.chatplus.application.common.logging.SouthernQuietLoggerFactory;
import com.chatplus.application.enumeration.AiPlatformEnum;
import okhttp3.logging.HttpLoggingInterceptor;
import org.apache.commons.lang3.StringUtils;
import org.springframework.stereotype.Service;

import java.util.ArrayList;
import java.util.List;
import java.util.stream.Collectors;


/**
 * ChatGLM 机器人处理器
 *
 * @author chj
 * @date 2023/12/27
 **/
@Service(value = ChatAiProcessorServiceProvider.SERVICE_NAME_PRE + "ChatGLM")
public class ChatGLMAiProcessor extends ChatAiProcessorService {
    private static final SouthernQuietLogger LOGGER = SouthernQuietLoggerFactory.getLogger(ChatGLMAiProcessor.class);

    @Override
    public void processStream(List<ChatRecordMessage> messageContext) throws Exception {
        initConfig();
        // 1. 配置文件
        Configuration configuration = new Configuration();
        configuration.setApiHost(getApiUrl());
        configuration.setApiSecretKey(getApiKeyList().getFirst());
        configuration.setLevel(HttpLoggingInterceptor.Level.BODY);
        // 2. 会话工厂
        OpenAiSessionFactory factory = new DefaultOpenAiSessionFactory(configuration);
        // 3. 开启会话
        OpenAiSession openAiSession = factory.openSession();
        //configuration.getOkHttpClient();

        String prompt = messageContext.stream()
                .filter(msg -> msg.getReply() == null)
                .map(ChatRecordMessage::getPrompt)
                .collect(Collectors.joining());
        // 入参；模型、请求信息
        ChatCompletionRequest request = new ChatCompletionRequest();
        Model model = null;
        String modelName = getSession().getAttributes().get("modelName").toString();
        switch (modelName) {
            case "chatGLM_6b_SSE":
                model = Model.CHATGLM_6B_SSE;
                break;
            case "chatglm_lite":
                model = Model.CHATGLM_LITE;
                break;
            case "chatglm_lite_32k":
                model = Model.CHATGLM_LITE_32K;
                break;
            case "chatglm_std":
                model = Model.CHATGLM_STD;
                break;
            case "chatglm_pro":
                model = Model.CHATGLM_PRO;
                break;
            case "glm-4":
                model = Model.GLM_4;
                break;
            case "glm-4v":
                model = Model.GLM_4V;
                break;
            case "glm-3-turbo":
                model = Model.GLM_3_5_TURBO;
                break;
            default:
                break;
        }
        if (model == null) {
            replyMessage("模型不存在，请联系管理员");
        }
        request.setModel(model); // chatGLM_6b_SSE、chatglm_lite、chatglm_lite_32k、chatglm_std、chatglm_pro
        request.setMaxTokens(getCommonSetting().getMaxTokens());
        request.setTemperature(getCommonSetting().getTemperature());
        // SSE接口调用时，用于控制每次返回内容方式是增量还是全量，不提供此参数时默认为增量返回
        // request.setIncremental(true);
        request.setIsCompatible(false); // 是否对返回结果数据做兼容，24年1月发布的 GLM_3_5_TURBO、GLM_4 模型，与之前的模型在返回结果上有差异。开启 true 可以做兼容。
        // 24年1月发布的 glm-3-turbo、glm-4 支持函数、知识库、联网功能
//        List<ChatCompletionRequest.Tool> toolList = new ArrayList<>();
//        toolList.add(ChatCompletionRequest.Tool.builder()
//                .type(ChatCompletionRequest.Tool.Type.web_search)
//                .webSearch(ChatCompletionRequest.Tool.WebSearch.builder().enable(true).searchQuery(prompt).build())
//                .build());
//        request.setTools(toolList);

        List<ChatCompletionRequest.Prompt> messages = new ArrayList<>();
        messageContext.forEach(msg -> {
            ChatCompletionRequest.Prompt promptMessage = new ChatCompletionRequest.Prompt(Role.user.getCode(), msg.getPrompt());
            messages.add(promptMessage);
            if (StringUtils.isNotEmpty(msg.getReply())) {
                ChatCompletionRequest.Prompt replyMessage = new ChatCompletionRequest.Prompt(Role.assistant.getCode(), msg.getReply());
                messages.add(replyMessage);
            }
        });
        request.setPrompt(messages);
        openAiSession.completions(request, new ChatGLMEventSourceListener(getSession(), getChannel(), prompt));
    }

    @Override
    public String processSync(List<ChatRecordMessage> messageContext) {
        return null;
    }

    @Override
    public AiPlatformEnum getChannel() {
        return AiPlatformEnum.CHAT_GLM;
    }

    @Override
    public void stopChat(String sessionId) {

    }
}