package com.zxy.Answer.manager;

import com.zhipu.oapi.ClientV4;
import com.zhipu.oapi.Constants;
import com.zhipu.oapi.service.v4.model.*;
import io.reactivex.Flowable;
import org.springframework.stereotype.Component;

import javax.annotation.Resource;
import java.util.ArrayList;
import java.util.List;

/**
 * @author zxy
 * @since 2024/6/9
 **/
@Component
public class AiManager {
    private final Float goodTem = 0.9f;
    private final Float badTem = 0.2f;
    @Resource
    private ClientV4 client;

    /**
     * 通用请求（简化消息传递）
     */
    public String doSyncGoodRequest(String systemMsg, String userMsg) {
        return doRequest(systemMsg, userMsg, Boolean.FALSE, goodTem);
    }

    /**
     * 通用请求（简化消息传递）
     */
    public String doSyncRequest(String systemMsg, String userMsg, Float temperature) {
        return doRequest(systemMsg, userMsg, Boolean.FALSE, temperature);
    }

    /**
     * 通用请求（简化消息传递）
     *
     * @param systemMsg   系统消息
     * @param userMsg     用户消息
     * @param stream      流式输出
     * @param temperature 精准度
     */
    public String doRequest(String systemMsg, String userMsg, Boolean stream, Float temperature) {
        List<ChatMessage> messages = new ArrayList<>();
        messages.add(new ChatMessage(ChatMessageRole.SYSTEM.value(), systemMsg));
        messages.add(new ChatMessage(ChatMessageRole.USER.value(), userMsg));
        return doRequest(messages, stream, temperature);
    }

    /**
     * 通用请求
     *
     * @param messages    列表消息
     * @param stream      流式输出
     * @param temperature 精准度
     */
    public String doRequest(List<ChatMessage> messages, Boolean stream, Float temperature) {
        ChatCompletionRequest chatCompletionRequest = ChatCompletionRequest.builder()
                .model(Constants.ModelChatGLM4)
                .stream(stream)
                .temperature(temperature)
                .invokeMethod(Constants.invokeMethod)
                .messages(messages)
                .build();
        ModelApiResponse invokeModelApiResp = client.invokeModelApi(chatCompletionRequest);
        return invokeModelApiResp.getData().getChoices().get(0).toString();
    }

    /**
     * 通用流式请求（简化）
     *
     * @param systemMsg
     * @param userMsg
     * @param temperature
     * @return
     */
    public Flowable<ModelData> doStreamRequest(String systemMsg, String userMsg, Float temperature) {
        List<ChatMessage> messages = new ArrayList<>();
        messages.add(new ChatMessage(ChatMessageRole.SYSTEM.value(), systemMsg));
        messages.add(new ChatMessage(ChatMessageRole.USER.value(), userMsg));
        return doStreamRequest(messages, temperature);
    }

    /**
     * 通用流式请求
     *
     * @param messages    消息
     * @param temperature 精度
     * @return 流式信息
     */
    public Flowable<ModelData> doStreamRequest(List<ChatMessage> messages, Float temperature) {
        ChatCompletionRequest chatCompletionRequest = ChatCompletionRequest.builder()
                .model(Constants.ModelChatGLM4)
                .stream(Boolean.TRUE)
                .temperature(temperature)
                .invokeMethod(Constants.invokeMethod)
                .messages(messages)
                .build();
        ModelApiResponse invokeModelApiResp = client.invokeModelApi(chatCompletionRequest);
        return invokeModelApiResp.getFlowable();
    }
}
