package com.zyb.beidada.manager;

import com.fasterxml.jackson.core.JsonProcessingException;
import com.zhipu.oapi.ClientV4;
import com.zhipu.oapi.Constants;
import com.zhipu.oapi.service.v4.model.*;
import com.zyb.beidada.common.ErrorCode;
import com.zyb.beidada.exception.ThrowUtils;
import io.reactivex.Flowable;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Component;

import javax.annotation.Resource;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.atomic.AtomicBoolean;

/**
 * @author 十八岁讨厌编程
 * @date 2025/5/7 17:34
 * @PROJECT_NAME beidada-backend
 * @description AI操控类
 */

@Component
public class AIManager {

    //引入客户端
    @Resource
    private ClientV4 client;

    /**
     * 同步调用AI
     * @param systemPrompt
     * @param userPrompt
     * @return
     */
    public String solveWithChatGLM4(String systemPrompt,String userPrompt){
        //创建客户端
        //构造模型用户的请求消息
        List<ChatMessage> messages = new ArrayList<>();
        ChatMessage systemMessage = new ChatMessage(ChatMessageRole.SYSTEM.value(), systemPrompt);
        messages.add(systemMessage);
        ChatMessage chatMessage = new ChatMessage(ChatMessageRole.USER.value(), userPrompt);
        messages.add(chatMessage);
        //构造请求
        ChatCompletionRequest chatCompletionRequest = ChatCompletionRequest.builder()
                .model(Constants.ModelChatGLM4)
                .stream(Boolean.FALSE)
                .invokeMethod(Constants.invokeMethod)
                .messages(messages)
                .build();
        ModelApiResponse invokeModelApiResp = client.invokeModelApi(chatCompletionRequest);
        //获得返回体
        Choice choice = invokeModelApiResp.getData().getChoices().get(0);
        //异常处理
        ThrowUtils.throwIf(!choice.getFinishReason().equals("stop"), ErrorCode.SYSTEM_ERROR,"AI返回结果异常");
        return choice.getMessage().getContent().toString();
    }

    /**
     * 流式调用AI
     * @param systemPrompt
     * @param userPrompt
     * @return 流式响应对象
     */
    public Flowable<ModelData> solveWithChatGLM4ByStream(String systemPrompt,String userPrompt) {
        //创建客户端
        //构造模型用户的请求消息
        List<ChatMessage> messages = new ArrayList<>();
        ChatMessage systemMessage = new ChatMessage(ChatMessageRole.SYSTEM.value(), systemPrompt);
        messages.add(systemMessage);
        ChatMessage chatMessage = new ChatMessage(ChatMessageRole.USER.value(), userPrompt);
        messages.add(chatMessage);
        //构造流式请求
        ChatCompletionRequest chatCompletionRequest = ChatCompletionRequest.builder()
                .model(Constants.ModelChatGLM4)
                .stream(Boolean.TRUE)
                .messages(messages)
                .build();
        //获得流式响应对象(也就是被观察者、源、上游)
        return client.invokeModelApi(chatCompletionRequest).getFlowable();
    }


}
