package org.leiyang.service.impls;

import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.zhipu.oapi.Constants;
import com.zhipu.oapi.service.v4.model.*;
import io.reactivex.Flowable;
import org.leiyang.common.factories.AiClientFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Service;

import java.util.ArrayList;
import java.util.List;

@Service
public class InvokeAPIServiceImpl {
    private final Logger log = LoggerFactory.getLogger(getClass());

    private final static String requestIdTemplate = "lei_bm_%s";
    private final static ObjectMapper mapper = new ObjectMapper();

    /**
     * 同步执行
     * @param question 问题描述
     */
    public void LLMAPIInvoke(String question) {
        List<ChatMessage> messages = new ArrayList<>();
//        ChatMessage chatMessage = new ChatMessage(ChatMessageRole.USER.value(), "作为一名营销专家，请为智谱开放平台创作一个吸引人的slogan");
        ChatMessage chatMessage = new ChatMessage(ChatMessageRole.USER.value(), question);
        messages.add(chatMessage);
        String requestId = String.format(requestIdTemplate, System.currentTimeMillis());

        ChatCompletionRequest chatCompletionRequest = ChatCompletionRequest.builder()
                .model(Constants.ModelChatGLM4)
                .stream(Boolean.FALSE)
                .invokeMethod(Constants.invokeMethod)
                .messages(messages)
                .requestId(requestId)
                .build();
        ModelApiResponse invokeModelApiResp = AiClientFactory.create().invokeModelApi(chatCompletionRequest);
        try {
            log.info("model output: {}", mapper.writeValueAsString(invokeModelApiResp));
        } catch (JsonProcessingException e) {
            log.error("print LLMAPIInvoke fail: ", e);
        }
    }

    /**
     * 异步执行
     * @param question 问题描述
     */
    public void LLMAPIInvokeAsync(String question) throws InterruptedException {
        List<ChatMessage> messages = new ArrayList<>();
//        ChatMessage chatMessage = new ChatMessage(ChatMessageRole.USER.value(), "作为一名营销专家，请为智谱开放平台创作一个吸引人的slogan");
        ChatMessage chatMessage = new ChatMessage(ChatMessageRole.USER.value(), question);
        messages.add(chatMessage);
        String requestId = String.format(requestIdTemplate, System.currentTimeMillis());

        ChatCompletionRequest chatCompletionRequest = ChatCompletionRequest.builder()
                .model(Constants.ModelChatGLM4)
                .stream(Boolean.FALSE)
                .invokeMethod(Constants.invokeMethodAsync)
                .messages(messages)
                .requestId(requestId)
                .build();
        ModelApiResponse invokeModelApiResp = AiClientFactory.create().invokeModelApi(chatCompletionRequest);
        try {
            log.info("Task ID info: {}", mapper.writeValueAsString(invokeModelApiResp.getData()));
        } catch (JsonProcessingException e) {
            log.error("print LLMAPIInvokeAsync fail: ", e);
        }
        String taskId = invokeModelApiResp.getData().getId();
        Thread.sleep(30000);
        //通过TASKID来查询结果
        QueryModelResultRequest queryModelResultRequest = QueryModelResultRequest.builder().taskId(taskId).build();
        QueryModelResultResponse queryModelResultResponse = AiClientFactory.create().queryModelResult(queryModelResultRequest);
        try {
            log.info("model output: {}", mapper.writeValueAsString(queryModelResultResponse.getData()));
        } catch (JsonProcessingException e) {
            log.error("print LLMAPIInvokeAsync fail: ", e);
        }
    }

    /**
     * 流式
     * @param question 问题描述
     */
    public Flowable<String> LLMAPIInvokeSSE(String question) throws JsonProcessingException {
        List<ChatMessage> messages = new ArrayList<>(1);
        ChatMessage chatMessage = new ChatMessage(ChatMessageRole.USER.value(), question);
        messages.add(chatMessage);
        String requestId = String.format(requestIdTemplate, System.currentTimeMillis());

        ChatCompletionRequest chatCompletionRequest = ChatCompletionRequest.builder()
                .model(Constants.ModelChatGLM4)
                .stream(Boolean.TRUE)
                .messages(messages)
                .requestId(requestId)
                .build();
        ModelApiResponse sseModelApiResp = AiClientFactory.create().invokeModelApi(chatCompletionRequest);
        if (sseModelApiResp.isSuccess()) {
//            AtomicBoolean isFirst = new AtomicBoolean(true);
            return mapStreamToAccumulator(sseModelApiResp.getFlowable())
                    .filter(accumulator -> (accumulator.getDelta() != null && accumulator.getDelta().getContent() != null))
                    .map(accumulator -> accumulator.getDelta().getContent());
//                    .doOnNext(accumulator -> {
//                        {
//                            if (isFirst.getAndSet(false)) {
//                                log.info("Response start!");
//                            }
//                            if (accumulator.getDelta() != null && accumulator.getDelta().getTool_calls() != null) {
//                                String jsonString = mapper.writeValueAsString(accumulator.getDelta().getTool_calls());
//                                log.info("tool_calls: " + jsonString);
//                            }
//                            if (accumulator.getDelta() != null && accumulator.getDelta().getContent() != null) {
//                                log.info(accumulator.getDelta().getContent());
//                            }
//                        }
//                    })
//                    .doOnComplete(System.out::println)
//                    .lastElement()
//                    .blockingGet();

//            Choice choice = new Choice();
//            choice.setFinishReason(chatMessageAccumulator.getChoice().getFinishReason());
//            choice.setIndex(0L);
//            choice.setDelta(chatMessageAccumulator.getDelta());
//            List<Choice> choices = new ArrayList<>();
//            choices.add(choice);
//            ModelData data = new ModelData();
//            data.setChoices(choices);
//            data.setUsage(chatMessageAccumulator.getUsage());
//            data.setId(chatMessageAccumulator.getId());
//            data.setCreated(chatMessageAccumulator.getCreated());
//            data.setRequestId(chatCompletionRequest.getRequestId());
//            sseModelApiResp.setFlowable(null);
//            sseModelApiResp.setData(data);
        } else {
            log.error("SSE方式执行失败，{}", sseModelApiResp.getError().getMessage());
        }
        log.info("Model output by SSE: {}", mapper.writeValueAsString(sseModelApiResp.getData()));
        return Flowable.empty();
    }

    private static Flowable<ChatMessageAccumulator> mapStreamToAccumulator(Flowable<ModelData> flowable) {
        return flowable.map(chunk -> new ChatMessageAccumulator(chunk.getChoices().get(0).getDelta(),
                null, chunk.getChoices().get(0), chunk.getUsage(), chunk.getCreated(), chunk.getId()));
    }
}