package com.example.server.manage.gptdialogue.server.impl;


import cn.hutool.core.lang.Dict;
import com.alibaba.fastjson.JSON;
import com.azure.ai.openai.OpenAIClient;
import com.azure.ai.openai.OpenAIClientBuilder;
import com.azure.core.credential.AzureKeyCredential;
import com.example.server.manage.gptdialogue.properties.GptProperties;
import com.example.server.manage.gptdialogue.server.ChatGptSseService;
import com.example.server.util.threadpool.XjThreadUtil;
import com.example.server.util.validated.ValidUtil;
import com.fasterxml.jackson.databind.node.TextNode;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils;
import org.springframework.ai.azure.openai.AzureOpenAiChatClient;
import org.springframework.ai.azure.openai.AzureOpenAiChatOptions;
import org.springframework.ai.chat.ChatResponse;
import org.springframework.ai.chat.messages.Message;
import org.springframework.ai.chat.messages.UserMessage;
import org.springframework.ai.chat.prompt.Prompt;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono;
import reactor.core.publisher.Sinks;

import java.net.Proxy;
import java.time.Duration;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;

@Service
@Slf4j
public class ChatGptSeeServiceImpl implements ChatGptSseService {

    @Autowired
    private GptProperties gptProperties;

    Proxy proxy = Proxy.NO_PROXY;


    @Override
    public Flux<Dict> chat(String content, String model, Integer maxTokens, Float temperature) {
        ValidUtil.isTrue(StringUtils.isBlank(content.trim()), "发送聊天内容不能为空");

        // 参数处理
        String url = gptProperties.getUrl();
        String key = gptProperties.getKeys().get(0);
        Integer timeout = gptProperties.getTimeout();
        model = StringUtils.isNotBlank(model) ? model : gptProperties.getDefaultModel();
        maxTokens = maxTokens != null ? maxTokens : gptProperties.getDefaultMaxTokens();
        temperature = temperature != null ? temperature : gptProperties.getDefaultTemperature();
        // chatGPT 构建
        AzureOpenAiChatClient chatClient = this.getAzureOpenAiChatClient(url, key, model, maxTokens, temperature);
        List<Message> messages = new ArrayList<>();
        String[] contents = content.split("\\|\\|");
        for (String c : contents) {
            messages.add(new UserMessage(c));
        }

        // Flux<T> t 的数据类型, 就是 sink.next 添加的数据类型, 建议使用对象, 不要使用基础数据类型
        return Flux.create(sink -> {
            XjThreadUtil.asyncExecute(() -> {
                try {
                    // 发起请求
                    Flux<ChatResponse> stream = chatClient.stream(new Prompt(messages));
                    // 想接收到的消息使用流方式返回给用户端
                    stream.subscribe(
                            res -> {
                                if (res.getResult().getOutput().getContent() != null) {
                                    sink.next(Dict.create().set("value", res.getResult().getOutput().getContent()));
                                    //  XjThreadUtil.sleep(100);
                                }
                            },
                            e -> {
                                sink.next(Dict.create().set("value", e.getMessage()));
                            },
                            sink::complete
                    );
                }catch (Exception e){
                    sink.next(Dict.create().set("value", e.getMessage()));
                    sink.complete();
                }
            });
        });
    }


    /**
     * 获得 azure ai 的请求对象
     *
     * @param model 模型
     * @param key 关键
     * @param endpoint 端点
     * @param maxTokens 最大 tokens
     * @return {@link AzureOpenAiChatClient}
     */
    private AzureOpenAiChatClient getAzureOpenAiChatClient(String endpoint, String key, String model, Integer maxTokens, float temperature) {
        List<String> models = new ArrayList<>();
        models.add("o1-preview");
        models.add("o1-mini");

        // 请求构建
        OpenAIClient openAIClient = new OpenAIClientBuilder().credential(new AzureKeyCredential(key)).endpoint(endpoint).buildClient();
        // 请求数据
        return new AzureOpenAiChatClient(openAIClient, AzureOpenAiChatOptions.builder()
                .withModel(model)
                .withMaxTokens(models.contains(model) ? null : maxTokens)
                .withTemperature(models.contains(model) ? null : temperature)
                .build());
    }
}
