package com.liuqi.openai.model;

import com.liuqi.openai.core.chat.ChatCompletionChoice;
import com.liuqi.openai.core.chat.ChatCompletionRequest;
import com.liuqi.openai.core.chat.ChatCompletionResponse;
import com.liuqi.openai.core.chat.Delta;
import com.liuqi.openai.core.chat.Tool;
import com.liuqi.openai.core.client.Executor;
import com.liuqi.openai.core.client.OpenAiRemoteException;
import com.liuqi.openai.core.client.StreamHandler;
import com.liuqi.openai.core.shared.StreamOptions;
import com.liuqi.openai.data.message.AiMessage;
import com.liuqi.openai.data.message.ChatMessage;
import com.liuqi.openai.model.chat.ChatModel;
import com.liuqi.openai.model.chat.StreamingChatModel;
import com.liuqi.openai.model.common.AbstractOpenAiChatModel;
import com.liuqi.openai.model.handler.StreamingResponseHandler;
import com.liuqi.openai.model.output.Response;
import com.liuqi.openai.util.StringUtil;
import java.util.List;
import java.util.Map;

/**
 * @author liuqi
 * @date 2025/7/20
 **/
public class OpenAiChatModel extends AbstractOpenAiChatModel implements ChatModel, StreamingChatModel {

    private final Integer topK;
    private final Boolean enableThinking;
    private final Integer thinkingBudget;
    private final Object toolChoice;
    private final Boolean parallelToolCalls;
    private final Boolean enableSearch;
    private final Map<String, Object> searchOptions;

    private List<Tool> tools;

    public List<Tool> getTools() {
        return tools;
    }
    public void setTools(List<Tool> tools) {
        this.tools = tools;
    }

    public static OpenAiChatBuilder builder() {
        return new OpenAiChatBuilder();
    }

    private OpenAiChatModel(OpenAiChatBuilder builder) {
        super(builder);
        this.topK = builder.topK;
        this.enableThinking = builder.enableThinking;
        this.thinkingBudget = builder.thinkingBudget;
        this.tools = builder.tools;
        this.toolChoice = builder.toolChoice;
        this.parallelToolCalls = builder.parallelToolCalls;
        this.enableSearch = builder.enableSearch;
        this.searchOptions = builder.searchOptions;
    }

    /**
     * 使用 builder 进行构建
     */
    public static class OpenAiChatBuilder
            extends AbstractOpenAiChatModelBuilder<OpenAiChatModel, OpenAiChatBuilder> {
        private Integer topK;
        private Boolean enableThinking;
        private Integer thinkingBudget;
        private List<Tool> tools;
        private Object toolChoice;
        private Boolean parallelToolCalls;
        Boolean enableSearch;
        Map<String, Object> searchOptions;
        public OpenAiChatBuilder topK(Integer topK) {
            this.topK = topK;
            return this;
        }
        public OpenAiChatBuilder enableThinking(Boolean enableThinking) {
            this.enableThinking = enableThinking;
            return this;
        }
        public OpenAiChatBuilder thinkingBudget(Integer thinkingBudget) {
            this.thinkingBudget = thinkingBudget;
            return this;
        }
        public OpenAiChatBuilder tools(List<Tool> tools) {
            this.tools = tools;
            return this;
        }
        public OpenAiChatBuilder toolChoice(Object toolChoice) {
            this.toolChoice = toolChoice;
            return this;
        }
        public OpenAiChatBuilder parallelToolCalls(Boolean parallelToolCalls) {
            this.parallelToolCalls = parallelToolCalls;
            return this;
        }
        public OpenAiChatBuilder enableSearch(Boolean enableSearch) {
            this.enableSearch = enableSearch;
            return this;
        }
        public OpenAiChatBuilder searchOptions(Map<String, Object> searchOptions) {
            this.searchOptions = searchOptions;
            return this;
        }

        @Override
        public OpenAiChatModel build() {
            return new OpenAiChatModel(this);
        }
    }

    @Override
    public Response<AiMessage> generate(List<ChatMessage> messages) throws OpenAiModelException {
        return generate0(messages, null);
    }

    @Override
    public Response<AiMessage> generate(List<ChatMessage> messages, StreamingResponseHandler handler) throws OpenAiModelException {
        if (handler == null) {
            throw new NullPointerException("streaming response: handler cannot be null.");
        }
        return generate0(messages, handler);
    }

    private Response<AiMessage> generate0(List<ChatMessage> messages, StreamingResponseHandler handler) throws OpenAiModelException {
        try {
            // 是否流式响应
            boolean stream = handler != null;

            // 构建请求报文
            ChatCompletionRequest request = request(messages, stream);

            // 创建执行器
            Executor<ChatCompletionResponse> executor = openAiClient.chatCompletion(request);

            // 流式响应
            if (stream) {
                executor.injectStreamHandler(new StreamHandler<ChatCompletionResponse>() {
                    @Override
                    public void onResponse(ChatCompletionResponse response) {
                        List<ChatCompletionChoice> choices = response.getChoices();
                        if (choices != null && !choices.isEmpty()) {
                            Delta delta = choices.get(0).getDelta();
                            if (delta != null) {
                                // 内容
                                String text = delta.getContent();
                                if (StringUtil.isNotEmpty(text)) {
                                    handler.onNext(text);
                                }

                                // 思考内容
                                String reasoningContent = delta.getReasoningContent();
                                if (StringUtil.isNotEmpty(reasoningContent)) {
                                    handler.onThink(reasoningContent);
                                }
                            }
                        }
                    }

                    @Override
                    public void onComplete() {
                        handler.onComplete();
                    }
                    @Override
                    public void onError(Throwable t) {
                        handler.onError(t);
                    }
                });
            }

            // 发起请求
            ChatCompletionResponse response = executor.execute();

            // choices 响应
            ChatCompletionChoice completionChoice = response.getChoices().get(0);

            // 构建响应
            return Response.from(
                    aiMessageFrom(response),
                    tokenUsageFrom(response.getUsage()),
                    finishReasonFrom(completionChoice.getFinishReason())
            );
        } catch (OpenAiRemoteException e) {
            throw new OpenAiModelException("OpenAiChatModel Error.", e);
        }
    }

    private ChatCompletionRequest request(List<ChatMessage> messages, boolean stream) {
        return ChatCompletionRequest.builder()
                .model(modelName)
                .messages(toOpenAiMessages(messages))
                .stream(stream)
                .streamOptions(stream ? StreamOptions.builder().includeUsage(true).build() : null)
                .temperature(temperature)
                .topP(topP)
                .topK(topK)
                .presencePenalty(presencePenalty)
                .frequencyPenalty(frequencyPenalty)
                .maxTokens(maxTokens)
                .n(n)
                .enableThinking(enableThinking)
                .thinkingBudget(thinkingBudget)
                .seed(seed)
                .stop(stop)
                .tools(tools)
                .toolChoice(toolChoice == null && tools != null ? "auto" : toolChoice)
                .parallelToolCalls(parallelToolCalls)
                .enableSearch(enableSearch)
                .searchOptions(searchOptions)
                .build();
    }

}
