package com.xkcyy.one.ai.llm;

import cn.hutool.core.util.IdUtil;
import cn.hutool.core.util.StrUtil;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.DeserializationFeature;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.PropertyNamingStrategies;
import com.xkcyy.one.ai.llm.model.*;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.http.MediaType;
import org.springframework.stereotype.Service;
import org.springframework.web.reactive.function.client.WebClient;
import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono;

import java.util.*;
import java.util.concurrent.atomic.AtomicReference;
import java.util.regex.Matcher;
import java.util.regex.Pattern;

/**
 * OpenAI API 客户端实现
 * 
 * @author yuand
 */
@Slf4j
@Service
public class OpenAiClient {

    private final WebClient webClient;
    private final ObjectMapper objectMapper;
    private final OpenAiModelOptions defaultOptions;

    /**
     * 基于配置初始化OpenAiClient
     */
    public OpenAiClient() {
        this(OpenAiModelOptions.defaultOptions());
    }

    /**
     * 基于配置初始化OpenAiClient
     *
     * @param options 模型选项
     */
    public OpenAiClient(OpenAiModelOptions options) {

        this.webClient = WebClient.builder()
                .baseUrl(options.getBaseUrl())
                .defaultHeader("Authorization", "Bearer " + options.getApiKey())
                .defaultHeader("Content-Type", "application/json")
                .build();

        this.objectMapper = new ObjectMapper();
        this.objectMapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
        this.objectMapper.setPropertyNamingStrategy(PropertyNamingStrategies.SNAKE_CASE);

        this.defaultOptions = options;
    }

    /**
     * 基于配置初始化OpenAiClient
     * 
     * @param apiKey       API密钥
     * @param baseUrl      基础URL
     * @param defaultModel 默认模型
     * @param temperature  温度参数
     * @param maxTokens    最大token数
     */
    public OpenAiClient(
            @Value("${spring.ai.openai.api-key}") String apiKey,
            @Value("${spring.ai.openai.base-url:https://api.openai.com}") String baseUrl,
            @Value("${spring.ai.openai.model:gpt-3.5-turbo}") String defaultModel,
            @Value("${spring.ai.openai.temperature:0.7}") Double temperature,
            @Value("${spring.ai.openai.max-tokens:2000}") Integer maxTokens) {

        this.webClient = WebClient.builder()
                .baseUrl(baseUrl)
                .defaultHeader("Authorization", "Bearer " + apiKey)
                .defaultHeader("Content-Type", "application/json")
                .build();

        this.objectMapper = new ObjectMapper();

        this.defaultOptions = OpenAiModelOptions.builder()
                .model(defaultModel)
                .temperature(temperature)
                .maxTokens(maxTokens)
                .isReasoning(false)
                .reasoningStartTag("<thinking>")
                .reasoningEndTag("</thinking>")
                .answerStartTag("<answer>")
                .answerEndTag("</answer>")
                .build();
    }

    /**
     * 创建OpenAiClient构建器
     * 
     * @return 构建器实例
     */
    public static Builder builder() {
        return new Builder();
    }

    /**
     * 发送聊天请求
     * 
     * @param prompt 用户提示
     * @return 聊天结果
     */
    public ChatResult chat(String prompt) {
        return chat(prompt, defaultOptions);
    }

    /**
     * 发送带有自定义选项的聊天请求
     * 
     * @param prompt  用户提示
     * @param options 模型选项
     * @return 聊天结果
     */
    public ChatResult chat(String prompt, OpenAiModelOptions options) {
        ChatMessage userMessage = ChatMessage.builder()
                .role("user")
                .content(prompt)
                .build();

        return chat(Collections.singletonList(userMessage), options);
    }

    /**
     * 发送带有对话历史的聊天请求
     * 
     * @param messages 消息历史
     * @return 聊天结果
     */
    public ChatResult chat(List<ChatMessage> messages) {
        return chat(messages, defaultOptions);
    }

    /**
     * 发送带有对话历史和自定义选项的聊天请求
     * 
     * @param messages 消息历史
     * @param options  模型选项
     * @return 聊天结果
     */
    public ChatResult chat(List<ChatMessage> messages, OpenAiModelOptions options) {
        boolean isReasoning = options.getIsReasoning() != null && options.getIsReasoning();
        List<ChatMessage> promptMessages = messages;

        // 如果是推理模型，添加系统提示来引导模型格式化输出
        if (isReasoning) {
            ChatMessage systemMessage = ChatMessage.builder()
                    .role("system")
                    .content(buildReasoningSystemPrompt(options))
                    .build();

            promptMessages = new ArrayList<>(messages);
            promptMessages.add(0, systemMessage);
        }

        ChatRequest request = buildChatRequest(promptMessages, options, false);

        try {
            String requestBody = objectMapper.writeValueAsString(request);

            ChatResponse response = webClient.post()
                    .uri("/v1/chat/completions")
                    .contentType(MediaType.APPLICATION_JSON)
                    .bodyValue(requestBody)
                    .retrieve()
                    .bodyToMono(ChatResponse.class)
                    .block();

            if (response != null && response.getChoices() != null && !response.getChoices().isEmpty()) {
                String content = response.getChoices().get(0).getMessage().getContent();

                if (isReasoning) {
                    return parseReasoningResponse(content, options, response.getUsage());
                } else {
                    return ChatResult.builder()
                            .content(content)
                            .reasoningContent(null)
                            .toolCalls(null)
                            .usage(response.getUsage())
                            .isComplete(true)
                            .build();
                }
            }

            return buildEmptyResult();
        } catch (JsonProcessingException e) {
            log.error("Error serializing request", e);
            throw new RuntimeException("Failed to serialize request", e);
        } catch (Exception e) {
            log.error("Error in chat request", e);
            throw new RuntimeException("Failed to complete chat request", e);
        }
    }

    /**
     * 发送流式聊天请求
     * 
     * @param prompt 用户提示
     * @return 流式聊天结果
     */
    public Flux<ChatResult> chatStream(String prompt) {
        return chatStream(prompt, defaultOptions);
    }

    /**
     * 发送带有自定义选项的流式聊天请求
     * 
     * @param prompt  用户提示
     * @param options 模型选项
     * @return 流式聊天结果
     */
    public Flux<ChatResult> chatStream(String prompt, OpenAiModelOptions options) {
        ChatMessage userMessage = ChatMessage.builder()
                .role("user")
                .content(prompt)
                .build();

        return chatStream(Collections.singletonList(userMessage), options);
    }

    /**
     * 发送带有对话历史的流式聊天请求
     * 
     * @param messages 消息历史
     * @return 流式聊天结果
     */
    public Flux<ChatResult> chatStream(List<ChatMessage> messages) {
        return chatStream(messages, defaultOptions);
    }

    /**
     * 发送带有对话历史和自定义选项的流式聊天请求
     * 
     * @param messages 消息历史
     * @param options  模型选项
     * @return 流式聊天结果
     */
    public Flux<ChatResult> chatStream(List<ChatMessage> messages, OpenAiModelOptions options) {
        boolean isReasoning = options.getIsReasoning() != null && options.getIsReasoning();

        ChatRequest request = buildChatRequest(messages, options, true);

        try {
            String requestBody = objectMapper.writeValueAsString(request);
            AtomicReference<StringBuilder> contentBuilder = new AtomicReference<>(new StringBuilder());
            Long responseMsgId = IdUtil.getSnowflakeNextId();

            return webClient.post()
                    .uri(Optional.ofNullable(options.getBaseUrl()).orElse(this.defaultOptions.getBaseUrl())
                            + "/chat/completions")
                    .contentType(MediaType.APPLICATION_JSON)
                    .bodyValue(requestBody)
                    .accept(MediaType.TEXT_EVENT_STREAM)
                    .retrieve()
                    .onStatus(status -> status.is4xxClientError() || status.is5xxServerError(),
                            clientResponse -> clientResponse.bodyToMono(String.class)
                                    .flatMap(errorBody -> {
                                        log.error("流式请求失败 - 状态码: {}, 响应头: {}, 响应体: {}",
                                                clientResponse.statusCode(),
                                                clientResponse.headers().asHttpHeaders(),
                                                errorBody);
                                        return Mono.error(new RuntimeException("流式请求失败: " + errorBody));
                                    }))
                    .bodyToFlux(String.class)
                    .doOnSubscribe(sub -> log.info("开始请求: {}", requestBody))
                    .doOnNext(chunk -> log.debug("收到响应块: {}", chunk))
                    .doOnError(error -> log.error("请求失败: {}", error.getMessage(), error))
                    .doOnComplete(() -> log.info("请求完成"))

                    .filter(chunk -> chunk != null && !chunk.isEmpty())
                    .map(chunk -> {
                        try {
                            // 处理SSE格式，去掉"data: "前缀
                            if (chunk.startsWith("data: ")) {
                                chunk = chunk.substring(6);
                            }

                            // 跳过心跳消息
                            if ("[DONE]".equals(chunk)) {
                                String content = contentBuilder.get().toString();

                                return ChatResult.builder()
                                        .content(content)
                                        .reasoningContent(null)
                                        .toolCalls(null)
                                        .isComplete(true)
                                        .build();
                            }

                            ChatResponse response = objectMapper.readValue(chunk, ChatResponse.class);
                            if (response.getChoices() != null && !response.getChoices().isEmpty()) {
                                ChatChoice choice = response.getChoices().get(0);
                                if (choice.getDelta() != null) {
                                    return ChatResult.builder()
                                            .content(choice.getDelta().getContent())
                                            .reasoningContent(choice.getDelta().getReasoningContent())
                                            .toolCalls(null)
                                            .isComplete("stop".equalsIgnoreCase(choice.getFinishReason()))
                                            .build();

                                }
                            }

                            return ChatResult.builder()
                                    .content("")
                                    .reasoningContent("")
                                    .toolCalls(null)
                                    .isComplete(true)
                                    .build();
                        } catch (Exception e) {
                            log.error("Error parsing stream chunk", e);
                            return ChatResult.builder()
                                    .content("")
                                    .reasoningContent("")
                                    .toolCalls(null)
                                    .isComplete(true)
                                    .build();
                        }
                    })
                    .filter(Objects::nonNull);
        } catch (JsonProcessingException e) {
            log.error("Error serializing request", e);
            return Flux.error(new RuntimeException("Failed to serialize request", e));
        } catch (Exception e) {
            log.error("Error in chat stream request", e);
            return Flux.error(new RuntimeException("Failed to complete chat stream request", e));
        }
    }

    /**
     * 使用工具调用发送聊天请求
     * 
     * @param prompt 用户提示
     * @param tools  工具定义列表
     * @return 聊天结果
     */
    public ChatResult chatWithTools(String prompt, List<ToolDefinition> tools) {
        return chatWithTools(prompt, tools, defaultOptions);
    }

    /**
     * 使用工具调用发送带有自定义选项的聊天请求
     * 
     * @param prompt  用户提示
     * @param tools   工具定义列表
     * @param options 模型选项
     * @return 聊天结果
     */
    public ChatResult chatWithTools(String prompt, List<ToolDefinition> tools, OpenAiModelOptions options) {
        ChatMessage userMessage = ChatMessage.builder()
                .role("user")
                .content(prompt)
                .build();

        return chatWithTools(Collections.singletonList(userMessage), tools, options);
    }

    /**
     * 使用工具调用发送带有自定义选项的聊天请求
     *
     * @param messages 用户提示
     * @param tools    工具定义列表
     * @return 聊天结果
     */
    public ChatResult chatWithTools(List<ChatMessage> messages, List<ToolDefinition> tools) {
        return chatWithTools(messages, tools, defaultOptions);
    }

    /**
     * 使用工具调用发送带有自定义选项的聊天请求
     *
     * @param messages 用户提示
     * @param tools    工具定义列表
     * @param options  模型选项
     * @return 聊天结果
     */
    public ChatResult chatWithTools(List<ChatMessage> messages, List<ToolDefinition> tools,
            OpenAiModelOptions options) {

        ChatRequest request = buildChatRequestWithTools(messages, tools, options, false);

        try {
            String requestBody = objectMapper.writeValueAsString(request);
            log.info("<UNK>: {}", requestBody);
            ChatResponse response = webClient.post()
                    .uri(Optional.ofNullable(options.getBaseUrl()).orElse(this.defaultOptions.getBaseUrl())
                            + "/chat/completions")
                    .contentType(MediaType.APPLICATION_JSON)
                    .bodyValue(requestBody)
                    .retrieve()
                    .onStatus(status -> status.is4xxClientError() || status.is5xxServerError(),
                            clientResponse -> clientResponse.bodyToMono(String.class)
                                    .flatMap(errorBody -> {
                                        log.error("请求失败 - 状态码: {}, 响应头: {}, 响应体: {}",
                                                clientResponse.statusCode(),
                                                clientResponse.headers().asHttpHeaders(),
                                                errorBody);
                                        return Mono.error(new RuntimeException("请求失败: " + errorBody));
                                    }))
                    .bodyToMono(ChatResponse.class)
                    .block();

            return processToolCallResult(response);
        } catch (JsonProcessingException e) {
            log.error("Error serializing request", e);
            throw new RuntimeException("Failed to serialize request", e);
        } catch (Exception e) {
            log.error("Error in chat with tools request", e);
            throw new RuntimeException("Failed to complete chat with tools request", e);
        }
    }

    /**
     * 使用工具调用发送流式聊天请求
     * 
     * @param prompt 用户提示
     * @param tools  工具定义列表
     * @return 流式聊天结果
     */
    public Flux<ChatResult> chatStreamWithTools(String prompt, List<ToolDefinition> tools) {
        return chatStreamWithTools(prompt, tools, defaultOptions);
    }

    /**
     * 使用工具调用发送带有自定义选项的流式聊天请求
     * 
     * @param prompt  用户提示
     * @param tools   工具定义列表
     * @param options 模型选项
     * @return 流式聊天结果
     */
    public Flux<ChatResult> chatStreamWithTools(String prompt, List<ToolDefinition> tools, OpenAiModelOptions options) {
        ChatMessage userMessage = ChatMessage.builder()
                .role("user")
                .content(prompt)
                .build();

        return chatStreamWithTools(Collections.singletonList(userMessage), tools, options);
    }

    /**
     * 使用工具调用发送带有自定义选项的流式聊天请求
     *
     * @param messages 用户提示
     * @param tools    工具定义列表
     * @return 流式聊天结果
     */
    public Flux<ChatResult> chatStreamWithTools(List<ChatMessage> messages, List<ToolDefinition> tools) {
        return chatStreamWithTools(messages, tools, defaultOptions);
    }

    /**
     * 使用工具调用发送带有自定义选项的流式聊天请求
     *
     * @param messages 用户提示
     * @param tools    工具定义列表
     * @param options  模型选项
     * @return 流式聊天结果
     */
    public Flux<ChatResult> chatStreamWithTools(List<ChatMessage> messages, List<ToolDefinition> tools,
            OpenAiModelOptions options) {

        ChatRequest request = buildChatRequestWithTools(
                messages, tools, options, true);

        try {
            String requestBody = objectMapper.writeValueAsString(request);

            // 使用List来保持工具调用的顺序
            AtomicReference<List<ToolCallResult>> toolCallsList = new AtomicReference<>(new ArrayList<>());

            return webClient.post()
                    .uri("/chat/completions")
                    .contentType(MediaType.APPLICATION_JSON)
                    .bodyValue(requestBody)
                    .accept(MediaType.TEXT_EVENT_STREAM)
                    .retrieve()
                    .onStatus(status -> status.is4xxClientError() || status.is5xxServerError(),
                            clientResponse -> clientResponse.bodyToMono(String.class)
                                    .flatMap(errorBody -> {
                                        log.error("流式请求失败 - 状态码: {}, 响应头: {}, 响应体: {}",
                                                clientResponse.statusCode(),
                                                clientResponse.headers().asHttpHeaders(),
                                                errorBody);
                                        return Mono.error(new RuntimeException("流式请求失败: " + errorBody));
                                    }))
                    .bodyToFlux(String.class)
                    .doOnSubscribe(sub -> log.info("开始请求: {}", requestBody))
                    .doOnNext(chunk -> log.debug("收到响应块: {}", chunk))
                    .doOnError(error -> log.error("请求失败: {}", error.getMessage(), error))
                    .doOnComplete(() -> log.info("请求完成"))

                    .map(chunk -> {
                        try {
                            // 处理SSE格式，去掉"data: "前缀
                            if (chunk.startsWith("data: ")) {
                                chunk = chunk.substring(6);
                            }

                            // 跳过心跳消息
                            if (chunk.equals("[DONE]")) {
                                return ChatResult.builder().isComplete(true).toolCalls(toolCallsList.get()).build();
                            }

                            ChatResponse response = objectMapper.readValue(chunk, ChatResponse.class);
                            return processStreamingToolCallResult(response, toolCallsList);
                        } catch (Exception e) {
                            log.error("Error parsing stream chunk", e);
                            return null;
                        }
                    })
                    .filter(result -> result != null);
        } catch (JsonProcessingException e) {
            log.error("Error serializing request", e);
            return Flux.error(new RuntimeException("Failed to serialize request", e));
        } catch (Exception e) {
            log.error("Error in chat with tools stream request", e);
            return Flux.error(new RuntimeException("Failed to complete chat with tools stream request", e));
        }
    }

    /**
     * 构建推理系统提示
     */
    private String buildReasoningSystemPrompt(OpenAiModelOptions options) {
        return String.format(
                "请首先用%s和%s标签包围你的思考过程，然后用%s和%s标签包围你的最终回答。例如：\n\n%s这里是我的思考过程...%s\n\n%s这里是我的回答...%s",
                options.getReasoningStartTag(), options.getReasoningEndTag(),
                options.getAnswerStartTag(), options.getAnswerEndTag(),
                options.getReasoningStartTag(), options.getReasoningEndTag(),
                options.getAnswerStartTag(), options.getAnswerEndTag());
    }

    /**
     * 构建聊天请求
     */
    private ChatRequest buildChatRequest(List<ChatMessage> messages, OpenAiModelOptions options, boolean stream) {
        return ChatRequest.builder()
                .model(options.getModel())
                .messages(messages)
                .temperature(options.getTemperature())
                .maxTokens(options.getMaxTokens())
                .topP(options.getTopP())
                .presencePenalty(options.getPresencePenalty())
                .frequencyPenalty(options.getFrequencyPenalty())
                .stream(stream)
                .build();
    }

    /**
     * 构建带工具的聊天请求
     */
    private ChatRequest buildChatRequestWithTools(
            List<ChatMessage> messages, List<ToolDefinition> tools, OpenAiModelOptions options, boolean stream) {
        return ChatRequest.builder()
                .model(options.getModel())
                .messages(messages)
                .tools(tools)
                // .toolChoice(Map.of("type", "auto"))
                .temperature(options.getTemperature())
                .maxTokens(options.getMaxTokens())
                .topP(options.getTopP())
                .presencePenalty(options.getPresencePenalty())
                .frequencyPenalty(options.getFrequencyPenalty())
                .stream(stream)
                .build();
    }

    /**
     * 解析推理响应
     */
    private ChatResult parseReasoningResponse(String content, OpenAiModelOptions options, Usage usage) {
        if (content == null || content.isEmpty()) {
            return buildEmptyResult();
        }

        String reasoning = extractContent(
                content,
                options.getReasoningStartTag(),
                options.getReasoningEndTag());

        String answer = extractContent(
                content,
                options.getAnswerStartTag(),
                options.getAnswerEndTag());

        // 如果没有找到标记，则假设整个内容是答案
        if (answer == null && reasoning == null) {
            answer = content;
        }

        return ChatResult.builder()
                .content(answer)
                .reasoningContent(reasoning)
                .toolCalls(null)
                .usage(usage)
                .isComplete(true)
                .build();
    }

    /**
     * 从内容中提取被标记包围的部分
     */
    private String extractContent(String content, String startTag, String endTag) {
        if (content == null || startTag == null || endTag == null) {
            return null;
        }

        Pattern pattern = Pattern.compile(
                Pattern.quote(startTag) + "(.*?)" + Pattern.quote(endTag),
                Pattern.DOTALL);

        Matcher matcher = pattern.matcher(content);
        if (matcher.find()) {
            return matcher.group(1).trim();
        }

        return null;
    }

    /**
     * 处理工具调用结果
     */
    private ChatResult processToolCallResult(ChatResponse response) {
        if (response == null || response.getChoices() == null || response.getChoices().isEmpty()) {
            return buildEmptyResult();
        }

        ChatChoice choice = response.getChoices().get(0);
        ChatMessage message = choice.getMessage();

        String content = message.getContent() != null ? message.getContent() : "";
        List<ToolCallResult> toolCalls = new ArrayList<>();

        if (message.getToolCalls() != null && !message.getToolCalls().isEmpty()) {
            toolCalls.addAll(message.getToolCalls());
        }

        return ChatResult.builder()
                .content(content)
                .reasoningContent(null)
                .toolCalls(toolCalls)
                .usage(response.getUsage())
                .isComplete(true)
                .build();
    }

    /**
     * 处理流式工具调用结果
     */
    private ChatResult processStreamingToolCallResult(ChatResponse response, 
            AtomicReference<List<ToolCallResult>> toolCallsList) {
        if (response == null || response.getChoices() == null || response.getChoices().isEmpty()) {
            return null;
        }

        ChatChoice choice = response.getChoices().get(0);
        ChatMessage delta = choice.getDelta();
        
        // 处理工具调用
        boolean hasToolCalls = false;
        
        if (delta.getToolCalls() != null && !delta.getToolCalls().isEmpty()) {
            hasToolCalls = true;
            List<ToolCallResult> currentList = toolCallsList.get();
            
            for (ToolCallResult deltaToolCall : delta.getToolCalls()) {
                // 如果是新的工具调用（包含ID）
                if (StrUtil.isNotEmpty(deltaToolCall.getId())) {
                    currentList.add(deltaToolCall);
                } else if (!currentList.isEmpty()) {
                    // 更新当前工具调用的参数
                    ToolCallResult existingToolCall = currentList.get(currentList.size() - 1);
                    ToolCallResult mergedToolCall = mergeToolCallResults(existingToolCall, deltaToolCall);
                    currentList.set(currentList.size() - 1, mergedToolCall);
                }
            }
        }

        // 对于非工具调用的消息，正常返回
        return ChatResult.builder()
                .content(delta.getContent())
                .reasoningContent(delta.getReasoningContent())
                .toolCalls(null)
                .usage(response.getUsage())
                .isComplete(false)
                .build();
    }

    /**
     * 合并两个工具调用结果
     */
    private ToolCallResult mergeToolCallResults(ToolCallResult existing, ToolCallResult delta) {
        // 保留现有工具调用的ID和类型
        String id = existing.getId();
        String type = existing.getType();

        ToolCallFunction function = existing.getFunction();
        ToolCallFunction deltaFunction = delta.getFunction();

        if (deltaFunction != null) {
            if (function == null) {
                function = deltaFunction;
            } else {
                String name = function.getName();
                String arguments = function.getArguments();

                // 如果delta中有新的name，使用新的name
                if (deltaFunction.getName() != null) {
                    name = deltaFunction.getName();
                }

                // 如果delta中有新的arguments，追加到现有arguments
                if (deltaFunction.getArguments() != null) {
                    arguments = (arguments == null) ? deltaFunction.getArguments()
                            : arguments + deltaFunction.getArguments();
                }

                function = ToolCallFunction.builder()
                        .name(name)
                        .arguments(arguments)
                        .build();
            }
        }

        return ToolCallResult.builder()
                .id(id)
                .type(type)
                .function(function)
                .build();
    }

    /**
     * 构建空结果
     */
    private ChatResult buildEmptyResult() {
        return ChatResult.builder()
                .content("")
                .reasoningContent(null)
                .toolCalls(new ArrayList<>())
                .isComplete(true)
                .build();
    }

    /**
     * OpenAiClient构建器
     */
    public static class Builder {
        private String apiKey;
        private String baseUrl = "https://api.openai.com";
        private String model = "gpt-3.5-turbo";
        private Double temperature = 0.7;
        private Integer maxTokens = 2000;
        private Double topP;
        private Double presencePenalty;
        private Double frequencyPenalty;
        private Boolean isReasoning = false;
        private String reasoningStartTag = "<thinking>";
        private String reasoningEndTag = "</thinking>";
        private String answerStartTag = "<answer>";
        private String answerEndTag = "</answer>";

        public Builder apiKey(String apiKey) {
            this.apiKey = apiKey;
            return this;
        }

        public Builder baseUrl(String baseUrl) {
            this.baseUrl = baseUrl;
            return this;
        }

        public Builder model(String model) {
            this.model = model;
            return this;
        }

        public Builder temperature(Double temperature) {
            this.temperature = temperature;
            return this;
        }

        public Builder maxTokens(Integer maxTokens) {
            this.maxTokens = maxTokens;
            return this;
        }

        public Builder topP(Double topP) {
            this.topP = topP;
            return this;
        }

        public Builder presencePenalty(Double presencePenalty) {
            this.presencePenalty = presencePenalty;
            return this;
        }

        public Builder frequencyPenalty(Double frequencyPenalty) {
            this.frequencyPenalty = frequencyPenalty;
            return this;
        }

        public Builder isReasoning(Boolean isReasoning) {
            this.isReasoning = isReasoning;
            return this;
        }

        public Builder reasoningTags(String startTag, String endTag) {
            this.reasoningStartTag = startTag;
            this.reasoningEndTag = endTag;
            return this;
        }

        public Builder answerTags(String startTag, String endTag) {
            this.answerStartTag = startTag;
            this.answerEndTag = endTag;
            return this;
        }

        public OpenAiClient build() {
            if (apiKey == null || apiKey.isEmpty()) {
                throw new IllegalArgumentException("API Key is required");
            }

            WebClient webClient = WebClient.builder()
                    .baseUrl(baseUrl)
                    .defaultHeader("Authorization", "Bearer " + apiKey)
                    .defaultHeader("Content-Type", "application/json")
                    .build();

            OpenAiModelOptions options = OpenAiModelOptions.builder()
                    .model(model)
                    .temperature(temperature)
                    .maxTokens(maxTokens)
                    .topP(topP)
                    .presencePenalty(presencePenalty)
                    .frequencyPenalty(frequencyPenalty)
                    .isReasoning(isReasoning)
                    .reasoningStartTag(reasoningStartTag)
                    .reasoningEndTag(reasoningEndTag)
                    .answerStartTag(answerStartTag)
                    .answerEndTag(answerEndTag)
                    .build();

            return new OpenAiClient(apiKey, baseUrl, model, temperature, maxTokens);
        }
    }
}
