package org.fujay.commons.langchain4j.core.model.impl;

import dev.langchain4j.model.chat.ChatModel;
import dev.langchain4j.model.chat.StreamingChatModel;
import dev.langchain4j.model.embedding.EmbeddingModel;
import dev.langchain4j.model.openai.OpenAiChatModel;
import dev.langchain4j.model.openai.OpenAiEmbeddingModel;
import dev.langchain4j.model.openai.OpenAiStreamingChatModel;
import org.fujay.commons.langchain4j.core.enumd.ModelSupplierEnum;
import org.fujay.commons.langchain4j.core.enumd.ResponseFormatEnum;
import org.fujay.commons.langchain4j.core.model.ModelProvider;
import org.fujay.commons.langchain4j.core.options.LlmOptions;

import java.time.Duration;

/**
 * @author slm
 * @description open-ai
 */
public class OpenAiModelProvider implements ModelProvider {

    /**
     * @param llmOptions
     * @author sunliming
     * @description 对话模型
     */
    @Override
    public ChatModel ChatModel(LlmOptions llmOptions) {
        return new OpenAiChatModel.OpenAiChatModelBuilder()
                .baseUrl(llmOptions.getBaseUrl())
                .apiKey(llmOptions.getApiKey())
                .modelName(llmOptions.getModelName())
                .maxTokens(llmOptions.getMaxTokens())
                .temperature(llmOptions.getTemperature())
                .timeout(Duration.ofSeconds(60))
                .logRequests(llmOptions.getLogRequests())
                .logResponses(llmOptions.getLogResponses())
                .build();
    }

    /**
     * @author sunliming
     * @description 生成流式对话模型
     */
    @Override
    public StreamingChatModel streamingChatModel(LlmOptions llmOptions) {
        String format = "";
        if (llmOptions.getResponseFormat() == ResponseFormatEnum.JSON_OBJECT) {
            format = ResponseFormatEnum.JSON_OBJECT.getMessage();
        }

        return new OpenAiStreamingChatModel.OpenAiStreamingChatModelBuilder()
                .baseUrl(llmOptions.getBaseUrl())
                .apiKey(llmOptions.getApiKey())
                .modelName(llmOptions.getModelName())
                .maxTokens(llmOptions.getMaxTokens())
                .temperature(llmOptions.getTemperature())
                .responseFormat(format)
                .timeout(Duration.ofSeconds(60))
                .logRequests(llmOptions.getLogRequests())
                .logResponses(llmOptions.getLogResponses())
                .build();
    }

    @Override
    public EmbeddingModel embeddingModel(LlmOptions llmOptions) {
        return new OpenAiEmbeddingModel.OpenAiEmbeddingModelBuilder()
                .baseUrl(llmOptions.getBaseUrl())
                .apiKey(llmOptions.getApiKey())
                .timeout(Duration.ofSeconds(60))
                .modelName(llmOptions.getModelName())
                .logRequests(llmOptions.getLogRequests())
                .logResponses(llmOptions.getLogResponses())
                .build();
    }

    @Override
    public ModelSupplierEnum modelSupplierEnum() {
        return ModelSupplierEnum.OPEN_AI;
    }

}
