package org.fujay.commons.langchain4j.core.model.impl;

import dev.langchain4j.model.chat.ChatModel;
import dev.langchain4j.model.chat.StreamingChatModel;
import dev.langchain4j.model.embedding.EmbeddingModel;
import dev.langchain4j.model.localai.LocalAiChatModel;
import dev.langchain4j.model.localai.LocalAiEmbeddingModel;
import dev.langchain4j.model.localai.LocalAiStreamingChatModel;
import org.fujay.commons.langchain4j.core.enumd.ModelSupplierEnum;
import org.fujay.commons.langchain4j.core.model.ModelProvider;
import org.fujay.commons.langchain4j.core.options.LlmOptions;

import java.time.Duration;

/**
 * @author slm
 * @description 本地模型，兼容OpenAI api规范
 */
public class LocalAiModelProvider implements ModelProvider {


    /**
     * @param llmOptions
     * @author sunliming
     * @description 对话模型
     */
    @Override
    public ChatModel ChatModel(LlmOptions llmOptions) {
        return new LocalAiChatModel.LocalAiChatModelBuilder()
                .baseUrl(llmOptions.getBaseUrl())
                .modelName(llmOptions.getModelName())
                .temperature(llmOptions.getTemperature())
                .timeout(Duration.ofSeconds(60))
                .topP(llmOptions.getTopP())
                .maxTokens(llmOptions.getMaxTokens())
                .logRequests(llmOptions.getLogRequests())
                .logResponses(llmOptions.getLogResponses())
                .build();
    }

    /**
     * @param llmOptions 模型参数
     * @author sunliming
     * @description 生成流式对话模型
     */
    @Override
    public StreamingChatModel streamingChatModel(LlmOptions llmOptions) {
        return new LocalAiStreamingChatModel.LocalAiStreamingChatModelBuilder()
                .baseUrl(llmOptions.getBaseUrl())
                .modelName(llmOptions.getModelName())
                .temperature(llmOptions.getTemperature())
                .timeout(Duration.ofSeconds(60))
                .topP(llmOptions.getTopP())
                .maxTokens(llmOptions.getMaxTokens())
                .logRequests(llmOptions.getLogRequests())
                .logResponses(llmOptions.getLogResponses())
                .build();
    }

    @Override
    public EmbeddingModel embeddingModel(LlmOptions llmOptions) {
        return new LocalAiEmbeddingModel.LocalAiEmbeddingModelBuilder()
                .baseUrl(llmOptions.getBaseUrl())
                .modelName(llmOptions.getModelName())
                .timeout(Duration.ofSeconds(60))
                .logRequests(llmOptions.getLogRequests())
                .logResponses(llmOptions.getLogResponses())
                .build();
    }

    /**
     * @author sunliming
     * @description 模型供应商
     */
    @Override
    public ModelSupplierEnum modelSupplierEnum() {
        return ModelSupplierEnum.LOCAL_AI;
    }
}
