package cn.six1943.deepchat.module.ai.agent;

import cn.hutool.core.util.StrUtil;
import cn.iocoder.yudao.framework.common.exception.ServiceException;
import dev.langchain4j.community.model.dashscope.QwenChatModel;
import dev.langchain4j.community.model.dashscope.QwenEmbeddingModel;
import dev.langchain4j.community.model.dashscope.QwenStreamingChatModel;
import dev.langchain4j.community.model.dashscope.QwenTokenCountEstimator;
import dev.langchain4j.community.model.qianfan.QianfanChatModel;
import dev.langchain4j.community.model.qianfan.QianfanEmbeddingModel;
import dev.langchain4j.community.model.qianfan.QianfanStreamingChatModel;
import dev.langchain4j.community.model.zhipu.ZhipuAiChatModel;
import dev.langchain4j.community.model.zhipu.ZhipuAiEmbeddingModel;
import dev.langchain4j.community.model.zhipu.ZhipuAiStreamingChatModel;
import dev.langchain4j.model.TokenCountEstimator;
import dev.langchain4j.model.bedrock.BedrockChatModel;
import dev.langchain4j.model.bedrock.BedrockCohereEmbeddingModel;
import dev.langchain4j.model.bedrock.BedrockStreamingChatModel;
import dev.langchain4j.model.chat.ChatModel;
import dev.langchain4j.model.chat.StreamingChatModel;
import dev.langchain4j.model.cohere.CohereScoringModel;
import dev.langchain4j.model.embedding.EmbeddingModel;
import dev.langchain4j.model.openaiofficial.OpenAiOfficialChatModel;
import dev.langchain4j.model.openaiofficial.OpenAiOfficialEmbeddingModel;
import dev.langchain4j.model.openaiofficial.OpenAiOfficialStreamingChatModel;
import dev.langchain4j.model.scoring.ScoringModel;
import software.amazon.awssdk.auth.credentials.AwsBasicCredentials;
import software.amazon.awssdk.regions.Region;
import software.amazon.awssdk.services.bedrockruntime.BedrockRuntimeAsyncClient;
import software.amazon.awssdk.services.bedrockruntime.BedrockRuntimeClient;

import java.util.List;

/**
 * 流式回答模型工厂
 */
public class ModelFactory {

    private static final LogChatModelListener logChatModelListener = new LogChatModelListener();

    public static StreamingChatModel createChatModel(ModelWithKey mk) {
        boolean enableLog = StrUtil.isNotBlank(System.getenv("th_llm_log"));
        switch (mk.getProviderType()) {
            case "dashscope":
                return QwenStreamingChatModel.builder()
                        .listeners(List.of(logChatModelListener))
                        .modelName(mk.getCode())
                        .apiKey(mk.getApiAk())
                        .build();
            case "Bedrock":
                BedrockRuntimeAsyncClient client = BedrockRuntimeAsyncClient.builder()
                        .region(Region.of(mk.getApiRegion()))
                        .credentialsProvider(() -> AwsBasicCredentials.create(mk.getApiAk(), mk.getApiSk()))
                        .overrideConfiguration(config -> {
                            // // config.apiCallTimeout(this.timeout);
                            // if (enableLog)
                            // 	config.addExecutionInterceptor(new AwsLoggingInterceptor(logRequests, logResponses));
                        })
                        .build();
                return new BedrockStreamingChatModel.Builder().client(client).modelId(mk.getCode()).build();
            case "zhipuai":
                return ZhipuAiStreamingChatModel.builder()
                        .model(mk.getCode())
                        .apiKey(mk.getApiAk())
                        .logRequests(enableLog)
                        .logResponses(enableLog)
                        .build();
            case "qianfan":
                return QianfanStreamingChatModel.builder()
                        .modelName(mk.getCode())
                        .apiKey(mk.getApiAk())
                        .logRequests(enableLog)
                        .logResponses(enableLog)
                        .build();
            default:
                // OpenAI and
                return OpenAiOfficialStreamingChatModel.builder()
                        .baseUrl(mk.getBaseUrl())
                        .apiKey(mk.getApiAk())
                        .modelName(mk.getCode())
                        .isAzure(false)
                        // .logRequests(enableLog)
                        // .logResponses(enableLog)
                        .build();
        }
    }

    public static ChatModel createGenModel(ModelWithKey mk) {
        boolean enableLog = StrUtil.isNotBlank(System.getenv("th_llm_log"));
        switch (mk.getProviderType()) {
            case "dashscope":
                return QwenChatModel.builder()
                        .modelName(mk.getCode())
                        .apiKey(mk.getApiAk())
                        .build();
            case "Bedrock":
                BedrockRuntimeClient client = BedrockRuntimeClient.builder()
                        .region(Region.of(mk.getApiRegion()))
                        .credentialsProvider(() -> AwsBasicCredentials.create(mk.getApiAk(), mk.getApiSk()))
                        .overrideConfiguration(config -> {
                            // // config.apiCallTimeout(this.timeout);
                            // if (enableLog)
                            // 	config.addExecutionInterceptor(new AwsLoggingInterceptor(logRequests, logResponses));
                        })
                        .build();
                return new BedrockChatModel.Builder().client(client).modelId(mk.getCode()).build();
            case "zhipuai":
                return ZhipuAiChatModel.builder()
                        .model(mk.getCode())
                        .apiKey(mk.getApiAk())
                        .logRequests(enableLog)
                        .logResponses(enableLog)
                        .build();
            case "qianfan":
                return QianfanChatModel.builder()
                        .modelName(mk.getCode())
                        .apiKey(mk.getApiAk())
                        .logRequests(enableLog)
                        .logResponses(enableLog)
                        .build();
            default:
                // OpenAI and
                return OpenAiOfficialChatModel.builder()
                        .baseUrl(mk.getBaseUrl())
                        .apiKey(mk.getApiAk())
                        .modelName(mk.getCode())
                        // .logRequests(enableLog)
                        // .logResponses(enableLog)
                        .build();
        }
    }

    public static EmbeddingModel createEmbeddingModel(ModelWithKey mk) {
        boolean enableLog = StrUtil.isNotBlank(System.getenv("th_llm_log"));
        switch (mk.getProviderType()) {
            case "dashscope":
                return QwenEmbeddingModel.builder()
                        .modelName(mk.getCode())
                        .apiKey(mk.getApiAk())
                        .build();
            case "Bedrock":
                return BedrockCohereEmbeddingModel
                        .builder()
                        .region(Region.of(mk.getApiRegion()))
                        .maxRetries(1)
                        .credentialsProvider(() -> AwsBasicCredentials.create(mk.getApiAk(), mk.getApiSk()))
                        .model(mk.getCode())
                        .build();
            case "zhipuai":
                return ZhipuAiEmbeddingModel.builder()
                        .model(mk.getCode())
                        .apiKey(mk.getApiAk())
                        .logRequests(enableLog)
                        .logResponses(enableLog)
                        .build();
            case "qianfan":
                return QianfanEmbeddingModel.builder()
                        .modelName(mk.getCode())
                        .apiKey(mk.getApiAk())
                        .logRequests(enableLog)
                        .logResponses(enableLog)
                        .build();
            default:
                // OpenAI and
                return OpenAiOfficialEmbeddingModel.builder()
                        .baseUrl(mk.getBaseUrl())
                        .apiKey(mk.getApiAk())
                        .modelName(mk.getCode())
                        // .logRequests(enableLog)
                        // .logResponses(enableLog)
                        .build();
        }
    }

    public static ScoringModel createScoringModel(ModelWithKey mk) {
        boolean enableLog = StrUtil.isNotBlank(System.getenv("th_llm_log"));
        switch (mk.getCode()) {
            case "rerank-multilingual-v3.0":
                return CohereScoringModel.builder()
                    .apiKey(System.getenv("COHERE_API_KEY"))
                    .modelName("rerank-multilingual-v3.0")
                    .build();
            // case "gte-rerank-v2":
            //     return BailianScoringModel.builder()
            //             .modelName(mk.getCode())
            //             .apiKey(mk.getApiAk())
            //             // .logRequests(enableLog)
            //             // .logResponses(enableLog)
            //             .build();
            default:
                // OpenAI and
                throw new ServiceException(400, "排序模型不支持");
        }
    }

    public static TokenCountEstimator createTokenizer(ModelWithKey mk) {
        boolean enableLog = StrUtil.isNotBlank(System.getenv("th_llm_log"));
        switch (mk.getProviderType()) {
            case "dashscope":
                return QwenTokenCountEstimator.builder()
                        .modelName(mk.getCode())
                        .apiKey(mk.getApiAk())
                        .build();
            default:
                // return new OpenAiOfficialTokenCountEstimator(mk.getCode());
                throw new ServiceException(400, "排序模型不支持");
        }
    }
}
