package com.starhub.application.model.biz;

import com.starhub.application.interfaces.model.ModelConfigService;
import com.starhub.application.model.constants.ModelConstants;
import com.starhub.application.model.dto.ModelConfigDto;
import com.starhub.application.model.enums.ChatTypeEnums;
import com.starhub.common.bean.model.ModelConfig;
import com.starhub.common.security.util.SecurityUtils;
import com.starhub.common.sys.user.LoginUser;
import com.starhub.integration.sdk.shangtang.model.ShangtangChatModel;
import com.starhub.integration.sdk.shangtang.model.ShangtangStreamingChatModel;

import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.chat.StreamingChatLanguageModel;
import dev.langchain4j.model.openai.OpenAiChatModel;
import dev.langchain4j.model.openai.OpenAiStreamingChatModel;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;

import java.time.Duration;

import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

/**
 * 模型连接工厂
 */
@Component
public class ModelConnectionFactory {

    @Autowired
    private ModelConfigService modelConfigService;
    
    private static final Logger log = LoggerFactory.getLogger(ModelConnectionFactory.class);

    /**
     * todo: 需要补充并发控制，后续对接好MQ之后基于MQ封装AI连接池
     */

    /**
     * 获取指定mark的流式模型连接
     * @param mark
     * @return
     */
    public StreamingChatLanguageModel getStreamingConnection(String mark, ModelConfigDto modelConfigDto) {
        return createStreamingModelConnection(mark, modelConfigDto);
    }

    public StreamingChatLanguageModel getStreamingConnection(String mark) {
        return getStreamingConnection(mark, ModelConfigDto.builder().build());
    }
    
    /**
     * 获取指定mark的非流式模型连接
     * @param mark
     * @return
     */
    public ChatLanguageModel getChatConnection(String mark, ModelConfigDto modelConfigDto) {
        return createChatModelConnection(mark, modelConfigDto);
    }

    public ChatLanguageModel getChatConnection(String mark) {
        return getChatConnection(mark, ModelConfigDto.builder().build());
    }
    
    /**
     * 获取模型配置信息
     * @param mark
     * @param modelConfigDto
     * @return
     */
    private ModelConfigDto getModel(String mark, ModelConfigDto modelConfigDto){
        LoginUser user = SecurityUtils.getCurrentUser();

        // 获取模型配置
        ModelConfig config = modelConfigService.getModel(user,mark);
        if (config == null) {
            throw new IllegalArgumentException("No configuration found for mark: " + mark);
        }

        // 获取模型类型
        String chatType = config.getChatType();
        ChatTypeEnums chatTypeEnum = ChatTypeEnums.getModelType(chatType);
        if (chatTypeEnum == null) {
            throw new IllegalArgumentException("Unsupported model type: " + chatType);
        }
        
        log.info("Creating streaming {} model with config: mark={}, model_type={}, chat_type={}, api_url={}",
            chatTypeEnum.getDesc(), config.getMark(), config.getModelType(), config.getChatType(), config.getApiUrl());

        return modelConfigDto = modelConfigDto.toBuilder()
            .modelConfig(config)
            .ChatTypeEnum(chatTypeEnum)
            .build()
            .checkParams();
    }

    /**
     * 根据模型类型对应的流式模型连接
     * @param mark
     * @param modelConfigDto
     * @return
     */
    private StreamingChatLanguageModel createStreamingModelConnection(String mark, ModelConfigDto modelConfigDto) {
        modelConfigDto = getModel(mark, modelConfigDto);
        switch (modelConfigDto.getChatTypeEnum()) {
            case OpenaiGpt:
                return createOpenAIStreamingModel(modelConfigDto);
            case Claude:
                return createAnthropicStreamingModel(modelConfigDto);
            case Qwen:
                return createAlibabaStreamingModel(modelConfigDto);
            case Wenxinyiyan:
                return createBaiduStreamingModel(modelConfigDto);
            case Xinghuo:
                return createXunfeiStreamingModel(modelConfigDto);
            case Deepseek:
                return createDeepseekStreamingModel(modelConfigDto);
            case DeepseekChat:
                return createDeepseekStreamingModel(modelConfigDto);
            case Doubao:
                return createBytedanceStreamingModel(modelConfigDto);
            case open_ollama:
                return createOllamaStreamingModel(modelConfigDto);
            case open_llm:
                return createLLMStreamingModel(modelConfigDto);
            case custom:
                return createShangTangStreamingModel(modelConfigDto);
            default:
                throw new IllegalArgumentException("Unsupported model type: " + modelConfigDto.getChatTypeEnum().getModelType());
        }
    }

    /**
     * 根据模型类型对应的非流式模型连接
     * @param mark
     * @param modelConfigDto
     * @return
     */
    private ChatLanguageModel createChatModelConnection(String mark, ModelConfigDto modelConfigDto) {
        modelConfigDto = getModel(mark, modelConfigDto);
        switch (modelConfigDto.getChatTypeEnum()) {
            case OpenaiGpt:
                return createOpenAIChatModel(modelConfigDto);
            case Claude:
                return createAnthropicChatModel(modelConfigDto);
            case Qwen:
                return createAlibabaChatModel(modelConfigDto);
            case Wenxinyiyan:
                return createBaiduChatModel(modelConfigDto);
            case Xinghuo:
                return createXunfeiChatModel(modelConfigDto);
            case Deepseek:
                return createDeepseekChatModel(modelConfigDto);
            case DeepseekChat:
                return createDeepseekChatModel(modelConfigDto);
            case Doubao:
                return createBytedanceChatModel(modelConfigDto);
            case open_ollama:
                return createOllamaChatModel(modelConfigDto);
            case open_llm:
                return createLLMChatModel(modelConfigDto);
            case custom:
                return createShangTangModel(modelConfigDto);
            default:
                throw new IllegalArgumentException("Unsupported model type: " + modelConfigDto.getChatTypeEnum().getModelType());
        }
    }

    /**
     * OpenAI流式模型连接
     * @param modelConfigDto
     * @return
     */
    private StreamingChatLanguageModel createOpenAIStreamingModel(ModelConfigDto modelConfigDto) {
        return OpenAiStreamingChatModel.builder()
                .apiKey(modelConfigDto.getModelConfig().getApiKey())
                .baseUrl(modelConfigDto.getModelConfig().getApiUrl())
                .modelName(modelConfigDto.getModelConfig().getChatType())
                .temperature(modelConfigDto.getTemperature())
                .maxTokens(modelConfigDto.getMaxTokens())
                .build();
    }

    /**
     * OpenAI非流式模型连接
     * @param modelConfigDto
     * @return
     */
    private ChatLanguageModel createOpenAIChatModel(ModelConfigDto modelConfigDto) {
        return OpenAiChatModel.builder()
                .apiKey(modelConfigDto.getModelConfig().getApiKey())
                .baseUrl(modelConfigDto.getModelConfig().getApiUrl())
                .modelName(modelConfigDto.getModelConfig().getChatType())
                .temperature(modelConfigDto.getTemperature())
                .maxTokens(modelConfigDto.getMaxTokens())
                .timeout(Duration.ofSeconds(ModelConstants.DEFAULT_TIMEOUT))
                .build();
    }

    /**
     * Anthropic流式模型连接
     * @param modelConfigDto
     * @return
     */
    private StreamingChatLanguageModel createAnthropicStreamingModel(ModelConfigDto modelConfigDto) {
        throw new UnsupportedOperationException("anthropic streaming model not implemented yet");
    }

    /**
     * Anthropic非流式模型连接
     * @param modelConfigDto
     * @return
     */
    private ChatLanguageModel createAnthropicChatModel(ModelConfigDto modelConfigDto) {
        throw new UnsupportedOperationException("anthropic chat model not implemented yet");
    }

    /**
     * 阿里流式模型连接
     * @param modelConfigDto
     * @return
     */
    private StreamingChatLanguageModel createAlibabaStreamingModel(ModelConfigDto modelConfigDto) {
        throw new UnsupportedOperationException("Alibaba streaming model not implemented yet");
    }

    /**
     * 阿里非流式模型连接
     * @param modelConfigDto
     * @return
     */
    private ChatLanguageModel createAlibabaChatModel(ModelConfigDto modelConfigDto) {
        throw new UnsupportedOperationException("Ali chat model not implemented yet");
    }

    /**
     * 百度流式模型连接
     * @param modelConfigDto
     * @return
     */
    private StreamingChatLanguageModel createBaiduStreamingModel(ModelConfigDto modelConfigDto) {
        throw new UnsupportedOperationException("Baidu streaming model not implemented yet");
    }

    /**
     * 百度非流式模型连接
     * @param modelConfigDto
     * @return
     */
    private ChatLanguageModel createBaiduChatModel(ModelConfigDto modelConfigDto) {
        throw new UnsupportedOperationException("Baidu chat model not implemented yet");
    }

    /**
     * 讯飞流式模型连接
     * @param modelConfigDto
     * @return
     */
    private StreamingChatLanguageModel createXunfeiStreamingModel(ModelConfigDto modelConfigDto) {
        throw new UnsupportedOperationException("Xunfei streaming model not implemented yet");
    }

    /**
     * 讯飞非流式模型连接
     * @param modelConfigDto
     * @return
     */
    private ChatLanguageModel createXunfeiChatModel(ModelConfigDto modelConfigDto) {
        throw new UnsupportedOperationException("Xunfei chat model not implemented yet");
    }

    /**
     * DeepSeek流式模型连接
     * @param modelConfigDto
     * @return
     */
    private StreamingChatLanguageModel createDeepseekStreamingModel(ModelConfigDto modelConfigDto) {
        return OpenAiStreamingChatModel.builder()
                .apiKey(modelConfigDto.getModelConfig().getApiKey())
                .baseUrl(modelConfigDto.getModelConfig().getApiUrl())
                .modelName(modelConfigDto.getModelConfig().getChatType())
                .temperature(modelConfigDto.getTemperature())
                .maxTokens(modelConfigDto.getMaxTokens())
                .build();
    }

    /**
     * DeepSeek非流式模型连接
     * @param modelConfigDto
     * @return
     */
    private ChatLanguageModel createDeepseekChatModel(ModelConfigDto modelConfigDto) {
        return OpenAiChatModel.builder()
                .apiKey(modelConfigDto.getModelConfig().getApiKey())
                .baseUrl(modelConfigDto.getModelConfig().getApiUrl())
                .modelName(modelConfigDto.getModelConfig().getChatType())
                .temperature(modelConfigDto.getTemperature())
                .maxTokens(modelConfigDto.getMaxTokens())
                .timeout(Duration.ofSeconds(ModelConstants.DEFAULT_TIMEOUT))
                .build();
    }

    /**
     * 字节跳动流式模型连接
     * @param modelConfigDto
     * @return
     */
    private StreamingChatLanguageModel createBytedanceStreamingModel(ModelConfigDto modelConfigDto) {
        throw new UnsupportedOperationException("Bytedance streaming model not implemented yet");
    }

    /**
     * 字节跳动非流式模型连接
     * @param modelConfigDto
     * @return
     */
    private ChatLanguageModel createBytedanceChatModel(ModelConfigDto modelConfigDto) {
        throw new UnsupportedOperationException("Bytedance chat model not implemented yet");
    }

    /**
     * Ollam流式模型连接
     * @param modelConfigDto
     * @return
     */
    private StreamingChatLanguageModel createOllamaStreamingModel(ModelConfigDto modelConfigDto) {
        return OpenAiStreamingChatModel.builder()
                .apiKey(modelConfigDto.getModelConfig().getApiKey())
                .baseUrl(modelConfigDto.getModelConfig().getApiUrl())
                .modelName(modelConfigDto.getModelConfig().getChatType())
                .temperature(modelConfigDto.getTemperature())
                .maxTokens(modelConfigDto.getMaxTokens())
                .build();
    }

    /**
     * Ollam非流式模型连接
     * @param modelConfigDto
     * @return
     */
    private ChatLanguageModel createOllamaChatModel(ModelConfigDto modelConfigDto) {
        return OpenAiChatModel.builder()
                .apiKey(modelConfigDto.getModelConfig().getApiKey())
                .baseUrl(modelConfigDto.getModelConfig().getApiUrl())
                .modelName(modelConfigDto.getModelConfig().getChatType())
                .temperature(modelConfigDto.getTemperature())
                .maxTokens(modelConfigDto.getMaxTokens())
                .timeout(Duration.ofSeconds(ModelConstants.DEFAULT_TIMEOUT))
                .build();
    }

    /**
     * VLLM流式模型连接
     * @param modelConfigDto
     * @return
     */
    private StreamingChatLanguageModel createLLMStreamingModel(ModelConfigDto modelConfigDto) {
        return OpenAiStreamingChatModel.builder()
                .apiKey(modelConfigDto.getModelConfig().getApiKey())
                .baseUrl(modelConfigDto.getModelConfig().getApiUrl())
                .modelName(modelConfigDto.getModelConfig().getChatType())
                .temperature(modelConfigDto.getTemperature())
                .maxTokens(modelConfigDto.getMaxTokens())
                .build();
    }

    /**
     * VLLM非流式模型连接
     * @param modelConfigDto
     * @return
     */
    private ChatLanguageModel createLLMChatModel(ModelConfigDto modelConfigDto) {
        return OpenAiChatModel.builder()
                .apiKey(modelConfigDto.getModelConfig().getApiKey())
                .baseUrl(modelConfigDto.getModelConfig().getApiUrl())
                .modelName(modelConfigDto.getModelConfig().getChatType())
                .temperature(modelConfigDto.getTemperature())
                .maxTokens(modelConfigDto.getMaxTokens())
                .timeout(Duration.ofSeconds(ModelConstants.DEFAULT_TIMEOUT))
                .build();
    }

    /**
     * 自定义流式模型连接
     * @param modelConfigDto
     * @return
     */
    private StreamingChatLanguageModel createCustomStreamingModel(ModelConfigDto modelConfigDto) {
        throw new UnsupportedOperationException("Custom streaming model not implemented yet");
    }

    /**
     * 自定义非流式模型连接
     * @param modelConfigDto
     * @return
     */
    private ChatLanguageModel createCustomChatModel(ModelConfigDto modelConfigDto) {
        throw new UnsupportedOperationException("Custom chat model not implemented yet");
    }


    public ChatLanguageModel createShangTangModel(ModelConfigDto config) {
        // 商汤模型特定配置（从配置中获取或使用默认值）
        String modelName = config.getModelConfig().getModelId(); // 默认模型名称
        double temperature = config.getModelConfig().getTemperature() != null ? config.getTemperature() : 0.7;
        int maxTokens = config.getModelConfig().getMaxTokens() != null ? config.getMaxTokens() : 1024;
        // 商汤API地址 - 根据mark确定具体的API地址
        String apiUrl = config.getModelConfig().getApiUrl();
        return new ShangtangChatModel(apiUrl, modelName, temperature, maxTokens);
    }


    public StreamingChatLanguageModel createShangTangStreamingModel(ModelConfigDto config) {
        // 商汤模型特定配置（从配置中获取或使用默认值）
        String modelName = config.getModelConfig().getModelId(); // 默认模型名称
        double temperature = config.getModelConfig().getTemperature() != null ? config.getTemperature() : 0.7;
        int maxTokens = config.getModelConfig().getMaxTokens() != null ? config.getMaxTokens() : 1024;
        // 商汤API地址 - 根据mark确定具体的API地址
        String apiUrl = config.getModelConfig().getApiUrl();
        return new ShangtangStreamingChatModel(apiUrl, modelName, temperature, maxTokens);
    }

}