package com.pai4j.aigc.llm;

import com.pai4j.common.enums.LlmModelEnum;
import com.pai4j.common.exception.BusinessException;
import org.springframework.beans.BeansException;
import org.springframework.beans.factory.InitializingBean;
import org.springframework.context.ApplicationContext;
import org.springframework.context.ApplicationContextAware;
import org.springframework.stereotype.Component;

import java.util.HashMap;
import java.util.Map;

/**
 *  抽象大模型对话工厂服务
 *
 * @author: CYM-pai
 * @date: 2025/05/11 09:39
 **/
@Component
public class LLMServiceFactory implements InitializingBean, ApplicationContextAware {

    private ApplicationContext applicationContext;

    private static final Map<String, AbstractLLMChatService> serviceMap = new HashMap<>();

    @Override
    public void setApplicationContext(ApplicationContext applicationContext) throws BeansException {
        this.applicationContext = applicationContext;
    }

    @Override
    public void afterPropertiesSet() throws Exception {
        this.initServiceMap();
    }

    /**
     * 初始化模型服务
     */
    private void initServiceMap() {
        /**
         * kimi
         */
        serviceMap.put(LlmModelEnum.KIMI.getModel(), applicationContext.getBean(KimiAIService.class));
        /**
         * deepseek
         */
        serviceMap.put(LlmModelEnum.DEEPSEEK.getModel(), applicationContext.getBean(DeepSeekAIService.class));

        /**
         * ollama
         */
        serviceMap.put(LlmModelEnum.OLLAMA.getModel(), applicationContext.getBean(OllamaService.class));
    }

    public static AbstractLLMChatService getLLMService(String model) {
        AbstractLLMChatService llmChatService = serviceMap.get(model);
        if (llmChatService == null) {
            throw new BusinessException("不支持的模型");
        }
        return llmChatService;
    }
}
