import { DeepSeek } from "./deepSeek";
import { GLM } from "./glm";
import { Qwen } from "./qwen";
import { BaseLLM } from "./baseLLM";
import fs from "fs";
import path from "path";

type LLMConfig = {
    name: string;
    model: string;
    apiKey: string;
    url: string;
};

export class LLMFactory {
    private static instance: LLMFactory;
    private llmMap: Map<string, BaseLLM> = new Map();

    constructor() {
        this.initializeLLMMap();
    }

    private initializeLLMMap(): void {
        try {
            // 尝试多个可能的路径
            const possiblePaths = [
                path.join(__dirname, 'llm-config.json'),                    // 开发模式：core/llm/llm-config.json
                path.join(__dirname, '..', 'llm-config.json'),              // 打包模式：dist/../core/llm-config.json
                path.join(__dirname, '..', 'core', 'llm-config.json'),      // 打包模式：dist/../core/llm-config.json
                path.join(__dirname, '..', '..', 'core', 'llm-config.json') // 其他可能的路径
            ];
            
            let configPath = '';
            for (const tryPath of possiblePaths) {
                if (fs.existsSync(tryPath)) {
                    configPath = tryPath;
                    break;
                }
            }
            
            // 如果所有路径都不存在
            if (!configPath) {
                console.error(`LLM config file not found. Tried paths:`);
                possiblePaths.forEach(p => console.error(`  - ${p}`));
                console.error(`__dirname: ${__dirname}`);
                throw new Error(`LLM配置文件未找到，请确保 core/llm-config.json 文件存在`);
            }
            
            console.log(`Loading LLM config from: ${configPath}`);
            const configData = fs.readFileSync(configPath, 'utf-8');
            const configs: LLMConfig[] = JSON.parse(configData);
            
            console.log(`Loaded ${configs.length} LLM configurations`);

            for (const config of configs) {
                console.log(`Initializing LLM: ${config.name}`);
                switch (config.name) {
                    case 'deepseek-chat':
                        this.llmMap.set(config.name, new DeepSeek(config.apiKey, config.model, config.url));
                        break;
                    case 'glm-4-flash':
                    case 'glm-4':
                    case 'glm-4-plus':
                        this.llmMap.set(config.name, new GLM(config.apiKey, config.model, config.url));
                        break;
                    case 'qwen-turbo':
                    case 'qwen-plus':
                    case 'qwen-max':
                    case 'qwen-long':
                        this.llmMap.set(config.name, new Qwen(config.apiKey, config.model, config.url));
                        break;
                    // Add other LLM types here as needed
                    default:
                        console.warn(`Unknown LLM type: ${config.name}`);
                }
            }
            
            console.log(`LLM Factory initialized with ${this.llmMap.size} models`);
        } catch (error) {
            console.error('Error initializing LLM map:', error);
            throw error; // 抛出错误以便调用者知道初始化失败
        }
    }

    public getLLM(modelName: string): BaseLLM | undefined {
        console.log(`Getting LLM for model: ${modelName}`);
        console.log(`Available models: ${Array.from(this.llmMap.keys()).join(', ')}`);
        const llm = this.llmMap.get(modelName);
        if (!llm) {
            console.error(`LLM not found for model: ${modelName}`);
        }
        return llm;
    }
}
