import fs from 'fs';
import path from 'path';
import { AppConfig, AIModelConfig, LogConfig } from '../types';

class ConfigManager {
  private config: AppConfig | null = null;
  private configPath: string;

  constructor() {
    this.configPath = path.join(process.cwd(), 'settings', 'app.json');
  }

  async loadConfig(): Promise<AppConfig> {
    try {
      if (!fs.existsSync(this.configPath)) {
        throw new Error(`配置文件不存在: ${this.configPath}`);
      }

      const configData = fs.readFileSync(this.configPath, 'utf-8');
      this.config = JSON.parse(configData);
      
      if (!this.config) {
        throw new Error('配置文件为空');
      }

      this.validateConfig(this.config);
      return this.config;
    } catch (error) {
      console.error('加载配置失败:', error);
      return this.getDefaultConfig();
    }
  }

  private validateConfig(config: AppConfig): void {
    if (!config.main_ai_model) {
      throw new Error('缺少 main_ai_model 配置');
    }

    if (!config.models || typeof config.models !== 'object') {
      throw new Error('缺少 models 配置');
    }

    const mainModel = config.models[config.main_ai_model];
    if (!mainModel) {
      throw new Error(`找不到主模型配置: ${config.main_ai_model}`);
    }

    this.validateModelConfig(mainModel, config.main_ai_model);
  }

  private validateModelConfig(model: AIModelConfig, name: string): void {
    if (!model.model_name) {
      throw new Error(`模型 ${name} 缺少 model_name`);
    }
    if (!model.model_type) {
      throw new Error(`模型 ${name} 缺少 model_type`);
    }
    if (!model.model_url) {
      throw new Error(`模型 ${name} 缺少 model_url`);
    }

    // 验证本地模型特定配置
    const localModelTypes = ['ollama', 'localai', 'custom_local'];
    if (localModelTypes.includes(model.model_type.toLowerCase())) {
      this.validateLocalModelConfig(model, name);
    }
  }

  private validateLocalModelConfig(model: AIModelConfig, name: string): void {
    if (model.local_config) {
      const config = model.local_config;
      
      // 验证数值类型参数
      if (config.context_length !== undefined && (typeof config.context_length !== 'number' || config.context_length <= 0)) {
        throw new Error(`模型 ${name} 的 context_length 必须是正整数`);
      }
      if (config.temperature !== undefined && (typeof config.temperature !== 'number' || config.temperature < 0 || config.temperature > 2)) {
        throw new Error(`模型 ${name} 的 temperature 必须在 0-2 之间`);
      }
      if (config.max_tokens !== undefined && (typeof config.max_tokens !== 'number' || config.max_tokens <= 0)) {
        throw new Error(`模型 ${name} 的 max_tokens 必须是正整数`);
      }
      if (config.timeout !== undefined && (typeof config.timeout !== 'number' || config.timeout <= 0)) {
        throw new Error(`模型 ${name} 的 timeout 必须是正整数`);
      }

      // 验证Ollama特定参数
      if (model.model_type.toLowerCase() === 'ollama') {
        if (config.keep_alive && typeof config.keep_alive !== 'string') {
          throw new Error(`模型 ${name} 的 keep_alive 必须是字符串`);
        }
      }

      // 验证自定义请求头
      if (config.custom_headers && typeof config.custom_headers !== 'object') {
        throw new Error(`模型 ${name} 的 custom_headers 必须是对象`);
      }
    }
  }

  private getDefaultConfig(): AppConfig {
    return {
      "main_ai_model": "default",
      "models": {
        "default": {
          "model_name": "gpt-3.5-turbo",
          "model_type": "openai",
          "model_url": "https://api.openai.com/v1/chat/completions",
          "model_token": ""
        },
        "ollama_default": {
          "model_name": "llama2",
          "model_type": "ollama",
          "model_url": "http://localhost:11434/api/chat",
          "local_config": {
            "temperature": 0.7,
            "max_tokens": 2000,
            "stream": true,
            "keep_alive": "5m"
          }
        }
      }
    };
  }

  getCurrentConfig(): AppConfig | null {
    return this.config;
  }

  getCurrentModel(): AIModelConfig | null {
    if (!this.config) return null;
    return this.config.models[this.config.main_ai_model] || null;
  }

  async reloadConfig(): Promise<AppConfig> {
    this.config = null;
    return await this.loadConfig();
  }

  // 获取日志配置
  getLogConfig(): LogConfig {
    const defaultLogConfig: LogConfig = {
      enabled: true,
      directory: './logs',
      rotation: 'hourly',
      level: 'INFO',
      maxFiles: 168,
      bufferSize: 1024,
      enableDebugLog: false,
      maxFileSize: 100,
      compressionEnabled: false
    };

    if (!this.config || !(this.config as any).logging) {
      return defaultLogConfig;
    }

    const logConfig = (this.config as any).logging;
    return {
      enabled: logConfig.enabled !== undefined ? logConfig.enabled : defaultLogConfig.enabled,
      directory: logConfig.directory || defaultLogConfig.directory,
      rotation: logConfig.rotation || defaultLogConfig.rotation,
      level: logConfig.level || defaultLogConfig.level,
      maxFiles: logConfig.maxFiles !== undefined ? logConfig.maxFiles : defaultLogConfig.maxFiles,
      bufferSize: logConfig.bufferSize !== undefined ? logConfig.bufferSize : defaultLogConfig.bufferSize,
      enableDebugLog: logConfig.enableDebugLog !== undefined ? logConfig.enableDebugLog : defaultLogConfig.enableDebugLog,
      maxFileSize: logConfig.maxFileSize !== undefined ? logConfig.maxFileSize : defaultLogConfig.maxFileSize,
      compressionEnabled: logConfig.compressionEnabled !== undefined ? logConfig.compressionEnabled : defaultLogConfig.compressionEnabled
    };
  }

  // 测试本地模型连接
  async testLocalModelConnection(modelName: string): Promise<{ success: boolean; error?: string }> {
    try {
      if (!this.config) {
        await this.loadConfig();
      }

      const model = this.config!.models[modelName];
      if (!model) {
        return { success: false, error: `找不到模型配置: ${modelName}` };
      }

      const localModelTypes = ['ollama', 'localai', 'custom_local'];
      if (!localModelTypes.includes(model.model_type.toLowerCase())) {
        return { success: false, error: `${modelName} 不是本地模型类型` };
      }

      const axios = require('axios');
      const timeout = model.local_config?.timeout || 5000;

      if (model.model_type.toLowerCase() === 'ollama') {
        // 测试Ollama连接
        const healthUrl = model.model_url.replace('/api/chat', '/api/tags');
        await axios.get(healthUrl, { timeout });
      } else {
        // 测试其他本地API连接
        const testUrl = new URL(model.model_url);
        const baseUrl = `${testUrl.protocol}//${testUrl.host}`;
        await axios.get(baseUrl, { timeout });
      }

      return { success: true };
    } catch (error: any) {
      return { 
        success: false, 
        error: `连接失败: ${error.message || '未知错误'}` 
      };
    }
  }
}

export const configManager = new ConfigManager();