// AI模型配置管理类
export class ModelConfig {
  static CONFIG_KEY = 'aiConfig'

  // 默认配置
  static DEFAULT_CONFIG = {
    provider: 'openai',
    openai: {
      apiKey: '',
      model: 'gpt-3.5-turbo',
      temperature: 0.7
    },
    ollama: {
      serverUrl: 'http://localhost:11434',
      model: ''
    }
  }

  static getConfig() {
    try {
      const config = localStorage.getItem(this.CONFIG_KEY)
      return config ? JSON.parse(config) : this.DEFAULT_CONFIG
    } catch (error) {
      console.error('读取模型配置失败:', error)
      return this.DEFAULT_CONFIG
    }
  }

  static saveConfig(config) {
    try {
      const mergedConfig = {
        ...this.DEFAULT_CONFIG,
        ...config,
        openai: { ...this.DEFAULT_CONFIG.openai, ...config.openai },
        ollama: { ...this.DEFAULT_CONFIG.ollama, ...config.ollama }
      }
      localStorage.setItem(this.CONFIG_KEY, JSON.stringify(mergedConfig))
      return true
    } catch (error) {
      console.error('保存模型配置失败:', error)
      return false
    }
  }

  static getModelType() {
    const config = this.getConfig()
    return config.provider
  }

  static getApiKey() {
    const config = this.getConfig()
    return config.openai.apiKey
  }

  static getApiEndpoint() {
    const config = this.getConfig()
    return config.ollama.serverUrl
  }

  static getModelName() {
    const config = this.getConfig()
    return config.provider === 'openai' 
      ? config.openai.model
      : config.ollama.model
  }

  static getTemperature() {
    const config = this.getConfig()
    return config.openai.temperature
  }

  static getOpenAIConfig() {
    const config = this.getConfig()
    return config.openai
  }

  static getOllamaConfig() {
    const config = this.getConfig()
    return config.ollama
  }
}