/**
 * AI 模型配置管理
 * 支持阿里云百炼、DeepSeek、Ollama 等平台
 */

export interface AIProvider {
  id: string;
  name: string;
  baseUrl: string;
  apiKey: string;
  models: AIModel[];
  enabled: boolean;
}

export interface AIModel {
  id: string;
  name: string;
  description?: string;
  maxTokens?: number;
  supportsVision?: boolean;
}

export interface ChatMessage {
  id: string;
  role: 'user' | 'assistant' | 'system';
  content: string;
  timestamp: number;
  model?: string;
  provider?: string;
}

export interface ChatConfig {
  temperature: number;
  maxTokens: number;
  topP: number;
  stream: boolean;
}

export interface ChatSession {
  id: string;
  title: string;
  messages: ChatMessage[];
  createdAt: number;
  updatedAt: number;
  provider?: string;
  model?: string;
}

const STORAGE_KEY = 'feng-admin-ai-config';
const CHAT_HISTORY_KEY = 'feng-admin-chat-history';
const SESSIONS_KEY = 'feng-admin-chat-sessions';
const CURRENT_SESSION_KEY = 'feng-admin-current-session-id';

/**
 * 默认 AI 提供商配置模板（不包含真实 API Key）
 * 用户需要在设置页面配置自己的 API Key
 */
export function getDefaultProviders(): AIProvider[] {
  return [
    {
      id: 'aliyun',
      name: '阿里云百炼',
      baseUrl: 'https://dashscope.aliyuncs.com/compatible-mode/v1',
      apiKey: '', // 请在设置页面配置
      models: [
        {
          id: 'qwen-max',
          name: 'Qwen Max',
          description: '通义千问最强模型',
          maxTokens: 8192,
          supportsVision: false,
        },
        {
          id: 'qwen-plus',
          name: 'Qwen Plus',
          description: '通义千问增强版',
          maxTokens: 8192,
          supportsVision: false,
        },
        {
          id: 'qwen-turbo',
          name: 'Qwen Turbo',
          description: '通义千问快速版',
          maxTokens: 8192,
          supportsVision: false,
        },
        {
          id: 'qwen-vl-plus',
          name: 'Qwen VL Plus',
          description: '通义千问视觉模型',
          maxTokens: 8192,
          supportsVision: true,
        },
      ],
      enabled: false,
    },
    {
      id: 'deepseek',
      name: 'DeepSeek',
      baseUrl: 'https://api.deepseek.com',
      apiKey: '', // 请在设置页面配置
      models: [
        {
          id: 'deepseek-chat',
          name: 'DeepSeek Chat',
          description: 'DeepSeek 对话模型',
          maxTokens: 4096,
          supportsVision: false,
        },
        {
          id: 'deepseek-coder',
          name: 'DeepSeek Coder',
          description: 'DeepSeek 代码模型',
          maxTokens: 4096,
          supportsVision: false,
        },
      ],
      enabled: false,
    },
    {
      id: 'ollama',
      name: 'Ollama (本地)',
      baseUrl: 'http://localhost:11434/v1',
      apiKey: 'ollama', // Ollama 不需要真实 API Key
      models: [
        {
          id: 'llama3.1',
          name: 'Llama 3.1',
          description: 'Meta Llama 3.1',
          maxTokens: 4096,
          supportsVision: false,
        },
        {
          id: 'qwen2.5',
          name: 'Qwen 2.5',
          description: '通义千问 2.5',
          maxTokens: 4096,
          supportsVision: false,
        },
        {
          id: 'mistral',
          name: 'Mistral',
          description: 'Mistral AI',
          maxTokens: 4096,
          supportsVision: false,
        },
      ],
      enabled: false,
    },
  ];
}

/**
 * 获取 AI 提供商配置
 * 优先从 localStorage 读取用户配置的 API Key
 */
export function getProviders(): AIProvider[] {
  try {
    const data = localStorage.getItem(STORAGE_KEY);
    if (data) {
      const savedProviders = JSON.parse(data);
      // 合并默认配置和保存的配置，确保新增的提供商也能显示
      const defaultProviders = getDefaultProviders();
      const mergedProviders = defaultProviders.map(defaultProvider => {
        const savedProvider = savedProviders.find((p: AIProvider) => p.id === defaultProvider.id);
        return savedProvider || defaultProvider;
      });
      return mergedProviders;
    }
  } catch (error) {
    console.error('Failed to load AI config:', error);
  }
  return getDefaultProviders();
}

/**
 * 保存 AI 提供商配置
 */
export function saveProviders(providers: AIProvider[]): void {
  try {
    localStorage.setItem(STORAGE_KEY, JSON.stringify(providers));
  } catch (error) {
    console.error('Failed to save AI config:', error);
    throw new Error('保存失败');
  }
}

/**
 * 获取当前启用的AI提供商
 */
export function getCurrentProvider(providers: AIProvider[]): AIProvider | null {
  return providers.find(p => p.enabled && p.apiKey) || null;
}

/**
 * 获取当前启用的AI模型
 */
export function getCurrentModel(providers: AIProvider[]): AIModel | null {
  const currentProvider = getCurrentProvider(providers);
  if (!currentProvider) return null;
  
  // 返回第一个可用的模型
  return currentProvider.models[0] || null;
}

/**
 * 更新提供商配置
 */
export function updateProvider(id: string, updates: Partial<AIProvider>): void {
  const providers = getProviders();
  const index = providers.findIndex(p => p.id === id);
  if (index !== -1) {
    providers[index] = { ...providers[index], ...updates };
    saveProviders(providers);
  }
}

/**
 * 获取聊天历史
 */
export function getChatHistory(): ChatMessage[] {
  try {
    const data = localStorage.getItem(CHAT_HISTORY_KEY);
    if (data) {
      return JSON.parse(data);
    }
  } catch (error) {
    console.error('Failed to load chat history:', error);
  }
  return [];
}

/**
 * 保存聊天历史
 */
export function saveChatHistory(messages: ChatMessage[]): void {
  try {
    localStorage.setItem(CHAT_HISTORY_KEY, JSON.stringify(messages));
  } catch (error) {
    console.error('Failed to save chat history:', error);
  }
}

/**
 * 清空聊天历史
 */
export function clearChatHistory(): void {
  localStorage.removeItem(CHAT_HISTORY_KEY);
}

/**
 * 调用 AI API
 */
export async function callAI(
  provider: AIProvider,
  model: AIModel,
  messages: ChatMessage[],
  config: ChatConfig
): Promise<string> {
  const apiMessages = messages.map(msg => ({
    role: msg.role,
    content: msg.content,
  }));

  const response = await fetch(`${provider.baseUrl}/chat/completions`, {
    method: 'POST',
    headers: {
      'Content-Type': 'application/json',
      'Authorization': `Bearer ${provider.apiKey}`,
    },
    body: JSON.stringify({
      model: model.id,
      messages: apiMessages,
      temperature: config.temperature,
      max_tokens: config.maxTokens,
      top_p: config.topP,
      stream: false,
    }),
  });

  if (!response.ok) {
    const error = await response.text();
    throw new Error(`API 调用失败: ${response.statusText}\n${error}`);
  }

  const data = await response.json();
  return data.choices[0].message.content;
}

/**
 * 流式调用 AI API
 */
export async function* callAIStream(
  provider: AIProvider,
  model: AIModel,
  messages: ChatMessage[],
  config: ChatConfig
): AsyncGenerator<string> {
  const apiMessages = messages.map(msg => ({
    role: msg.role,
    content: msg.content,
  }));

  const response = await fetch(`${provider.baseUrl}/chat/completions`, {
    method: 'POST',
    headers: {
      'Content-Type': 'application/json',
      'Authorization': `Bearer ${provider.apiKey}`,
    },
    body: JSON.stringify({
      model: model.id,
      messages: apiMessages,
      temperature: config.temperature,
      max_tokens: config.maxTokens,
      top_p: config.topP,
      stream: true,
    }),
  });

  if (!response.ok) {
    const error = await response.text();
    throw new Error(`API 调用失败: ${response.statusText}\n${error}`);
  }

  const reader = response.body?.getReader();
  if (!reader) throw new Error('无法读取响应流');

  const decoder = new TextDecoder();
  let buffer = '';

  while (true) {
    const { done, value } = await reader.read();
    if (done) break;

    buffer += decoder.decode(value, { stream: true });
    const lines = buffer.split('\n');
    buffer = lines.pop() || '';

    for (const line of lines) {
      if (line.startsWith('data: ')) {
        const data = line.slice(6);
        if (data === '[DONE]') continue;

        try {
          const json = JSON.parse(data);
          const content = json.choices[0]?.delta?.content;
          if (content) {
            yield content;
          }
        } catch (error) {
          console.error('Failed to parse SSE data:', error);
        }
      }
    }
  }
}

/**
 * 获取所有对话会话
 */
export function getChatSessions(): ChatSession[] {
  try {
    const data = localStorage.getItem(SESSIONS_KEY);
    if (data) {
      return JSON.parse(data);
    }
  } catch (error) {
    console.error('Failed to load chat sessions:', error);
  }
  return [];
}

/**
 * 保存所有对话会话
 */
export function saveChatSessions(sessions: ChatSession[]): void {
  try {
    localStorage.setItem(SESSIONS_KEY, JSON.stringify(sessions));
  } catch (error) {
    console.error('Failed to save chat sessions:', error);
  }
}

/**
 * 创建新会话
 */
export function createNewSession(): ChatSession {
  const session: ChatSession = {
    id: `session-${Date.now()}`,
    title: '新对话',
    messages: [],
    createdAt: Date.now(),
    updatedAt: Date.now(),
  };
  
  const sessions = getChatSessions();
  sessions.unshift(session);
  saveChatSessions(sessions);
  setCurrentSessionId(session.id);
  
  return session;
}

/**
 * 更新会话
 */
export function updateSession(sessionId: string, updates: Partial<ChatSession>): void {
  const sessions = getChatSessions();
  const index = sessions.findIndex(s => s.id === sessionId);
  if (index !== -1) {
    sessions[index] = {
      ...sessions[index],
      ...updates,
      updatedAt: Date.now(),
    };
    saveChatSessions(sessions);
  }
}

/**
 * 删除会话
 */
export function deleteSession(sessionId: string): void {
  const sessions = getChatSessions();
  const filtered = sessions.filter(s => s.id !== sessionId);
  saveChatSessions(filtered);
  
  // 如果删除的是当前会话，切换到第一个
  if (getCurrentSessionId() === sessionId) {
    setCurrentSessionId(filtered[0]?.id || '');
  }
}

/**
 * 获取当前会话ID
 */
export function getCurrentSessionId(): string | null {
  return localStorage.getItem(CURRENT_SESSION_KEY);
}

/**
 * 设置当前会话ID
 */
export function setCurrentSessionId(sessionId: string): void {
  localStorage.setItem(CURRENT_SESSION_KEY, sessionId);
}

/**
 * 根据消息内容生成标题
 */
export function generateSessionTitle(firstMessage: string): string {
  const maxLength = 20;
  const title = firstMessage.trim().slice(0, maxLength);
  return title + (firstMessage.length > maxLength ? '...' : '');
}

/**
 * 动态获取提供商的模型列表
 */
export async function fetchProviderModels(provider: AIProvider): Promise<AIModel[]> {
  try {
    // Ollama 使用不同的 API 格式
    if (provider.id === 'ollama') {
      const response = await fetch(`${provider.baseUrl.replace('/v1', '')}/api/tags`);
      if (!response.ok) throw new Error('无法连接到 Ollama');
      
      interface OllamaModel {
        name: string;
        size: number;
      }
      
      const data = await response.json() as { models?: OllamaModel[] };
      return data.models?.map((model) => ({
        id: model.name,
        name: model.name,
        description: `大小: ${(model.size / 1024 / 1024 / 1024).toFixed(2)} GB`,
        maxTokens: 4096,
        supportsVision: false,
      })) || [];
    }
    
    // OpenAI 兼容格式 (阿里云百炼、DeepSeek)
    const response = await fetch(`${provider.baseUrl}/models`, {
      headers: {
        'Authorization': `Bearer ${provider.apiKey}`,
      },
    });
    
    if (!response.ok) {
      throw new Error(`获取模型列表失败: ${response.statusText}`);
    }
    
    interface OpenAIModel {
      id: string;
      created: number;
      description?: string;
    }
    
    const data = await response.json() as { data?: OpenAIModel[] };
    
    // 转换为标准格式
    return data.data?.map((model) => ({
      id: model.id,
      name: model.id,
      description: model.description || `创建于: ${new Date(model.created * 1000).toLocaleDateString('zh-CN')}`,
      maxTokens: 8192, // 默认值，可以根据实际情况调整
      supportsVision: model.id.includes('vl') || model.id.includes('vision'),
    })) || [];
  } catch (error) {
    console.error(`获取 ${provider.name} 模型列表失败:`, error);
    throw error;
  }
}

/**
 * 更新提供商的模型列表
 */
export async function refreshProviderModels(providerId: string): Promise<void> {
  const providers = getProviders();
  const provider = providers.find(p => p.id === providerId);
  
  if (!provider) {
    throw new Error('提供商不存在');
  }
  
  if (!provider.apiKey && provider.id !== 'ollama') {
    throw new Error('请先配置 API Key');
  }
  
  // 获取最新模型列表
  const models = await fetchProviderModels(provider);
  
  // 更新提供商配置
  provider.models = models;
  saveProviders(providers);
}

