import { request } from './http';

// LLM配置
const OPENAI_API_KEY = import.meta.env.VITE_OPENAI_API_KEY;
const OPENAI_API_BASE_URL = import.meta.env.VITE_OPENAI_API_BASE_URL || 'https://api.openai.com/v1';
const ANTHROPIC_API_KEY = import.meta.env.VITE_ANTHROPIC_API_KEY;
const ANTHROPIC_API_BASE_URL = import.meta.env.VITE_ANTHROPIC_API_BASE_URL || 'https://api.anthropic.com';

// 消息类型
export interface Message {
  role: 'system' | 'user' | 'assistant';
  content: string;
}

// OpenAI请求参数
export interface OpenAIRequest {
  model: string;
  messages: Message[];
  temperature?: number;
  max_tokens?: number;
  stream?: boolean;
}

// Anthropic请求参数
export interface AnthropicRequest {
  model: string;
  messages: Message[];
  max_tokens: number;
  temperature?: number;
  system?: string;
}

// LLM响应
export interface LLMResponse {
  content: string;
  model: string;
  usage?: {
    prompt_tokens: number;
    completion_tokens: number;
    total_tokens: number;
  };
}

// OpenAI服务
export class OpenAIService {
  private apiKey: string;
  private baseURL: string;

  constructor(apiKey?: string, baseURL?: string) {
    this.apiKey = apiKey || OPENAI_API_KEY || '';
    this.baseURL = baseURL || OPENAI_API_BASE_URL;
  }

  async chat(messages: Message[], options: Partial<OpenAIRequest> = {}): Promise<LLMResponse> {
    if (!this.apiKey) {
      throw new Error('OpenAI API Key is required');
    }

    const response = await fetch(`${this.baseURL}/chat/completions`, {
      method: 'POST',
      headers: {
        'Authorization': `Bearer ${this.apiKey}`,
        'Content-Type': 'application/json',
      },
      body: JSON.stringify({
        model: options.model || 'gpt-3.5-turbo',
        messages,
        temperature: options.temperature || 0.7,
        max_tokens: options.max_tokens || 1000,
        stream: options.stream || false,
      }),
    });

    if (!response.ok) {
      const error = await response.json();
      throw new Error(error.error?.message || 'OpenAI API request failed');
    }

    const data = await response.json();
    return {
      content: data.choices[0]?.message?.content || '',
      model: data.model,
      usage: data.usage,
    };
  }

  async streamChat(messages: Message[], options: Partial<OpenAIRequest> = {}): Promise<ReadableStream> {
    if (!this.apiKey) {
      throw new Error('OpenAI API Key is required');
    }

    const response = await fetch(`${this.baseURL}/chat/completions`, {
      method: 'POST',
      headers: {
        'Authorization': `Bearer ${this.apiKey}`,
        'Content-Type': 'application/json',
      },
      body: JSON.stringify({
        model: options.model || 'gpt-3.5-turbo',
        messages,
        temperature: options.temperature || 0.7,
        max_tokens: options.max_tokens || 1000,
        stream: true,
      }),
    });

    if (!response.ok) {
      const error = await response.json();
      throw new Error(error.error?.message || 'OpenAI API request failed');
    }

    return response.body!;
  }
}

// Anthropic服务
export class AnthropicService {
  private apiKey: string;
  private baseURL: string;

  constructor(apiKey?: string, baseURL?: string) {
    this.apiKey = apiKey || ANTHROPIC_API_KEY || '';
    this.baseURL = baseURL || ANTHROPIC_API_BASE_URL;
  }

  async chat(messages: Message[], options: Partial<AnthropicRequest> = {}): Promise<LLMResponse> {
    if (!this.apiKey) {
      throw new Error('Anthropic API Key is required');
    }

    const response = await fetch(`${this.baseURL}/v1/messages`, {
      method: 'POST',
      headers: {
        'x-api-key': this.apiKey,
        'Content-Type': 'application/json',
        'anthropic-version': '2023-06-01',
      },
      body: JSON.stringify({
        model: options.model || 'claude-3-sonnet-20240229',
        messages,
        max_tokens: options.max_tokens || 1000,
        temperature: options.temperature || 0.7,
        system: options.system,
      }),
    });

    if (!response.ok) {
      const error = await response.json();
      throw new Error(error.error?.message || 'Anthropic API request failed');
    }

    const data = await response.json();
    return {
      content: data.content[0]?.text || '',
      model: data.model,
      usage: data.usage,
    };
  }

  async streamChat(messages: Message[], options: Partial<AnthropicRequest> = {}): Promise<ReadableStream> {
    if (!this.apiKey) {
      throw new Error('Anthropic API Key is required');
    }

    const response = await fetch(`${this.baseURL}/v1/messages`, {
      method: 'POST',
      headers: {
        'x-api-key': this.apiKey,
        'Content-Type': 'application/json',
        'anthropic-version': '2023-06-01',
      },
      body: JSON.stringify({
        model: options.model || 'claude-3-sonnet-20240229',
        messages,
        max_tokens: options.max_tokens || 1000,
        temperature: options.temperature || 0.7,
        system: options.system,
        stream: true,
      }),
    });

    if (!response.ok) {
      const error = await response.json();
      throw new Error(error.error?.message || 'Anthropic API request failed');
    }

    return response.body!;
  }
}

// LLM服务管理器
export class LLMServiceManager {
  private openai: OpenAIService;
  private anthropic: AnthropicService;

  constructor() {
    this.openai = new OpenAIService();
    this.anthropic = new AnthropicService();
  }

  getOpenAI(): OpenAIService {
    return this.openai;
  }

  getAnthropic(): AnthropicService {
    return this.anthropic;
  }

  // 通用聊天方法，自动选择可用的服务
  async chat(messages: Message[], provider: 'openai' | 'anthropic' = 'openai', options: any = {}): Promise<LLMResponse> {
    try {
      if (provider === 'openai') {
        return await this.openai.chat(messages, options);
      } else if (provider === 'anthropic') {
        return await this.anthropic.chat(messages, options);
      } else {
        throw new Error(`Unsupported provider: ${provider}`);
      }
    } catch (error) {
      console.error(`LLM chat error (${provider}):`, error);
      throw error;
    }
  }

  // 流式聊天
  async streamChat(messages: Message[], provider: 'openai' | 'anthropic' = 'openai', options: any = {}): Promise<ReadableStream> {
    try {
      if (provider === 'openai') {
        return await this.openai.streamChat(messages, options);
      } else if (provider === 'anthropic') {
        return await this.anthropic.streamChat(messages, options);
      } else {
        throw new Error(`Unsupported provider: ${provider}`);
      }
    } catch (error) {
      console.error(`LLM stream chat error (${provider}):`, error);
      throw error;
    }
  }
}

// 导出默认实例
export const llmService = new LLMServiceManager(); 