// src/utils/apiClients.ts
import OpenAI from "openai";

interface ChatMessage {
  role: 'system' | 'user' | 'assistant';
  content: string;
}

interface ApiClientConfig {
  apiKey: string;
  baseURL?: string;
  model: string;
  temperature?: number;
  maxTokens?: number;
}

abstract class BaseChatClient {
  protected config: ApiClientConfig;

  constructor(config: ApiClientConfig) {
    this.config = config;
  }

  abstract sendRequest(
    messages: ChatMessage[],
    stream?: boolean,
    url?:string,
    onStream?: (chunk: string) => void
  ): Promise<string>;
}

export class FetchChatClient extends BaseChatClient {
  async sendRequest(
    messages: ChatMessage[], 
    stream = false,
    url: 'https://api.deepseek.com/chat/completions',
    onStream?: (chunk: string) => void
  ): Promise<string> {
    const endpoint = `${this.config.baseURL}/chat/completions`;
    
    const response = await fetch(url, {
      method: 'POST',
      headers: {
        'Content-Type': 'application/json',
        'Authorization': `Bearer ${this.config.apiKey}`,
      },
      body: JSON.stringify({
        messages,
        model: this.config.model,
        temperature: this.config.temperature ?? 0.7,
        max_tokens: this.config.maxTokens,
        stream,
      }),
    });

    if (!response.ok) {
      throw new Error(`API Error: ${response.status} ${response.statusText}`);
    }

    if (stream) {
      return this.handleStreamResponse(response, onStream);
    }
    
    const data = await response.json();
    return data.choices[0].message.content;
  }

  private async handleStreamResponse(
    response: Response,
    onStream?: (chunk: string) => void
  ): Promise<string> {
    const reader = response.body?.getReader();
    const decoder = new TextDecoder();
    let buffer = '';
    let fullContent = '';

    if (!reader) return '';

    while (true) {
      const { done, value } = await reader.read();
      if (done) break;

      buffer += decoder.decode(value, { stream: true });
      const lines = buffer.split('\n');

      lines.forEach((line) => {
        if (line.startsWith('data: ')) {
          const data = line.replace('data: ', '');
          try {
            const parsed = JSON.parse(data);
            const chunk = parsed.choices[0].delta?.content || '';
            fullContent += chunk;
            onStream?.(chunk);
          } catch {
            // 跳过无效JSON
          }
        }
      });

      buffer = lines[lines.length - 1];
    }

    return fullContent;
  }
}

export class OpenAIClient extends BaseChatClient {
  private openai: any;

  constructor(config: ApiClientConfig) {
    super(config);
    this.openai = new OpenAI({
      apiKey: config.apiKey,
      baseURL: config.baseURL,
      dangerouslyAllowBrowser: true
    });
  }

  async sendRequest(
    messages: ChatMessage[],
    stream = false,
    url: 'https://api.deepseek.com/chat/completions',
    onStream?: (chunk: string) => void
  ): Promise<string> {
    const response = await this.openai.chat.completions.create({
      messages,
      model: this.config.model,
      temperature: this.config.temperature,
      max_tokens: this.config.maxTokens,
      stream,
    });

    if (stream) {
      return this.handleStream(response, onStream);
    }

    return response.choices[0].message.content;
  }

  private async handleStream(
    response: AsyncIterable<any>,
    onStream?: (chunk: string) => void
  ): Promise<string> {
    let fullContent = '';
    
    for await (const chunk of response) {
      const content = chunk.choices[0]?.delta?.content || '';
      fullContent += content;
      onStream?.(content);
    }

    return fullContent;
  }
}
