// AI消息类型定义
export interface AIMessage {
  role: 'user' | 'assistant';
  content: string;
}

// 流式响应处理函数类型
type OnDataCallback = (text: string) => void;
type OnCompleteCallback = () => void;
type OnErrorCallback = (error: Error) => void;

// Ollama API配置
const OLLAMA_BASE_URL = 'http://localhost:11434';
const MODEL_NAME = 'deepseek-r1:7b';  // 更新为已安装的模型名称

// 检查Ollama服务是否可用
const checkOllamaService = async (): Promise<boolean> => {
  try {
    const response = await fetch(`${OLLAMA_BASE_URL}/api/tags`);
    return response.ok;
  } catch (error) {
    return false;
  }
};

// 检查模型是否已安装
const checkModelAvailable = async (): Promise<boolean> => {
  try {
    const response = await fetch(`${OLLAMA_BASE_URL}/api/tags`);
    if (!response.ok) return false;
    const data = await response.json();
    return data.models?.some((model: any) => model.name === MODEL_NAME) ?? false;
  } catch (error) {
    return false;
  }
};

// 获取AI响应流
export const getAIResponseStream = async (
  messages: AIMessage[],
  onData: OnDataCallback,
  onComplete: OnCompleteCallback,
  onError: OnErrorCallback
) => {
  try {
    // 首先检查Ollama服务是否可用
    const isServiceAvailable = await checkOllamaService();
    if (!isServiceAvailable) {
      throw new Error(
        'Ollama服务未运行。请确保：\n' +
        '1. 已安装Ollama\n' +
        '2. 运行了 ollama serve 命令\n' +
        '3. 确保端口11434未被占用'
      );
    }

    // 检查模型是否已安装
    const isModelAvailable = await checkModelAvailable();
    if (!isModelAvailable) {
      throw new Error(
        '模型未安装。请在终端中运行以下命令：\n' +
        'ollama pull deepseek-r1:7b\n' +
        '等待下载完成后再试。'
      );
    }

    const response = await fetch(`${OLLAMA_BASE_URL}/api/chat`, {
      method: 'POST',
      headers: {
        'Content-Type': 'application/json',
      },
      body: JSON.stringify({
        model: MODEL_NAME,
        messages: messages.map(msg => ({
          role: msg.role,
          content: msg.content
        })),
        stream: true,
      }),
    });

    if (!response.ok) {
      const errorText = await response.text();
      let errorMessage = `HTTP error! status: ${response.status}`;
      try {
        const errorJson = JSON.parse(errorText);
        if (errorJson.error) {
          errorMessage = errorJson.error;
        }
      } catch {
        errorMessage += `, message: ${errorText}`;
      }
      throw new Error(errorMessage);
    }

    if (!response.body) {
      throw new Error('Response body is null');
    }

    const reader = response.body.getReader();
    const decoder = new TextDecoder();
    let accumulatedText = '';

    while (true) {
      const { done, value } = await reader.read();
      
      if (done) {
        onComplete();
        break;
      }

      // 解码二进制数据为文本
      const chunk = decoder.decode(value);
      
      // 处理每一行数据
      const lines = chunk.split('\n').filter(line => line.trim());
      
      for (const line of lines) {
        try {
          const response = JSON.parse(line);
          
          if (response.message?.content) {
            accumulatedText += response.message.content;
            onData(accumulatedText);
          }
          
          if (response.done) {
            onComplete();
            return;
          }

          // 检查错误
          if (response.error) {
            throw new Error(response.error);
          }
        } catch (e) {
          console.error('Error parsing JSON:', e);
          console.error('Raw line:', line);
          if (line.includes('error')) {
            throw new Error(line);
          }
        }
      }
    }
  } catch (error) {
    console.error('Error in getAIResponseStream:', error);
    onError(error instanceof Error ? error : new Error('未知错误，请检查Ollama服务是否正常运行'));
  }
}; 