import OpenAI, { AzureOpenAI } from "openai";
import {
  ChatCompletionMessageParam,
  ChatCompletionTool,
} from "openai/resources";

import { kEnvs } from "../utils/env";
import { withDefault } from "../utils/base";
import { ChatCompletionCreateParamsBase } from "openai/resources/chat/completions";
import { Logger } from "../utils/log";
import { kProxyAgent } from "./proxy";
import { isNotEmpty } from "../utils/is";

export interface ChatOptions {
  user: string;
  system?: string;
  model?: ChatCompletionCreateParamsBase["model"];
  tools?: Array<ChatCompletionTool>;
  jsonMode?: boolean;
  requestId?: string;
  trace?: boolean;
  enableSearch?: boolean;
}

class OpenAIClient {
  traceInput = false;
  traceOutput = true;
  private _logger = Logger.create({ tag: "Open AI" });

  deployment?: string;

  private _client?: OpenAI;
  private _init() {
    this.deployment = kEnvs.AZURE_OPENAI_DEPLOYMENT;
    if (!this._client) {
      if (kEnvs.AZURE_OPENAI_API_KEY) {
        this._client = new AzureOpenAI({
          httpAgent: kProxyAgent,
          deployment: this.deployment,
        });
      } else if (kEnvs.DIFY_API_KEY) {
        this._client = new OpenAI({
          apiKey: kEnvs.DIFY_API_KEY,
          baseURL: kEnvs.DIFY_BASE_URL,
          defaultHeaders: {
            Authorization: `Bearer ${kEnvs.DIFY_API_KEY}`,
            "Content-Type": "application/json",
          },
          httpAgent: kProxyAgent,
        });
      } else {
        this._client = new OpenAI({ httpAgent: kProxyAgent });
      }
    }
  }

  private _formatDifyResponse(response: any) {
    if (!response) return null;
    // 处理Dify API的响应格式
    if (kEnvs.DIFY_API_KEY && response.answer) {
      return {
        choices: [
          {
            message: {
              content: response.answer,
              role: "assistant"
            },
            delta: {
              content: response.answer
            }
          }
        ]
      };
    }
    return response;
  }

  private _formatDifyRequest(options: ChatOptions & {
    stream?: boolean;
  }) {
    if (!kEnvs.DIFY_API_KEY) return null;
    const { user, system } = options;
    // 移除时间戳和用户标识信息
    const cleanedQuery = user.replace(
      /\d{4}年\d{2}月\d{2}日星期[一二三四五六日]\s+\d{2}:\d{2}\s+[^:]+:\s*/g,
      ""
    );
    return {
      query: cleanedQuery,
      inputs: system ? { system } : {},
      response_mode: options.stream ? "streaming" : "blocking",
      user: "default"
    };
  }

  async chat(options: ChatOptions) {
    this._init();
    let {
      user,
      system,
      tools,
      jsonMode,
      requestId,
      trace = false,
      model = this.deployment ?? kEnvs.OPENAI_MODEL ?? "gpt-4o",
    } = options;
    if (trace && this.traceInput) {
      this._logger.log(
        `🔥 onAskAI\n🤖️ System: ${system ?? "None"}\n😊 User: ${user}`.trim()
      );
    }

    let signal: AbortSignal | undefined;
    if (requestId) {
      const controller = new AbortController();
      this._abortCallbacks[requestId] = () => controller.abort();
      signal = controller.signal;
    }
    const systemMsg: ChatCompletionMessageParam[] = isNotEmpty(system)
    ? [{ role: "system", content: system! }]
    : [];
    const difyRequest = this._formatDifyRequest(options);
    const chatCompletion = await (difyRequest
      ? this._client!.post("/chat-messages", { body: difyRequest,stream: false, signal })
      : this._client!.chat.completions.create(
        {
          model,
          tools,
          messages: [...systemMsg, { role: "user", content: user }],
          response_format: jsonMode ? { type: "json_object" } : undefined,
        },
        { signal }
    ).catch((e) => {
      this._logger.error("LLM 响应异常", e);
      return null;
    }));
    if (requestId) {
      delete this._abortCallbacks[requestId];
    }
    const formattedResponse = this._formatDifyResponse(chatCompletion);
    const message = formattedResponse?.choices?.[0]?.message;
    if (trace && this.traceOutput) {
      this._logger.log(`✅ Answer1: ${message?.content ?? "None"}`.trim());
    }
    return message;
  }

  async chatStream(
    options: ChatOptions & {
      onStream?: (text: string) => void;
    }
  ) {
    this._init();
    let {
      user,
      system,
      tools,
      jsonMode,
      requestId,
      onStream,
      trace = false,
      model = this.deployment ?? kEnvs.OPENAI_MODEL ?? "gpt-4o",
      enableSearch = kEnvs.QWEN_ENABLE_SEARCH,
    } = options;
    if (trace && this.traceInput) {
      this._logger.log(
        `🔥 onAskAI\n🤖️ System: ${system ?? "None"}\n😊 User: ${user}`.trim()
      );
    }
    const systemMsg: ChatCompletionMessageParam[] = isNotEmpty(system)
      ? [{ role: "system", content: system! }]
      : [];
    const difyRequest = this._formatDifyRequest({ ...options, stream: true });
    const stream = await (difyRequest
      ? this._client!.post("/chat-messages", { body: difyRequest, stream: true }) as any
      : this._client!.chat.completions.create({
        model,
        tools,
        stream: true,
        messages: [...systemMsg, { role: "user", content: user }],
        response_format: jsonMode ? { type: "json_object" } : undefined,
        ...(enableSearch && { enable_search: true })
      }).catch((e) => {
      this._logger.error("LLM 响应异常", e);
      return null;
    }));

    if (!stream) return;
    if (requestId) {
      this._abortCallbacks[requestId] = () => {
        const streamWithController = stream as { controller?: AbortController };
        if (streamWithController.controller instanceof AbortController) {
          return streamWithController.controller.abort();
        }
      };
    }
    
    let content = "";
    for await (const chunk of stream) {
      // 处理Dify API的ping事件
      if (typeof chunk === 'string' && chunk.includes('event: ping')) {
        continue;
      }
      
      try {
        // 如果chunk是字符串，尝试解析JSON
        const chunkData = typeof chunk === 'string' ? JSON.parse(chunk.replace(/^data: /, '').trim()) : chunk;
        const formattedChunk = this._formatDifyResponse(chunkData);
        const text = formattedChunk?.choices?.[0]?.delta?.content || "";
        const aborted = requestId && !Object.keys(this._abortCallbacks).includes(requestId);
        if (aborted) {
          content = "";
          break;
        }
        if (text) {
          onStream?.(text);
          content += text;
        }
      } catch (error) {
        // 如果JSON解析失败，记录错误但继续处理下一个chunk
        this._logger.error("流式响应解析异常", error);
        continue;
      }
    }

    if (requestId) {
      delete this._abortCallbacks[requestId];
    }
    if (trace && this.traceOutput) {
      this._logger.log(`✅ Answer2: ${content ?? "None"}`.trim());
    }
    return withDefault(content, undefined);
  }
  private _abortCallbacks: Record<string, VoidFunction> = {
    // requestId: abortStreamCallback
  };

  cancel(requestId: string) {
    this._init();
    if (this._abortCallbacks[requestId]) {
      this._abortCallbacks[requestId]();
      delete this._abortCallbacks[requestId];
    }
  }
}

export const openai = new OpenAIClient();
