/**
 * DeepSeek Provider
 * 虽然 openai 相对比较通用，但仍然建议
 *
 * 文档：https://api-docs.deepseek.com/zh-cn/
 */
import type { LLMCompletionsData, LLMCompletionsCb, LLMAttachCheckRes } from '@/provider/llm/type'
import { llmBaseProvider } from '@/provider/llm/baseProvider'
import OpenAI from 'openai'
import { geneTitlePrompt, clearContent, isBase64Image } from '@/provider/llm/hooks/utils'

class DeepSeekProvider extends llmBaseProvider {
  // 客户端
  private static client: OpenAI
  // stream signal
  private static signalController: AbortController
  // 是否已打断
  private static isCanceled = false

  protected init() {
    // 初始化 openai-node
    DeepSeekProvider.client = new OpenAI({
      baseURL: 'https://api.deepseek.com',
      apiKey: this.getApiKey() || 'DEFAULT_API',
      dangerouslyAllowBrowser: true,
    })
  }

  public async accessCheck() {
    // 尝试调用 api 以判断是否可用
    try {
      await DeepSeekProvider.client.models.list()
      return true
    } catch {
      return false
    }
  }

  public attachCheck(model: LLMModel): LLMAttachCheckRes {
    return {
      allowVision: !!model.vision,
      visionLimit: 20 * 1024 * 1024,
    }
  }

  public async getModels() {
    // openai 获取模型列表较为简单（通常仅返回模型名称）
    // 建议使用自定义进行获取
    return this.getDefaultModels()
  }

  public async geneTitle(model: string, messages: LLMMessage[]) {
    try {
      const result = await DeepSeekProvider.client.responses.create({
        model,
        input: geneTitlePrompt(messages),
        temperature: 0.3,
        top_p: 0.5,
        // max_output_tokens: 100,
      })

      return result.output_text
        .replace(/<[^>]*>/g, '')
        .replace(/\n/g, '')
        .trim()
    } catch (error) {
      window.console.error(error)
      return ''
    }
  }

  public async completions(data: LLMCompletionsData, callback: LLMCompletionsCb) {
    let fullText = ''
    const messages: OpenAI.ChatCompletionMessageParam[] = []
    for (const item of data.messages) {
      if (item.role === 'user') {
        const content: OpenAI.ChatCompletionContentPart[] = []
        // 处理附件请求
        // 此处直接使用文件base64进行提交，仅作演示使用，生产环境下建议使用存储服务转储
        // 参考：https://platform.openai.com/docs/guides/pdf-files?api-mode=responses
        if (item.attachs?.length) {
          for (const attach of item.attachs) {
            if (isBase64Image(attach.url)) {
              // 图片
              content.push({
                type: 'image_url',
                image_url: {
                  url: attach.url,
                },
              })
            } else {
              // 非图片
              content.push({
                type: 'file',
                file: {
                  filename: attach.name,
                  file_data: attach.url,
                },
              })
            }
          }
        }
        // 添加文本
        content.push({
          type: 'text',
          text: clearContent(item.content),
        })
        messages.push({
          role: item.role,
          content,
        })
      } else if (item.role !== 'error') {
        messages.push({
          role: item.role,
          content: clearContent(item.content),
        })
      }
    }
    // 重置打断信号
    DeepSeekProvider.signalController = new AbortController()
    DeepSeekProvider.isCanceled = false
    try {
      const prediction = await DeepSeekProvider.client.chat.completions.create(
        {
          model: data.model,
          messages,
          temperature: data.temperature,
          max_tokens: data.max_tokens || undefined,
          stream: true,
        },
        {
          // 如果OpenAI库的底层HTTP客户端支持，可以这样传递signal
          // 但请注意：官方库通常不会直接暴露fetchOptions来中止流
          // 这里的signal更多是用于处理fetch请求，而不是直接中止流式迭代
          // 对于流式迭代，我们更多是控制客户端的循环
          signal: DeepSeekProvider.signalController.signal, // 这行代码在OpenAI官方库中可能无效，仅作示意
        },
      )
      for await (const chunk of prediction) {
        if (DeepSeekProvider.isCanceled) {
          window.console.warn('Abort stream!')
          break
        }
        fullText += chunk.choices[0]?.delta?.content ?? ''
        callback.onUpdate(fullText)
      }
      callback.onSuccess(fullText)
    } catch (error) {
      window.console.error(error)
      if (error.name === 'AbortError') {
        // 打断
        callback.onSuccess(fullText)
      } else {
        callback.onError(error)
      }
    } finally {
      DeepSeekProvider.isCanceled = false
    }
  }

  public async abort() {
    DeepSeekProvider.signalController?.abort()
    DeepSeekProvider.isCanceled = true
  }
}

export default DeepSeekProvider
