import { onMessage } from 'webext-bridge/background'

// 调用模型的接口，这里使用模型可以进行优化处理
onMessage('get-request-glm', async (res: any) => {
  const data: any = res.data
  return new Promise((resolve) => {
    fetch(data.url, {
      headers: {
        'Accept-Language': 'zh-CN,zh;q=0.9,en;q=0.8',
        'Cache-Control': 'no-cache',
        'Connection': 'keep-alive',
        'Content-Type': 'application/json',
        'Origin': 'https://chatglm.cn',
        'Pragma': 'no-cache',
        'Referer': 'https://chatglm.cn/main/alltoolsdetail',
        'Sec-Fetch-Dest': 'empty',
        'Sec-Fetch-Mode': 'cors',
        'Sec-Fetch-Site': 'same-origin',
        'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/123.0.0.0 Safari/537.36',
        'accept': 'text/event-stream',
        'sec-ch-ua': '"Google Chrome";v="123", "Not:A-Brand";v="8", "Chromium";v="123"',
        'sec-ch-ua-mobile': '?0',
        'sec-ch-ua-platform': '"Windows"',
        ...data.headers,
      },
      body: JSON.stringify(data.data),
      method: data.method || 'GET',
    }).then((response) => {
      const reader = response?.body?.getReader()
      const decoder = new TextDecoder('utf-8')
      readStream()
      let textJSON = ''
      function readStream() {
        reader?.read().then(({ done, value }) => {
          if (done) {
            resolve(textJSON)
            return
          }
          const text = decoder.decode(value)
          const dataArray = text.split('\n')
          // 找到包含 JSON 数据的行并提取
          const jsonDataLine = dataArray.find(line => line.startsWith('data:'))
          const jsonData = jsonDataLine?.split('data: ')[1]
          // 将提取的 JSON 数据解析为 JavaScript 对象
          if (jsonData)
            textJSON = jsonData
          readStream()
        })
      }
    })
  })
})
