export async function callOpenAICompatibleAPI(apiKey, endpoint, model, prompt, sys_prompt) {
  prompt = prompt ? prompt : '你是谁？'
  sys_prompt = sys_prompt ? sys_prompt : 'You are a helpful assistant.'
  prn(apiKey, endpoint, model)
  const payload = {
    model: model, // 模型名称
    messages: [
      { role: 'system', content: sys_prompt },
      { role: 'user', content: prompt}
    ]
  };

  try {
    const response = await fetch(endpoint, {
      method: 'POST',
      headers: {
        'Authorization': `Bearer ${apiKey}`,
        'Content-Type': 'application/json'
      },
      body: JSON.stringify(payload)
    });

    if (!response.ok) {
      throw new Error(`HTTP error! status: ${response.status}`);
    }

    const data = await response.json();
    
    // console.log(data.choices[0].message.content); // 输出回复内容

    return data.choices[0].message.content
  } catch (error) {
    console.error('Error calling API:', error);
  }
}



export async function streamCallLLM(apiKey, endpoint, model, prompt, callback) {
  try {
    const response = await fetch(endpoint, {
      method: "POST",
      headers: {
        "Content-Type": "application/json",
        "Authorization": `Bearer ${apiKey}`
      },
      body: JSON.stringify({
        model: model,
        messages: [{ role: "user", content: prompt }],
        stream: true // 开启流式传输
      })
    });

    // 使用 ReadableStream 处理流式数据
    const reader = response.body.getReader();
    const decoder = new TextDecoder("utf-8");
    let partialText = "";

    while (true) {
      const { done, value } = await reader.read();
      if (done) break;

      const chunk = decoder.decode(value);
      const lines = chunk.split("\n").filter(line => line.trim() !== "");

      for (const line of lines) {
        const message = line.replace(/^data: /, "");
        if (message === "[DONE]") {
          // 流式数据传输完成
          // callback("任务完成");
        } else {
          const data = JSON.parse(message);
          const text = data.choices[0].delta.content || "";
          partialText += text;
          callback(text); // 实时回调处理数据
        }
      }
    }
  } catch (error) {
    console.error("API 请求失败:", error);
    callback(`错误: ${error.message}`);
  }
}

// GLM
export async function callGlmAPI(prompt) {
    let endpoint = 'https://open.bigmodel.cn/api/paas/v4/chat/completions'
    let key = '734c6ee4a5524052a1fc44580660dc38.F5A02rjHWxocAsaT'
    let model = 'glm-4-flash'

    return await $.callOpenAICompatibleAPI(key, endpoint, model, prompt)  
}

export async function streamGlmApi(prompt, callback) {
    let endpoint = 'https://open.bigmodel.cn/api/paas/v4/chat/completions'
    let key = '734c6ee4a5524052a1fc44580660dc38.F5A02rjHWxocAsaT'
    let model = 'glm-4-flash'

    return await $.streamCallLLM(key, endpoint, model, prompt, callback)  
}


//Hunyuan
export async function callHunyuanAPI(prompt) {
    let endpoint = 'https://api.hunyuan.cloud.tencent.com/v1/chat/completions'
    let key = 'sk-D7a4oH67FQOS2CwbwPJ2j0BgJzTULrGiP7f3XZ2d0Lkvox97'
    let model = 'hunyuan-lite'

    return await $.callOpenAICompatibleAPI(key, endpoint, model, prompt)  
}

export async function callQianfanAPI(prompt) {
    let endpoint = 'https://qianfan.baidubce.com/v2/chat/completions'
    let key = 'bce-v3/ALTAK-C8XfhToFk6gwdkfrB4mJF/0efb847335867c8dad3df660bfe5ce873b55cf97'
    let model = 'deepseek-r1'

    return await $.callOpenAICompatibleAPI(key, endpoint, model, prompt)  
}


//siliconflow

export async function callSiliconflowAPI(prompt) {
    let endpoint = 'https://api.siliconflow.cn/v1/chat/completions'
    let key = 'sk-mduiudxrtwhptysueaghbhhoycbzhdfeqiubqrmmejgjgvwx'
    let model = 'deepseek-ai/DeepSeek-R1-Distill-Qwen-1.5B'

    return await $.callOpenAICompatibleAPI(key, endpoint, model, prompt)  
}

export async function streamSiliconflowAPI(prompt, callback) {
    let endpoint = 'https://api.siliconflow.cn/v1/chat/completions'
    let key = 'sk-mduiudxrtwhptysueaghbhhoycbzhdfeqiubqrmmejgjgvwx'
    let model = 'deepseek-ai/DeepSeek-R1-Distill-Qwen-1.5B'
    model = 'deepseek-ai/DeepSeek-R1-Distill-Qwen-7B'

    
    return await $.streamCallLLM(key, endpoint, model, prompt, callback) 
}


//qwq

export async function callQwQAPI(prompt) {
    let endpoint = 'https://api.suanli.cn/v1/chat/completions'
    let key = 'sk-W0rpStc95T7JVYVwDYc29IyirjtpPPby6SozFMQr17m8KWeo'
    let model = 'free:QwQ-32B'

    return await $.callOpenAICompatibleAPI(key, endpoint, model, prompt)  
}

export async function streamQwQAPI(prompt, callback) {
    let endpoint = 'https://api.suanli.cn/v1/chat/completions'
    let key = 'sk-W0rpStc95T7JVYVwDYc29IyirjtpPPby6SozFMQr17m8KWeo'
    let model = 'free:QwQ-32B'
    
    return await $.streamCallLLM(key, endpoint, model, prompt, callback) 
}