#include "openai/openai.hpp"
#include "tools/ReadAndWeite.hpp"
#include <iostream>

class Deepseek_call {
public:
  Deepseek_call() {
    // 读取api_key
    nlohmann::json json;
    read_from_file("resource/api_key.json", json);

    // 安全读取 JSON 字段，防止 null 值导致异常
    api_key = json.contains("api_key") && !json["api_key"].is_null()
                  ? json["api_key"].get<std::string>()
                  : "";

    web_curl_1 = json.contains("web_curl_1") && !json["web_curl_1"].is_null()
                     ? json["web_curl_1"].get<std::string>()
                     : "https://api.deepseek.com/v1"; // 默认值

    web_curl_2 = json.contains("web_curl_2") && !json["web_curl_2"].is_null()
                     ? json["web_curl_2"].get<std::string>()
                     : "https://api.deepseek.com/v1"; // 默认值

    selected_model =
        json.contains("selected_model") && !json["selected_model"].is_null()
            ? json["selected_model"].get<std::string>()
            : "web_curl_1";

    std::string api_base =
        (selected_model == "web_curl_2") ? web_curl_2 : web_curl_1;
    setenv("OPENAI_API_KEY", api_key.c_str(), 1); // 设置环境变量
    setenv("OPENAI_API_BASE", api_base.c_str(), 1);
  }
  std::string call_api(std::string &prompt) {
    try {
      openai::start();
      nlohmann::json req = {
          {"model", "deepseek-chat"},
          {"messages", {{{"role", "user"}, {"content", prompt}}}},
          {"max_tokens", 8192},
          {"temperature", 0.7}};
      auto chat = openai::chat().create(req);
      return chat["choices"][0]["message"]["content"];
    } catch (const std::exception &e) {
      std::cerr << "Error: " << e.what() << std::endl;
      return "sorry call api error";
    }
  }

private:
  std::string api_key;
  std::string web_curl_1;
  std::string web_curl_2;
  std::string selected_model;
};