#pragma once
#include "Ai_Server.hpp"
#include <fstream>

class Customer_Service : public Ai_Server
{
public:
    Customer_Service(boost::asio::io_context& io_context, ssl::context& context, const std::string& server, const std::string& port)
        : Ai_Server(io_context, context, server, port)
    {
        initjson();
    }
    //初始化预料库
    virtual void initjson()
    {
        json.clear();
        std::fstream f("说明书.txt");
        if (!f.is_open()) {
            return;
        }
        std::string fileContent((std::istreambuf_iterator<char>(f)), std::istreambuf_iterator<char>());
        f.close();
        std::string str;
        str += "初始化一个文具电商人工客服。";
        str += "根据产品说明书，回答用户关于产品和店铺的问题，拒绝回答无关的提问。\n";
        str += fileContent;
        boost::json::object o1;
        o1["content"] = str;
        o1["role"] = "system";
        boost::json::object o2;
        o2["content"] = "你好";
        o2["role"] = "user";

        boost::json::array ar;
        ar.push_back(o1);
        ar.push_back(o2);
        json["messages"] = ar;
        json["model"] = "deepseek-chat";
        json["frequency_penalty"] = 0;
        json["max_tokens"] = 2048;
        json["presence_penalty"] = 0;
        boost::json::object o3;
        o3["type"] = "text";
        json["response_format"] = o3;
        json["stop"];
        json["stream"] = false;
        json["stream_options"];
        json["temperature"] = 1;
        json["top_p"] = 1;
        json["tools"];
        json["tool_choice"] = "none";
        json["logprobs"] = false;
        json["top_logprobs"];
    }
};