// Copyright (C) 2024 Kumo inc.
// Author: Jeff.li lijippy@163.com
// All rights reserved.
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU Affero General Public License as published
// by the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
// GNU Affero General Public License for more details.
//
// You should have received a copy of the GNU Affero General Public License
// along with this program.  If not, see <https://www.gnu.org/licenses/>.
//

#include <kllm/tools/oai/oai.h>
#include <kllm/tools/service_context.h>
#include <turbo/bootstrap/servlet.h>
#include <turbo/flags/flag.h>
#include <krpc/utility/logging.h>
#include <krpc/rpc/server.h>
#include <krpc/rpc/restful.h>
#include <krpc/json2pb/pb_to_json.h>
#include <krpc/rpc/restful_service.h>
#include <kllm/openai/oai_processor.h>

TURBO_FLAG(int32_t, idle_timeout_s, -1, "Connection will be closed if there is no "
                                        "read/write operations during the last `idle_timeout_s'");

namespace kllm {
    static ServiceContext oai_context;

    static void run_oai_server();
    turbo::Status setup_oai_service(turbo::cli::App *app) {
        auto oai_cmd = app->add_subcommand("oai", "openai service");
        oai_context.params_context = ParamsContext::setup_app_context(oai_cmd,ServiceContext::params,LLAMA_EXAMPLE_SERVER);
        turbo::Servlet::setup_log_option(oai_cmd);
        oai_cmd->callback(run_oai_server);
        return turbo::OkStatus();
    }

    void run_oai_server() {
        ServiceContext::call_after_parse();
        // Generally you only need one Server.
        turbo::set_flag(&FLAGS_enable_rpc_builtin, true);
        // load the model
        LOG_INF("%s: loading model\n", __func__);
        oai_context.state.store(SERVER_STATE_READY);
        LOG_INF("%s: model loaded\n", __func__);
        krpc::Server server;

        //auto *ins = krpc::RestfulService::instance();

        setup_oai_api(&ServiceContext::ctx_server);

        setup_oai_ui(&ServiceContext::ctx_server);

        oai_context.start_context_async();

        // Start the server.
        krpc::ServerOptions options;
        options.idle_timeout_sec = turbo::get_flag(FLAGS_idle_timeout_s);
        if(!ServiceContext::params.ssl_file_cert.empty()) {
            options.mutable_ssl_options()->default_cert.certificate = ServiceContext::params.ssl_file_cert;
            options.mutable_ssl_options()->default_cert.private_key = ServiceContext::params.ssl_file_key;
            options.mutable_ssl_options()->ciphers = "";
        }

        if (server.Start(ServiceContext::params.port, &options) != 0) {
            LOG(ERROR) << "Fail to start HttpServer";
            oai_context.stop_context_async();
            return;
        }

        // Wait until Ctrl-C is pressed, then Stop() and Join() the server.
        server.RunUntilAskedToQuit();
        oai_context.stop_context_async();
    }
}  // namespace kllm
