#ifndef __EXTERN_CPP
#define __EXTERN_CPP

// #include "chatglm.h"
// #include "chatglm.cpp"
// #include "chatglm_cpp_sys/src/lib.rs.h"
#include "extern.h"
#include "rust/cxx.h"

namespace chatglm
{
    // reference: https://stackoverflow.com/questions/216823/how-to-trim-a-stdstring

    // trim from start (in place)
    static inline void lltrim(std::string &s)
    {
        s.erase(s.begin(), std::find_if(s.begin(), s.end(), [](unsigned char ch)
                                        { return !std::isspace(ch); }));
    }
    std::unique_ptr<GenerationConfig> generation_config_new(int max_length, int max_new_tokens, int max_context_length,
                                                            bool do_sample, int top_k, float top_p, float temperature,
                                                            float repetition_penalty, int num_threads)
    {
        return std::unique_ptr<GenerationConfig>(new GenerationConfig(max_length, max_new_tokens, max_context_length,
                                                                      do_sample, top_k, top_p, temperature,
                                                                      repetition_penalty, num_threads));
    }
    std::unique_ptr<ExternPipeline> extern_pipeline_new(rust::Str path, std::unique_ptr<GenerationConfig> gen_config)
    {
        return std::unique_ptr<ExternPipeline>(new ExternPipeline(std::string(path), *gen_config));
    }
    ExternStreamer::ExternStreamer()
    {
    }
    void ExternStreamer::put(const std::vector<int> &output_ids)
    {
        if (is_prompt_)
        {
            // skip prompt
            is_prompt_ = false;
            return;
        }

        static const std::vector<char> puncts{',', '!', ':', ';', '?'};

        token_cache_.insert(token_cache_.end(), output_ids.begin(), output_ids.end());
        std::string text = tokenizer_->decode(token_cache_);
        if (is_first_line_)
        {
            lltrim(text);
        }
        if (text.empty())
        {
            return;
        }

        std::string printable_text;
        if (text.back() == '\n')
        {
            // flush the cache after newline
            printable_text = text.substr(print_len_);
            is_first_line_ = false;
            token_cache_.clear();
            print_len_ = 0;
        }
        else if (std::find(puncts.begin(), puncts.end(), text.back()) != puncts.end())
        {
            // last symbol is a punctuation, hold on
        }
        else if (text.size() >= 3 && text.compare(text.size() - 3, 3, "�") == 0)
        {
            // ends with an incomplete token, hold on
        }
        else
        {
            printable_text = text.substr(print_len_);
            print_len_ = text.size();
        }

        // os_ << printable_text << std::flush;
        (*_process_callback)(rust::Str(printable_text));
    }
    void ExternStreamer::end()
    {
        std::string text = tokenizer_->decode(token_cache_);
        if (is_first_line_)
        {
            lltrim(text);
        }
        (*_process_callback)(rust::Str(text.substr(print_len_)));
        is_prompt_ = true;
        is_first_line_ = true;
        token_cache_.clear();
        print_len_ = 0;

        (*_end_callback)();
    }
    void ExternStreamer::set_callback(rust::Fn<void(rust::Str s)> process_callback, rust::Fn<void()> end_callback)
    {
        _process_callback = process_callback;
        _end_callback = end_callback;
    }
    // ExternPipeline::ExternPipeline(Pipeline pipeline, ExternStreamer streamer, GenerationConfig gen_config)
    // {
    //     _pipeline = pipeline;
    //     _extern_streamer = streamer;
    //     _gen_config = gen_config;
    // }
    void ExternPipeline::set_callback(rust::Fn<void(rust::Str s)> process_callback, rust::Fn<void()> end_callback) const
    {
        _extern_streamer->set_callback(process_callback, end_callback);
    }
    rust::String ExternPipeline::generate_string(const rust::Str prompt,
                                 bool use_callback = false) const
    {
        std::string _prompt = std::string(prompt.data());
        std::vector<int> input_ids = _pipeline->tokenizer->encode(_prompt, _gen_config.max_context_length);
        std::vector<int> new_output_ids;
        if (use_callback)
        {
            new_output_ids = _pipeline->generate(input_ids, _gen_config, _extern_streamer);
            //std::string result = _pipeline->generate(_prompt, _gen_config, _extern_streamer);
            //return result;
        } else {
            new_output_ids = _pipeline->generate(input_ids, _gen_config, nullptr);
            //std::string result = _pipeline->generate(_prompt, _gen_config, nullptr);
            //return result;
        }
        std::string output = _pipeline->tokenizer->decode(new_output_ids);
        return output;
    }
}

#endif