#pragma once

#include <LibDL/Tensor/Tensor.h>
#include <LibDL/serialize/module_keys.h>
#include <LibDL/serialize/module_structs.h>
#include <LibDL/nn/Modules/Modules.h>
#include <boost/any.hpp>
#include <boost/filesystem.hpp>
#include <boost/archive/binary_iarchive.hpp>
#include <boost/archive/binary_oarchive.hpp>
#include <boost/serialization/string.hpp>
#include <boost/serialization/vector.hpp>
#include <torch/torch.h>
#include <torch/script.h>
#include <LibDL/ordered_dict/boost_any_wrapper.h>

class Serialization {
private:
    static void
    _save_to_state_dict(const torch::nn::Module &self, Ordered_dict<std::string, any_wrapper> &destination,
                        const std::string &prefix,
                        bool keep_vars) {
        for (auto &pair : self.named_parameters(false)) {
            if (!at::any(at::isnan(pair.value())).item().toBool())
#ifdef Torch_Version_Less_13
                destination.insert(prefix + pair.key(),
                                   std::pair<Tensor, bool>(Tensor(keep_vars ? pair.value() : *pair->data<at::Tensor>()),
                                                           false));
#else
                destination.insert(prefix + pair.key(),
                                   std::pair<Tensor, bool>(Tensor(keep_vars ? pair.value() : pair.value().data()),
                                                           false));
#endif
        }
        for (auto pair : self.named_buffers(false)) {
            if (!at::any(at::isnan(pair.value())).item().toBool())
#ifdef Torch_Version_Less_13
                destination.insert(prefix + pair.key(),
                                   std::pair<Tensor, bool>(Tensor(keep_vars ? pair.value() : *pair->data<at::Tensor>()),
                                                           true));
#else
                destination.insert(prefix + pair.key(),
                                   std::pair<Tensor, bool>(Tensor(keep_vars ? pair.value() : pair.value().data()),
                                                           true));
#endif
        }
    }

    static void
    _load_from_state_dict(std::shared_ptr<torch::nn::Module> &self,
                          Ordered_dict<std::string, any_wrapper> state_dict,
                          const std::string &prefix, const Ordered_dict<std::string, any_wrapper> &local_metadata,
                          bool strict, std::vector<std::string> &missing_keys,
                          std::vector<std::string> &unexpected_keys,
                          std::vector<std::string> &error_msgs, module_structs &moduleStructs) {
        //todo hooks
        //  for hook in self._load_state_dict_pre_hooks.values():
        //      hook(state_dict, prefix, local_metadata, strict, missing_keys, unexpected_keys, error_msgs)

        auto local_state = Ordered_dict<std::string, Tensor>();
        for (auto &pair : self->named_parameters()) {
            if (!at::any(at::isnan(pair.value())).item().toBool())
                local_state.insert(pair.key(), Tensor(pair.value()));
        }
        for (auto &pair : self->named_buffers()) {
            if (!at::any(at::isnan(pair.value())).item().toBool())
                local_state.insert(pair.key(), Tensor(pair.value()));
        }

        if (!local_state.core.is_empty()) {
            for (auto &pair : local_state.core) {
                std::string key = prefix + pair.key();
                Tensor param = pair.value();
                if (state_dict.contains(key)) {
                    auto input_param = state_dict[key].content<Tensor>();

                    if (input_param.dim() != param.dim()) {
                        std::ostringstream buffer;
                        buffer << "size mismatch for " << key << ": copying a param with shape " << input_param.sizes()
                               << " from checkpoint, the shape in current model is " << param.sizes() << ".";
                        error_msgs.push_back(buffer.str());
                    }

#ifdef Torch_Version_Less_14
                    if (input_param.core.type().is_variable()) {
#ifdef Torch_Version_Less_13
                        input_param = *input_param.core.data<Tensor>();
#else
                        input_param.core = input_param.core.data();
#endif
                    }
#endif

                    try {
                        //copy_ function can't used here for undefined tensor
                        param.core.data().copy_(input_param.core.clone());
                    } catch (std::exception &exception) {
                        std::ostringstream buffer;
                        buffer << "While copying the parameter named \"" << key
                               << "\", whose dimensions in the model are " << param.sizes()
                               << " and whose dimensions in the checkpoint are " << input_param.sizes() << ".";
                        error_msgs.push_back(buffer.str());
                    }
                } else if (strict) {
                    missing_keys.push_back(key);
                }
            }
        } else {
            std::cerr << "Empty Module Given!" << std::endl;
            if (strict) {
                std::ostringstream backtrace;
                backtrace << boost::stacktrace::stacktrace();
                throw c10::Error("Load Module Failed\n", backtrace.str());
            } else {
                //init module with saved module struct
                if (!moduleStructs.name.empty()) {
                    register_module_struct(self, moduleStructs);
                    _load_from_state_dict(self, state_dict, prefix, local_metadata, strict, missing_keys,
                                          unexpected_keys, error_msgs, moduleStructs);
                }
            }
        }
        if (strict) {
            for (const auto &key : state_dict.keys()) {
                if (key.find(prefix) == 0) {
                    auto input_name = key.substr(prefix.length());
                    if (input_name == "_metadata") continue;
                    auto index = input_name.find('.');
                    input_name = input_name.substr(0, index);
                    if (!self->named_modules().contains(input_name) && !local_state.contains(input_name)) {
                        unexpected_keys.push_back(key);
                    }
                }
            }
        }
    }

    static void
    register_module_struct(std::shared_ptr<torch::nn::Module> &module, module_structs &moduleStructs) {
        auto new_module = torch::nn::Module(moduleStructs.name);
        module = std::make_shared<torch::nn::Module>(new_module);
        for (auto &item : moduleStructs.submodules) {
            auto sub_module = std::make_shared<torch::nn::Module>();
            register_module_struct(sub_module, item);
            module->register_module(item.name, sub_module);
        }
        for (auto &item : moduleStructs.parameters) {
            //check if contains recurse parameters
            auto key = item.name.substr(0, item.name.find('.'));
            if (!moduleStructs.contains(key))
                module->register_parameter(item.name, torch::rand(item.get_sizes()));
        }
        for (auto &item : moduleStructs.buffers) {
            //check if contains recurse buffers
            auto key = item.name.substr(0, item.name.find('.'));
            if (!moduleStructs.contains(key))
                module->register_buffer(item.name, torch::rand(item.get_sizes()));
        }
    }

    static void
    load_state_dict_inner_load(std::shared_ptr<torch::nn::Module> &module, const std::string &prefix,
                               const Ordered_dict<std::string, any_wrapper> &state_dict,
                               const Ordered_dict<std::string, any_wrapper> &metadata, bool strict,
                               std::vector<std::string> &missing_keys,
                               std::vector<std::string> &unexpected_keys,
                               std::vector<std::string> &error_msgs, module_structs &moduleStructs) {
        Ordered_dict<std::string, any_wrapper> local_metadata;
        if (metadata.is_empty()) {
            local_metadata = Ordered_dict<std::string, any_wrapper>();
        } else {
            auto key = prefix.substr(0, prefix.length() - 1 < 0 ? 0 : prefix.length() - 1);
            if (metadata.contains(key))
                local_metadata = metadata.findRef(key).content<Ordered_dict<std::string, any_wrapper>>();
        }
        _load_from_state_dict(module, state_dict, prefix, local_metadata, strict,
                              missing_keys, unexpected_keys, error_msgs, moduleStructs);
        for (auto pair : module->named_children()) {
            if (pair.value() != nullptr) {
                load_state_dict_inner_load(pair.value(), prefix + pair.key() + '.', state_dict, metadata, strict,
                                           missing_keys, unexpected_keys, error_msgs, moduleStructs);
            }
        }
    }

    static void save_recursive(torch::serialize::OutputArchive &archive,
                               const Ordered_dict<std::string, any_wrapper> &ordered_dict) {
        for (auto i : ordered_dict.items()) {
            auto &value = i.value();
            if (value.is_type<std::pair<Tensor, bool>>()) {
                auto tmp = value.content<std::pair<Tensor, bool>>();
                archive.write(i.key(), tmp.first.core, tmp.second);
            }
            if (value.is_type<std::string>())
#ifdef Torch_Version_Less_13
                continue;
#endif
                archive.write(i.key(), value.content<std::string>());
            if (value.is_type<Ordered_dict<std::string, any_wrapper>>()) {
                save_recursive(archive, value.content<Ordered_dict<std::string, any_wrapper>>());
#ifdef Torch_Version_Less_13
                continue;
#endif
                archive.write(i.key(), i.key() + "_meta");
            }
        }
    }

    static void
    save_keys_recursive(const Ordered_dict<std::string, any_wrapper> &ordered_dict, std::vector<module_keys> &keys) {
        for (auto &i : ordered_dict.items()) {
            module_keys moduleKeys;
            moduleKeys.key = i.key();
            moduleKeys.is_buffer = false;
            if (i.value().is_type<std::pair<Tensor, bool>>())
                moduleKeys.is_buffer = i.value().content<std::pair<Tensor, bool>>().second;
            moduleKeys.has_children = false;
            if (i.value().is_type<Ordered_dict<std::string, any_wrapper>>()) {
                moduleKeys.has_children = true;
                std::vector<module_keys> children;
                save_keys_recursive(i.value().content<Ordered_dict<std::string, any_wrapper>>(), children);
                moduleKeys.children = children;
            }
            keys.emplace_back(moduleKeys);
        }
    }

    static module_structs
    save_module_struct(const std::shared_ptr<torch::nn::Module> &module, std::string module_name) {
        module_structs moduleStructs;
        moduleStructs.name = module_name;
        for (const auto &item : module->named_parameters(false)) {
            moduleStructs.parameters.emplace_back(module_structs::tensor_struct(item.key(), item.value().sizes()));
        }
        for (const auto &item : module->named_buffers(false)) {
            moduleStructs.buffers.emplace_back(module_structs::tensor_struct(item.key(), item.value().sizes()));
        }
        for (auto item : module->named_children()) {
            moduleStructs.submodules.emplace_back(save_module_struct(item.value(), item.key()));
        }
        return moduleStructs;
    }

    static void save_keys(const Ordered_dict<std::string, any_wrapper> &ordered_dict, const std::string &filename) {
        std::vector<module_keys> keys;
        std::string prefix = ordered_dict["_metadata"].content<Ordered_dict<std::string, any_wrapper>>().find(
                "prefix")->content<std::string>();
        save_keys_recursive(ordered_dict, keys);
        auto file = filename + ".keys";
        std::ofstream outfile(file, std::ios::binary | std::ios::out | std::iostream::trunc);
        if (!outfile.good()) {
            outfile.open(file, std::ios::out | std::iostream::trunc);
        }
        boost::archive::binary_oarchive out_archive(outfile);
        out_archive << keys << prefix;
        outfile.close();
    }

    static void save_keys(const Ordered_dict<std::string, any_wrapper> &ordered_dict,
                          const std::shared_ptr<torch::nn::Module> &module, const std::string &filename) {
        std::vector<module_keys> keys;
        std::string prefix = ordered_dict["_metadata"].content<Ordered_dict<std::string, any_wrapper>>().find(
                "prefix")->content<std::string>();
        save_keys_recursive(ordered_dict, keys);
        module_structs moduleStructs = save_module_struct(module, module->name());
        auto file = filename + ".keys";
        std::ofstream outfile(file, std::ios::binary | std::ios::out | std::iostream::trunc);
        if (!outfile.good()) {
            outfile.open(file, std::ios::out | std::iostream::trunc);
        }
        boost::archive::binary_oarchive out_archive(outfile);
        out_archive << keys << prefix << moduleStructs;
        outfile.close();
    }

    static std::tuple<std::vector<module_keys>, std::string, module_structs> load_keys(const std::string &filename) {
        std::vector<module_keys> keys;
        std::string prefix;
        module_structs moduleStructs;
        auto file = filename + ".keys";
        std::ifstream infile(file, std::ios::binary | std::ios::in);
        if (!infile.good()) {
            throw std::invalid_argument("Key file not Found");
        }
        boost::archive::binary_iarchive in_archive(infile);
        try {
            in_archive >> keys >> prefix >> moduleStructs;
        } catch (std::exception &e) {
            std::cerr << "[Warning] Not find module struct defined. You should declare the\n\t"
                      << "saved module type to load serialized module." << std::endl;
            moduleStructs = module_structs();
        }
        return std::make_tuple(keys, prefix, moduleStructs);
    }

    static void
    load_recursive(torch::serialize::InputArchive &archive, Ordered_dict<std::string, any_wrapper> &ordered_dict,
                   std::vector<module_keys> &keys) {
        for (auto &key : keys) {
            if (key.has_children) {
                torch::serialize::InputArchive child_archive;
                if (archive.try_read(key.key, child_archive)) {
                    Ordered_dict<std::string, any_wrapper> ordered_dict_children;
                    load_recursive(child_archive, ordered_dict_children, key.children);
                    ordered_dict.insert(key.key, ordered_dict_children);
                } else {
#ifdef Torch_Version_Less_13
                    continue;
#endif
                    c10::IValue value;
                    archive.read(key.key, value);
                    std::string str = value.toStringRef();
                    if (str.find("_meta") != -1) {
                        Ordered_dict<std::string, any_wrapper> ordered_dict_children;
                        load_recursive(archive, ordered_dict_children, key.children);
                        ordered_dict.insert(key.key, ordered_dict_children);
                    }
                }
            } else {
                Tensor tensor;
                if (archive.try_read(key.key, tensor, key.is_buffer)) {
                    ordered_dict.insert(key.key, tensor);
                } else {
#ifdef Torch_Version_Less_13
                    continue;
#endif
                    c10::IValue str;
                    archive.read(key.key, str);
                    ordered_dict.insert(key.key, str.toStringRef());
                }
            }
        }
    }

public:
    static Ordered_dict<std::string, any_wrapper>
    state_dict(nn::Module &self, Ordered_dict<std::string, any_wrapper> &destination,
               const std::string &prefix = "",
               bool keep_vars = false) noexcept(false) {
        if (!destination.find("_metadata"))
            destination.insert("_metadata", Ordered_dict<std::string, any_wrapper>());
        Ordered_dict<std::string, any_wrapper> local_metadata = destination["_metadata"].content<Ordered_dict<std::string, any_wrapper>>();
        auto meta_value = Ordered_dict<std::string, any_wrapper>();
        meta_value.insert("version", self.version());
        local_metadata.insert(prefix.substr(0, prefix.length() - 1 < 0 ? 0 : prefix.length() - 1), meta_value);
        if (!local_metadata.contains("prefix"))
            local_metadata.insert("prefix", prefix);
        destination["_metadata"] = local_metadata;
        _save_to_state_dict(*self.get_core(), destination, prefix, keep_vars);
        for (const auto &pair : self.named_children()) {
            if (pair.second.get_core() != nullptr) {
                auto child = nn::Module(pair.second);
                state_dict(child, destination, prefix + pair.first + ".", keep_vars);
            }
        }
        // todo add hooks
        // for hook in self._state_dict_hooks.values():
        //     hook_result = hook(self, destination, prefix, local_metadata)
        //     if hook_result is not None:
        //         destination = hook_result
        return destination;
    }

    static std::pair<std::vector<std::string>, std::vector<std::string>>
    load_state_dict(nn::Module &self, Ordered_dict<std::string, any_wrapper> state_dict, const std::string &prefix = "",
                    bool strict = true) {
        std::string prefix_ = prefix;
        std::vector<std::string> missing_keys, unexpected_keys, error_msgs;
        Ordered_dict<std::string, any_wrapper> metadata;
        module_structs moduleStructs;
        if (state_dict.contains("_metadata")) {
            metadata = state_dict.findRef("_metadata").content<Ordered_dict<std::string, any_wrapper>>();
            auto _prefix = metadata.find("prefix")->content<std::string>();
            if (_prefix != prefix_) {
                std::cerr << "[Warning] Incorrect prefix found in state_dict: '" << _prefix << "'" << std::endl;
                prefix_ = _prefix;
            }
        }
        if (metadata.is_empty()) {
            state_dict["_metadata"] = metadata = Ordered_dict<std::string, any_wrapper>();
        }
        if (state_dict.contains("_module_struct")) {
            moduleStructs = state_dict.findRef("_module_struct").content<module_structs>();
        }
        auto core = self.get_core();
        load_state_dict_inner_load(core, prefix_, state_dict, metadata, strict, missing_keys,
                                   unexpected_keys, error_msgs, moduleStructs);
        self = nn::Module(core);
        if (strict) {
            if (!unexpected_keys.empty()) {
                std::ostringstream buffer;
                buffer << "Unexpected key(s) in state_dict: ";
                for (const auto &key : unexpected_keys) {
                    buffer << key << ", ";
                }
                error_msgs.insert(error_msgs.begin(), buffer.str());
            }
            if (!missing_keys.empty()) {
                std::ostringstream buffer;
                buffer << "Missing key(s) in state_dict: ";
                for (const auto &key : missing_keys) {
                    buffer << key << ", ";
                }
                error_msgs.insert(error_msgs.begin(), buffer.str());
            }
        }
        if (!error_msgs.empty()) {
            std::ostringstream buffer;
            buffer << "Error(s) in loading state_dict for " << typeid(self).name() << ": \n\t";
            for (const auto &msg : error_msgs) {
                buffer << msg << "\n\t";
            }
            std::cerr << buffer.str() << std::endl;
        }
        return std::make_pair(missing_keys, unexpected_keys);
    }

    //use_native means use the method save libtorch owns
    static void save(nn::Module &module, const std::string &filename, bool use_native = false) {
        if (use_native) {
            torch::save(module.get_core(), filename);
        } else {
            Ordered_dict<std::string, any_wrapper> orderedDict;
            state_dict(module, orderedDict);
#ifdef Torch_Version_Less_13
            torch::serialize::OutputArchive archive;
#else
            torch::serialize::OutputArchive archive(
                    std::make_shared<torch::jit::script::CompilationUnit>());
#endif
            save_recursive(archive, orderedDict);
            archive.save_to(filename);
            save_keys(orderedDict, module.get_core(), filename);
        }
    }

    static void save(const Ordered_dict<std::string, any_wrapper> &ordered_dict, const std::string &filename) {
#ifdef Torch_Version_Less_13
        torch::serialize::OutputArchive archive;
#else
        torch::serialize::OutputArchive archive(
                std::make_shared<torch::jit::script::CompilationUnit>());
#endif
        save_recursive(archive, ordered_dict);
        archive.save_to(filename);
        save_keys(ordered_dict, filename);
    }

    static void load(nn::Module &module, const std::string &filename, bool strict = true, bool use_native = false) {
        if (!boost::filesystem::exists(filename + ".keys") && !use_native) {
            throw std::invalid_argument("Invalid file name.\nCan't find key defined file " + filename + ".keys");
        }
        if (use_native) {
            auto core = module.get_core();
            torch::load(core, filename);
            module = nn::Module(core);
        } else {
            load(filename, module, strict);
        }
    }

    static Ordered_dict<std::string, any_wrapper> load(const std::string &filename) {
        auto result = load_keys(filename);
        torch::serialize::InputArchive archive;
        archive.load_from(filename);
        Ordered_dict<std::string, any_wrapper> ordered_dict;
        load_recursive(archive, ordered_dict, std::get<0>(result));
        return ordered_dict;
    }

    static nn::Module load(const std::string &filename, nn::Module &module, bool strict = true) {
        auto result = load_keys(filename);
        torch::serialize::InputArchive archive;
        archive.load_from(filename);
        Ordered_dict<std::string, any_wrapper> ordered_dict;
        load_recursive(archive, ordered_dict, std::get<0>(result));
        if (module.get_core() == nullptr)
            module = nn::Module();
        ordered_dict.insert("_module_struct", std::get<2>(result));
        auto tuple = load_state_dict(module, ordered_dict, std::get<1>(result), strict);
        if (!tuple.first.empty() || !tuple.second.empty()) {
            std::ostringstream backtrace;
            backtrace << boost::stacktrace::stacktrace();
            throw c10::Error("Load Module Failed\n", backtrace.str());
        }
        return module;
    }
};

