#ifndef __WORDS_H
#define __WORDS_H

#include <unordered_map>

#ifdef USE_TURGLEM

#include <turglem/lemmatizer.hpp>
#include <turglem/russian/adapter.hpp>

#else

#include <unitypes.h>
#include <unistr.h>
#include <unictype.h>
#include <unicase.h>

#endif

#include "io.h"

struct Words {

    std::vector<std::string> w;
    std::unordered_map<std::string, size_t> s;

    Sink fout;

    Words(const std::string& file) : fout(file, true) {

        Source fin(file);

        w.push_back("~X-(");

        try {
            while (1) {

                std::string word;
                
                if (!std::getline(fin.inp, word))
                    break;

                s[word] = w.size();
                w.push_back(word);
            }
        } catch(...) {}
    }

    const std::string get(size_t i) {
        return w[i];
    }

    size_t has(const std::string& str) {

        auto i = s.find(str);

        if (i == s.end()) {
            return 0;

        } else {
            return i->second;
        }
    }

    size_t add(const std::string& str) {

        auto i = s.find(str);

        if (i != s.end())
            return i->second;

        size_t ret = w.size();
        s[str] = ret;
        w.push_back(str);

        fout << str;

        fout.flush();

        return ret;
    }
};

Words& words() {
    static Words ret("words.dat");
    return ret;
}

Words& files() {
    static Words ret("files.dat");
    return ret;
}


/****************************/


#ifdef USE_TURGLEM

struct Lemmatizer {

    tl::lemmatizer lem;

    Lemmatizer() {

        lem.load_lemmatizer("data/dict_russian.auto",
                            "data/paradigms_russian.bin",
                            "data/prediction_russian.auto");
    }


    void tokenize(const std::string& s, std::vector<std::string>& out) {

        out.push_back(std::string());

        const u_char* i = (const u_char *)s.data();
        const u_char* e = i + s.size();

        uint32_t cur_letter;

        while (*i && i < e) {

            if (i[0] <= '~') {

                if (i[0] >= '!') {

                    if ((i[0] >= '0' && i[0] <= '9') ||
                        (i[0] >= 'a' && i[0] <= 'z') ||
                        (i[0] >= 'A' && i[0] <= 'Z') /*|| i[0] == '-'*/
                        ) {

                        out.back() += i[0];

                    } else {

                        if (out.back().empty()) {
                            out.back() += i[0];

                        } else {
                            out.push_back(std::string(1, i[0]));
                        }

                        out.push_back(std::string());
                    }

                } else if (!out.back().empty()) {
                    out.push_back(std::string());
                }

                ++i;

            } else {

                if ((i[0] & 0xC0) == 0x80) {
                    ++i;

                } else if ((i[0] & 0xE0) == 0xC0) {

                    cur_letter = ((i[0] & 0x1F) << 6) | (i[1] & 0x3F);

                    if (cur_letter >= 0x0400 && cur_letter <= 0x04FF) {
                        out.back() += i[0];
                        out.back() += i[1];

                    } else if (!out.back().empty()) {
                        out.push_back(std::string());
                    }

                    i += 2;

                } else {

                    if (!out.back().empty()) {
                        out.push_back(std::string());
                    }

                    ++i;
                }
            }
        }
    }

    void operator()(const std::string& s, std::map<size_t, size_t>& out, size_t& N) {

        std::vector<std::string> tmp;
        tokenize(s, tmp);

        for (const auto& z : tmp) {

            if (z.size() <= 2)
                continue;

            tl::lem_result lr;
            size_t sz_lem = lem.lemmatize<russian_utf8_adapter>(z.data(), lr);

            std::string first;
            bool found = false;

            for (size_t i = 0; i < sz_lem; i++) {

                std::string tmp = lem.get_text<russian_utf8_adapter>(lr, i, 0);
            
                if (first.empty())
                    first = tmp;

                size_t nw = words().has(tmp);
                if (nw > 0) {
                    out[nw] += 1;
                    N++;
                    found = true;
                    break;
                }
            }

            if (!found && !first.empty()) {
                size_t nw = words().add(first);
                out[nw] += 1;
                N++;
            }
        }
    }
};

#else 

struct Lemmatizer {

    void tokenize(const std::string& s, std::vector<std::string>& out) {

        out.push_back(std::string());

        bool in_word = false;

        const uint8_t* utf = (const uint8_t*)s.data();

        while (utf != NULL) {

            ucs4_t chr;

            utf = u8_next(&chr, utf);

            if (uc_is_general_category_withtable(chr, UC_CATEGORY_MASK_L)) {

                if (!in_word) {
                    out.push_back(std::string());
                    in_word = true;
                }

                chr = uc_toupper(chr);

                static uint8_t tmp[6];
                int tmpi = u8_uctomb(tmp, chr, 6);

                if (tmpi > 0) {
                    out.back().append((const char*)tmp, tmpi);
                }

            } else {
                in_word = false;
            }
        }
    }

    void operator()(const std::string& s, std::map<size_t, size_t>& out, size_t& N) {

        std::vector<std::string> tmp;
        tokenize(s, tmp);

        for (auto& z : tmp) {

            if (z.size() <= 2)
                continue;

            size_t nw = words().add(z);

            out[nw] += 1;
            N++;
        }
    }
};

#endif

Lemmatizer& lemm() {
    static Lemmatizer ret;
    return ret;
}


#endif
