from fastNLP.io.loader import Conll2003NERLoader
from fastNLP.io.pipe import Conll2003NERPipe
from fastNLP import Vocabulary
import os
import sys

sys.path.append("../")

from pathes import conll03_paths
from fastNLP import cache_results


def gen_chars(entry):
    chars = [list(word) for word in entry]

    result = {}
    result["chars"] = chars
    result["char_seq_len"] = [len(char) for char in chars]
    return result


@cache_results("../caches/conll03.pkl", _refresh=False)
def load_and_process():
    data_bundle = Conll2003NERLoader().load(conll03_paths)
    pipe = Conll2003NERPipe(lower=True)

    data_bundle = pipe.process(data_bundle)
    data_bundle.apply_field_more(
        gen_chars,
        field_name="raw_words",
    )

    char_vocab = Vocabulary()
    char_vocab.from_dataset(data_bundle.get_dataset("train"), field_name="chars")
    data_bundle.set_vocab(char_vocab, "chars")
    char_vocab.index_dataset(
        data_bundle.datasets.values(),
        field_name="chars",
        new_field_name="chars_index",
    )

    return data_bundle


if __name__ == "__main__":
    data_bundle = load_and_process()
    # dataset = data_bundle.datasets["train"]
    # char_vocab = data_bundle.get_vocab("chars")
    # print(len(char_vocab))
    # # print(char_vocab.to_word(0))
    # for v in char_vocab:
    #     print(v)
    print(data_bundle.get_dataset("train")[0].items())
