# -*- coding: utf-8 -*-

import torch
from typing import Dict, Any

"""
Create on 2020/05/27 15:35

Author: GT

=================================
加载转换后的训练集和测试集数据.
"""


def __to_tensor(sentences, ner_type_tags, pad_idx, device):
    sentences = torch.LongTensor(sentences)
    if ner_type_tags is not None:
        ner_type_tags = torch.LongTensor(ner_type_tags)
    masks = (sentences != pad_idx)
    seq_len = torch.sum(masks, dim=1).long()
    _, sorted_index = torch.sort(seq_len, dim=0, descending=True)
    sentences = sentences[sorted_index].to(device)
    if ner_type_tags is not None:
        ner_type_tags = ner_type_tags[sorted_index].to(device)
    masks = masks[sorted_index].to(device)
    return sentences, ner_type_tags, masks


def __label_split(item):
    pos = 0
    for pos in range(len(item) - 1, -1, -1):
        if item[pos] == "/":
            break
    return item[:pos], item[pos + 1:]


def training_iterator(train_file: str,
                      batch_size: int,
                      max_len: int,
                      char_vocabs: Dict[str, int],
                      ner_type_vocabs: Dict[str, int],
                      unk_idx: int = 1,
                      pad_idx: int = 0,
                      device=torch.device("cuda:0")):
    sentences = []
    ner_type_tags = []
    count = 0

    with open(train_file, 'r') as f:
        for line in f:
            sentence = []
            tags = []

            items = line.split()
            for item in items:
                char, ner_type = __label_split(item)
                sentence.append(char_vocabs.get(char, unk_idx))
                tags.append(ner_type_vocabs[ner_type])

            if len(sentence) == 0:
                continue

            if len(sentence) < max_len:
                sentence.extend([pad_idx] * (max_len - len(sentence)))
                tags.extend(([pad_idx] * (max_len - len(tags))))
            else:
                sentence = sentence[:max_len]
                tags = tags[:max_len]

            sentences.append(sentence)
            ner_type_tags.append(tags)
            count += 1

            if count == batch_size:
                data = __to_tensor(sentences, ner_type_tags, pad_idx, device)
                sentences = []
                ner_type_tags = []
                count = 0
                yield data  # sent, ner-types, masks
            pass
        # end of train file
        f.seek(0, 0)  # 不断循环
        if count > 0:
            data = __to_tensor(sentences, ner_type_tags, pad_idx, device)
            yield data  # sent, ner-types, masks


def testing_iterator(
        test_file: str,
        batch_size: int,
        max_len: int,
        char_vocabs: Dict[str, int],
        unk_idx: int = 1,
        pad_idx: int = 0,
        device=torch.device("cuda:0")):
    sentences = []
    count = 0

    with open(test_file, 'r') as f:
        for line in f:
            line = line.strip()
            if line is None or line == "":
                continue

            sentence = []
            for char in line:
                sentence.append(char_vocabs.get(char, unk_idx))

            if len(sentence) == 0:
                continue

            if len(sentence) < max_len:
                sentence.extend([pad_idx] * (max_len - len(sentence)))
            else:
                sentence = sentence[:max_len]

            sentences.append(sentence)
            count += 1

            if count == batch_size:
                data = __to_tensor(sentences, None, pad_idx, device)
                sentences = []
                count = 0
                yield data  # sent, ner, types, masks
            pass
        # end of train file
        if count > 0:
            data = __to_tensor(sentences, None, pad_idx, device)
            return data  # sent, ner-types, masks


class TrainIteratorWrapper(object):

    def __init__(self, *args):
        self.args = args

    def __iter__(self):
        args = self.args
        iter = training_iterator(args[0], args[1], args[2], args[3], args[4], args[5], args[6], args[7])
        return iter

    def __next__(self):
        return self.__iter__().__next__()


class TestIteratorWrapper(object):

    def __init__(self, *args):
        self.args = args

    def __iter__(self):
        args = self.args
        iter = testing_iterator(args[0], args[1], args[2], args[3], args[4], args[5], args[6])
        return iter

    def __next__(self):
        return self.__iter__().__next__()


def load_vocabs(vocab_file: str) -> Dict[str, int]:
    vocabs = {}
    count = 0
    with open(vocab_file, 'r') as f:
        for line in f:
            line = line.strip()
            if line is None or line == "":
                continue
            vocabs[line] = count
            count += 1
        pass
    return vocabs


def reverse_k_v(vocabs: Dict[Any, Any]):
    new_vocabs = {}
    for key, value in vocabs.items():
        new_vocabs[value] = key
    return new_vocabs


if __name__ == "__main__":
    import os
    import sys

    if len(sys.argv) < 5:
        print("python3 load_data.py train_file test_file char_vocabs ner_type_vocabs")
        exit(0)

    UNK, PAD = "<UNK>", "<PAD>"

    train_file = sys.argv[1]
    test_file = sys.argv[2]
    char_vocabs_file = sys.argv[3]
    ner_type_vocabs_file = sys.argv[4]

    char_vocabs = load_vocabs(char_vocabs_file)
    ner_type_vocabs = load_vocabs(ner_type_vocabs_file)
    idx2char = reverse_k_v(char_vocabs)

    unk_idx = char_vocabs.get(UNK)
    pad_idx = char_vocabs.get(PAD)

    epochs = 5
    batch_size = 128
    max_len = 300
    device = torch.device("cuda:0")

    train_iter = TrainIteratorWrapper(train_file, batch_size, max_len, char_vocabs, ner_type_vocabs, unk_idx, pad_idx,
                                 device)
    test_iter = TestIteratorWrapper(test_file, batch_size, max_len, char_vocabs, unk_idx, pad_idx, device)

    count = 0
    for epoch in range(1, epochs+1):
        print("[Epoch]: %d" % epoch)
        for data in train_iter:
            sent, tags, masks = data
            count += 1
            print(epoch, " : ", count, " : ",  sent)

    print('\n\n')
    for data in test_iter:
        sent, _, masks = data
        print(sent)
