from common.configs.tools import logger, init_logger
from common.configs.path import paths
from common.configs.stopwords import punc, stopwords
from argparse import ArgumentParser
from pathlib import Path
import re
import json
from gensim.models import Word2Vec
from gensim.models.fasttext import FastText
import pickle
import numpy as np


class LineIterator:
    def __init__(self, filepath, gram):
        self.filepath = filepath
        self.gram = gram

    def __iter__(self):
        for line in open(self.filepath):
            text = json.loads(line.strip())
            title_ = text['title']
            content_ = text['content']
            yield split_punc(title_ + ' ' + content_, self.gram)


def filter_stopwords(text_list):
    return list(filter(lambda e: e not in stopwords, text_list))


def split_punc(text_, gram):
    gram1 = [filter_stopwords((new_text.strip()).split(' '))
             for new_text in re.split('|'.join(punc), text_)]
    gram1 = list(filter(lambda e: len(e) > 1, gram1))
    if gram == 1:
        return gram1
    elif gram == 2:
        n, m = 2, 1
        return [['_'.join(gram1_[i:i+n]) for i in range(0, len(gram1_), n-m)][:-1] for gram1_ in gram1]
    elif gram == 3:
        n, m = 3, 2
        return [['_'.join(gram1_[i:i+n]) for i in range(0, len(gram1_), n-m)][:-2] for gram1_ in gram1]


def main():
    parser = ArgumentParser(description='')
    parser.add_argument('--vs', type=int, help='vector size',
                        default=100)
    parser.add_argument('--window', type=int, help='window size',
                        default=5)
    parser.add_argument('--mincount', type=int, help='min count',
                        default=2)
    parser.add_argument('--workers', type=int, help='workers',
                        default=6)
    parser.add_argument('--model', type=str, help='word2vec or glove or fasttext',
                        default='fasttext')
    parser.add_argument('--sg', type=int, help='CBOW: 0, Skip-gram: 1',
                        default=0)
    # parser.add_argument('--gram', type=int, default=3, help='N-gram')

    args = parser.parse_args()

    vector_size = args.vs
    window = args.window
    min_count = args.mincount
    workers = args.workers
    model_ = args.model
    sg = args.sg

    with open(paths['output'] / 'corpus/corpus_w2v_tt.json', 'r') as f:
        corpus = json.load(f)

    if model_ == 'word2vec':
        logger.info(
            'traing with word2vec:\n\tvector size: {}\n\twindow: {}\n\tmin_count: {}\n\tworkers: {}'.format(vector_size,
                                                                                                            window, min_count, workers))
        model_w2v_1gram = Word2Vec(corpus['1gram'],
                                   vector_size=vector_size,
                                   window=window,
                                   min_count=min_count,
                                   workers=workers,
                                   sg=sg)
        model_w2v_1gram.save("common/output/c2v_model/w2v_1gram.model")

        model_w2v_2gram = Word2Vec(corpus['2gram'],
                                   vector_size=vector_size,
                                   window=window,
                                   min_count=min_count,
                                   workers=workers,
                                   sg=sg)
        model_w2v_2gram.save("common/output/c2v_model/w2v_2gram.model")

        model_w2v_3gram = Word2Vec(corpus['3gram'],
                                   vector_size=vector_size,
                                   window=window,
                                   min_count=min_count,
                                   workers=workers,
                                   sg=sg)
        model_w2v_3gram.save("common/output/c2v_model/w2v_3gram.model")
        logger.info('w2v_model Done.')

    if model_ == 'fasttext':
        print("running fasttext")
        model_ftt_1gram = FastText(corpus['1gram'],
                                   vector_size=vector_size,
                                   window=window,
                                   min_count=min_count,
                                   workers=workers,
                                   sg=sg)
        model_ftt_1gram.save("common/output/c2v_model/ftt_1gram.model")

        model_ftt_2gram = FastText(corpus['2gram'],
                                   vector_size=vector_size,
                                   window=window,
                                   min_count=min_count,
                                   workers=workers,
                                   sg=sg)
        model_ftt_2gram.save("common/output/c2v_model/ftt_2gram.model")

        model_ftt_3gram = FastText(corpus['3gram'],
                                   vector_size=vector_size,
                                   window=window,
                                   min_count=min_count,
                                   workers=workers,
                                   sg=sg)
        model_ftt_3gram.save("common/output/c2v_model/ftt_3gram.model")


if __name__ == '__main__':
    main()
