#! /usr/bin/env python3
from libs.models import (
        DocumentClassifier, LayoutClassifier, TypeClassifier, AreaDetect, TextDeskew)
from libs.recall import DeepMNBClassifier, MNBClassifier
from libs.tools import poolImages2
import fire

from urllib.request import urlopen
import os
from datetime import datetime

import jieba
from tqdm import tqdm
from glob import glob

import json
with open('config.json', 'r', encoding='utf-8') as f:
    config = json.load(f)

def updateModel(model_name, host=None, model_type='classifier'):
    host = host or '127.0.0.1'
    flag = 'success'
    try:
        url = 'http://{}:{}/update_{}/{}/{}'.format(
               host, config['PORT'], model_type, model_name, config['TOKEN'])
        flag = urlopen(url).read().decode()
    except:
        flag = 'fail'
    print('Update({}-{}) {}!'.format(host, model_name, flag))
    return flag

def dumpLogs(model_filename, actual_epoch, flag, start_time):
    log_filename = os.path.join(
            os.path.dirname(os.path.abspath(model_filename)),
            'logs.txt')
    with open(log_filename, 'a') as f:
        f.write('Time: {} -> {}, Model: {}, Epoch: {}, Update: {}\n'.format(
            datetime.strftime(start_time, '%y-%m-%d %H:%M:%S'),
            datetime.strftime(datetime.today(), '%y-%m-%d %H:%M:%S'),
            model_filename, actual_epoch, flag))

class TrainShell:
    def poolimages(self, src, dst=None, size=(128, 128)):
        dst = dst or src
        poolImages2(src, dst, size)

    def cleantexts(self, dirname, txtname, chi_sim):
        with open('./chi_sim.txt') as f:
            chi_sim = [w[0] for w in f.readlines()[:3500]]
        paths = glob(os.path.join(dirname, '*.txt'))
        with open(txtname, 'w') as F:
            for path in tqdm(paths, ncols=80):
                with open(path) as f:
                    txt = f.read()
                    txt = ''.join([w for w in txt if w in chi_sim + ['\n']])
                    txt = txt.replace('\n', ' ')
                    seg = ' '.join(jieba.lcut(txt))
                    seg = ' '.join(seg.split())
                    F.write(seg + '\n')

    def documentclf(self, datasets, model, clean=False, train=True,
                    batch_size=128, epochs_num=32, time=None, host=None, monitor=None, test_steps=10 , isupdate=True, date=None):
        start_time = datetime.today()
        model_filename, actual_epoch = DocumentClassifier(
            datasets, model, clean=clean, train=train,
            batch_size=batch_size, epochs_num=epochs_num).run(time=time, remote_monitor=monitor, test_max_steps=test_steps, date=date)
        if isupdate:
            flag = updateModel('document_clf', host=host)
            dumpLogs(model_filename, actual_epoch, flag, start_time)

    def recallclf(self, datasets, model, clean=False, train=True,
                    batch_size=16, epochs_num=64, time=None, host=None, monitor=None, test_steps=10 , isupdate=True, date=None):
        start_time = datetime.today()
        model_filename, actual_epoch = DeepMNBClassifier(
            datasets, model, clean=clean, train=train,
            batch_size=batch_size, epochs_num=epochs_num).run(time=time, remote_monitor=monitor, test_max_steps=test_steps, date=date)
        basename = os.path.basename(model)
        model_name = os.path.splitext(basename)[0]
        if isupdate:
            flag = updateModel(model_name, host=host, model_type='recall')
            dumpLogs(model_filename, actual_epoch, flag, start_time)

    def nlprecallclf(self, txt0, txt1, model, host=None , isupdate=True ):
        start_time = datetime.today()
        mnbclassifier = MNBClassifier()
        mnbclassifier.train(txt0, txt1)
        mnbclassifier.save(model)
        basename = os.path.basename(model)
        model_name = os.path.splitext(basename)[0]
        if  isupdate:
            flag = updateModel(model_name, host=host, model_type='recall')
            dumpLogs(model, -1, flag, start_time)

    def layoutclf(self, datasets, model, clean=False, train=True,
                  batch_size=128, epochs_num=32, time=None, host=None, monitor=None , isupdate=True , date=None):
        start_time = datetime.today()
        model_filename, actual_epoch = LayoutClassifier(
            datasets, model, clean=clean, train=train,
            batch_size=batch_size, epochs_num=epochs_num).run(time=time, remote_monitor=monitor, date=date)
        if isupdate:
            flag = updateModel('layout_clf', host=host)
            dumpLogs(model_filename, actual_epoch, flag, start_time)

    def typeclf(self, datasets, model, clean=False, train=True,
                batch_size=128, epochs_num=32, time=None, host=None, monitor=None , isupdate=True , date=None):
        start_time = datetime.today()
        model_filename, actual_epoch = TypeClassifier(
            datasets, model, clean=clean, train=train,
            batch_size=batch_size, epochs_num=epochs_num).run(time=time, remote_monitor=monitor, date=date)
        if isupdate:
            flag = updateModel('type_clf', host=host)
            dumpLogs(model_filename, actual_epoch, flag, start_time)

    def areadetect(self, datas, labels, colors, model, clean=False, train=True,
                   batch_size=50, epochs_num=300, time=None, host=None, monitor=None , isupdate=True , date=None):
        start_time = datetime.today()
        model_filename, actual_epoch = AreaDetect(
            datas, labels, colors, model, clean=clean, train=train,
            batch_size=batch_size, epochs_num=epochs_num).run(time=time, remote_monitor=monitor, date=date)
        basename = os.path.basename(model)
        model_name = os.path.splitext(basename)[0]
        if  isupdate:
            flag = updateModel(model_name, host=host, model_type='areadetect')
            dumpLogs(model_filename, actual_epoch, flag, start_time)

    def textdeskew(self, datasets, model, clean=False, train=True,
                   batch_size=50, epochs_num=200, time=None, host=None, monitor=None , isupdate=True , date=None):
        start_time = datetime.today()
        model_filename, actual_epoch = TextDeskew(
            datasets, model, clean=clean, train=train,
            batch_size=batch_size, epochs_num=epochs_num).run(time=time, remote_monitor=monitor, date=date)
        if isupdate:
            flag = updateModel('deskew_model', host=host)
            dumpLogs(model_filename, actual_epoch, flag, start_time)

if __name__ == '__main__':
    fire.Fire(TrainShell)
