import os
os.environ["MXNET_CUDNN_AUTOTUNE_DEFAULT"] = "0"
import mxnet.autograd as ag
import mxnet as mx
import numpy as np
import mxnet.ndarray as nd
import logging
import tqdm
import gluoncv, cv2,gluonnlp
from  mxnet.gluon.data import DataLoader
from models.res_ctc import Resnet_CTC
from lib.data.latex import latex_dataset


def str_sum(l):
    x0 = l[0]
    for i in range(len(l)-1):
        x0 += l[i+1]
    return x0
def main():
    LOG_DIR = "./output/res_ctc"
    logging.basicConfig(level=logging.INFO)
    gpu_ids = [2]
    ctx_list = [mx.gpu(x) for x in gpu_ids]
    dataset = latex_dataset(resize=False)
    train_loader =  DataLoader(dataset=dataset, batch_size=16, shuffle=True, num_workers=16,last_batch="discard")
    net = Resnet_CTC(alphabet_size = dataset.words_count,sequence_len = dataset.max_len)
    net.collect_params().reset_ctx(ctx=ctx_list)
    params = dict(net.collect_params())
    del params['resnetv1b0_dense0_weight']
    del params['resnetv1b0_dense0_bias']
    trainer = mx.gluon.Trainer(params,
                               'adam',
                               {'learning_rate': 1e-4,
                                # 'wd': 5e-3,
                                # 'momentum': 0.9,
                                'clip_gradient': None,
                                'multi_precision': True
                                },
                               )
    criterion  = mx.gluon.loss.CTCLoss()

    for nepoch in range(100):
        epoch_loss = 0
        loss_average = 0
        for nbatch,batch in enumerate(train_loader):
            batch = [mx.nd.array(x, ctx=ctx_list[0]) for x in batch]
            batch_img,batch_label = batch
            with ag.record():
                logits = net(batch_img)
                loss =  criterion(logits , batch_label)
                loss= mx.nd.sum(loss)
            ag.backward(loss)
            trainer.step(1)
            loss_average += loss.asscalar()
            epoch_loss += loss.asscalar()
            if nbatch % 100 ==0:
                print(loss_average / (100 + 1))
                loss_average = 0
        filename = LOG_DIR + "/weights-%d-%d-[%.4f].params" % (nepoch, len(train_loader), epoch_loss / len(train_loader))
        net.collect_params().save(filename)
        logging.info("saveing checkpoint to {}".format(filename))
def demo():
    LOG_DIR = "./output/latex"
    import matplotlib.pyplot as plt
    dataset = latex_dataset(resize=False,images_filter="validate_filter.lst")
    # dataset = latex_dataset(resize=False)
    from lib.common import lsdir
    logging.basicConfig(level=logging.INFO)
    gpu_ids = [2]
    ctx_list = [mx.gpu(x) for x in gpu_ids]
    net = Resnet_CTC(alphabet_size = dataset.words_count,sequence_len = dataset.max_len)
    net.collect_params().reset_ctx(ctx=ctx_list)
    net.collect_params().load("output/res_ctc/weights-99-609-[5.6544].params")
    # for image_name in lsdir("/home/kohill/dataset/latex_formula/images_processed",suffix=".png"):
    for idx in range(len(dataset)):
        img,label = dataset[idx]
        img_float = img.astype(np.float32)[np.newaxis]
        # mean = np.array([103.06,115.90, 123.15])[np.newaxis,np.newaxis].astype(np.float32)
        # img_float = img_float - mean
        # img_float = img_float[:,:,(2,1,0)]
        # img_float = np.transpose(img_float,(2,0,1))[np.newaxis]
        logits = net(nd.array(img_float,ctx=mx.gpu(gpu_ids[0])))
        logits_softmax = mx.nd.softmax(logits,axis=2)
        assert logits_softmax.shape[0]==1
        logits_softmax = mx.nd.squeeze(logits_softmax)
        logits_softmax_argmax = mx.nd.argmax(logits_softmax,axis=1).asnumpy()
        dataset.index2words[dataset.words_count-1] = "blank"
        print(logits_softmax_argmax)
        sentence = list(filter(lambda x:dataset.index2words[x] != "blank" and x != -1,logits_softmax_argmax))
        sentence = [ dataset.index2words[index] for index in  sentence]
        # sentence = [ index for index in  logits_softmax_argmax]
        print(str_sum(sentence))
        plt.imshow(dataset.at_with_image_path(idx)[0])
        plt.show()
if __name__ == "__main__":
    demo()