import sys
sys.path.append('./')

import argparse
import shutil
import time
import yaml
import json
import re
import math
import numpy as np
from easydict import EasyDict as edict
import logging

import torch
import torch.nn as nn
import torch.nn.functional as F
import torch.backends.cudnn as cudnn
from torch.utils.tensorboard import SummaryWriter


from pq.prune import replace_prune_module
from pq.modules.mconv import mapping
from utils import *
from utils.train_utils import accuracy, tb_write_scalars, save_checkpoint

cudnn.benchmark = True
assert torch.cuda.is_available()

parser = argparse.ArgumentParser(description='PyTorch Image Classification Training')
parser.add_argument('config', default='configs/res18_nm_gradual.yaml', type=str, nargs='?', help='config file path')
parser.add_argument('--resume', type=str, help='ckpt file path')
parser.add_argument('--gpus', default="0,1", type=str, help='GPUs id to use. separated by ,')
parser.add_argument('--name', default='exp_res18_nm_gradual', type=str, help='experiment name')


gvar = edict({
    'args': None,
    'cmd_args': None,
})

def main():
    cmd_args = parser.parse_args()
    cmd_args = edict(cmd_args.__dict__)
    cmd_args.gpus = [int(x) for x in cmd_args.gpus.split(',')]

    args = yaml.load(open(cmd_args.config), yaml.Loader)
    args = edict(args)
    
    logging.info("cmd_args: " + json.dumps(cmd_args, indent=4, sort_keys=True))
    logging.info("args: " + json.dumps(args, indent=4, sort_keys=True))

    gvar.args = args
    gvar.cmd_args = cmd_args
    if args.seed is not None:
        set_seeds(args.seed)

    print("Use GPU: {}".format(cmd_args.gpus))

    # Data loading code
    val_dataset = build_dataset(args.dataset, 'val')
    val_loader = torch.utils.data.DataLoader(
        val_dataset,
        batch_size=args.train.batch_size,
        shuffle=False,
        num_workers=args.train.workers,
        pin_memory=True
    )

    # create model
    model = build_model(args.model)

    torch.cuda.set_device(cmd_args.gpus[0])
    model = model.cuda()
    if len(cmd_args.gpus) > 1:
        model = torch.nn.DataParallel(model.cuda(), device_ids=cmd_args.gpus)

    replace_prune_module(model, mapping)
    
    criterion = build_loss(args.loss)

    # optionally resume from a checkpoint
    if cmd_args.resume is not None:
        logging.info(f'resuming from ckpt: {cmd_args.resume}')
        resume_from_ckpt(model, None, cmd_args.resume, device=torch.device(f'cuda:{cmd_args.gpus[0]}'))

    validate(val_loader, model, criterion)

def validate(val_loader, model, criterion):
    batch_time = AverageMeter('Time', ':6.3f')
    losses = AverageMeter('Loss', ':.4e')
    top1 = AverageMeter('Acc@1', ':6.2f')
    top5 = AverageMeter('Acc@5', ':6.2f')
    progress = ProgressMeter(
        len(val_loader),
        [batch_time, losses, top1, top5],
        prefix='Test: ')

    # switch to evaluate mode
    model.eval()

    with torch.no_grad():
        end = time.time()
        for i, (images, target) in enumerate(val_loader):
            images = images.cuda(non_blocking=True)
            target = target.cuda(non_blocking=True)

            # compute output
            output = model(images)
            loss = criterion(output, target)

            # measure accuracy and record loss
            acc1, acc5 = accuracy(output, target, topk=(1, 5))
            losses.update(loss.item(), images.size(0))
            top1.update(acc1[0], images.size(0))
            top5.update(acc5[0], images.size(0))

            # measure elapsed time
            batch_time.update(time.time() - end)
            end = time.time()
    
    logging.info(f'Test Epoch {gvar.epoch} Acc@1 {top1.avg:.3f} Acc@5 {top5.avg:.3f} Loss {losses.avg:.6f} sparsity {get_sparsity(model):.3f}')
    return top1.avg

def get_sparsity(model, pattern='.*'):
    N = 0
    NZ = 0 # nonzero
    for name, mod in model.named_modules():
        if type(mod) in mapping.values() and re.search(pattern, name):
            N += np.prod(mod.weight.shape)
            NZ += (mod.mask.abs() == 0).float().sum().item()
    return NZ / N