import sys
sys.path.append('./')

import argparse
import numpy as np
from collections import OrderedDict
from pathlib import Path

import torch
import torch.nn as nn
import torch.backends.cudnn as cudnn

from utils import *

cudnn.benchmark = True
assert torch.cuda.is_available()

parser = argparse.ArgumentParser(description='PyTorch Image Classification Training')
parser.add_argument(
    'input_ckpt',
    default='runs/exp_p2conv_2021-12-01-17:11:21/last.pt',
    type=str, nargs='?', help='input ckpt file path')

args = parser.parse_args()

in_ckpt_path = Path(args.input_ckpt)
_name = in_ckpt_path.stem + '_converted' + in_ckpt_path.suffix
out_ckpt_path = in_ckpt_path.parent / _name
assert not out_ckpt_path.exists()

Path(out_ckpt_path).parent.mkdir(parents=True, exist_ok=True)


ckpt = torch.load(str(in_ckpt_path))
print(f'ckpt has keys: {list(ckpt.keys())}')
state_dict = ckpt['state_dict']

new_state_dict = OrderedDict()
for k, v in state_dict.items():
    if k.endswith(('.weight', '.bias', '.running_mean', '.running_var')):
        new_state_dict[k] = v

# update new_state_dict with mask
mask_n = 0
for k, v in state_dict.items():
    if k.endswith('.mask'):
        w_k = k[:-5] + '.weight'
        assert w_k in new_state_dict, f'{w_k} should in state_dict'
        _s = torch.sum(state_dict[k] > 0) / np.prod(state_dict[k].shape)
        print(f'find mask {w_k} with sparsity {_s:.3f}')
        new_state_dict[w_k] *= state_dict[k]
        print()
        mask_n += 1
print(f'total mask number: {mask_n}')

ckpt['state_dict'] = new_state_dict

total_N = sum(np.prod(v.shape) for k, v in new_state_dict.items())
weight_N = sum(np.prod(v.shape) for k, v in new_state_dict.items() if k.endswith('.weight') and k.startswith('layer'))
z_total = sum([np.sum(v.cpu().numpy()==0) for k, v in new_state_dict.items()])
z_weight = sum([np.sum(v.cpu().numpy()==0) for k, v in new_state_dict.items() if k.endswith('.weight') and k.startswith('layer')])

print(f'total_sparsity: {z_total / total_N}')
print(f'weight_sparsity: {z_weight / weight_N}')

torch.save(ckpt, str(out_ckpt_path))
print(f'saved to {out_ckpt_path}')