import os
import argparse
from tqdm import tqdm
from PIL import Image
import numpy as np
import torch
import torch.nn.functional as F
from model.SupCon import resnet
from model.resnet3d import resnet3d
import torchvision.transforms as T
import data.transforms as MT
from torchvision.datasets import ImageFolder
from data.datasets import AortaDataset3DCenter, AortaDataset
from torch.utils.data import DataLoader
import matplotlib.pyplot as plt
import seaborn as sns

class HookModule:
    def __init__(self, model, module):
        self.model = model
        self.handle = module.register_forward_hook(self._get_output)
        
    def _get_output(self, module, inputs, outputs):
        self.outputs = outputs
    
    def grads(self, outputs, retain_graph=True, create_graph=True):
        grads = torch.autograd.grad(outputs=outputs, inputs=self.outputs, retain_graph=retain_graph, create_graph=create_graph)
        self.model.zero_grad()
        return grads[0]
    
    def remove(self):
        self.handle.remove()

def view_grads(grads, fig_path, fig_w=30, fig_h=5):
    f, ax = plt.subplots(figsize=(fig_w, fig_h), ncols=1)
    ax.set_xlabel('convolutional kernel')
    ax.set_ylabel('category')
    sns.heatmap(grads, annot=False, ax=ax)
    plt.savefig(fig_path, bbox_inches='tight')
    plt.close()

def get_args():
    parser = argparse.ArgumentParser(description='Generate channel gradient pattern',
                                     formatter_class=argparse.ArgumentDefaultsHelpFormatter)
    parser.add_argument('--device', '-d', type=int, default=0, help='GPU index')
    parser.add_argument('--weight', '-w', type=str, help='Model checkpoint')
    parser.add_argument('--flag3d', '-f', action='store_true', help='3D Net')
    parser.add_argument('--pos', '-p', action='store_true', help='Positive grad or abs grad')
    #一定要记得改数据目录

    return parser.parse_args()

if __name__ == '__main__':
    opt = get_args()
    assert 'best' in opt.weight, f'Should use best model weight, but got {opt.weight}'

    opt.image_size = 81
    opt.batch_size = 128
    opt.n_classes = 2
    opt.cate = list(range(opt.n_classes))
    opt.sobel = True
    opt.top = 500
    opt.data = '/nfs3-p1/zsxm/dataset/aorta_classify_cta_-100_500/train/'
    opt.data3d = '/nfs3-p1/zsxm/dataset/aorta_classify_cta_-100_500/center/train/'

    device = torch.device(f'cuda:{opt.device}')

    if opt.flag3d:
        net = resnet3d(34, n_channels=2, n_classes=opt.n_classes, conv1_t_size=3)
        try:
            net.load_state_dict(torch.load(opt.weight, map_location=device))
        except RuntimeError:
            opt.sobel = False
            net = resnet3d(34, n_channels=1, n_classes=opt.n_classes, conv1_t_size=3)
            net.load_state_dict(torch.load(opt.weight, map_location=device))

        t_list = [
            MT.Resize3D(opt.image_size),
            MT.ToTensor3D(),
        ]
        if opt.sobel:
            t_list.append(MT.SobelChannel(3, flag_3d=True))
        transform = T.Compose(t_list)

        dataset = AortaDataset3DCenter(opt.data3d, opt.cate, transform, depth=7, step=1)

    else:
        net = resnet(34, n_channels=2, n_classes=opt.n_classes)
        try:
            net.load_state_dict(torch.load(opt.weight, map_location=device))
        except RuntimeError:
            opt.sobel = False
            net = resnet(34, n_channels=1, n_classes=opt.n_classes)
            net.load_state_dict(torch.load(opt.weight, map_location=device))
        
        t_list = [
            T.Resize(opt.image_size),
            T.ToTensor(),
        ]
        if opt.sobel:
            t_list.append(MT.SobelChannel(3))
        transform = T.Compose(t_list)

        dataset = AortaDataset(opt.data, opt.cate, transform)

    dataloader = DataLoader(dataset, batch_size=opt.batch_size, shuffle=False, drop_last=False, num_workers=8, pin_memory=True)

    net.to(device)
    net.eval()
    hook = HookModule(net, net.encoder.layer4[2].conv2)

    idx = 0
    score_list = [[] for _ in range(opt.n_classes)]
    for imgs, labels in tqdm(dataloader, total=len(dataloader), desc='Dataset', unit='batch', leave=False):
        imgs = imgs.to(device=device, dtype=torch.float32)
        labels = labels.to(device=device, dtype=torch.long)

        with torch.no_grad():
            preds = torch.softmax(net(imgs), dim=1)
            labels_pred = preds.argmax(dim=1)
            for i in range(len(labels)):
                if labels_pred[i] == labels[i]:
                    score_list[labels[i].item()].append((preds[i, labels_pred[i]].item(), idx))
                idx += 1
    
    grad_list = []
    mask_list = []
    for i in range(opt.n_classes):
        score_list[i].sort(key=lambda x: x[0], reverse=True)
        idx_list = [score[1] for score in score_list[i][0: opt.top]]
        tensor_list = []
        for idx in idx_list:
            tensor_list.append(dataset[idx][0])
        batch = torch.stack(tensor_list, dim=0)
        pred = net(batch.to(device))
        nll_loss = F.nll_loss(pred, (torch.ones(batch.size(0))*i).to(device, dtype=torch.long))
        grads = hook.grads(-nll_loss, True, False)
        nll_loss.backward()
        if opt.flag3d:
            if opt.pos:
                grads = F.relu(grads).sum(dim=(2,3,4)).mean(dim=0)
            else:
                grads = torch.abs(grads).sum(dim=(2,3,4)).mean(dim=0)
        else:
            if opt.pos:
                grads = F.relu(grads).sum(dim=(2,3)).mean(dim=0)
            else:
                grads = torch.abs(grads).sum(dim=(2,3)).mean(dim=0)
        grad_list.append(grads.cpu().numpy())
        mask = (grads>grads.mean()).long()
        mask_list.append(mask.cpu().numpy())

    grads = np.array(grad_list)
    masks = np.array(mask_list)

    weight_dir = os.path.dirname(opt.weight)
    
    hm = grads - np.min(grads)
    hm = hm / np.max(hm)
    view_grads(hm, os.path.join(weight_dir, f'grad_heatmap_{"pos" if opt.pos else "abs"}.png'))

    view_grads(masks, os.path.join(weight_dir, f'channel_pattern_{"pos" if opt.pos else "abs"}.png'))

    np.save(os.path.join(weight_dir, f'grad_pattern_{"pos" if opt.pos else "abs"}.npy'), masks)

    hook.remove()