import numpy as np
from dataset.liver_data import get_loader_liver

import torch 
import torch.nn as nn 
from monai.networks.nets.basic_unet import BasicUNet
from monai.networks.nets.unetr import UNETR
from monai.networks.nets.swin_unetr import SwinUNETR
from monai.inferers import SlidingWindowInferer
from light_training.evaluation.metric import dice, hausdorff_distance_95
from light_training.trainer import Trainer
from monai.utils import set_determinism
from light_training.utils.lr_scheduler import LinearWarmupCosineAnnealingLR
from light_training.utils.files_helper import save_new_model_and_delete_last
from models.uent2d import UNet2D
from models.uent3d import UNet3D
from medpy.metric import dc, hd95
from monai.networks.nets.segresnet import SegResNet
from models.transbts.TransBTS_downsample8x_skipconnection import TransBTS
from models.nestedformer.nested_former import NestedFormer
from models.swinunet2d.swinunet import SwinUnet
from einops import rearrange
from monai.networks.nets.vnet import VNet
from models.modelgenesis.unet3d import UNet3DModelGen
from models.transvw.models.ynet3d import UNet3DTransVW
from monai.networks.nets.attentionunet import AttentionUnet
set_determinism(123)
import os

os.environ["CUDA_VISIBLE_DEVICES"] = "0,1"

# logdir = "./logs_liver/swinunetr/"
# logdir = "./logs_brats/unet2d/"
# logdir = "./logs_brats/unet3d/"
logdir = "./logs_liver/unetr/"
# logdir = "./logs_brats/tranbts/"
# logdir = "./logs_liver/segresnet/"

# logdir = "./logs_liver/transvw"

# logdir = "./logs_brats/swinunet2d"
# logdir = "./logs_brats/vnet"
# logdir = "./logs_liver/modelsgenesis"
# logdir = "./logs_brats/transvw"
# logdir = "./logs_liver/attentionUNet"

model_save_path = os.path.join(logdir, "model")
max_epoch = 600
batch_size = 2
val_every = 50
num_gpus = 2
env = "DDP"
device = "cuda:0"

class BraTSTrainer(Trainer):
    def __init__(self, env_type, max_epochs, batch_size, device="cpu", val_every=1, num_gpus=1, logdir="./logs/", master_ip='localhost', master_port=17750, training_script="train.py"):
        super().__init__(env_type, max_epochs, batch_size, device, val_every, num_gpus, logdir, master_ip, master_port, training_script)
        self.window_infer = SlidingWindowInferer(roi_size=[96, 96, 96],
                                        sw_batch_size=2,
                                        overlap=0.5)

        # self.model = SwinUNETR([96, 96, 96], 1, 3, feature_size=48)
        # self.model = UNet2D()
        # self.model = UNet3D()
        self.model = UNETR(1, 3, [96, 96, 96], mlp_dim=1024, pos_embed="conv", norm_name="instance")
        # _, model = TransBTS(dataset='brats', _conv_repr=True, _pe_type="learned")

        # self.model = model

        # self.model = SegResNet(3, 16, 1, 3)

        # self.model = UNet3DModelGen(3)
        # weight_dir = "/home/xingzhaohu/jiuding_code/mutual_learning/logs_brats/Genesis_Chest_CT.pt"
        # weight_dir = "/home/xingzhaohu/jiuding_code/mutual_learning/logs_brats/TransVW_chest_ct.pt"
        # checkpoint = torch.load(weight_dir)
        # state_dict = checkpoint['state_dict']
        # unParalled_state_dict = {}
        # for key in state_dict.keys():
            # if "down_tr64.ops.0.conv1.weight" in key:
            #     state_dict[key] = state_dict[key].repeat(1, 4, 1, 1, 1)

        #     if "out_tr" not in key:
        #         unParalled_state_dict[key.replace("module.", "")] = state_dict[key]

        # self.model.load_state_dict(unParalled_state_dict, strict=False)

        # self.model = AttentionUnet(3, 1, 3, channels=[32, 64, 128, 256], strides=[2, 2, 2, 2])

        self.best_mean_dice = 0.0
        self.optimizer = torch.optim.AdamW(self.model.parameters(), lr=2e-4, weight_decay=1e-3)

        self.scheduler = LinearWarmupCosineAnnealingLR(self.optimizer,
                                                  warmup_epochs=50,
                                                  max_epochs=max_epochs)

        self.loss_func = nn.CrossEntropyLoss()

    def training_step(self, batch):
        import time 
        image, label = self.get_input(batch)

        pred = self.model(image)
        loss = self.loss_func(pred, label)
        self.log("train_loss", loss, step=self.global_step)
        return loss 

    def get_input(self, batch):
        image = batch["image"]
        label = batch["label"]
        if len(label.shape) == 5:
            label = label.squeeze(dim=1)

        label = label.long()
        return image, label 

    def validation_step(self, batch):
        image, label = self.get_input(batch)
       
        output = self.window_infer(image, self.model).argmax(dim=1).cpu().numpy()
        target = label.cpu().numpy()
        dices = []
        hds = []

        c = 3
        for i in range(1, c):
            pred_c = (output == i)
            target_c = (target == i)
            if target_c.sum() == 0:
                dices.append(float("nan"))
                hds.append(float("nan"))

            else :
                dices.append(dice(pred_c, target_c))
                hds.append(hausdorff_distance_95(pred_c, target_c))
        
        return_metrics = []
        for d in dices:
            return_metrics.append(d)
        for h in hds:
            return_metrics.append(h)

        return return_metrics

    def validation_end(self, mean_val_outputs):
        dices_1, dices_2, hd_1, hd_2 = mean_val_outputs
        print(dices_1, dices_2, hd_1, hd_2)

        self.log("dice_1", dices_1, step=self.epoch)
        self.log("dice_2", dices_2, step=self.epoch)
        self.log("hd1", hd_1, step=self.epoch)
        self.log("hd2", hd_2, step=self.epoch)

        mean_dice = (dices_1 + dices_2) / 2
        mean_hd = (hd_1 + hd_2) / 2
        self.log("mean_dice", mean_dice, step=self.epoch)
        self.log("mean_hd", mean_hd, step=self.epoch)

        if mean_dice > self.best_mean_dice:
            self.best_mean_dice = mean_dice
            save_new_model_and_delete_last(self.model, 
                                            os.path.join(model_save_path, 
                                            f"best_model_{mean_dice:.4f}.pt"), 
                                            delete_symbol="best_model")

        save_new_model_and_delete_last(self.model, 
                                        os.path.join(model_save_path, 
                                        f"final_model_{mean_dice:.4f}.pt"), 
                                        delete_symbol="final_model")

        print(f" mean_dice is {mean_dice}")

if __name__ == "__main__":
    
    trainer = BraTSTrainer(env_type=env,
                            max_epochs=max_epoch,
                            batch_size=batch_size,
                            device=device,
                            logdir=logdir,
                            val_every=val_every,
                            num_gpus=num_gpus,
                            master_port=17751,
                            training_script=__file__)

    train_ds, val_ds = get_loader_liver(batch_size=batch_size, fold=0, cache=True)

    # for data in val_ds:
    #     image = data["image"]
    #     labels = data["label"]

    #     print(image.shape)
    #     print(labels.shape)
    #     print((labels == 2).sum())
    #     import matplotlib.pyplot as plt 
    #     for i in range(image.shape[2]):
    #         if labels[0, i].sum() != 0:
    #             print(i)
    #             plt.subplot(1, 2, 1)
    #             plt.imshow(image[0, i].cpu().numpy(), cmap="gray")
    #             plt.subplot(1, 2, 2)
    #             plt.imshow(labels[0, i].cpu().numpy(),cmap="gray")
    #             plt.show()

    trainer.train(train_dataset=train_ds, val_dataset=val_ds)
