import os

from models.Loss import *
from models.Modified3DUNet import Modified3DUNet
from models.UNet3DOfficial.UNet_3D import ResidualUNet3D
from models.VNetOfficial import VNet


class UNet3DConfigure:
    threshold = 0.5
    class_num = 5
    # 1 is random crop   2 is direct crop  3 is cropping to 1 patch  4 is no crop
    crop_method = 2
    if_debug = True

    if if_debug:
        if_record_train_log = False
    else:
        if_record_train_log = True

    model = ResidualUNet3D(1, class_num)

    if_load_all_data = True
    if_data_augmentation = False
    if_mask = False
    aug_probability = 0.3
    if_hybrid = False  # This configure only works if if_data_augmentation is true
    log_root_dir = os.path.join(os.path.join(os.curdir, "..", "run_logs"))
    weight_root_dir = os.path.join(os.path.join(os.curdir, "..", "weights"))

    # train_data_root_path = r"D:\dataset\vessel\Parse_2022_train_data"
    # old_weights_dir = r"D:\gs\code\parse2022-draft\weights\UNet3D(1653186419147698\epoch_14"
    old_weights_dir = None
    if old_weights_dir is not None:
        old_epoch = int(os.path.basename(old_weights_dir).split("_")[1])
    else:
        old_epoch = 0
    sample_prefix = "sample"
    if os.environ.get("COMPUTERNAME") == "DESKTOP-16QO4BT":
        batch_size = 1
        train_data_root_path = r"D:\dataset\other\KiPA2022\train"
    else:
        batch_size = 4
        train_data_root_path = r"C:\dataset\other\KiPA2022"
    valid_batch_size = 1
    learning_rate = 5e-3
    training_epochs = 2000
    if_valid = True
    if_cuda = True
    if_lazy_load = True
    if_record_train_log = True
    train_val_rate = 0.8
    if_shuffle = True
    channel_number = 1
    # patch_shape = (64, 256, 256)
    patch_size = (128, 128, 160)  # 256 256 128 is too big for some box
    group_size = 4
    epoch_size = 0
    save_model_interval = epoch_size * 3
    lr_schedule_interval = epoch_size
    valid_interval = epoch_size
    recrop_data_interval = epoch_size * 5

    # loss_function = [DiceLoss(), FocalLoss()]

    @classmethod
    def set_interval(cls):
        cls.save_model_interval = cls.epoch_size * 3
        cls.lr_schedule_interval = cls.epoch_size
        cls.valid_interval = int(cls.epoch_size / 2)
        cls.recrop_data_interval = cls.epoch_size * 5


class ModifiedUNet3DConfigure(UNet3DConfigure):
    loss_function = [
        DiceLoss(UNet3DConfigure.class_num),
        MultiClassHausdorffDTLoss()
    ]
    dataset_name = "KiPA2022"
    run_type = "train"
    class_num = 5
    if_debug = True
    patch_size = (112, 112, 144)
    model = Modified3DUNet(1, class_num)


class CPFNetConfigure:
    crop_method = 1
    class_num = 1
    # model = CPFNet(class_num)
    if_debug = False
    if if_debug:
        if_record_train_log = False
    else:
        if_record_train_log = True

    if_data_augmentation = True
    if_hybrid = False  # This configure only works if if_data_augmentation is true
    log_root_dir = os.path.join(os.path.join(os.curdir, "../..", "run_logs"))
    weight_root_dir = os.path.join(os.path.join(os.curdir, "../..", "weights"))

    # old_weights_dir = r"D:\gs\code\parse2022-draft\weights\1652583900700248\epoch_35"
    old_weights_dir = None
    if old_weights_dir is not None:
        old_epoch = int(os.path.basename(old_weights_dir).split("_")[1])
    else:
        old_epoch = 0
    sample_prefix = "sample"
    if os.environ.get("COMPUTERNAME") == "DESKTOP-16QO4BT":
        batch_size = 2
        train_data_root_path = r"D:\dataset\vessel\Parse_2022_train_data"
    else:
        batch_size = 8
        train_data_root_path = r"C:\dataset\vessel\Parse_2022_train_data"
    valid_batch_size = 2
    learning_rate = 5e-3
    training_epochs = 25
    if_valid = True
    if_cuda = True
    if_lazy_load = True
    if_record_train_log = True
    save_model_interval = 300
    valid_interval = 50
    train_val_rate = 0.8
    if_shuffle = True
    channel_number = 1
    # patch_shape = (64, 256, 256)
    patch_shape = (128, 128, 64)  # 256 256 128 is too big for some box
    group_size = 4


class UNetTransformerConfigure:
    log_root_dir = os.path.join(os.getcwd(), "../run_logs", "UNETR")
    weight_root_dir = os.path.join(os.getcwd(), "../weights", "UNETR")
    train_data_root_path = r"D:\mimics_data\train_data"
    sample_prefix = "sample"
    train_data_size = [64, 128, 128]
    in_channel_num = 1
    out_channel_num = 1
    mlp_dim = 3072
    dropout_rate = 0.0
    max_epochs_num = 5000
    batch_size = 2
    optim_lr = 5e-4
    optim_name = 'adam'
    momentum = 0.99
    train_val_rate = 0.8
    reg_weight = 1e-5
    dice_loss_smooth_dr = 1e-6
    dice_loss_smooth_nr = 0.0
    save_model_interval = 500
    valid_interval = 60
    valid_batch_size = 2
    valid = True


class VNetConfigure:
    class_num = 1
    # model = VNet3D(1, class_num)
    model = VNet()
    crop_method = 1

    if_debug = False
    if if_debug:
        if_record_train_log = False
    else:
        if_record_train_log = True

    if_data_augmentation = True
    if_hybrid = False  # This configure only works if if_data_augmentation is true
    log_root_dir = os.path.join(os.path.join(os.curdir, "../..", "run_logs"))
    weight_root_dir = os.path.join(os.path.join(os.curdir, "../..", "weights"))

    # train_data_root_path = r"D:\dataset\vessel\Parse_2022_train_data"
    # old_weights_dir = r"D:\gs\code\parse2022-draft\weights\1652583900700248\epoch_35"
    old_weights_dir = None
    if old_weights_dir is not None:
        old_epoch = int(os.path.basename(old_weights_dir).split("_")[1])
    else:
        old_epoch = 0
    sample_prefix = "sample"
    if os.environ.get("COMPUTERNAME") == "DESKTOP-16QO4BT":
        batch_size = 2
        train_data_root_path = r"D:\dataset\vessel\Parse_2022_train_data"
    else:
        batch_size = 8
        train_data_root_path = r"C:\dataset\vessel\Parse_2022_train_data"
    valid_batch_size = 2
    learning_rate = 5e-3
    training_epochs = 20
    if_valid = True
    if_cuda = True

    save_model_interval = 300
    valid_interval = 50
    train_val_rate = 0.8
    if_shuffle = True
    channel_number = 1

    # patch_shape = (64, 256, 256)
    patch_shape = (128, 128, 64)  # 256 256 128 is too big for some box
    group_size = 4


if __name__ == '__main__':
    # print(os.getcwd())
    print(UNet3DConfigure.old_epoch)
