from Utils import *
import os


class TrainConfig(object):
    TEST_MYLAB = 0
    TEST_BBBC = 1
    TEST_INCOHERENT = 2
    TEST_FOCUSPATH = 3

    BBBC_INTERVAL = 2000
    BBBC_MAX_POS = 32000

    INCOHERENT_INTERVAL = 500
    INCOHERENT_MAX_POS = 12000

    FOCUSPATH_INTERVAL = 250
    FOCUSPATH_MAX_POS = 2000

    MYLAB_INTERVAL = 20 #这个是脉冲单位，其他是nm, 1脉冲约10nm
    MYLAB_MAX_POS = 1660

    def __init__(self):
        # 模型方面配置
        self.model_type_name = None
        self.init_from = None
        self.num_classes = 1
        # 设备配置
        self.devices = "0"
        # 数据集配置
        self.dataset_type_name = None
        self.test_forward = -1
        self.train_dir_list = None
        self.test_dir_list = None
        self.abs_flag = True
        self.random_pair = False
        self.num_workers = 4
        self.batch_size = 8
        self.pin_memory = False
        self.width = None
        self.height = None
        self.test_code_flag = False
        self.update_cache = True
        self.from_cache = True
        # 训练参数配置
        self.learning_rate = 1e-4
        self.loss_type_name = None
        self.loss_param_dict = None
        self.train_epochs = 100
        self.scheduler_flag = False
        self.l2_norm = 0
        self.scheduler_interval = 20
        self.scheduler_gamma = 0.1
        self.grad_accumulation_count = 1


        # 测试参数配置
        self.test_interval = 1
        self.save_interval = 1
        self.save_dir = "./checkpoints"

        # wandb
        self.log_flag = True
        self.wandb_project = None
        self.wandb_run_name = None

        self.conment = ""
        self.suffix = ""

    def reflash(self):
        # 按一定规则刷新部分配置
        train_config = self
        train_config.wandb_run_name = "{}_{}_{}_bs{}_gau{}_{}".format(
            train_config.model_type_name,
            train_config.width, train_config.height,
            train_config.batch_size,
            train_config.grad_accumulation_count,
            train_config.loss_type_name)
        if self.suffix:
            train_config.wandb_run_name += "_" + self.suffix

        train_config.save_dir = os.path.join("./checkpoints", train_config.wandb_project, train_config.wandb_run_name)


def get_dataset_base_dir():
    if isWindows():
        dataset_base_dir = r"D:\data\datasets"
    elif isLinux():
        dataset_base_dir = r"/home/username/datasets"
    return dataset_base_dir

def DefaultIncoherentConfig():
    dataset_base_dir = get_dataset_base_dir()

    train_config = TrainConfig()

    train_config.test_code_flag = False
    train_config.model_type_name = ""

    train_config.init_from = None  #
    train_config.num_classes = 1  # ?
    # 设备配置
    train_config.devices = "0"
    if isWindows():
        train_config.devices = "0"

    train_config.dataset_type_name = "FormatDataset"
    train_config.test_forward = TrainConfig.TEST_INCOHERENT
    train_config.train_dir_list = [os.path.join(dataset_base_dir, "incoherent", "train")]
    train_config.test_dir_list = [
        os.path.join(dataset_base_dir, "incoherent", "test", "testRawData_incoherent_diffProtocol_binning_cut"),
        os.path.join(dataset_base_dir, "incoherent", "test", "testRawData_incoherent_sameProtocol_binning_cut"), ]
    train_config.abs_flag = True
    train_config.num_workers = 8
    train_config.batch_size = 32
    train_config.width = 224
    train_config.height = 224

    # 训练参数配置
    train_config.learning_rate = 1e-4
    train_config.loss_type_name = "MSE_loss"
    train_config.train_epochs = 50
    train_config.scheduler_flag = True
    train_config.l2_norm = 0
    train_config.scheduler_interval = 25
    train_config.scheduler_gamma = 0.1
    train_config.grad_accumulation_count = 1

    # wandb
    train_config.log_flag = True
    train_config.wandb_project = "focus_pred_Incoherent"
    train_config.wandb_run_name = "{}_{}_{}_bs{}_gau{}_{}".format(
        train_config.model_type_name,
        train_config.width, train_config.height,
        train_config.batch_size,
        train_config.grad_accumulation_count,
        train_config.loss_type_name)

    # 测试参数配置
    train_config.test_interval = 1
    train_config.save_interval = 10
    train_config.save_dir = os.path.join("./checkpoints", train_config.wandb_project, train_config.wandb_run_name)
    train_config.pin_memory = False
    return train_config

def DefaultFocusPathConfig():
    dataset_base_dir = get_dataset_base_dir()
    train_config = TrainConfig()
    train_config.test_code_flag = False
    train_config.model_type_name = ""
    train_config.init_from = None
    train_config.num_classes = 1

    # 设备配置
    train_config.devices = "0"
    if isWindows():
        train_config.devices = "0"
    # 数据集配置
    train_config.dataset_type_name = "FormatDataset"
    train_config.test_forward = TrainConfig.TEST_FOCUSPATH
    train_config.train_dir_list = [os.path.join(dataset_base_dir, "FocusPath", "train")]
    train_config.test_dir_list = [os.path.join(dataset_base_dir, "FocusPath", "test")]
    train_config.abs_flag = True
    train_config.num_workers = 4
    train_config.batch_size = 32

    train_config.width = 224
    train_config.height = 224

    # 训练参数配置
    train_config.learning_rate = 1e-3
    train_config.loss_type_name = "MSE_loss"

    train_config.train_epochs = 300
    train_config.scheduler_flag = True
    train_config.l2_norm = 0
    train_config.scheduler_interval = 100
    train_config.scheduler_gamma = 0.1
    train_config.grad_accumulation_count = 1

    # wandb
    train_config.log_flag = True
    train_config.wandb_project = "focus_pred_focusPath"
    train_config.wandb_run_name = "{}_{}_{}_bs{}_gau{}_{}".format(
        train_config.model_type_name,
        train_config.width, train_config.height,
        train_config.batch_size,
        train_config.grad_accumulation_count,
        train_config.loss_type_name)

    # 测试参数配置
    train_config.test_interval = 1
    train_config.save_interval = 100
    train_config.save_dir = os.path.join("./checkpoints", train_config.wandb_project, train_config.wandb_run_name)
    train_config.pin_memory = False
    return train_config


def DefaultBBBCConfig():
    dataset_base_dir = get_dataset_base_dir()
    train_config = TrainConfig()
    train_config.test_code_flag = False
    train_config.model_type_name = ""

    train_config.init_from = None
    train_config.num_classes = 1 #17
    # 设备配置
    train_config.devices = "0"
    if isWindows():
        train_config.devices = "0"


    train_config.dataset_type_name = "FormatDataset"
    train_config.test_forward = TrainConfig.TEST_BBBC
    train_config.train_dir_list = [os.path.join(dataset_base_dir,"BBBC","split111", "train")]
    train_config.test_dir_list = [os.path.join(dataset_base_dir, "BBBC","split111", "test")]
    train_config.abs_flag = True
    train_config.gray_img_flag = False
    train_config.move_z = 0
    train_config.as_class_flag = False
    train_config.nm_per_class = None
    train_config.class_nm_threshold = None
    train_config.num_workers = 8
    train_config.batch_size = 32

    train_config.width = 224
    train_config.height = 224


    # 训练参数配置
    train_config.learning_rate = 1e-3
    train_config.loss_type_name = "MSE_loss"
    train_config.train_epochs = 100
    train_config.scheduler_flag = True
    train_config.l2_norm = 0
    train_config.scheduler_interval = 50
    train_config.scheduler_gamma = 0.1
    train_config.grad_accumulation_count = 1


    # wandb
    train_config.log_flag = True
    train_config.wandb_project = "focus_pred_BBBC"
    train_config.wandb_run_name = "{}_{}_{}_bs{}_gau{}_{}".format(
        train_config.model_type_name,
        train_config.width,train_config.height,
        train_config.batch_size,
        train_config.grad_accumulation_count,
        train_config.loss_type_name)

    # 测试参数配置
    train_config.test_interval = 1
    train_config.save_interval = 20
    train_config.save_dir = os.path.join("./checkpoints", train_config.wandb_project, train_config.wandb_run_name)
    train_config.pin_memory = False
    return train_config

def DefaultMyLabConfig():
    dataset_base_dir = get_dataset_base_dir()
    train_config = TrainConfig()
    train_config.test_code_flag = False
    train_config.model_type_name = ""
    train_config.init_from = None
    train_config.num_classes = 1
    # 设备配置
    train_config.devices = "0"
    if isWindows():
        train_config.devices = "0"


    train_config.dataset_type_name = "FormatDataset"
    train_config.test_forward = TrainConfig.TEST_MYLAB

    train_config.train_dir_list = [
        os.path.join(dataset_base_dir,  "mylab", "train", "2023-10-24 16-53-19_resize_224_224"),
        os.path.join(dataset_base_dir,  "mylab", "train", "2024-04-19 13-59-18_resize_224_224")]

    train_config.test_dir_list = [
        os.path.join(dataset_base_dir,  "mylab", "test", "2023-10-24 15-52-08_resize_224_224"),
        os.path.join(dataset_base_dir,  "mylab", "test", "2024-04-19 11-16-46_resize_224_224")]
    train_config.abs_flag = True

    train_config.num_workers = 8
    train_config.batch_size = 32

    train_config.width = 224
    train_config.height = 224

    # 训练参数配置
    train_config.learning_rate = 1e-3
    train_config.loss_type_name = "MSE_loss"
    train_config.loss_param_dict = None
    train_config.train_epochs = 100
    train_config.scheduler_flag = True
    train_config.l2_norm = 0
    train_config.scheduler_interval = 50
    train_config.scheduler_gamma = 0.1
    train_config.grad_accumulation_count = 1

    # wandb
    train_config.log_flag = True
    train_config.wandb_project = "focus_pred_mylab"
    train_config.wandb_run_name = "{}_{}_{}_bs{}_gau{}_{}".format(
        train_config.model_type_name,
        train_config.width, train_config.height,
        train_config.batch_size,
        train_config.grad_accumulation_count,
        train_config.loss_type_name)

    # 测试参数配置
    train_config.test_interval = 1
    train_config.save_interval = 20
    train_config.save_dir = os.path.join("./checkpoints", train_config.wandb_project, train_config.wandb_run_name)
    train_config.pin_memory = False
    return train_config


