import os.path


from Trainer.TrainConfig import *
from Trainer.Trainer import Trainer,TrainConfig
from Utils import *
import wandb


g_model_type_name = "DCFNet"
g_devices = "0"
g_batch_size = 32
g_grad_accumulation_count = 1

g_focusPath_learningRate = None
g_Incorherent_learningRate = None
g_BBBC_learningRate = None
g_Mylab_learningRate = None

# MSE_loss
g_loss_type_name = "MSE_loss"
g_suffix = None

# 默认学习率的0.1倍
# g_focusPath_learningRate = 1e-4
# g_Incorherent_learningRate = 1e-5
# g_BBBC_learningRate = 1e-4
# g_Mylab_learningRate = 1e-4

# 默认学习率的10倍
# g_focusPath_learningRate = 1e-2
# g_Incorherent_learningRate = 1e-3
# g_BBBC_learningRate = 1e-2
# g_Mylab_learningRate = 1e-2

# 默认学习率的100倍
# g_focusPath_learningRate = 1e-1
# g_Incorherent_learningRate = 1e-2
# g_BBBC_learningRate = 1e-1
# g_Mylab_learningRate = 1e-1

def run_focusPath():
    train_config = DefaultFocusPathConfig()
    train_config.random_pair = False
    train_config.loss_type_name = g_loss_type_name
    train_config.model_type_name = g_model_type_name
    train_config.devices = g_devices
    train_config.batch_size = g_batch_size
    train_config.scheduler_interval = 60
    train_config.grad_accumulation_count = g_grad_accumulation_count
    if g_model_type_name == "GFNet":
        train_config.random_pair = True
    if g_focusPath_learningRate != None:
        train_config.learning_rate = g_focusPath_learningRate
    if g_suffix != None:
        train_config.suffix = g_suffix

    train_config.reflash()
    trainer = Trainer(train_config)
    last_min_error = trainer.min_error_avg_FocusPath
    for epoch in range(1, train_config.train_epochs+1):
        trainer.train_epoch()
        if epoch % train_config.test_interval == 0:
            last_min_error = trainer.min_error_avg_FocusPath
            trainer.test_epoch()
            if trainer.min_error_avg_FocusPath < last_min_error:
                save_model(trainer.model,epoch,train_config.save_dir,latest_flag=False,save_tag="best")
        if epoch % train_config.save_interval == 0:
            save_model(trainer.model,epoch,train_config.save_dir)
    wandb.finish()

def run_Incorherent():
    train_config = DefaultIncoherentConfig()
    train_config.random_pair = False
    train_config.loss_type_name = g_loss_type_name
    train_config.model_type_name = g_model_type_name
    train_config.devices = g_devices
    train_config.batch_size = g_batch_size
    train_config.Incoherent_no_Normalize_flag = True
    train_config.grad_accumulation_count = g_grad_accumulation_count
    train_config.scheduler_interval = 15
    if g_model_type_name == "GFNet":
        train_config.random_pair = True
    if g_Incorherent_learningRate != None:
        train_config.learning_rate = g_Incorherent_learningRate
    if g_suffix != None:
        train_config.suffix = g_suffix
    train_config.reflash()

    trainer = Trainer(train_config)
    last_min_same = trainer.min_error_avg_Incoherent_Same
    last_min_diff = trainer.min_error_avg_Incoherent_Diff
    last_min_all = trainer.min_error_avg_Incoherent_All
    for epoch in range(1, train_config.train_epochs + 1):
        trainer.train_epoch()
        if epoch % train_config.test_interval == 0:
            last_min_same = trainer.min_error_avg_Incoherent_Same
            last_min_diff = trainer.min_error_avg_Incoherent_Diff
            last_min_all = trainer.min_error_avg_Incoherent_All
            trainer.test_epoch()

            if trainer.min_error_avg_Incoherent_Same < last_min_same:
                save_model(trainer.model, epoch, train_config.save_dir, latest_flag=False, save_tag="same_best")
            if trainer.min_error_avg_Incoherent_Diff < last_min_diff:
                save_model(trainer.model, epoch, train_config.save_dir, latest_flag=False, save_tag="diff_best")
            if trainer.min_error_avg_Incoherent_All < last_min_all:
                save_model(trainer.model, epoch, train_config.save_dir, latest_flag=False, save_tag="all_best")

        if epoch % train_config.save_interval == 0:
            save_model(trainer.model, epoch, train_config.save_dir)

    wandb.finish()

def run_MyLab():
    train_config = DefaultMyLabConfig()

    train_config.loss_type_name = g_loss_type_name
    train_config.model_type_name = g_model_type_name
    train_config.devices = g_devices
    # train_config.num_workers = 16
    # train_config.learning_rate = 0.001
    train_config.scheduler_interval = 25
    train_config.batch_size = g_batch_size
    train_config.grad_accumulation_count = g_grad_accumulation_count
    if g_model_type_name == "GFNet":
        train_config.random_pair = True
    if g_Mylab_learningRate != None:
        train_config.learning_rate = g_Mylab_learningRate
    if g_suffix != None:
        train_config.suffix = g_suffix
    train_config.reflash()
    trainer = Trainer(train_config)


    last_min_error = trainer.min_error_avg_MyLab
    for epoch in range(1, train_config.train_epochs+1):
        trainer.train_epoch()
        if epoch % train_config.test_interval == 0:
            last_min_error = trainer.min_error_avg_MyLab
            trainer.test_epoch()
            train_min_error_avg = trainer.min_error_avg_MyLab

            if train_min_error_avg < last_min_error:
                save_model(trainer.model,epoch,train_config.save_dir,latest_flag=False,save_tag="best")
        if epoch % train_config.save_interval == 0:
            save_model(trainer.model,epoch,train_config.save_dir)
    wandb.finish()

def run_BBBC():
    train_config = DefaultBBBCConfig()
    train_config.random_pair = False
    train_config.loss_type_name = g_loss_type_name
    train_config.model_type_name = g_model_type_name
    train_config.devices = g_devices
    train_config.batch_size = g_batch_size
    train_config.grad_accumulation_count = g_grad_accumulation_count

    # train_config.learning_rate = 1e-4
    train_config.scheduler_interval = 30
    if g_model_type_name == "GFNet":
        train_config.random_pair = True
    if g_BBBC_learningRate != None:
        train_config.learning_rate = g_BBBC_learningRate
    if g_suffix != None:
        train_config.suffix = g_suffix
    train_config.reflash()
    trainer = Trainer(train_config)
    last_min_error = trainer.min_error_avg_BBBC
    for epoch in range(1, train_config.train_epochs + 1):
        trainer.train_epoch()
        if epoch % train_config.test_interval == 0:
            last_min_error = trainer.min_error_avg_BBBC
            trainer.test_epoch()
            train_min_error_avg = trainer.min_error_avg_BBBC
            if train_min_error_avg < last_min_error:
                save_model(trainer.model, epoch, train_config.save_dir, latest_flag=False, save_tag="best")
        if epoch % train_config.save_interval == 0:
            save_model(trainer.model, epoch, train_config.save_dir)
    wandb.finish()

if __name__ == '__main__':
    if "f" in sys.argv[1].lower():
        run_focusPath()
    if "m" in sys.argv[1].lower():
        run_MyLab()
    if "b" in sys.argv[1].lower():
        run_BBBC()
    if "i" in sys.argv[1].lower():
        run_Incorherent()