import argparse
import torch
import timm
from ljp.engine import FastTrainer
from ljp.tools import get_data, get_model
from torchmetrics import Accuracy, Precision, Recall, F1Score, AUROC, ConfusionMatrix
from ljp.dataset_dali.dali_imagenet import ImageNet2012

torch.backends.cudnn.deterministic = True
torch.backends.cudnn.benchmark = False

timm_models = [
    'xception71',
    'ghostnetv2_160',
    'resnet101',

    'resnext101_32x8d'
    'repvgg_b3',
    'mnasnet_140',
    'inception_next_base',
    'mobileone_s4',
    'mobilenetv4_hybrid_large'
    'fastvit_sa36',
    'repvit_m3',
    'convnextv2_large',

]


def run(
        model_name='',
        device='',
        optim=None,
        save_dir=f'path_to_your_path'
):
    LSD_epk90 = {
        'lr_scheduler': torch.optim.lr_scheduler.StepLR,
        'step_size': 30,
        'gamma': 0.1,
    }

    loss_dict = {
        'loss_func': torch.nn.CrossEntropyLoss,
    }
    optimizer_dict1_wd = {
        'optimizer': optim,
        'lr': 1e-3,
        'final_lr': 0.1,
        'gamma': 1e-3,
        'weight_decay': 1e-4,

    }

    data = ImageNet2012(
        dataroot='/data/imagenet/',
        image_size=64,
        batch_size=64,
        batch_size_test=128,
        device_id=int(device.replace('cuda:', '')),
        use_dali=False
    )
    if model_name in ['resnet50', 'resnet18', 'resnet34', ]:
        net = get_model(modelname=model_name, data=data)
    else:
        assert model_name in timm.list_models()
        timm_model = timm.create_model(model_name,
                                       pretrained=False,
                                       num_classes=data.num_classes,
                                       in_chans=data.inchannels)
        if not hasattr(timm_model, 'desc'):
            timm_model.desc = model_name
        net = FastTrainer(timm_model)

    net.compile(loss_dict=loss_dict,
                optimizer_dict=optimizer_dict1_wd,
                lr_scheduler_dict=LSD_epk90,
                metrics_dict={
                    "acc": Accuracy('multiclass', num_classes=data.num_classes, average='weighted').to(device),
                    # "precision": Precision('multiclass', num_classes=data.num_classes, average='weighted').to(device),
                    # "recall": Recall('multiclass', num_classes=data.num_classes, average='weighted').to(device),
                    # "f1": F1Score('multiclass', num_classes=data.num_classes, average='weighted').to(device),
                    # "auc": AUROC('multiclass', num_classes=data.num_classes, average='weighted').to(device),
                },
                numpy_metric=False,
                monitor='val_acc',
                monitor_mode='max')

    net.fit(data=data,
            epochs=90,
            device=device,
            verbose=2,
            use_compile=False,
            use_amp=False,
            save_dir=save_dir)


if __name__ == "__main__":
    from torch.optim import SGD, Adam, Adamax, RAdam
    from ljp.optimizers.adabound import AdaBound
    from ljp.optimizers.ranger import Ranger
    from ljp.optimizers.adabelief import AdaBelief
    from ljp.optimizers.diffgrad import DiffGrad
    from ljp.optimizers.adan import Adan
    from ljp.optimizers.adabelief_derivative import AdaDerivative
    from ljp.optimizers.adabelief_fast import FastAdaBelief
    from ljp.optimizers.came import CAME
    from ljp.optimizers.lion import Lion
    from ljp.optimizers.sophia import Sophia
    from ljp.optimizers.adabob import AdaBoB

    device = 'cuda:0'
    model_name = 'fastvit_sa36'
    optim = AdaBoB
    run(model_name=model_name, device=device, optim=optim)
