
import torch
import torch.nn as nn
from torch import optim
import torchvision as tv
import torchvision.transforms as transforms
import models
import logging
from .misc import LabelSmoothing, CosAnnealWarmup


def build_model(cfg):
    model = models.__dict__[cfg.type](**cfg.args)

    if cfg.pretrain:
        logging.info(f'loading pretrain from {cfg.pretrain}')
        ckpt = torch.load(cfg.pretrain)
        state_dict = ckpt['state_dict'] if 'state_dict' in ckpt else ckpt
        ret = None
        try:
            ret = model.load_state_dict(state_dict, strict=False)
        except Exception as e:
            logging.warning(str(e))
        logging.warning(str(ret))
    return model


def build_optimizer(cfg, params):
    if cfg.type.lower() == 'sgd':
        optimizer = optim.SGD(params, **cfg.args)
    else:
        raise ValueError(f'Unknown optimizer type: {cfg.type}')
    return optimizer

def build_optimizer_q(cfg, params, params_q):
    if cfg.type.lower() == 'sgd':
        optimizer = optim.SGD([
            {'params': params, **cfg.args},
            {'params': params_q, **cfg.args_q}
        ])
    else:
        raise ValueError(f'Unknown optimizer type: {cfg.type}')
    return optimizer



def build_dataset(cfg, split):
    if cfg.type.lower() == 'cifar10':
        normalize = transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225])
        if split == 'train':
            return tv.datasets.CIFAR10(root=cfg.root, train=True, transform=transforms.Compose([
                transforms.RandomHorizontalFlip(),
                transforms.RandomCrop(32, 4),
                transforms.ToTensor(),
                normalize,
            ]))
        else:
            return tv.datasets.CIFAR10(root=cfg.root, train=False, transform=transforms.Compose([
                transforms.ToTensor(),
                normalize,
            ]))

    elif cfg.type.lower() == 'cifar100':
        normalize = transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225])
        if split == 'train':
            return tv.datasets.CIFAR100(root=cfg.root, train=True, transform=transforms.Compose([
                transforms.RandomHorizontalFlip(),
                transforms.RandomCrop(32, 4),
                transforms.RandomRotation(15),
                transforms.ToTensor(),
                normalize,
            ]))
        else:
            return tv.datasets.CIFAR100(root=cfg.root, train=False, transform=transforms.Compose([
                transforms.ToTensor(),
                normalize,
            ]))

    elif cfg.type.lower() == 'imagenet':
        normalize = transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225])
        if split == 'train':
            tfm = transforms.Compose([
                    transforms.RandomResizedCrop(224),
                    transforms.RandomHorizontalFlip(),
                    transforms.ToTensor(),
                    normalize,
                ])
            return tv.datasets.ImageFolder(cfg.root + '/train', transform=tfm)
                
        else:
            tfm = transforms.Compose([
                transforms.Resize(256),
                transforms.CenterCrop(224),
                transforms.ToTensor(),
                normalize
            ])
            return tv.datasets.ImageFolder(cfg.root + '/val', transform=tfm)

    else:
        raise ValueError(f'Unknown dataset type: {cfg.type}')


def build_lr_scheduler(cfg, optimizer):
    if cfg.type.lower() == 'multisteplr':
        return optim.lr_scheduler.MultiStepLR(optimizer, **cfg.args)
    elif cfg.type.lower() == 'cosannealwarmup':
        return CosAnnealWarmup(optimizer, **cfg.args)
    else:
        raise ValueError(f'Unknown lr_scheduler type: {cfg.type}')

def build_loss(cfg):
    if 'args' not in cfg:
        cfg.args = {}
    if cfg.type.lower() == 'crossentropy':
        return nn.CrossEntropyLoss(**cfg.args)
    elif cfg.type.lower() == 'labelsmooothing':
        return LabelSmoothing(**cfg.args)
    else:
        raise ValueError(f'Unknown criterion type: {cfg.type}')
