import torchvision
import torchvision.transforms as transforms

import numpy as np
from fastai import *
import pandas as pd
from fastai.vision import *

from fastai.torch_core import defaults
import torch.nn as nn
from fastai.callbacks import *
from utils import *
from models import ResNet18, ResNet34
from losses import *

import argparse


def parse_arg():
    dataset_options = ['cifar10', 'cifar100']
    parser = argparse.ArgumentParser(description='single variable search')
    parser.add_argument('--loss_func', default='soft', choices=
    ['soft', 'arc1', 'arcface', 'adacos', 'center', 'cosface', 'sphere','arc2'])
    parser.add_argument('--ds', default='cifar', type=str, choices=dataset_options)

    parser.add_argument('-e', '--epochs', default=90, type=int)
    parser.add_argument('--data', default='./data', type=str)
    parser.add_argument('--lr', default=0.1, type=float)
    parser.add_argument('--wd', default=0.0005, type=float)
    parser.add_argument('--bs', default=256, type=int)
    parser.add_argument('-o', '--out', type=str, default='output/')
    parser.add_argument('--data_augmentation', action='store_true', default=False,
                        help='augment data by flipping and cropping')
    parser.add_argument('--device', default='cuda')
    parser.add_argument('-s', default=0.5, type=float)
    parser.add_argument('-m', default=0, type=float)
    parser.add_argument('--seed', default=113, type=int)

    return parser.parse_known_args()


def __post_init__(self) -> None:
    "Setup path,metrics, callbacks and ensure model directory exists."
    self.path = Path(ifnone(self.path, self.data.path))
    (self.path / self.model_dir).mkdir(parents=True, exist_ok=True)
    self.model = self.model.to(self.data.device)
    self.loss_func = ifnone(self.loss_func, self.data.loss_func)
    self.metrics = listify(self.metrics)
    # train loss_func
    if not self.layer_groups: self.layer_groups = [nn.Sequential(*(flatten_model(self.model) + [self.loss_func]))]
    self.callbacks = listify(self.callbacks)
    self.callback_fns = [Recorder] + listify(self.callback_fns)


def _error_rate(criterion):
    def error(features, label):
        return error_rate(criterion(features), label)

    return error


def _error_rate5(criterion):
    def error_5(features, label):
        return 1 - top_k_accuracy(criterion(features), label)

    return error_5


__post_init__bac = Learner.__post_init__
Learner.__post_init__ = __post_init__


def main(args):
    defaults.device = args.device
    setup_seed(args.seed)
    root = args.data
    if args.data_augmentation:
        transform_train = transforms.Compose([
            transforms.RandomCrop(32, padding=4),
            transforms.RandomHorizontalFlip(),
            transforms.ToTensor(),
            transforms.Normalize(*cifar_stats),
        ])
    else:
        transform_train = transforms.Compose([
            transforms.ToTensor(),
            transforms.Normalize(*cifar_stats),
        ])

    DATASET = torchvision.datasets.CIFAR100 if args.ds == 'cifar100' else torchvision.datasets.CIFAR10

    trainset = DATASET(root=root, train=True, download=True, transform=transform_train)
    testset = DATASET(root=root, train=False, download=True, transform=transforms.Compose([
        transforms.ToTensor(),
        transforms.Normalize(*cifar_stats),
    ]))

    trainloader = torch.utils.data.DataLoader(trainset, batch_size=256, shuffle=True, num_workers=4, pin_memory=True, )
    validloader = torch.utils.data.DataLoader(testset, batch_size=256, shuffle=False, num_workers=4, pin_memory=True, )

    databunch = DataBunch(trainloader, validloader, )

    hidden = 512 if args.ds == 'cifar100' else 256
    class_number = 100 if args.ds == 'cifar100' else 10
    cnn = ResNet18(hidden) if args.ds == 'cifar100' else ResNet34(hidden)

    loss_func = args.loss_func
    criterion = get_loss(loss_func, hidden, class_number, args).to(args.device)

    learner = Learner(databunch, cnn,
                      path=args.out,
                      metrics=[_error_rate(criterion), _error_rate5(criterion)],
                      loss_func=criterion,
                      # opt_func=optim.SGD
                      )
    print(loss_func, args)
    # learner.lr_find()
    # learner.recorder.plot()
    # plt.savefig(learner.path/'lr.png')
    learner.path.mkdir(511, True, True)

    learner.fit_one_cycle(args.epochs, args.lr, wd=args.wd,
                          callbacks=[  # SaveModelCallback(learner,name='best'),
                              CSVLogger(learner),
                              # AngleLogger(learner,class_number,hidden,'as.csv',it=200),
                              # TBLogger(learner,'soft', 'temp/soft')
                          ])


if __name__ == '__main__':
    args, _ = parse_arg()
    main(args)