import os
import numpy as np
import torch.nn as nn
import torch
from torch.utils.data import DataLoader
import torchvision.transforms as transforms
import torch.optim as optim
from matplotlib import pyplot as plt
import torchvision.models as models
from Baseline.A_AlexNet.tools.my_dataset import CatDogDataset

BASE_DIR = os.path.dirname(os.path.abspath(__file__))
ASSETS_DIR = os.path.join(BASE_DIR, r'..\..\..', 'Assets')

device = torch.device("cuda" if torch.cuda.is_available() else "CPU")


def get_model(path_state_dict, vis_model=False):
    model = models.alexnet()
    pretrained_state_dict = torch.load(path_state_dict)
    model.load_state_dict(pretrained_state_dict)

    if vis_model:
        from torchsummary import summary
        summary(model, input_size=(3, 224, 224), device='cpu')

    model.to(device)
    return model


if __name__ == '__main__':
    # config
    data_dir = os.path.join(ASSETS_DIR, 'dogs-vs-cats-redux-kernels-edition', 'train')
    path_state_dict = os.path.join(ASSETS_DIR, 'pre_train_modules', 'pre_train_alexnet.pth')
    num_classes = 2

    MAX_EPOCH = 3
    BATCH_SIZE = 128
    LR = 0.001
    log_interval = 1
    val_interval = 1
    classes = 2
    start_epoch = -1
    lr_decay_step = 1

    # data
    norm_mean = [0.485, 0.456, 0.406]
    norm_std = [0.229, 0.224, 0.225]

    train_transform = transforms.Compose([
        transforms.Resize((256)),
        transforms.CenterCrop(256),
        transforms.RandomCrop(224),
        transforms.RandomHorizontalFlip(p=0.5),
        transforms.ToTensor(),
        transforms.Normalize(norm_mean, norm_std),
    ])

    normalizes = transforms.Normalize(norm_mean, norm_std)
    valid_transform = transforms.Compose([
        transforms.Resize((256, 256)),
        transforms.TenCrop(224, vertical_flip=False),
        transforms.Lambda(lambda crops: torch.stack([normalizes(transforms.ToTensor()(crop)) for crop in crops]))
    ])

    # 构建MyDataset实例
    train_data = CatDogDataset(data_dir=data_dir, mode='train', transforms=train_transform)
    valid_data = CatDogDataset(data_dir=data_dir, mode='valid', transforms=valid_transform)

    # 构建DataLodder
    train_loader = DataLoader(dataset=train_data, batch_size=BATCH_SIZE, shuffle=True)
    valid_loader = DataLoader(dataset=valid_data, batch_size=4)

    # 模型
    alexnet_model = get_model(path_state_dict, False)

    num_ftrs = alexnet_model.classifier._modules['6'].in_features
    alexnet_model.classifier._modules['6'] = nn.Linear(num_ftrs, num_classes)

    alexnet_model.to(device)

    # Loss
    criterion = nn.CrossEntropyLoss()

    # 冻结卷积层
    flag = 0
    if flag:
        fc_params_id = list(map(id, alexnet_model.classifier.parameters()))
        base_params = filter(lambda p: id(p) not in fc_params_id, alexnet_model.parameters())
        optimizer = optim.SGD([
            {'params': base_params, 'lr': LR * 0.1},
            {'params': alexnet_model.classifier.parameters(), 'lr': LR}
        ], momentum=0.9)
    else:
        optimizer = optim.SGD(alexnet_model.parameters(), lr=LR, momentum=0.9)

    scheduler = torch.optim.lr_scheduler.StepLR(optimizer, step_size=lr_decay_step, gamma=0.1)
