import numpy as np
import random
import torch
import os

import torchvision

os.environ["KMP_DUPLICATE_LIB_OK"] = "TRUE"
from torchvision import datasets, transforms
from torch.utils.data import DataLoader
import matplotlib.pyplot as plt
from ResNet import ResNet,BasicBlock
import torch.nn as nn
import torch.optim as optim

def set_seed(seed):
    random.seed(seed)
    np.random.seed(seed)
    torch.manual_seed(seed)
    os.environ['PYTHONHASHSEED'] = str(seed)

set_seed(42)

#1定义数据集的加载与处理
#定义训练数据处理步骤
train_transforms = transforms.Compose([
    #随机裁剪，大小224
    transforms.RandomResizedCrop(224),
    #随机翻转，默认值0.5
    transforms.RandomHorizontalFlip(),
    #以图像的中心为基准调整大小到224
    transforms.CenterCrop(224),
    #转变类型
    transforms.ToTensor(),
    #标准化，参数一般不会动，没有特殊要求
    transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225])
])
#定义验证数据的处理步骤
test_transforms = transforms.Compose([
    #裁剪图像大小，符合网络输入标准就行
    transforms.Resize(224),
    transforms.CenterCrop(224),
    transforms.ToTensor(),
    transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225])
])

#读取处理数据
train_datasets = datasets.ImageFolder(root='../dataset/train', transform=train_transforms)
test_datasets = datasets.ImageFolder(root='../dataset/val', transform=test_transforms)

#数据太大了，使用dataloader,方便后续模型训练
trian_dataloader = DataLoader(train_datasets,batch_size=32,shuffle=True)
test_dataloader = DataLoader(test_datasets,batch_size=32,shuffle=False)

# # 从训练集中抽几张图片进行显示
# examples = enumerate(trian_dataloader)
# batch_idx, (imgs, lbs) = next(examples)
# fig = plt.figure()
#
# for i in range(4):
#     plt.subplot(2, 2, i + 1)
#     plt.imshow(imgs[i][0], cmap='gray')
#     plt.title(f'Ground Truth: {lbs[i]}')
#     plt.xticks([])
#     plt.yticks([])
# plt.show()

#2实例化模型
# model = ResNet(BasicBlock, [2,2,2,2], num_classes=10).to('cpu')

#演示使用预训练模型训练自己的任务
#定义模型
model = torchvision.models.resnet18(weights=None).to('cpu')
#引入模型
model.load_state_dict(torch.load('../model/resnet18-5c106cde.pth',weights_only=False))
#对模型动点手脚，让模型和我们的任务相匹配
for param in model.parameters():
    param.requires_grad = False
fc_inputs = model.fc.in_features
model.fc = nn.Sequential(
    nn.Linear(fc_inputs,256),
    nn.ReLU(),
    nn.Dropout(0.4),
    nn.Linear(256,10),
    nn.LogSoftmax(dim=1),

).to('cpu')



#定义损失函数和优化器
criterion = nn.CrossEntropyLoss() #交叉熵损失函数
optimizer = optim.Adam(model.parameters(), lr=0.001)

#3训练模型
epochs = 5
best_acc = 0.0#初始化最好的准确率
for epoch in range(1,1+epochs):
    model.train()
    total_loss = 0 #总损失
    correct = 0 #预测正确的样本数
    total = 0 #总样本数

    #i标号，表示第几批的数据
    for i,(images, labels) in enumerate(trian_dataloader):
        images = images.to('cpu')
        labels = labels.to('cpu')

        #清空梯度
        optimizer.zero_grad()
        #前向传播
        outputs = model(images)
        #计算损失
        loss = criterion(outputs, labels)

        #方向传播
        loss.backward()
        #更新梯度
        optimizer.step()

        #累加计算总损失
        total_loss += loss.item()

        #计算准确率
        _, predicted = torch.max(outputs.data, 1)#获取预测的类别
        total += labels.size(0) #累加总样本
        correct += (predicted == labels).sum().item()#累加预测正确的样本数

        #打印训练过程
        print(f'Epoch [{epoch}/{epochs}], Loss: {total_loss:.3f},Batch size: [{i+1}/{len(train_datasets)}]')

    #计算数据的平均损失和准确率
    avg_loss = total_loss/len(train_datasets)

    epoch_acc_new = 100.*correct/total
    if epoch % 5 == 0 or epoch == 1:
        print(f'Average loss: {avg_loss:.3f}')

#4模型评估
model.eval()
total_loss = 0
correct = 0
total = 0
with torch.no_grad():
    for images, labels in test_dataloader:
        images = images.to('cpu')
        labels = labels.to('cpu')
        outputs = model(images)
        loss = criterion(outputs, labels)
        total_loss += loss.item()

        # 计算准确率
        _, predicted = torch.max(outputs.data, 1)  # 获取预测的类别
        total += labels.size(0)  # 累加总样本
        correct += (predicted == labels).sum().item()  # 累加预测正确的样本数

    test_acc = 100.*correct/total
    if test_acc > best_acc:
        best_acc = test_acc
        torch.save(model.state_dict(), f'../model/ResNet_{best_acc}%.pth')
