# 导入所需包
import math
import paddle
import paddle.nn as nn
import paddle.nn.functional as F
from paddle.vision import datasets, transforms
import paddle.utils
from data_load import get_dataloader,get_dataset
# 定义老师网络结构，
# Conv2D(输入通道，输出通道，卷积核大小，步长)
class TeacherNet(nn.Layer):
    def __init__(self):
        super(TeacherNet, self).__init__()
        self.conv1 = nn.Conv2D(in_channels=3, out_channels=32, kernel_size=3, stride=1)
        self.bn1 = nn.BatchNorm2D(32)
        self.relu1 = nn.ReLU()
        self.conv2 = nn.Conv2D(in_channels=32, out_channels=64, kernel_size=3, stride=1)
        self.bn2 = nn.BatchNorm2D(64)
        self.relu2 = nn.ReLU()
        self.dropout1 = nn.Dropout2D(0.3)
        self.dropout2 = nn.Dropout(0.5)
        self.fc1 = nn.Linear(9216,128)
        self.fc2 = nn.Linear(128,10)
        self.relu3 = nn.ReLU()

    def forward(self, x):
        x = self.conv1(x)
        x = self.bn1(x)
        x = self.relu1(x)

        x = self.conv2(x)
        x = self.bn2(x)
        x = self.relu2(x)

        x = F.max_pool2d(x, 2)
        x = self.dropout1(x)
        x = paddle.flatten(x, 1)

        x = self.fc1(x)
        x = self.relu3(x)
        x = self.dropout2(x)
        x = self.fc2(x)
        return x
import paddle
import paddle.nn as nn
# 初始化函数，用于模型初始化
class AverageMeter():
    """ Meter for monitoring losses"""
    def __init__(self):
        self.avg = 0
        self.sum = 0
        self.cnt = 0
        self.reset()

    def reset(self):
        """reset all values to zeros"""
        self.avg = 0
        self.sum = 0
        self.cnt = 0

    def update(self, val, n=1):
        """update avg by val and n, where val is the avg of n values"""
        self.sum += val * n
        self.cnt += n
        self.avg = self.sum / self.cnt
# 定义老师网络训练
def teacher_train_one_epoch(model, dataloader, criterion, optimizer, epoch, total_epoch, report_freq=20):
    print(f'----- Training Epoch [{epoch}/{total_epoch}]:')
    loss_meter = AverageMeter()
    acc_meter = AverageMeter()
    model.train()
    for batch_idx, data in enumerate(dataloader):
        image = data[0]
        label = data[1]

        out = model(image)
        loss = criterion(out, label)

        loss.backward() #反向传播
        optimizer.step() #更新参数
        optimizer.clear_grad() #清除梯度

        pred = nn.functional.softmax(out, axis=1)
        acc1 = paddle.metric.accuracy(pred, label)

        batch_size = image.shape[0]
        loss_meter.update(loss.cpu().numpy()[0], batch_size)
        acc_meter.update(acc1.cpu().numpy()[0], batch_size)

        if batch_idx > 0 and batch_idx % report_freq == 0:
            print(f'----- Batch[{batch_idx}/{len(dataloader)}], Loss: {loss_meter.avg:.5}, Acc@1: {acc_meter.avg:.4}')

    print(f'----- Epoch[{epoch}/{total_epoch}], Loss: {loss_meter.avg:.5}, Acc@1: {acc_meter.avg:.4}')
    return loss,acc1
# 定义老师网络预测
def teacher_validate(model, dataloader, criterion, report_freq=10):
    print('----- Validation')
    loss_meter = AverageMeter()
    acc_meter = AverageMeter()
    model.eval()
    for batch_idx, data in enumerate(dataloader):
        image = data[0]
        label = data[1]

        out = model(image)
        loss = criterion(out, label)

        pred = paddle.nn.functional.softmax(out, axis=1)
        acc1 = paddle.metric.accuracy(pred, label)
        batch_size = image.shape[0]
        loss_meter.update(loss.cpu().numpy()[0], batch_size)
        acc_meter.update(acc1.cpu().numpy()[0], batch_size)

        if batch_idx > 0 and batch_idx % report_freq == 0:
            print(f'----- Batch [{batch_idx}/{len(dataloader)}], Loss: {loss_meter.avg:.5}, Acc@1: {acc_meter.avg:.4}')

    print(f'----- Validation Loss: {loss_meter.avg:.5}, Acc@1: {acc_meter.avg:.4}')
    return loss,acc1


# 老师网络主函数
def teacher_main():
    print('开始训练教师模型')
    total_epoch = 20
    batch_size = 256

    model = TeacherNet()
    train_dataset = get_dataset(mode='train')
    train_dataloader = get_dataloader(train_dataset, batch_size, mode='train')
    val_dataset = get_dataset(mode='test')
    val_dataloader = get_dataloader(val_dataset, batch_size, mode='test')
    criterion = nn.CrossEntropyLoss()
    scheduler = paddle.optimizer.lr.CosineAnnealingDecay(0.02, total_epoch)
    optimizer = paddle.optimizer.Momentum(learning_rate=scheduler,
                                          parameters=model.parameters(),
                                          momentum=0.9,
                                          weight_decay=5e-4)

    eval_mode = False
    if eval_mode:
        state_dict = paddle.load('./teacher_ep200.pdparams')
        model.set_state_dict(state_dict)
        teacher_validate(model, val_dataloader, criterion)
        return

    teacher_history_train = []
    teacher_history_vali = []
    save_freq = 5
    test_freq = 1
    for epoch in range(1, total_epoch + 1):
        loss_train, acc1_train = teacher_train_one_epoch(model, train_dataloader, criterion, optimizer, epoch,
                                                         total_epoch)
        scheduler.step()
        teacher_history_train.append((loss_train, acc1_train))

        if epoch % test_freq == 0 or epoch == total_epoch:
            loss_vali, acc1_vali = teacher_validate(model, val_dataloader, criterion)
            teacher_history_vali.append((loss_vali, acc1_vali))

        if epoch % save_freq == 0 or epoch == total_epoch:
            paddle.save(model.state_dict(), f'model_out/teacher_ep{epoch}.pdparams')
            paddle.save(optimizer.state_dict(), f'model_out/teacher_ep{epoch}.pdopts')

    return model, teacher_history_train, teacher_history_vali
# 打印输出网络结构
teacher_Net = TeacherNet()
paddle.summary(teacher_Net,(1, 3, 28, 28)) #(1, 3, 28, 28)输入数据大小
# 返回值分别是网络模型、训练时的loss和acc、预测时的loss和acc
teacher_model,teacher_history_train,teacher_history_vali = teacher_main()

#查看隐藏知识
from data_load import get_dataset,get_dataloader
import numpy as np
from matplotlib import pyplot as plt

def softmax_t(x, t):
    x_exp = np.exp(x / t)
    return x_exp / np.sum(x_exp)
# 通过刚刚定义的数据集加载函数，加载数据集
val_dataset_plt = get_dataset(mode='test')
val_dataloader_plt = get_dataloader(val_dataset_plt, batch_size=1, mode='test')

# 这段代码可多次执行看看效果
teacher_model.eval()
with paddle.no_grad():
    data, target = next(iter(val_dataloader_plt))
    output = teacher_model(data)

test_x = data.cpu().numpy()
y_out = output.cpu().numpy()
y_out = y_out[0, ::]
print('Output (NO softmax):', y_out)

plt.subplot(3, 1, 1)
plt.imshow(test_x[0, 0, ::])

plt.subplot(3, 1, 2)
plt.bar(list(range(10)), softmax_t(y_out, 1), width=0.3)

plt.subplot(3, 1, 3)
plt.bar(list(range(10)), softmax_t(y_out, 10), width=0.3)
plt.show()
