from utils import evaluate
from models import StudentWithoutDistilling
from utils import load_dataset

import torch


def train():
    epochs = 5
    lr = 0.01
    batch_size = 64
    train_iter, test_iter = load_dataset(batch_size)
    model = StudentWithoutDistilling()
    optimizer = torch.optim.Adam(model.parameters(), lr=lr)
    max_test_acc = 0
    for epoch in range(epochs):
        for i, (x, y) in enumerate(train_iter):
            loss, logits = model(x, y)
            optimizer.zero_grad()
            loss.backward()
            optimizer.step()  # 执行梯度下降
            if i % 50 == 0:
                acc = (logits.argmax(1) == y).float().mean()
                print(f"Epochs[{epoch + 1}/{epochs}]--batch[{i}/{len(train_iter)}]"
                      f"--Acc: {round(acc.item(), 4)}--loss: {round(loss.item(), 4)}")
        test_acc = evaluate(test_iter, model)
        if test_acc > max_test_acc:
            max_test_acc = test_acc
        print(f"Epochs[{epoch + 1}/{epochs}]--Acc on test {test_acc}, max test acc: {max_test_acc}")



if __name__ == '__main__':
    train()
    # Epochs[5/5]--batch[400/938]--Acc: 0.8594--loss: 0.5711
    # Epochs[5/5]--batch[450/938]--Acc: 0.6875--loss: 0.7795
    # Epochs[5/5]--batch[500/938]--Acc: 0.7188--loss: 0.7678
    # Epochs[5/5]--batch[550/938]--Acc: 0.7031--loss: 0.7326
    # Epochs[5/5]--batch[600/938]--Acc: 0.7188--loss: 0.6579
    # Epochs[5/5]--batch[650/938]--Acc: 0.7344--loss: 0.733
    # Epochs[5/5]--batch[700/938]--Acc: 0.7344--loss: 0.8884
    # Epochs[5/5]--batch[750/938]--Acc: 0.75--loss: 0.7302
    # Epochs[5/5]--batch[800/938]--Acc: 0.5625--loss: 0.9818
    # Epochs[5/5]--batch[850/938]--Acc: 0.6719--loss: 0.7181
    # Epochs[5/5]--batch[900/938]--Acc: 0.6719--loss: 0.8806
    # Epochs[5/5]---Acc on test 0.8154, max_test_acc:0.8274
