import torch
from config import config
from datam import *
from model import Model,Backbone
from train_lightning import *
from torch.nn import functional as F
if __name__ == '__main__':
    # data_mnist = DataM("../data/",32,0)
    data_mnist = DataM(config.data_dir, config.BATCH_SIZE, config.AVAIL_GPUS)
    data_mnist.setup()
    pl.seed_everything(1234)

    model = Model.load_from_checkpoint(config.modelPATH+"/last.ckpt")
    model.eval()
    data,label = next(iter(data_mnist.test_dataloader()))
    logits = model(data)
    print("logits \n", logits)
    # torch.argmax(x, dim)，其中x为张量，dim控制比较的维度，返回最大值的索引。
    preds_argmax = torch.argmax(logits, dim=1)
    print("preds  argmax \n", preds_argmax)
    preds_softmax = torch.softmax(logits, dim=1)
    print("preds  softmax \n", preds_softmax)

    # 1
    loss = F.cross_entropy(logits, label)
    print("F.cross_entropy \n", loss)
    # 2
    loss = F.nll_loss(F.log_softmax(logits, dim=1), label)
    print("F.nll_loss(F.log_softmax(logits, dim=1) \n", loss)
    # 3
    loss = F.nll_loss(torch.log(preds_softmax), label)
    print("F.nll_loss(torch.log(preds_softmax) \n", loss)
    # 4
    loss = F.cross_entropy(F.softmax(logits, dim=1), label)
    print("F.cross_entropy(F.softmax(logits, dim=1), \n", loss)
    # 1=2=3!=4


    trainer = Trainer(gpus=config.AVAIL_GPUS, max_epochs=config.max_epochs)
    model = Model.load_from_checkpoint(config.modelPATH+"/last.ckpt")
    model.eval()
    result = trainer.test(model,data_mnist.test_dataloader())
    print(result)



