# -*- coding: utf-8 -*-
"""
@author: YuHaiyang

"""
import os
import shutil
from pathlib import Path

import torch
from torch import optim
from torch.utils.data import DataLoader
from torch.nn import functional as F
from nets.alex.alex_net import AlexNet
from nets.alex.loader import DataSetLoader


def print_hi(name):
    # Use a breakpoint in the code line below to debug your script.
    print(f'Hi, {name}')  # Press ⌘F8 to toggle the breakpoint.


# Press the green button in the gutter to run the script.
if __name__ == '__main__':
    device = torch.device('cuda') if torch.cuda.is_available() else "cpu"

    home_path = os.environ.get("HOME")
    src = Path(home_path, "workspace", "dataset", "dc1000")
    loader: DataLoader = DataSetLoader(src).gen(batch_size=50)

    net = AlexNet()
    net.to(device)
    optimizer = optim.SGD(net.parameters(), lr=0.01, momentum=0.9, weight_decay=0.005)

    net.train()

    shutil.rmtree("out")
    Path("out").mkdir(exist_ok=True)

    for epoch in range(1):
        for index, (data, label) in enumerate(loader):
            data, label = data.to(device), label.to(device)
            optimizer.zero_grad()
            output = net(data)
            loss = F.cross_entropy(output, label)
            loss.backward()
            optimizer.step()

            if index % 10 == 0:
                train_loss = loss.item()
                print('Train Epoch:{}\ttrain loss:{:.6f}'.format(epoch, loss.item()))

        torch.save(net.state_dict(), r'out/model%s.pth' % epoch)
