import torch
import torch.nn as nn
import torch.optim as optim
from torch.autograd import Variable
from torch.utils.data import DataLoader

from net import NetClassify
import dataset as ds

test_dataset = ds.get_dataset('test', 'test.txt')
test_loader = DataLoader(test_dataset, batch_size=10, shuffle=True)

train_dataset = ds.get_dataset('train', 'train.txt')
train_loader = DataLoader(train_dataset, batch_size=10, shuffle=True)

net = NetClassify()

def train():
    criterion = nn.CrossEntropyLoss()
    optimizer = optim.SGD(net.parameters(), lr=0.001, momentum=0.5)
    for epoch in range(50): # 50次训练
        running_loss = 0.0
        for i, data in enumerate(train_loader, 0):
            inputs, labels = data
            inputs, labels = Variable(inputs), Variable(labels)
            optimizer.zero_grad()  # 优化器梯度清零
            outputs = net(inputs)
            loss = criterion(outputs, labels)
            loss.backward()
            optimizer.step()  # 优化
            running_loss += loss.item()
            if i % 200 == 199:
                print('[%d %5d] loss: %.3f' % (epoch + 1, i + 1, running_loss / 200))
                running_loss = 0.0
    print('finished training!')

def test():
    total = 0
    correct = 0
    with torch.no_grad():
        for data in test_loader:
            images, labels = data
            outputs = net(Variable(images))
            _, predicted = torch.max(outputs.data, dim=1)
            total += labels.size(0)
            correct += (predicted == labels).sum()
    print('Accuracy of the network on the 400 test images: %d %%' % (100 * correct / total))
