
import torch
import torch.nn as nn
import torch.nn.functional as F
import torch.optim as optim
# from torch.utils.data import DataLoader

class Net(nn.Module):
    def __init__(self):
        super(Net, self).__init__()
        self.conv1 = nn.Conv2d(1, 10, kernel_size=5)
        self.conv2 = nn.Conv2d(10, 20, kernel_size=5)
        self.conv2_drop = nn.Dropout2d()
        self.fc1 = nn.Linear(320, 50)
        self.fc2 = nn.Linear(50, 10)

    def forward(self, x):
        x = F.relu(F.max_pool2d(self.conv1(x), 2))
        x = F.relu(F.max_pool2d(self.conv2_drop(self.conv2(x)), 2))
        # x = F.relu(self.conv1(x))
        # x = F.relu(self.conv2(x))
        x = x.view(-1, 320)
        x = F.relu(self.fc1(x))
        x = F.dropout(x, training=self.training)
        x = self.fc2(x)
        return x

class Counter:
    def __init__(self) -> None:
        self.reset()

    def reset(self):
        self.counter = 0

    def update(self, target, *args, **kwargs):
        self.counter += len(target)

    @property
    def value(self):
        return self.counter 

    def __repr__(self):
        return f'Counter({self.value})'

class Percentage(Counter):
    def reset(self):
        super(Percentage, self).reset()
        self.correct = 0

    def update(self, target, pred, *args, **kwargs):
        super(Percentage, self).update(target, pred)
        correct = pred.eq(target.data.view_as(pred))
        self.correct += correct.sum()

    @property
    def percent(self):
        if self.counter > 0:
            return 100. * self.correct / self.counter
        else:
            return 0.

    def __repr__(self):
        return f'{self.correct}/{self.counter} ({self.percent:.0f}%)'

class AverageLosses(Counter):
    def reset(self):
        self.losses = []
        self.counters = []

    @property
    def counter(self):
        return 0 if len(self.counters) == 0 else self.counters[-1]

    def update(self, target, loss):
        num = len(target)
        self.losses.append(loss / num)
        self.counters.append(num + self.counter)

    def __repr__(self):
        if len(self.losses) > 0:
            return f'{self.losses[-1]:.5f} / {self.counters[-1]:6d}'
        else:
            return '0.0/0'

class TrainResult:
    def __init__(self, log_interval=10):
        self.log_interval = log_interval
        self.reset()

    def reset(self):
        self.epoch = 0
        self.batch_idx = 0
        self.average_losses = AverageLosses()

    def __repr__(self):
        format_string = self.__class__.__name__ + '('
        format_string += f'AverageLosses={self.average_losses}'
        format_string += ')'
        return format_string

    def start(self, train_loader):
        self.train_loader = train_loader
        self.epoch += 1
        self.batch_idx = 0
        self.average_losses.reset()
        print(f'Start trainig Epoch: {self.epoch} ')

    def end(self):
        print(f'Trainig Epoch {self.epoch} Result: \n{self}')

    def update(self, target, output, loss):
        self.batch_idx += 1
        self.average_losses.update(target, loss)
        counter = self.average_losses.counter
        if self.batch_idx % self.log_interval == 0:
            percent = 100. * self.batch_idx / counter
            print('Train Epoch: {} [{}/{} ({:.0f}%)]\tLoss: {:.6f}'.format(
                self.epoch, self.batch_idx, counter, percent, loss))

class TestResult:
    def __init__(self):
        self.reset()

    def reset(self):
        self.accuracy = Percentage()
        self.average_loss = AverageLosses()

    def __repr__(self):
        return f'{self.__class__.__name__}(Accuracy={self.accuracy}, AverageLosses={self.average_loss})'

    def start(self, test_loader):
        self.test_loader = test_loader
        self.accuracy.reset()
        self.average_loss.reset()
        print('Start Testing ...')

    def end(self):
        print(f'Testing Result: {self}')

    def update(self, target, pred, loss):
        self.accuracy.update(target, pred)
        self.average_loss.update(target, loss)
        print(f'{self.accuracy.counter:06d}: AverageLosses = {self.average_loss}; Accuarcy = {self.accuracy}')

class Classifier:
    def __init__(self, network=Net(), learning_rate=0.01, momentum=0.5):
        self.network = network
        self.optimizer = optim.SGD(network.parameters(), lr=learning_rate, momentum=momentum)
        self.criterion = F.cross_entropy

    def train(self, train_loader, num_epoch=1):
        result = TrainResult()
        for epoch in range(num_epoch):
            result.start(train_loader)
            for batch_idx, (data, target) in enumerate(train_loader):
                output, loss = self.train_batch(data, target)
                result.update(target, output, loss)
            result.end()
        return result

    def train_batch(self, data, target):
        self.network.train()
        self.optimizer.zero_grad()
        output = self.network(data)
        loss = self.criterion(output, target)
        loss.backward()
        self.optimizer.step()
        return output, loss.item()

    def test(self, test_loader):
        result = TestResult()
        result.start(test_loader)
        with torch.no_grad():
            for data, target in test_loader:
                pred, output, loss = self.test_batch(data, target)
                result.update(target, pred, loss)
        result.end()
        return result

    def test_batch(self, data, target):
        self.network.eval()
        output = self.network(data)
        pred = output.data.max(1, keepdim=True)[1]
        loss = self.criterion(output, target)
        return pred, output, loss

    def save_state(self):
        torch.save(self.network.state_dict(), './model.pth')
        torch.save(self.optimizer.state_dict(), './optimizer.pth')

    def load_state(self):
        self.network = Net()
        self.optimizer = optim.SGD(self.network.parameters(), lr=self.learning_rate, momentum=self.momentum)
        network_state_dict = torch.load('model.pth')
        self.network.load_state_dict(network_state_dict)
        optimizer_state_dict = torch.load('optimizer.pth')
        self.optimizer.load_state_dict(optimizer_state_dict)


