import os
import gzip
import requests
import pickle
import matplotlib.pyplot as plt
import torch
import torch.nn as nn


DATA_PATH = "../dataset"
DATA = os.path.join(DATA_PATH, "mnist")

if not os.path.exists(DATA):
    os.makedirs(DATA)

URL = "http://deeplearning.net/data/mnist/"
FILENAME = "mnist.pkl.gz"
URL = URL + FILENAME
FILEPATH = os.path.join(DATA, FILENAME)

if not os.path.exists(FILEPATH):
    content = requests.get(URL).content
    with open(FILEPATH, 'wb') as f:
        f.write(content)

with gzip.open(FILEPATH, "rb") as f:
    ((x_train, y_train), (x_valid, y_valid), _) = pickle.load(f, encoding="latin-1")
plt.imshow(x_train[0].reshape((28, 28)), cmap="gray")
# plt.show()
print(x_train.shape)
x_train, y_train, x_valid, y_valid = map(torch.tensor, (x_train, y_train, x_valid, y_valid))


class Mnist_Logistic(nn.Module):
    def __init__(self):
        super().__init__()
        self.fc1 = nn.Linear(784, 784)
        self.fc2 = nn.Linear(784, 10)
        self.dropout = nn.Dropout(0.5)

    def forward(self, x):
        x = self.dropout(self.fc1(x))
        x = self.fc2(x)
        return x


import torch.nn.functional as F
loss_func = F.cross_entropy
lr = 0.05
epochs = 5
bs = 64
model = Mnist_Logistic()
n = len(x_train)

xb = x_train[0:bs]
yb = y_train[0:bs]
print(loss_func(model(xb), yb))


def fit():
    for epoch in range(epochs):
        for i in range((n - 1) // bs + 1):
            start_i = i * bs
            end_i = start_i + bs
            xb = x_train[start_i: end_i]
            yb = y_train[start_i: end_i]
            pred = model(xb)
            loss = loss_func(pred, yb)
            loss.backward()
            with torch.no_grad():
                for p in model.parameters():
                    p -= p.grad * lr
                model.zero_grad()


fit()
print(loss_func(model(xb), yb))
model = model.eval()
equal_ = model(x_valid).argmax(dim=1) == y_valid
print(torch.sum(equal_).cpu().item() / len(y_valid))
