import gzip

import matplotlib.pyplot as plt
import numpy as np
import seaborn as sns
from sklearn.metrics import accuracy_score, confusion_matrix

# from sklearn.model_selection import train_test_split
from sklearn.preprocessing import StandardScaler


# Function to load MNIST images
def load_mnist_images(filename):
    with gzip.open(filename, "rb") as f:
        data = np.frombuffer(f.read(), np.uint8, offset=16)
        data = data.reshape(-1, 28 * 28)
    return data


# Function to load MNIST labels
def load_mnist_labels(filename):
    with gzip.open(filename, "rb") as f:
        labels = np.frombuffer(f.read(), np.uint8, offset=8)
    return labels


# Paths to the MNIST files
train_images_path = "/home/vaeringjar/Desktop/phd/ml_202403summer/assignments/hw1/mnist/train-images-idx3-ubyte.gz"
train_labels_path = "/home/vaeringjar/Desktop/phd/ml_202403summer/assignments/hw1/mnist/train-labels-idx1-ubyte.gz"
test_images_path = "/home/vaeringjar/Desktop/phd/ml_202403summer/assignments/hw1/mnist/t10k-images-idx3-ubyte.gz"
test_labels_path = "/home/vaeringjar/Desktop/phd/ml_202403summer/assignments/hw1/mnist/t10k-labels-idx1-ubyte.gz"

# Load the data
X_train = load_mnist_images(train_images_path)
y_train = load_mnist_labels(train_labels_path)
X_test = load_mnist_images(test_images_path)
y_test = load_mnist_labels(test_labels_path)

# Normalize the data
scaler = StandardScaler()
X_train = scaler.fit_transform(X_train)
X_test = scaler.transform(X_test)

# Add bias term to the features
X_train = np.c_[np.ones(X_train.shape[0]), X_train]
X_test = np.c_[np.ones(X_test.shape[0]), X_test]

print("Training data shape:", X_train.shape)
print("Training labels shape:", y_train.shape)
print("Test data shape:", X_test.shape)
print("Test labels shape:", y_test.shape)


class Perceptron:
    def __init__(self, input_dim, output_dim, learning_rate=0.01, epochs=100):
        self.input_dim = input_dim
        self.output_dim = output_dim
        self.learning_rate = learning_rate
        self.epochs = epochs
        self.weights = np.random.randn(output_dim, input_dim)

    def train(self, X, y):
        for epoch in range(self.epochs):
            for i in range(X.shape[0]):
                xi = X[i]
                yi = y[i]
                outputs = self.predict(xi, raw_output=True)
                for j in range(self.output_dim):
                    target = 1 if yi == j else 0
                    error = target - outputs[j]
                    self.weights[j] += self.learning_rate * error * xi

    def predict(self, X, raw_output=False):
        net_inputs = np.dot(X, self.weights.T)
        if raw_output:
            return net_inputs
        return np.argmax(net_inputs, axis=1)


# Choose learning rate
chosen_learning_rate = 0.01

# Train perceptron with the chosen learning rate
perceptron = Perceptron(
    input_dim=785, output_dim=10, learning_rate=chosen_learning_rate, epochs=40
)
perceptron.train(X_train, y_train)

# Make predictions on test data
y_pred = perceptron.predict(X_test)

# Calculate accuracy
accuracy = accuracy_score(y_test, y_pred)
print(f"Accuracy with learning rate {chosen_learning_rate}: {accuracy * 100:.2f}%")

# Generate confusion matrix
conf_matrix = confusion_matrix(y_test, y_pred)
print(conf_matrix)

# Plot confusion matrix
plt.figure(figsize=(10, 7))
sns.heatmap(conf_matrix, annot=True, fmt="d", cmap="Blues")
plt.xlabel("Predicted")
plt.ylabel("Actual")
plt.title(f"Confusion Matrix (Learning Rate: {chosen_learning_rate})")
plt.show()

# import matplotlib.pyplot as plt
# import numpy as np
# import seaborn as sns
# from sklearn.datasets import fetch_openml
# from sklearn.metrics import accuracy_score, confusion_matrix
# from sklearn.model_selection import train_test_split
# from sklearn.preprocessing import StandardScaler

# # Load MNIST dataset
# mnist = fetch_openml("mnist_784")
# X = mnist.data
# y = mnist.target.astype(int)

# # Normalize the data
# scaler = StandardScaler()
# X = scaler.fit_transform(X)

# # Add bias term to the features
# X = np.c_[np.ones(X.shape[0]), X]

# # Split data into training and test sets
# X_train, X_test, y_train, y_test = train_test_split(
#     X, y, test_size=1 / 7, random_state=42
# )


# class Perceptron:
#     def __init__(self, input_dim, output_dim, learning_rate=0.01, epochs=100):
#         self.input_dim = input_dim
#         self.output_dim = output_dim
#         self.learning_rate = learning_rate
#         self.epochs = epochs
#         self.weights = np.random.randn(output_dim, input_dim)

#     def train(self, X, y):
#         for epoch in range(self.epochs):
#             for i in range(X.shape[0]):
#                 xi = X[i]
#                 yi = y[i]
#                 outputs = self.predict(xi, raw_output=True)
#                 for j in range(self.output_dim):
#                     target = 1 if yi == j else 0
#                     error = target - outputs[j]
#                     self.weights[j] += self.learning_rate * error * xi

#     def predict(self, X, raw_output=False):
#         net_inputs = np.dot(X, self.weights.T)
#         if raw_output:
#             return net_inputs
#         return np.argmax(net_inputs, axis=1)


# # Choose learning rate
# chosen_learning_rate = 0.01

# # Train perceptron with the chosen learning rate
# perceptron = Perceptron(
#     input_dim=785, output_dim=10, learning_rate=chosen_learning_rate, epochs=10
# )
# perceptron.train(X_train, y_train)

# # Make predictions on test data
# y_pred = perceptron.predict(X_test)

# # Calculate accuracy
# accuracy = accuracy_score(y_test, y_pred)
# print(f"Accuracy with learning rate {chosen_learning_rate}: {accuracy * 100:.2f}%")

# # Generate confusion matrix
# conf_matrix = confusion_matrix(y_test, y_pred)
# print(conf_matrix)

# # Plot confusion matrix
# plt.figure(figsize=(10, 7))
# sns.heatmap(conf_matrix, annot=True, fmt="d", cmap="Blues")
# plt.xlabel("Predicted")
# plt.ylabel("Actual")
# plt.title(f"Confusion Matrix (Learning Rate: {chosen_learning_rate})")
# plt.show()
