# %%
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt

def getData(n):
    """
    Obtain data that conforms to the Gaussian distribution
    """
    X, y = [], []
    cov = np.array([[20, 0], [0, 20]])
    size = 100

    for i in range(n):
        mean = np.random.uniform(-100, 100, 2)
        X.append(np.random.multivariate_normal(mean, cov, size))
        y.extend([i] * size)
    X = np.array(X).reshape((n * size, 2))
    y = np.array(y)
    return X, y

# %%
def Homogenization(X):
    """
    Homogenize the given data
    """
    one = np.ones(len(X))
    X = np.insert(X, 2, values=one, axis=1)
    return X

def Perception(X_train, y_train, max_iter=1000):
    """
    Training perceptron algorithm weight vector
    """
    # Data homogenization
    X_train = Homogenization(X_train)

    # Initialization weight vector
    kindNum = len(np.unique(y_train))
    Omega = np.zeros((kindNum, 3))

    isConverge = False
    while (not isConverge and max_iter > 0):
        isConverge = True
        for j in range(len(y_train)):
            y = X_train[j]
            i = y_train[j]
            for t in range(kindNum):
                if i == t:
                    continue
                if np.dot(Omega[i], y) <= np.dot(Omega[t], y):
                    isConverge = False
                    Omega[i] = Omega[i] + y
                    Omega[t] = Omega[t] - y
        max_iter = max_iter - 1
    return Omega

# %%
def checkTest(Omega, X_test, y_test):
    """
    Calculate the correct rate of the classifier
    """
    fig1 = plt.figure("Figure1").add_subplot(111)
    fig1.set_title("Test set classification results")
    
    colors = ('red', 'blue', 'lightgreen', 'gray', 'cyan', 'yellow', 'green', 'black')
    
    # Data homogenization
    X_test = Homogenization(X_test)
    
    correctNum = 0
    for i in range(len(X_test)):
        kind = 0
        maxValue = np.dot(Omega[0], X_test[i])
        for j in range(1, len(Omega)):
            nowValue = np.dot(Omega[j], X_test[i])
            if nowValue > maxValue:
                maxValue = nowValue
                kind = j
        if kind == y_test[i]:
            correctNum = correctNum + 1
        fig1.scatter(X_test[i][0], X_test[i][1], c=colors[kind])
    return correctNum / len(X_test)

# %%
def printOmega(X_train, Omega):
    """
    Plot the data set and weight vector image
    """
    # Value range of horizontal and vertical axis
    x_min, x_max = X_train[:, 0].min() - 1, X_train[:, 0].max() + 1
    y_min, y_max = X_train[:, 1].min() - 1, X_train[:, 1].max() + 1

    fig2 = plt.figure('Figure2').add_subplot(111)
    fig2.set_title("Training Set")
    fig2.set_xlim(x_min, x_max)
    fig2.set_ylim(y_min, y_max)
    
    # Plot the data set
    fig2.scatter(X[:,0], X[:,1])
    # Plot the classifier weight vector
    x = np.linspace(x_min, x_max, 1000)
    for i in range(len(Omega)):
        fig2.plot(x, -(Omega[i][0] * x + Omega[i][2]) / Omega[i][1])
    return None

# %%
if __name__ == "__main__":
    X, y = getData(8)
    # Split the data into training/testing sets
    X_train, y_train = X[::2], y[::2]
    X_test, y_test = X[1::2], y[1::2]

    # Call the perceptron algorithm to calculate the classifier weight vector
    Omega = Perception(X_train, y_train)
    print("Omega:\n", Omega)

    correctRate = checkTest(Omega, X_test, y_test)
    printOmega(X_train, Omega)
    print("Classification accuracy rate: ", correctRate)


