import numpy as np
from sklearn import datasets
from sklearn.neighbors import KNeighborsClassifier
from sklearn.model_selection import train_test_split
from sklearn.metrics import accuracy_score
from sklearn.model_selection import GridSearchCV
from ai.knn.KNNClassifier import KNNClassifier
import matplotlib
import matplotlib.pyplot as plt

def trainTestSplit(x, y, testRatio = 0.2, seed = None):

    assert x.shape[0] == y.shape[0], \
        'the size of x must b equal to the size of y'

    assert 0.0 <= testRatio <= 1.0,\
        'test ratio must be valid'

    if seed:
        np.random.seed(seed)

    shuffleIndexes = np.random.permutation(len(x))

    testSize = int(len(x) * testRatio)
    testIndexes = shuffleIndexes[:testSize]

    trainIndexes = shuffleIndexes[testSize:]

    xTrain = x[trainIndexes]
    yTrain = y[trainIndexes]

    xTest = x[testIndexes]
    yTest = y[testIndexes]

    return xTrain, xTest, yTrain, yTest

def irisDataTest():
    iris = datasets.load_iris()
    x = iris.data
    y = iris.target

    xTrain,  xTest, yTrain, yTest = trainTestSplit(x, y, testRatio=0.2, seed=None)

    print(xTrain.shape, yTrain.shape, xTest.shape, yTest.shape)

    kneighborsClassifier = KNeighborsClassifier(n_neighbors=3)
    kneighborsClassifier.fit(xTrain, yTrain)
    predict = kneighborsClassifier.predict(xTest)
    print(predict)
    print(yTest)
    print(sum(predict == yTest) / len(yTest))
    print(sum(predict == yTest))

    # xTrain, xTest, yTrain, yTest = train_test_split(x, y, test_size=0.2)
    #
    # print(xTrain.shape, xTest.shape, yTrain.shape, yTest.shape)

def digitsDataTest():
    print("-------------------digitsDataTest()------------------------")
    digits = datasets.load_digits()
    X = digits.data
    y = digits.target
    #
    # xTrain, xTest, yTrain, yTest = trainTestSplit(X, y, testRatio=0.2, seed=666)
    # # print(xTrain.shape, xTest.shape, yTrain.shape, yTest.shape)
    # # print(xTrain.shape[1], xTest.shape[1])
    # myKNN = KNNClassifier(k=3)
    # myKNN.fit(xTrain, yTrain)
    # yPredict = myKNN.predict(xTest)
    # print(myKNN.accurayScore(yTest, yPredict))
    # print(myKNN.score(xTest, yTest))

    xTrain, xTest, yTrain, yTest = train_test_split(X, y, test_size=0.2, random_state=666)
    kneighborsClassifier = KNeighborsClassifier(n_neighbors=3)
    kneighborsClassifier.fit(xTrain, yTrain)
    yPredict = kneighborsClassifier.predict(xTest)
    print(kneighborsClassifier.score(xTest, yTest))
    print(accuracy_score(yTest, yPredict))

    bestScore = 0.0
    bestK = -1
    bestModel = ""
    bestP = 0

    # for model in ["uniform", "distance"]:
    for k in range(1, 11):
        for p in range(1, 6):
            kneighborsClassifier = KNeighborsClassifier(n_neighbors=k, weights="distance", p=p)
            kneighborsClassifier.fit(xTrain, yTrain)
            score = kneighborsClassifier.score(xTest, yTest)

            if score > bestScore:
                bestScore = score
                bestK = k
                bestP = p

    print("best K", bestK)
    print("best score", bestScore)
    print("best P", bestP)

def gridSearchTest():
    digits = datasets.load_digits()
    X = digits.data
    y = digits.target
    xTrain, xTest, yTrain, yTest = train_test_split(X, y, test_size=0.2, random_state=666)
    paramGrid = [
        {
            'weights': ['uniform'],
            'n_neighbors': [i for i in range(1, 11)]
        },
        {
            'weights': ['distance'],
            'n_neighbors': [i for i in range(1, 11)],
            'p': [p for p in range(1, 6)]
        }
    ]

    kNeighborsClassifier = KNeighborsClassifier()
    gridSearch = GridSearchCV(kNeighborsClassifier, paramGrid)
    print(gridSearch)

    gridSearch.fit(xTrain, yTrain)

    print(gridSearch.best_estimator_)

    print(gridSearch.best_score_)


if __name__ == '__main__':
    # irisDataTest()
    # digitsDataTest()
    gridSearchTest()