import numpy as np
import matplotlib.pyplot as plt
import knn_model as knn
import time
import knn_kdTree

DATASET_PATH = '../DataSet/mnist/'

# train_images = np.load(DATASET_PATH + 'train_img.npy')
# train_labels = np.load(DATASET_PATH + 'train_label.npy')
#
# test_images = np.load(DATASET_PATH + 'test_img.npy')
# test_labels = np.load(DATASET_PATH + 'test_label.npy')
#
# train_images = train_images.reshape((train_images.shape[0], -1))
# train_labels = [x.argmax() for x in train_labels]
#
# test_images = test_images.reshape((test_images.shape[0], -1))
# test_labels = [x.argmax() for x in test_labels]
#
base_num = 1000

train_images = np.random.randint(-1000, 1000 + 1, (6 * base_num, 3))
train_images[0:base_num] += [2000, 0, 0]
train_images[base_num:2 * base_num] += [-2000, 0, 0]
train_images[2 * base_num:3 * base_num] += [0, 2000, 0]
train_images[3 * base_num:4 * base_num] += [0, -2000, 0]
train_images[4 * base_num:5 * base_num] += [2000, 0, 2000]
train_images[5 * base_num:6 * base_num] += [2000, 0, -2000]

train_labels = np.array([x // base_num for x in range(6 * base_num)])

perm = np.random.permutation(6 * base_num)
train_images = train_images[perm]
train_labels = train_labels[perm]

test_images = train_images[:base_num]
test_labels = train_labels[:base_num]

train_images = train_images[base_num:]
train_labels = train_labels[base_num:]


t1 = time.time()
accuracy = 0
size = 100
print('slow kNN start predicting...')
for idx in range(size):
    predict = knn.classify0(test_images[idx], train_images, train_labels, 3)
    if predict == test_labels[idx]:
        accuracy += 1. / size

print('slow kNN', 'time:%.2fs' % (time.time() - t1), 'accuracy:%.1f' % (accuracy * 100) + '%')
print()
print('kdTree kNN training...')
kd = knn_kdTree.KDTree(train_images, train_labels)

print('kdTree kNN start predicting...')
t1 = time.time()
accuracy = 0
for idx in range(size):
    predict = kd.predict(test_images[idx], 3)
    if predict == test_labels[idx]:
        accuracy += 1. / size

print('kd kNN', 'time:%.2fs' % (time.time() - t1), 'accuracy:%.1f' % (accuracy * 100) + '%')
print()
