from sklearn import datasets
from sklearn.neighbors import KNeighborsClassifier
from sklearn.tree import DecisionTreeClassifier
from sklearn.svm import SVC
from sklearn.metrics import classification_report, make_scorer, accuracy_score
from sklearn.model_selection import cross_val_score, cross_val_predict, KFold, GridSearchCV
import matplotlib.pyplot as plt

import numpy as np
import pandas as pd
from sklearn.preprocessing import StandardScaler


def grid_search(classifier, params):
    global data, target
    k_fold = KFold(n_splits=10)
    scoring_func = make_scorer(accuracy_score)
    grid = GridSearchCV(classifier, param_grid=params, scoring=scoring_func, cv=k_fold)
    grid = grid.fit(data, target)
    return grid.best_params_


# Import dataset
raw_data = pd.read_csv('./datingTestSet2.txt', delimiter='\t', header=None)
print("Dataset Shape: " + str(raw_data.shape))

# Preprocess data
raw_data = raw_data.drop_duplicates()
raw_data = raw_data.fillna(raw_data.mean())
# Normalize data using z-score
ss = StandardScaler()
# 0: flight, 1: ice cream, 2: game time
scale_features = [0, 1, 2]
raw_data[scale_features] = ss.fit_transform(raw_data[scale_features])
raw_data.hist(grid=False, figsize=(12, 12))
plt.show()

# Separate data
raw_data = np.array(raw_data)
data = raw_data[:, :2]
target = raw_data[:, 3]

# KNN Model
params = {'n_neighbors': range(1, 10), 'p': range(1, 10)}
params_result = grid_search(KNeighborsClassifier(), params)
n_neighbors, p = params_result['n_neighbors'], params_result['p']
knn = KNeighborsClassifier(n_neighbors=n_neighbors, p=p)
print("KNN Hyperparameter: n_neighbors: {}, p: {}".format(n_neighbors, p))

# Decision Tree Model
params = {'max_depth': range(1, 21), 'criterion': np.array(['entropy', 'gini'])}
params_result = grid_search(DecisionTreeClassifier(), params)
max_depth, criterion = params_result['max_depth'], params_result['criterion']
decision = DecisionTreeClassifier(max_depth=max_depth, criterion=criterion)
print("Decision Tree Hyperparameter: max_depth: {}, criterion: {}".format(max_depth, decision))

# SVC Model
params = {'C': np.array([0.1, 1, 10]), 'gamma': np.array([1, 0.1, 0.01])}
params_result = grid_search(SVC(), params)
C, gamma = params_result['C'], params_result['gamma']
svc = SVC(C=C, gamma=gamma)
print("SVC Hyperparameter: C: {}, gamma: {}".format(C, gamma))

# Train and score
target_names = ['class 1', 'class 2', 'class 3']

knn_score = cross_val_score(knn, data, target, cv=10)
knn_labels = cross_val_predict(knn, data, target, cv=10)
print(knn_labels)
print("The score of KNN model: " + str(knn_score))
print(classification_report(target, knn_labels, target_names=target_names))

decision_score = cross_val_score(decision, data, target, cv=10)
decision_labels = cross_val_predict(decision, data, target, cv=10)
print("The score of DecisionTree model: " + str(decision_score))
print(classification_report(target, decision_labels, target_names=target_names))

svc_score = cross_val_score(svc, data, target, cv=10)
svc_labels = cross_val_predict(svc, data, target, cv=10)
print("The score of SVC model: " + str(svc_score))
print(classification_report(target, svc_labels, target_names=target_names))
