#!/usr/bin/env python

import numpy as np
from sklearn.base import ClassifierMixin
from sklearn.cluster import KMeans
from semi_kmeans import *

from sklearn.metrics.pairwise import linear_kernel, polynomial_kernel, rbf_kernel, sigmoid_kernel, laplacian_kernel, cosine_similarity

kernel = polynomial_kernel

class KernelKMeansClassifier(KMeans, ClassifierMixin):
    def _init(self, X, y):
        n_samples = X.shape[0]
        self.X_ = X
        self.y_ = y
        self.labels_ = np.unique(y)

    def get_class(self, c):
        return self.X_[self.y_==c]

    def fit(self, X):
        self._init(X)
        return self

    def get_Kc(self, c):
        return kernel(self.get_class(c))

    def predict(self, X):
        def decision(X, c):
            Kc = self.get_Kc(c)
            return np.mean(kernel(X, self.get_class(c)), axis=1) - np.mean(Kc)
        y = np.argmax([decision(X, c) for c in self.labels_], axis=0)
        return np.argmax([decision(X, c) for c in self.labels_], axis=0)


class KernelKMeans(KernelKMeansClassifier):
    def _init(self, X):
        n_samples = X.shape[0]
        y = np.random.randint(0, self.n_clusters, n_samples)
        super()._init(X, y)

    def fit(self, X):
        self._init(X)
        for _ in range(self.max_iter):
            self.y_ = self.predict(X)

        return self


    # def fit(self, X):
    #     self._init(X)
    #     n_batches = 8
    #     n_samples = X.shape[0]
    #     size_batch, r = divmod(n_samples, n_batches)
    #     for _ in range(self.max_iter):
    #         for batch in range(n_batches):
    #             X_batch = X[batch*size_batch: (batch+1)*size_batch]
    #             self.y_[batch*size_batch: (batch+1)*size_batch] = self.predict(X_batch)
    #         self.y_[-r:] = self.predict(X[-r:])
    #         print(self.y_)
    #     return self



if __name__ == '__main__':
    from sklearn.model_selection import train_test_split
    from sklearn import datasets

    digists = datasets.load_digits()
    X_train, X_test, y_train, y_test = train_test_split(digists.data, digists.target, test_size=0.75)
    X_train = X_train.astype(np.float_)
    y_train = y_train.astype(np.float_)
        
    km = KMeans(n_clusters=10)
    km.fit(X_train)
    kkm = KernelKMeans(n_clusters=10, max_iter=20)
    kkm.fit(X_train)
    y = kkm.predict(X_train)
    
    import numpy as np
    from sklearn.manifold import TSNE
    import matplotlib.pyplot as plt

    # Instantiate the t-SNE model
    tsne = TSNE(n_components=2, random_state=42)

    # Perform t-SNE embedding
    X_embedded = tsne.fit_transform(X_train)

    # Plot the embedded data
    for c, color in zip(kkm.labels_, ('r','b', 'g', 'y', 'k', 'm')):
        plt.scatter(X_embedded[y==c, 0], X_embedded[y==c, 1], c=color)
    plt.xlabel('t-SNE Dimension 1')
    plt.ylabel('t-SNE Dimension 2')
    plt.title('t-SNE Embedding')
    plt.show()

