import numpy as np
import matplotlib.pyplot as plt
from utils2 import JS_D
import random

def euclidean_distances(a,b):
    return np.sqrt(np.sum(np.power(a-b,2)))


class MyKMeans():
    def __init__(self, n_clusters, distance_metric):
        self.n_clusters = n_clusters
        self.distance_metric = distance_metric
    
    def fit(self, X):
        self.data_num = np.shape(X)[0]
        self.date_dim = np.shape(X)[1]
        self.labels_ = np.zeros(self.data_num,dtype=int)
        self.X = X
        self.centre = self.init_centre()
        n = 0
        while n < 300:
            self.last_labels = self.labels_.copy()
            self.centre = self.update_centre(self.centre)
            n += 1
            if np.sum((self.last_labels == self.labels_)) == self.data_num:
                print('K-means convergence after iteration ',n)
                break
    
    
    def init_centre(self):
        centre = random.sample(range(self.data_num), self.n_clusters)
        centre = self.X[centre]
        return centre

    def update_centre(self, centre):
        for i, x in enumerate(self.X):
            dis_min = np.inf
            for j, one_centre in enumerate(centre):
               cur_dis = self.distance_metric(x, one_centre)
               if cur_dis < dis_min:
                   dis_min = cur_dis
                   self.labels_[i] =  j
        for i in range(self.n_clusters):
            centre[i] = np.average(self.X[self.labels_ == i], axis=0)
        return centre

if __name__ == '__main__':


    x1 = np.random.normal(0,5,(100,2))
    x2 = np.random.normal(0,5,(100,2)) + [10,0]
    x3 = np.random.normal(0,5,(100,2)) + [5,8.6]

    X = np.concatenate((x1,x2,x3), axis=0)
    np.random.shuffle(X)



    estimator = MyKMeans(n_clusters=3, distance_metric = euclidean_distances)#构造聚类器
    estimator.fit(X)#聚类
    label_pred = estimator.labels_ #获取聚类标签


    plt.figure()
    plt.scatter(X[:,0],X[:,1], c=label_pred)
    plt.show()
    plt.close()