import numpy as np
import matplotlib.pyplot as plt
import matplotlib.transforms as transforms
from matplotlib.patches import Ellipse


class Util(object):
    @staticmethod
    def draw_dynamic_clusters(data, i=-1, max_iter=-1, num_cluster=None, label=None, mean=None, interval=0.1):
        """
        Draws dynamic cluster

        @deprecated
        :param data: data points
        :param i:  ith iteration
        :param max_iter:  number of iteration
        :param num_cluster:  number of clusters
        :param label:  predicted labels
        :param mean: mean values from different cluster
        :param interval: refreshing interval
        :return:  None
        """

        plt.ion()
        ax = plt.gca()

        if i == -1:
            ax.set_title('No labels')
            plt.scatter(data[:, 0], data[:, 1])
            # enlarging interval for display
            plt.pause(1.0)
        else:
            ax.set_title('Iter: {%d / %d} Evaluation Result' % (i, max_iter))
            colors = ['blue', 'brown', 'gold', 'green', 'chocolate', 'indigo', 'fuchsia', 'orange', 'sienna']
            for k in range(num_cluster):
                plt.scatter(data[label == k, 0], data[label == k, 1], c=colors[k])
            plt.scatter(mean[:, 0], mean[:, 1], s=300, edgecolors='black', c=colors[:num_cluster], marker='P')

            plt.pause(interval=interval)

        if i > -1 and i == max_iter:
            plt.ioff()
            plt.show()
        else:
            plt.cla()
        return


def draw_ani(loss, end=True, interval=0.1):
    """
    :param loss:  loss values
    :param interval:  animation refreshing interval (default = 0.1)
    :return:
    """
    plt.ion()
    plt.plot(loss)
    plt.pause(interval)
    if end:
        plt.ioff()
        plt.show()
    else:
        plt.cla()

    plt.show()

    return


fig, axs = plt.subplots(1, 2)


def result_fig(data, likelihood=None, i=-1, max_iter=-1,
               num_cluster=None, label=None, mean=None, interval=0.1,
               kmeans=False, end=False):
    """
    Draw result figure
        axs[0] for data points 画点
        axs[1] for log likelihood 似然函数
    :param data: data points
    :param likelihood: log likelihood
    :param i: ith iteration 第 i 次迭代
    :param max_iter: number of iteration 迭代次数
    :param num_cluster: number of cluster 类别总数
    :param label: predicted label 预测的标签
    :param mean: mean of different clusters 每个类的均值
    :param interval: refreshing intervals 更新间隔
    :return:
    """

    # 打开交互模式
    plt.ion()

    # 设置标题
    axs[1].set_title('WCSS loss' if kmeans else 'Log Likelihood')
    #  设置 x 轴
    axs[1].set_xlabel('Iteration')
    axs[0].set_title('Data Points')

    if likelihood is None:
        # 画原始图
        plt.suptitle('Original Data Points')
        axs[0].scatter(data[:, 0], data[:, 1])
        # 方便展示 停顿时间加大
        plt.pause(1.0)
    else:
        if not kmeans:
            plt.suptitle('Iter: {%d / %d}--%d Clusters' % (
                i, max_iter, num_cluster
            ))
        else:
            plt.suptitle('Iter: {%d} -- %d Clusters' % (
                i, num_cluster))

        # 画 likelihood 数组
        axs[1].plot(likelihood)
        # 设置不同色号
        colors = ['blue', 'brown', 'gold', 'green', 'chocolate', 'indigo', 'fuchsia', 'orange', 'sienna']
        # 画出不同类的散点图
        for k in range(num_cluster):
            axs[0].scatter(data[label == k, 0], data[label == k, 1], c=colors[k])
        # 画出每个类的中心点
        axs[0].scatter(mean[:, 0], mean[:, 1], s=30, edgecolors='black', c=colors[:num_cluster], marker='o')
        # 画高斯分布
        if not kmeans:
            _ellipse(data, label, num_cluster, axs[0])
        plt.pause(interval)

    # 如果是最后一张 关掉交互模式 调用 show 保存最后一张照片
    if (i > -1 and i == max_iter) or (kmeans and end):
        plt.ioff()
        plt.show()
    else:
        # 否则就擦掉以前的图
        axs[0].cla()
        axs[1].cla()

    return


def _ellipse(data, labels, num_cluster, ax):
    colors = ['blue', 'brown', 'gold', 'green', 'chocolate', 'indigo', 'fuchsia', 'orange', 'sienna']
    for i in range(num_cluster):
        # _confidence_ellipse(data[labels == i, 0], data[labels == i, 1],
        #                     ax, edgecolor=colors[i], linewidth=2)
        _confidence_ellipse(data[labels == i, 0], data[labels == i, 1],
                            ax, edgecolor='black', linewidth=2)
    return


def _confidence_ellipse(x, y, ax, n_std=1.5,
                        facecolor='none', **kwargs):
    """
    Create a plot of the covariance confidence ellipse of x and y
    Reference
     https://matplotlib.org/3.3.2/gallery/statistics/confidence_ellipse.html#sphx-glr-gallery-statistics-confidence-ellipse-py

    :param x: array-like, shape (n,)
    :param y: array-like, shape (n,) input data
    :param ax:  matplotlib.axes.Axes
    :param n_std: float
    The number of standard deviations to determine the ellipse's radiuses.
    :param facecolor:
    :param kwargs:
    :return:  matplotlib.patches.Ellipse
    """

    cov = np.cov(x, y)
    pearson = cov[0, 1] / np.sqrt(cov[0, 0] * cov[1, 1])
    # using a special case to obtain the eigenvalues of this 2D dataset
    ell_radius_x = np.sqrt(1 + pearson)
    ell_radius_y = np.sqrt(1 - pearson)
    ellipse = Ellipse((0, 0), width=ell_radius_x * 2, height=ell_radius_y * 2,
                      facecolor=facecolor, **kwargs)
    # Calculate the standard deviation of x from
    # the square root of the variance and multiplying
    # with the given number of standard deviations
    scale_x, scale_y = np.sqrt(cov[0, 0]) * n_std, np.sqrt(cov[1, 1]) * n_std
    mean_x, mean_y = np.mean(x), np.mean(y)

    transf = transforms.Affine2D().rotate_deg(45). \
        scale(scale_x, scale_y).translate(mean_x, mean_y)
    ellipse.set_transform(transf + ax.transData)
    return ax.add_patch(ellipse)
