import numpy as np
import matplotlib.pyplot as plt
from sklearn.cluster import KMeans
from descartes import PolygonPatch
import heapq
import alphashape
import pandas as pd


def get_colors(A):
    """
    :param A: A is a matrix contains nodes with properties of num, x, y, color
    :return: a list of colors in python color plates
    """
    c_list = []
    for i in range(len(A)):
        c = A[i, 3].astype(int)
        if c == 0:
            c_list.append('r')
        if A[i, 3] == 1:
            c_list.append('g')
        if A[i, 3] == 2:
            c_list.append('b')
        if A[i, 3] == 3:
            c_list.append('y')
        if A[i, 3] == 4:
            c_list.append('c')
        if A[i, 3] == 5:
            c_list.append('m')
        if A[i, 3] == 6:
            c_list.append('gold')
        if A[i, 3] == 7:
            c_list.append('grey')
        if A[i, 3] == 8:
            c_list.append('#6600CC')
        if A[i, 3] == 9999:
            c_list.append('#663300')
        if A[i, 3] == 99999:
            c_list.append('k')

    return c_list


def get_distance_matrix(M):
    """
    :param M: M is a matrix contains nodes with properties of num, x, y
    :return: the matrix of the distance in between nodes
    """
    m, n = M.shape
    D = np.zeros([m, m])
    for i in range(m):
        for j in range(m):
            D[i][j] = np.sqrt((M[i][1] - M[j][1]) ** 2 + (M[i][2] - M[j][2]) ** 2)

    return D


def distance(p1, p2):
    """
    :param p1: node1
    :param p2: node2
    :return: the 2D-Euclidean distance between node1 and node2
    """
    return np.sqrt((p1[0] - p2[0]) ** 2 + (p1[1] - p2[1]) ** 2)


def get_center(D):
    """
    :param D: the distance matrix
    :return: the index of the center point
             the center point definition: the node with the shortest average distance to other nodes
    """
    m, n = D.shape
    total_dis = np.zeros(m)
    for i in range(m):
        for j in range(m):
            total_dis[i] += D[i][j]

    return np.argmin(total_dis)


def get_near(D, node, num):
    """
    :param D: the distance matrix
    :param node: a node in the matrix
    :param num: the number of the nearest nodes to search
    :return: num nodes that near the specific node
    """
    m, n = D.shape
    dis = []
    for i in range(m):
        dis.append(D[node][i])

    min_dis = heapq.nsmallest(num + 1, dis)
    min_indexes = np.argwhere(dis <= min_dis[-1])
    return min_indexes


def mark_U(A, n):
    """
    :param A: A is a matrix contains nodes with properties of num, x, y
    :param n: the number of nodes in U-set
    :return: matrix contains nodes with marked U-set
             U-set color is marked as 9999
             the center point is marked as 99999
    """
    distance_matrix = get_distance_matrix(A)
    center_point = get_center(distance_matrix)
    min_indexes = get_near(distance_matrix, center_point, n)
    colors = np.zeros((len(A), 1), dtype=int)
    A = np.concatenate((A, colors), axis=1)
    for i in range(1, len(min_indexes)):
        A[min_indexes[i][0]][3] = 9999  # mark the center node black

    A[center_point][3] = 99999  # mark the center node black
    return A


def k_means_clustering(k, A):
    """
    :param k: the number of partitions
    :param A: A is a matrix contains nodes with properties of num, x, y
    :return: matrix contains nodes with marked colors
             color is marked as 0, 1, 2, ...
    """
    V = A[np.where(A[:, 3].astype(int) == 0)]
    km = KMeans(n_clusters=k).fit(V)
    colors = km.labels_
    c_count = 0
    for i in range(len(A)):
        if A[i][3].astype(int) == 0:
            A[i][3] = colors[c_count]
            c_count += 1

    return A


# generate CTSP data out of a raw TSP data
def generate_data(path, file_name, extension, m, u_part):
    """
    :param path: TSP data file path
    :param file_name: TSP file name
    :param extension: TSP file extension
    :param m: the number of colors to allocate
    :param u_part: the partition ratio of the U-set
    :return: Null
    """
    color_num = m
    file_path = path + '\\' + file_name + extension
    A = np.loadtxt(file_path, skiprows=6)

    n = np.ceil(len(A) * u_part).astype(int)
    A = mark_U(A, n)
    nodes_num = A.shape[0]
    A = k_means_clustering(color_num, A)

    df = pd.DataFrame(A)
    df.columns = ['num', 'x', 'y', 'color']
    df['num'] = df['num'].astype(int)
    df['color'] = df['color'].astype(int)
    color_count = df['color'].value_counts()
    color_count = np.array(color_count).tolist()
    color_count = color_count[:-2]

    description = ['NAME: ' + file_name,
                   'TYPE: CTSP ' + 'U=' + str(u_part) + ' (' + str(n) + ')',
                   'NUM_NODES: ' + str(nodes_num),
                   'NUM_COLOR: ' + str(color_num) + ' colors counts: ' + ', '.join(str(i) for i in color_count),
                   'EDGE_WEIGHT_TYPE: EUC_2D',
                   'COMMENTS: 9999 marks the U set, 99999 marks the center point.']
    description = np.array(description)

    save_path = 'output\\' + str(u_part) + '\\' + file_name + '_' + str(color_num) + 'colors.txt'
    np.savetxt(save_path, description, fmt='%s', encoding='utf-8')
    df.to_csv(save_path, index=False, sep='\t', na_rep='NA', mode='a')

    # fig, ax = plt.subplots(1, 1)
    # plt.title(file_name + ' divided in ' + str(color_num) + ' colors')
    #
    # ax.scatter(A[:, 1], A[:, 2], color=get_colors(A), s=10)
    # center = A[np.argmax(A[:, 3])]
    # ax.scatter(center[1], center[2], color='k', s=20)
    # draw_a_shape(A, ax)
    # fig.savefig('output\\' + str(u_part) + '\\' + file_name + '_' + str(color_num) + 'colors.png')
    # plt.show()
    plt.cla()
    plt.close('all')


# draw the alpha-shape of each cluster
def draw_a_shape(A, ax):
    points = A[np.argwhere(A[:, 3].astype(int) >= 9999).flatten(), 1:-1]
    if len(points) > 3:
        c = '#663300'
        a_shape_1 = alphashape.alphashape(points)
        ax.add_patch(PolygonPatch(a_shape_1, fc=c, ec=c, alpha=0.2))

    points = A[np.argwhere(A[:, 3].astype(int) == 0).flatten(), 1:-1]
    if len(points) > 3:
        c = 'r'
        a_shape_1 = alphashape.alphashape(points)
        ax.add_patch(PolygonPatch(a_shape_1, fc=c, ec=c, alpha=0.2))

    points = A[np.argwhere(A[:, 3].astype(int) == 1).flatten(), 1:-1]
    if len(points) > 3:
        c = 'g'
        a_shape_1 = alphashape.alphashape(points)
        ax.add_patch(PolygonPatch(a_shape_1, fc=c, ec=c, alpha=0.2))

    points = A[np.argwhere(A[:, 3].astype(int) == 2).flatten(), 1:-1]
    if len(points) > 3:
        c = 'b'
        a_shape_1 = alphashape.alphashape(points)
        ax.add_patch(PolygonPatch(a_shape_1, fc=c, ec=c, alpha=0.2))

    points = A[np.argwhere(A[:, 3].astype(int) == 3).flatten(), 1:-1]
    if len(points) > 3:
        c = 'y'
        a_shape_1 = alphashape.alphashape(points)
        ax.add_patch(PolygonPatch(a_shape_1, fc=c, ec=c, alpha=0.2))

    points = A[np.argwhere(A[:, 3].astype(int) == 4).flatten(), 1:-1]
    if len(points) > 3:
        c = 'c'
        a_shape_1 = alphashape.alphashape(points)
        ax.add_patch(PolygonPatch(a_shape_1, fc=c, ec=c, alpha=0.2))

    points = A[np.argwhere(A[:, 3].astype(int) == 5).flatten(), 1:-1]
    if len(points) > 3:
        c = 'm'
        a_shape_1 = alphashape.alphashape(points)
        ax.add_patch(PolygonPatch(a_shape_1, fc=c, ec=c, alpha=0.2))


if __name__ == '__main__':

    # file list of the raw TSP data, this list contains small scale TSP data
    small_dimension_tsp = ['att48',
                           'berlin52',
                           'kroB100',
                           'kroA150',
                           'kroA200',
                           'ch150',
                           'gr202']

    # file list of the raw TSP data, this list contains large scale TSP data
    large_dimension_tsp = ['dsj1000',
                           'pr1002',
                           'rl1889',
                           'pcb3038',
                           'fnl4461',
                           'brd14051']
    u_set = 0.2  # the partition ratio of U-set

    # path = 'data\\large-dimension-tsp'
    # extension = '.tsp'
    path = 'data\\small-dimension-tsp'  # TSP data path
    extension = '.tsp.txt'  # TSP data.extension

    for file in small_dimension_tsp:
        for i in range(2, 6, 1):
            generate_data(path, file, extension, i, u_set)
            print(file, ' with ' + str(i), 'colors done.')

    print('All done.')
