# -*- coding: utf-8 -*-
"""
Created on Fri Aug 23 16:54:29 2019

@author: MS
"""
import numpy as np
import matplotlib.pyplot as plt
from functools import reduce

#---------------------------------
#      定义几个函数
#---------------------------------      
def train(X,outshape,ite=500):
   m,n=X.shape                                 #m为样本数,n为数据维度
   outshape=[os for os in outshape if os>1]    #剔除长度为1的维度
   L=reduce(lambda x,y:x*y,outshape)           #输出层总的单元数
   position=coordinate(outshape)               #输出层各个单元的坐标位置
   W=X[np.random.choice(m,L,replace=False),:]  #随机选取样本数据作为初始权重
   for t in range(ite):
       x=X[np.random.randint(m),:]             #随机选取一个x样本作为输入
       winner=compete(x,W,rule='euler')        #获胜神经元索引号
       eta=LearningRate(position,winner,t,rule='euler')  #各个神经元单元处的学习率       
       W=W+eta.reshape(-1,1)*(x-W)
   return W   

def LearningRate(position,winner,t,rule='euler'):
    # postion列出了输出层所有坐标位置
    # winner为获胜位置的索引号
    # t为迭代次数
    # rule为领域规则
    # 最终返回各个单元处的学习率
    p0=position[winner]
    if rule=='abs':
        dis=sum(abs(position-p0).T)                     #曼哈顿距离
        maxdis=sum(abs(position[0]-position[-1]).T)
    elif rule=='euler':
        dis=sum((position-p0).T**2)                     #欧拉距离
        maxdis=sum((position[0]-position[-1])**2)
    elif rule=='max':
        dis=np.max(abs(position-p0),axis=1)             #max距离
        maxdis=max(abs(position[0]-position[-1]))
    else:
        print('未知距离规则')    
    eta_t=1*np.exp(-t/200)       #学习率随时间的衰减
    eta_N=np.exp(-dis/maxdis)  #学习率随距离的衰减
    Nmax=maxdis*np.exp(-t/200) #截止距离随时间而缩小
    eta=eta_t*eta_N*(dis<=Nmax)
    return eta
    
def compete(x,W,rule='euler'):
    # 输出层竞争，返回获胜神经元索引号
    # rule为竞争规则：angle-余弦法，abs-曼哈顿距离，euler-欧拉距离,max-最大值距离
    # 其中，最大值距离表示，设向量x1和x2，则dis=最大分量(abs(x1-x2))
    if rule=='angle':
        W1=W/np.sqrt(sum(W.T**2)).reshape(-1,1)  #将权重归一化
        return np.argmax(W1.dot(x))              #内积最大者的索引号
    elif rule=='abs':
        return np.argmin(sum(abs(W-x).T))       #曼哈顿距离最小者的索引号
    elif rule=='euler':
        return np.argmin(sum((W-x).T**2))       #欧拉距离最小者的索引号
    elif rule=='max':
        return np.argmin(np.max(abs(W-x),axis=1)) #max距离最小者的索引号
    else:
        print('未知竞争规则')
   
def coordinate(dimension):
    # 由维度数生成坐标
    # 比如 dimension=[2,3]
    # 则 position=np.array([[0,0],[0,1],[0,2],[1,0],[1,1],[1,2]])
    L=reduce(lambda x,y:x*y,dimension)
    index=np.arange(L)
    dim=dimension[-1]
    position=(index%dim).reshape(-1,1)
    index=index//dim
    for dim in dimension[-2::-1]:
        position=np.c_[index%dim,position]
        index=index//dim
    return position
        
def show(X,outshape,W,rule='euler'):
    # 对训练结果作图
    #---------------------
    #  显示中文
    #---------------------
    plt.rcParams['font.sans-serif']=['SimHei']
    plt.rcParams['axes.unicode_minus'] = False
    #---------------------
    #  训练结果及聚类中心
    #---------------------
    winner=[]
    for x in X:
        winner.append(compete(x,W,rule=rule))
    winindexs=list(set(winner))
    fig=plt.figure()
    ax1=fig.add_subplot(121)
    ax1.set_title('原数据')
    c=['r', 'y', 'g', 'b', 'c', 'k', 'm','w','skyblue','gold','lime','darkorange']
    for i,wi in enumerate(winindexs):
        ax1.scatter(X[winner==wi,0],X[winner==wi,1],c=c[i],marker='x',label=i)
        ax1.scatter(W[wi,0],W[wi,1],c=c[i],s=60,marker='o',label='W%d'%i)
    ax1.axis('equal')
    #---------------------
    #  输出层激活单元
    #---------------------
    outshape=[os for os in outshape if os>1]    #剔除长度为1的维度
    L=reduce(lambda x,y:x*y,outshape)           #输出层总的单元数
    position=coordinate(outshape)               #输出层各个单元的坐标位置
    ax2=fig.add_subplot(122)
    ax2.set_title('SOM输出层激活单元')
    if len(outshape)>2:
        print('只能显示1D或者2D分布')
    elif len(outshape)==2:
        #2D
        for i,wi in enumerate(winindexs):
            ax2.scatter(position[wi,0],position[wi,1],c=c[i],s=80,marker='o',label=i)
        for i in range(outshape[0]):
            ax2.plot([i,i],[0,outshape[1]-1],c='k')
        for j in range(outshape[1]):
            ax2.plot([0,outshape[0]-1],[j,j],c='k')        
        plt.show()
    else:
        #1D
        for i,wi in enumerate(winindexs):
            ax2.scatter(position[wi],0,c=c[i],s=20,marker='o',label=i)
        plt.show()

#---------------------------------
#    西瓜数据集3.0α
#---------------------------------
X=np.array([[0.697,0.46],[0.774,0.376],[0.634,0.264],[0.608,0.318],[0.556,0.215],
   [0.403,0.237],[0.481,0.149],[0.437,0.211],[0.666,0.091],[0.243,0.267],
   [0.245,0.057],[0.343,0.099],[0.639,0.161],[0.657,0.198],[0.36,0.37],
   [0.593,0.042],[0.719,0.103]])
Y=np.array([1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,0])

#---------------------------------
#    主程序
#---------------------------------
outshape=[3,3]
W=train(X,outshape)
show(X,outshape,W)