import numpy as np
import sklearn
import sklearn.datasets
import matplotlib.pyplot as plt
import data
import misc

class LVQ(object):
    def __init__(self, w1, cs=None,step=100):
        self.w = [None, w1]
        self.cs = cs if cs != None else [range(len(w1))]
        self.step = step
        

    def train(self, xs, cs):

        assert len(xs) == len(cs)

        #(_xs, self.ranges, self.min_values) = misc.normalize(xs)
        (_xs, self.ranges, self.min_values) = (xs, 1, 0)

        #self.w[0] = np.random.rand(len(self.w[1]), len(xs[0,:]))
        self.w[0] = np.array([[3,1],[7,4]], dtype=float)


        for s in range(self.step):
            self._train_circle(_xs, cs, s)
            print(self.w[0])


    def _train_circle(self, xs, cs, step):
        for i in range(len(xs)):
            self._train_x(xs[i], cs[i], step)

    def _train_x(self, x, c, step):
        #eta = lambda e0,t: e0*(1-t/self.step)
        eta = lambda e0,t: 0.5
        
        i = np.linalg.norm(x-self.w[0], axis=1).argsort()[0]
        t = np.zeros(len(self.w[1]))
        t[i] = 1
        
        r = self.cs.index(c)
        delta = eta(0.1, step)*(x - self.w[0][i,:])



        self.w[0][i,:] += (1 if np.dot(t,self.w[1][:,r]) != 0 else -1) * delta
        print(self.w[0])
   
        # for i in range(len(x)):
        #     j = ((x[i] - self.w[0][:,i])**2).argsort()[0]
        #     t = np.zeros(len(self.w[1]))
        #     t[j] = 1
        
        #     r = self.cs.index(c)
        #     delta = eta(1, 1000, step)*(x[i] - self.w[0][j,i])
        #     self.w[0][j,i] += (1 if np.dot(t,self.w[1][:,r]) != 0 else -1) * delta
     
        #(self.w[0],_,_) = misc.normalize(self.w[0])


    def classify(self, xs):
        cs = []
        for x in (xs-self.min_values) / self.ranges:
            i = np.linalg.norm(self.w[0]-x, axis=1).argsort()[0]
            t = np.zeros(len(self.w[1]))
            t[i] = 1
            cs.append(np.array(self.cs)[np.dot(t,self.w[1])>0][0])

        return cs

        


def todo():
    (xs, cs) = data.samples.get_dating_persons()
    xs = np.array(xs,dtype='float')
    #print(cs)
    #xs = np.array([[1,1.1],[1,1],[0,0],[0,0.1]])
    #cs = ['A','A','B','B']
    
    w1 = np.array([[1,0,0],[0,1,0],[0,0,1]])
    #w1 = np.array([[1,0],[1,0],[0,1],[0,1]])

    #classifer = LVQ(w1, ['didntLike', 'smallDoses', 'largeDoses'], 1000)
    #classifer = LVQ(w1, ['A', 'B'])
    classifer.train(xs, cs)
    _cs = classifer.classify(xs)

    count = 0
    for i in range(len(cs)):
        if cs[i] == _cs[i]: count +=1
    print(count, len(cs), count/len(cs))

def som(xs, w):
    for i in range(10):
        for x in xs:
            j = ((x-w)**2).argsort()[0]
            w[j] += 0.5*(x-w[j])
            print('w', w)

class SOM(object):
    def __init__(self, n, step=100):
        self.number = n
        self.step = step

    def train(self, xs):

        (_xs, self.ranges, self.min_values) = misc.normalize(xs)
        #(_xs, self.ranges, self.min_values) = (xs, 1, 0)

        self.w = np.random.rand(self.number, len(xs[0,:]))


        for s in range(self.step):
            self._train_circle(_xs, s)
            print(self.w)


    def _train_circle(self, xs, step):
        for i in range(len(xs)):
            self._train_x(xs[i], step)

    def _train_x(self, x, step):
        eta = lambda e0,t: e0*(1-t/self.step)
        #eta = lambda e0,t: 0.5
        
        i = np.linalg.norm(x-self.w, axis=1).argsort()[0]

        delta = eta(0.1, step)*(x - self.w[i,:])



        self.w[i,:] += eta(0.1, step)*(x - self.w[i,:])
   
    def classify(self, xs):
        cs = []
        for x in (xs-self.min_values) / self.ranges:
            i = np.linalg.norm(self.w-x, axis=1).argsort()[0]
            cs.append(i)

        return cs

class BP(object):
    def __init__(self, levels, funcs=None):
        f = lambda x: 1/(1+np.exp(-x))
        df = lambda x: f(x)*(1-f(x))


        self.ws = [np.random.rand(i, j) for (i,j) in zip(levels[:-1],levels[1:])]
        self.bs = [np.random.rand(1, i) for i in levels[1:]]

        # _test_bp_1
        #self.ws = [np.array([[0.15,0.25],[0.20,0.30]]), np.array([[0.40,0.50],[0.45,0.55]])]
        #self.bs = [np.array([0.35, 0.35]), np.array([0.60, 0.60])]
        self.funcs = funcs if funcs else [[f,df]]*len(self.bs)

    def classify(self, xs):
        return np.array([self._classify_one(x) for x in xs])

    def _classify_one(self, x):

        (ls, rs) = ([None], [x])
        for (w,b, (f,df)) in zip(self.ws, self.bs, self.funcs):
            ls.append(np.dot(rs[-1], w) + b)
            rs.append(f(ls[-1]))
        return rs[-1][0]


    def train(self, xs, ys):
        for i in range(1000):
            for (x,y) in zip(xs, ys):
                self._train(np.array(x, ndmin=2), np.array(y, ndmin=2))

    def _train(self, x, y):
        '''http://ufldl.stanford.edu/wiki/index.php/Backpropagation_Algorithm'''

        (ls, rs) = BP._forward(x, self.ws, self.bs, self.funcs)

        print('cost', np.abs(rs[-1]-y))

        δs = [None] * len(self.ws)
        δs[-1] = (rs[-1]-y) * self.funcs[-1][1](ls[-1])


        for i in range(len(δs)-1, 0, -1):
            δs[i-1] = np.dot(δs[i], self.ws[i].T) * self.funcs[i-1][1](ls[i])


        (dws, dbs) = ([], [])
        for (δ, r) in zip(δs, rs):
            dws.append(np.dot(r.T, δ))
            dbs.append(δ)

        for i in range(len(dws)):
            self.ws[i] -= 0.5 * dws[i]
            self.bs[i] -= 0.5 * dbs[i]

    
    @staticmethod
    def _forward(x, ws, bs, funcs):
        (ls, rs) = ([None], [x])
        for (w,b, (f,df)) in zip(ws, bs, funcs):
            ls.append(np.dot(rs[-1], w) + b)
            rs.append(f(ls[-1]))
        return (ls, rs)

def _test_bp_1():
    '''https://mattmazur.com/2015/03/17/a-step-by-step-backpropagation-example/'''    
    bp = BP([2,2,2])
    f = lambda x: 1/(1+np.exp(-x))
    df = lambda x: f(x)*(1-f(x))
    xs = [np.array([0.05,0.10])]
    ys = [np.array([0.01,0.99])]
    #funcs = [[d,df],[lambda x:x, lambda x:1]]
    funcs = [[lambda x:x, lambda x:1], [lambda x:x, lambda x:1]]
    bp.train(xs, ys, funcs)

def _test_bp_0():
    xs = np.array([[i] for i in range(10)], dtype=float)/10

    ys = xs*2
   
    f = lambda x: 1/(1+np.exp(-x))
    df = lambda x: f(x)*(1-f(x))
    funcs = [[f,df],[lambda x:x*x, lambda x:2*x]]
    funcs = [[lambda x:x, lambda x:1], [lambda x:x, lambda x:1]]

    bp = BP([1,2,1], funcs)
    bp.train(xs, ys)

    _ys = bp.classify(xs)
    print(_ys, ys)
    
    
def _test_bp_xor():
    xs = np.array([[0,0],[0,1],[1,0],[1,1]], dtype=float)
    ys = np.array([0,1,1,0], dtype=float)

    funcs = [[lambda x: x*(x>=0), lambda x: 1*(x>=0)], [lambda x:x, lambda x:1]]
    f = lambda x: 1/(1+np.exp(-x))
    df = lambda x: f(x)*(1-f(x))
    funcs = [[f,df],[lambda x: x*(x>=0), lambda x: 1*(x>=0)], [lambda x:x, lambda x:1]]

    bp = BP([2,4,4,1], funcs)
    bp.train(xs, ys)
    _ys = bp.classify(xs)
    print(_ys, ys)

    (ls,rs) = BP._forward(np.array([0,1],ndmin=2), [np.array([[1,1],[1,1]],dtype=float), np.array([1,-2.0])], [np.array([0,-1.0]), np.array([0.0])], funcs)
    print(ls, rs)

def _test_som_0():
    
    xs = np.array([[1,1.1],[1,1],[0,0],[0,0.1]])
    classifier = SOM(2,100)
    classifier.train(xs)
    print(classifier.classify(xs))
    xst = np.array([[1.4,1.1],[1.2,9.7],[0.3,0.1],[0.4,0.1]])
    print(classifier.classify(xst))

def _test_0():
    '''http://ccy.dd.ncu.edu.tw/~chen/course/neural/ch4/index.htm'''
    xs = np.array([[1,3],[3,4],[6,1],[8,3],[9,1],[1,6]], dtype = float)
    cs = [1,1,-1,-1,-1,1]

    classifier = LVQ(np.array([[1,0],[0,1]]), [1,-1], 1)

    classifier.train(xs, cs)

def plot_decision_boundary(pred_func, X, y):
    # Set min and max values and give it some padding
    x_min, x_max = X[:, 0].min() - .5, X[:, 0].max() + .5
    y_min, y_max = X[:, 1].min() - .5, X[:, 1].max() + .5
    h = 0.01
    # Generate a grid of points with distance h between them
    xx, yy = np.meshgrid(np.arange(x_min, x_max, h), np.arange(y_min, y_max, h))
    # Predict the function value for the whole gid
    print('np.c_', np.c_[xx.ravel(), yy.ravel()])
    Z = pred_func(np.c_[xx.ravel(), yy.ravel()])
    print(Z)
    print(xx.shape)
    print(Z.shape)
    Z = Z.reshape(xx.shape)
    # Plot the contour and training examples
    plt.contourf(xx, yy, Z, cmap=plt.cm.Spectral)
    plt.scatter(X[:, 0], X[:, 1], c=y, cmap=plt.cm.Spectral)

def _test_bp_moon():
    '''http://www.wildml.com/2015/09/implementing-a-neural-network-from-scratch/'''
    np.random.seed(0)
    #X, y = sklearn.datasets.make_moons(200, noise=0.20)
    X, y = sklearn.datasets.make_circles(200, noise=0.020)
    plt.scatter(X[:,0], X[:,1], s=40, c=y, cmap=plt.cm.Spectral)

    f = lambda x: 1/(1+np.exp(-x))
    df = lambda x: f(x)*(1-f(x))
    funcs = [[np.tanh, lambda x: 1-np.tanh(x)**2],[np.tanh, lambda x: 1-np.tanh(x)**2],[f,df]]

    bp = BP([2,10,10,1], funcs)
    bp.train(X,y)

    #f = lambda x: np.array([int(i) for i in bp.classify(x)[0]>0.5])
    f = lambda x: bp.classify(x)>0.5


    _y = bp.classify(X)
    _y = [int(i>0.5) for i in _y]

    print(np.sum(_y==y) / len(y))

    plot_decision_boundary(f, X, y)

    plt.show()

    pass
if __name__ == '__main__':
    _test_bp_moon()