from functools import *
from toolz.curried import *
#from languagetools import *
import numpy as np
from numpy.random import RandomState
import pandas as pd
import scipy as sc
from scipy import spatial
import networkx as nx
from scipy.stats import rankdata
from collections import namedtuple
import sklearn.metrics, sklearn.manifold

Config = namedtuple('OvingConfig',
        [
            'input_neighborhood_width',
            'output_neighborhood_width',
            'learning_rate',
            't_max',
            'lifetime'
            ])

Range = namedtuple('Range',
        ['start','end'])

factor_end = lambda start, factor: Range(start=start, end=start*factor)

default_config = Config(
        input_neighborhood_width = factor_end(0.1,1/60),
        output_neighborhood_width = factor_end(30,1/10),
        learning_rate = factor_end(0.7,1/10),
        lifetime = factor_end(0.1,5),
        t_max = 1
        )

first_n = curry( lambda n, nparray: nparray[0:n] )

mse = curry(sklearn.metrics.mean_squared_error)

#use_pd_series = lambda array,f: f(pd.Series(array)).values

apply = curry( lambda f, xs: pipe( map(f, xs), list, np.array ) )

measure_distances = lambda x, ys: apply(mse(x), ys)

get_rank = lambda d: rankdata(d, method='ordinal')

# https://stachoverflow.com/a/36406142/1714997
transpose_an_array = lambda nparr: nparr.reshape(-1,1)
t1d = transpose_an_array

def adaptation_step(config,net,t,x):
    input_deviations = measure_distances(x, net.ws)
    input_ranks = pipe( input_deviations, get_rank )
    # z can't be 0
    t_relative = lambda z,f: z * ( (f / z) ** (t / config.t_max) )
    input_neighborhood_width_unscaled = t_relative(
            config.input_neighborhood_width.start,
            config.input_neighborhood_width.end
            )
    neuron_count = len(net.ws)
    input_neighborhood_width = input_neighborhood_width_unscaled * neuron_count
    input_neighborhood_influence = \
                    np.exp(-1 * input_ranks / input_neighborhood_width)
    learning_rate = t_relative(
            config.learning_rate.start,
            config.learning_rate.end
            )

    delta_w = \
            learning_rate \
            * t1d(input_neighborhood_influence) \
            * (x - net.ws)
    new_weights = net.ws + delta_w

    new_edges = net.es.copy() # deep copy
    winner_ix, second_nearest_ix = \
            pipe(
                    input_ranks.argsort(),
                    first_n(2),
                    tuple
                    )
    new_edges.add_edge(winner_ix, second_nearest_ix, age=0)

    lifetime_unscaled = t_relative(
            config.lifetime.start,
            config.lifetime.end
            )
    lifetime = lifetime_unscaled * config.t_max
    edges_connected_to_winner = new_edges.edges(nbunch=winner_ix)
    is_lifetime_unreached = lambda lifetime, age: lifetime > age
    # list(...) tam kad galima butu istrinti edge'us
    # per juos iteruojant
    for (k, j) in list(edges_connected_to_winner):
        age = new_edges[k][j]['age']
        if is_lifetime_unreached(lifetime, age):
            new_edges[k][j]['age'] += 1
        else:
            new_edges.remove_edge(k,j)

    zs_winner = net.zs[winner_ix]
    output_deviations = measure_distances(zs_winner, net.zs)
    output_ranks = pipe( output_deviations, get_rank )
    output_neighborhood_width_unscaled = t_relative(
            config.output_neighborhood_width.start,
            config.output_neighborhood_width.end
            )
    output_neighborhood_width = \
            output_neighborhood_width_unscaled * neuron_count
    output_neighborhood_influence = \
            np.exp(-1 * output_ranks / output_neighborhood_width)

    # kad nebutu dalybos is 0
    replace_zero = lambda arr: \
            pipe(
                    arr,
                    map(lambda x: x if x != 0 else 1e-10),
                    list
                    )
    
    _devs = (output_deviations - input_deviations) / replace_zero(output_deviations)
    delta_z = \
            learning_rate \
            * t1d(output_neighborhood_influence) \
            * t1d(_devs) \
            * ( zs_winner - net.zs )
    new_zs = net.zs + delta_z

    output = net \
            ._replace(ws = new_weights) \
            ._replace(zs = new_zs) \
            ._replace(es = new_edges)

    meta = dict(
            t=t,
            learning_rate=learning_rate,
            input_neighborhood_width_unscaled=\
                    input_neighborhood_width_unscaled,
            output_neighborhood_width_unscaled=\
                    output_neighborhood_width_unscaled,
            lifetime_unscaled=lifetime_unscaled
            )

    return (output, meta)

fire_for_winner = lambda net,x: \
    pipe(
            measure_distances(x, net.ws),
            np.argmin,
            lambda ix: net.zs[ix]
            )

OVING = namedtuple('OVING', ['ws','zs','es'])

def sample(nparr, n, random_state=RandomState()):
    return \
            pd.DataFrame(nparr)\
            .sample(n=n, replace=True, random_state=random_state)\
            .values

def initialise_oving(data, neuron_count):
    ws = sample(data, neuron_count)
    zs = np.random.rand(len(ws),2) # 2-dimensional output space
    es = nx.Graph() # edges, i.e. connections
    return OVING(ws=ws, zs=zs, es=es)

is_multiple_of = curry( lambda n, x: x % n == 0 )

rand_ix = lambda col: np.random.choice(len(col))
rand_df_el = lambda df: df.iloc[rand_ix(df)]

def train(data_df, t_max=100, neuron_count=60):
    config = default_config._replace(t_max=t_max)
    oving = initialise_oving(data_df, neuron_count)
    t = 0
    t_max = config.t_max
    # jeigu padarai t_max // 7 ir t_max % 7 == 0,
    # tada tarpas tarp recorded[-1] ir [-2] buna normalus
    # ir gauni 8 skaidres
    record_every = t_max // 8 or 1
    recorded = []
    def record(oving, meta):
        graph = construct_graph(oving)
        return dict(graph=graph,meta=meta)
    while t <= t_max:
        x = rand_df_el(data_df).values
        (oving, meta) = adaptation_step(config, oving, t, x)
        should_record = \
                is_multiple_of(record_every, t) and t != 0 or t == t_max
        if should_record:
            print(f'step {t}/{t_max}')
            recorded.append( record(oving, meta) )
        t = t + 1
    return (oving, recorded)

def construct_graph(oving):
    G = nx.Graph()
    # TODO ar reikia tu list callu?
    # nenaudoju edge attributu su age info,
    # nes tada springsta bokeh
    G.add_edges_from(np.array(list(oving.es.edges)).tolist())
    pos = oving.zs
    pos = thread_last(
            pos,
            ( map, lambda pos: dict(pos=pos) ),
            # reikia paversti i python tipa.
            # kitaip sprogsta from_networkx
            ( zip, np.arange(len(pos)).tolist() ),
            list
            )
    G.add_nodes_from(pos)
    return G

# turi grazinti { node: [x,y], node2: ... }
# naudoti su bokeh from_networkx metoda
def pos_attribute_layout(G):
    pos = pipe(
            G.nodes.data(),
            dict,
            valmap(get('pos'))
            )
    return pos

from bokeh.io import output_file, show, save
from bokeh.layouts import widgetbox, column, gridplot
from bokeh.plotting import figure
from bokeh.models.graphs import from_networkx
from bokeh.models.markers import Diamond

def to_figure(record):
    meta = record['meta']
    t = meta['t']
    learning_rate = meta['learning_rate']
    input_neighborhood_width_unscaled = \
            meta['input_neighborhood_width_unscaled']
    output_neighborhood_width_unscaled = \
            meta['output_neighborhood_width_unscaled']
    lifetime_unscaled = meta['lifetime_unscaled']
    graph = record['graph']
    title = f"t={t}" \
            + f",lr={learning_rate:.2f}" \
            + f",inw={input_neighborhood_width_unscaled:.4f}" \
            + f",onw={output_neighborhood_width_unscaled:.2f}" \
            + f",lt={lifetime_unscaled:.2f}"
    fig = figure( title=title, width=300, height=300 )
    graph_drawing = from_networkx(graph, pos_attribute_layout)
    graph_drawing.node_renderer.glyph = Diamond(size=6)
    fig.renderers.append(graph_drawing)
    return fig

to_figure = curry(to_figure)

def get_ranges(recorded):
    coords = \
            pipe(
                    recorded,
                    map( lambda r:
                        pipe(
                            r['graph'].nodes.data(),
                            map(get(1)),
                            map(get('pos')),
                            list
                            )),
                        concat,
                        list
                        )
    xs = pipe( coords, map(get(0)), list )
    ys = pipe( coords, map(get(1)), list )
    from bokeh.models.ranges import DataRange1d
    (min_x,max_x) = (min(xs),max(xs))
    (min_y,max_y) = (min(ys),max(ys))
    padding_factor = 0.05
    x_padding = (max_x - min_x) * padding_factor
    y_padding = (max_y - min_y) * padding_factor
    x_range = DataRange1d(
            start=min_x-x_padding, end=max_x+x_padding)
    y_range = DataRange1d(
            start=min_y-y_padding, end=max_y+y_padding)
    return (x_range, y_range)

def apply_ranges(x_range, y_range, f):
    f.x_range = x_range
    f.y_range = y_range
    return f

apply_ranges = curry(apply_ranges)

def visualise(recorded):
    (x_range, y_range) = get_ranges(recorded)
    figs = pipe(
            map(to_figure, recorded),
            map(apply_ranges(x_range, y_range)),
            list
            )
    import math
    mid_ix = pipe(
            len(figs) / 2,
            math.ceil
            )
    #import pdb; pdb.set_trace()
    #show(column(figs[0]))
    output_file("viz.html")
    save( gridplot([
        figs[:mid_ix],
        figs[mid_ix:]
        ])
        )

normalize = lambda df: (df - df.mean()) / (df.max() - df.min())

def make_3d_point_circle(n, radius):
    angles = np.random.rand(n) * np.pi * 2
    x = np.cos(angles)*radius
    y = np.sin(angles)*radius
    z = np.zeros(n)
    return np.array(list(zip(x,y,z)))

# z asis galima butu pakeisti su 1 ir -2
# gaunasi skersas judejimas dviejose dimensijose
make_3d_skersas = lambda n, radius: \
        np.array([0,radius,radius]) + np.array([0,radius * -2, radius * -2]) \
        * np.random.rand(n).reshape(-1,1)

make_3d_complex = lambda n, radius: \
        np.concatenate(
                [
                    make_3d_skersas(n//3, radius),
                    make_3d_point_circle((n//3)*2, radius)
                    ])

def ovingzs2mds(oving):
    mds = sklearn.manifold.MDS(n_jobs=-1)
    zs = mds.fit(oving.ws)
    return oving._replace(zs=zs)

def viz_oving_with_mds(oving):
  mds = sklearn.manifold.MDS(n_jobs=-1)
  zs = mds.fit_transform(oving.ws)
  print(f"mds stress {mds.stress_}")
  oving_mds = oving._replace(zs=zs)
  graph = ov.construct_graph(oving_mds)
  fig = figure(width=300,height=300)
  graph_drawing = from_networkx(graph, pos_attribute_layout)
  graph_drawing.node_renderer.glyph = Diamond(size=6)
  fig.renderers.append(graph_drawing)
  fig.x_range = DataRange1d(start=-2,end=2)
  fig.y_range = DataRange1d(start=-2,end=2)
  output_file("viz2.html")
  save(fig)
