from functools import *
from toolz.curried import *
#from languagetools import *
import numpy as np
from numpy.random import RandomState
import pandas as pd
#import scipy as sc
#import networkx as nx
from scipy.stats import rankdata
from scipy.spatial.distance import pdist
from sklearn.neighbors.kde import KernelDensity
from collections import namedtuple
import sklearn.metrics, sklearn.manifold

from antra_versija.oving import t1d, get_rank, sample
from timeit import default_timer as current_time

Config = namedtuple('BngConfig',
        [
            't_max'
            ])

#Range = namedtuple('Range', ['start','end'])
#factor_end = lambda start, factor: Range(start=start, end=start*factor)

default_config = Config(
        t_max = 5
        )

squared_euclidean = lambda xs, ys: \
        sklearn.metrics.pairwise.euclidean_distances(
                xs, ys, squared=True
        )

measure_distances = squared_euclidean

# turbut cia ne laikas 't' o rankas
neighborhood_f = curry(
        lambda neighborhood_width, rank: np.exp(-1 * rank / neighborhood_width)
        )

nparr_map = lambda f, xs: np.array([f(x) for x in xs])

from nolitsa import d2

calc_correlation_dimension = \
        lambda data_arr: \
        pipe(
                d2.c2(data_arr),
                lambda c2: d2.d2(c2[0], c2[1]),
                np.mean
                )

def adaptation_step(config,ws,t,xs,mc):
    neuron_count = len(ws)
    nw0 = neuron_count / 2
    t_max = config.t_max
    neighborhood_width = \
            nw0 * np.power(0.01 / nw0, t / t_max)
    #print(f'nw {neighborhood_width}')
    distances = measure_distances(ws, xs)
    ranks = nparr_map( get_rank, distances.T ).T
    ranks = ranks - 1 # kad rankai prasidetu nuo 0
    neighborhood_influence = neighborhood_f(neighborhood_width, ranks)

    nw = neighborhood_width
    ds = distances
    rs = ranks
    nbi = neighborhood_influence

    magnification_control = mc
    first_part = neighborhood_influence * magnification_control

    dividend = first_part.dot(xs)
    divisor = t1d( first_part.sum(axis=1) )
    new_ws = dividend / divisor
    return new_ws

def initialise_bng(data, neuron_count, random_state=RandomState()):
    ws = sample(data, neuron_count, random_state)
    #ws = np.random.rand(neuron_count, data_df.shape[1])
    # to prevent identical neurons
    noise = (random_state.random_sample(ws.shape) - 0.5) / 10
    return ws + noise

shuffle_df = lambda df: df.sample(n=len(df))

def get_local_density(xs, bandwidth):
    return pipe(
            KernelDensity(bandwidth=bandwidth)
            .fit(xs)
            .score_samples(xs),
            np.exp
            )

def train(
        data,
        t_max=3,
        neuron_count=50,
        information_transfer_rate=1,
        random_state=RandomState()
        ):
    start_time = current_time()
    config = default_config._replace(t_max=t_max)
    ws = initialise_bng(data, neuron_count, random_state=random_state)
    t = 0
    t_max = config.t_max
    xs = pipe( data, np.asanyarray )
    input_distance_average = pipe( xs, pdist, np.mean)
    local_density = get_local_density(xs, bandwidth=input_distance_average/3)
    ld = local_density
    correlation_dimension = calc_correlation_dimension(xs)
    corr_d = correlation_dimension
    calc_m = lambda itr,corr_d: itr + ((2*itr)/corr_d) - 1
    magnification = calc_m(information_transfer_rate, correlation_dimension)
    m = magnification
    magnification_control = np.power(local_density, magnification)
    mc = magnification_control
    mc = mc / mc.max()
    #mc = 1
    #import pdb; pdb.set_trace() 
    while t <= t_max:
        print(f'step {t}')
        ws = adaptation_step(config, ws, t, xs, mc)
        t = t + 1
    print_duration(start_time)
    return ws

from bokeh.io import output_file, show, save
from bokeh.layouts import widgetbox, column, gridplot, row
from bokeh.plotting import figure
from bokeh.palettes import *
from bokeh.models.sources import ColumnDataSource
from bokeh.models.graphs import from_networkx
from bokeh.models.markers import Diamond

print_duration = lambda start_time: print(f'elapsed {current_time() - start_time:.1f}s')

non_empty = lambda x: len(x) != 0

contains = lambda what,x: x.__contains__(what)
doesnt_contain = complement(contains)

def coords_to_xs_and_ys(zs):
    zs = np.asanyarray(zs).T
    xs = zs[0]
    ys = zs[1]
    return (xs, ys)

def make_big_bounding_square(zs, factor=10):
    (xs, ys) = coords_to_xs_and_ys(zs)
    #xw = x_width
    xw = np.abs(xs.max() - xs.min())
    yw = np.abs(ys.max() - ys.min())
    w = max(xw,yw) * factor
    #x_extreme
    xe = [xs.max() + w, xs.min() - w]
    ye = [ys.max() + w, ys.min() - w]
    return [
            [xe[0],ye[0]],
            [xe[0],ye[1]],
            [xe[1],ye[1]],
            [xe[1],ye[0]]
            ]

def get_box(zs):
    (xs, ys) = coords_to_xs_and_ys(zs)
    return np.array(
            [[xs.min(),xs.max()],[ys.min(),ys.max()]]
            )

def is_vertex_not_too_far(box, vertex, tolerance=1.3):
    box = box * tolerance
    x_min = box[0][0]
    x_max = box[0][1]
    y_min = box[1][0]
    y_max = box[1][1]
    x = vertex[0]
    y = vertex[1]
    return \
            x >= x_min \
            and x <= x_max \
            and y >= y_min \
            and y <= y_max \

is_vertex_not_too_far = curry(is_vertex_not_too_far)

are_vertices_not_too_far = lambda region: \
        pipe( region, map(is_vertex_not_too_far(box)), all )

def get_voronoi_regions(fig, zs):
    start_time = current_time()
    from scipy.spatial import Voronoi
    input_points = np.concatenate([ zs, make_big_bounding_square(zs) ])
    #input_points = zs
    vor = Voronoi(input_points)
    get_vertex_by_ix = lambda vert_ix: vor.vertices[vert_ix]
    box = get_box(zs)
    are_vertices_not_too_far = lambda region: \
            pipe( region, map(is_vertex_not_too_far(box)), all )
    output = pipe(
            vor.regions,
            filter(non_empty),
            filter(curry(doesnt_contain)(-1)),
            map(get_vertex_by_ix),
            #filter(are_vertices_not_too_far),
            map(np.transpose),
            list
            )
    print_duration(start_time)
    return output

from scipy.spatial import ConvexHull
area_of_polygon = lambda points: ConvexHull(points).area

def get_ranges_with_margin(zs, margin=0.05):
    box = get_box(zs)
    left_right = (box[0][0],box[0][1])
    top_bottom = (box[1][0],box[1][1])
    get_width = lambda min_max: np.abs(min_max[0] - min_max[1])
    xw = get_width(left_right)
    yw = get_width(top_bottom)
    if xw > yw:
        w_diff = xw - yw
        top_bottom = [top_bottom[0] - w_diff/2, top_bottom[1] + w_diff/2]
    else:
        w_diff = yw - xw
        left_right = [left_right[0] - w_diff/2, left_right[1] + w_diff/2]
    def add_margin(bounds, margin):
        w = get_width(bounds)
        return (bounds[0] - w * margin, bounds[1] + w * margin)
    left_right = add_margin(left_right, margin)
    top_bottom = add_margin(top_bottom, margin)
    return (left_right, top_bottom)

def run_net(ws, test_data):
    data = test_data['data']
    ngrams_out_of_order = test_data['ngrams']
    closest_w_ixs = pipe(
            measure_distances(data, ws),
            lambda ds: np.argmin(ds, axis=1)
            #lambda ix: ngrams_out_of_order[ix]
            )
    piled_ngrams = pipe( ws, len, range, map(lambda _: []), list )
    for x_ix, closest_w_ix in enumerate(closest_w_ixs):
        ngram = ngrams_out_of_order[x_ix]
        piled_ngrams[closest_w_ix].append(ngram)
    return piled_ngrams

CSV_DIR = '/home/domas/data/google-books-1gram/'
CURVES_CSV = CSV_DIR \
        + 'yearly_fractions_nonull_1808<=' \
        + '_top25k_processed_every3rd_nonull' \
        + '.csv'
ANGLES_CSV = CSV_DIR \
        + 'yearly_fractions_nonull_1808<=' \
        + '_top25k_processed_every3rd_nonull' \
        + '_angles'
        + '.csv'

def read_max_fractions()

def read_curves():
    df = pd.read_csv(CURVES_CSV)
    df = df.rename(index=str,columns={'year_fraction':'curve'})
    return df

def read_angles():
    df = pd.read_csv(ANGLES_CSV)
    df = df.rename(index=str,columns={'year_fraction':'angle'})
    return df

current_csv_state = lambda:
    (angles_file.last_modified, curves_file.last_modified)

TEST_DATA_CACHE = dict(
        uid=None,
        correct=lambda: TEST_DATA_CACHE.uid == current_csv_state(),
        payload=None
        )

def update_test_data_cache(payload):
    global TEST_DATA_CACHE
    TEST_DATA_CACHE.uid = current_csv_state()
    TEST_DATA_CACHE.payload = payload
    assert(TEST_DATA_CACHE.correct())
    return payload

def read_test_data(n=1000*10):
    if TEST_DATA_CACHE.correct():
        return TEST_DATA_CACHE.payload
    data = read_max_fractions().tail(n).set_index('ngram')
    topn = data.index
    curves = read_curves()
    year_range = np.arange(curves.year.min(), curves.year.max() + 1)
    data['curves'] = curves\
            .query('ngram in @topn')\
            .groupby('ngram')['curve']\
            .apply(np.array)
    data['angles'] = read_angles()\
            .query('ngram in @topn')\
            .groupby('ngram')['angle']\
            .apply(np.array)
    output = dict(data,year_range)
    update_test_data_cache(output)
    return output

def visualise(ws, test_data, file_name="bng_viz", hex_bin=True, random_state=RandomState(), circle_radius=0.5):
    start_time = current_time()
    mds = sklearn.manifold.MDS(
            n_jobs=-1,
            max_iter=500,
            n_init=10,
            metric=True,
            random_state=random_state
            )
    zs = mds.fit_transform(ws)
    print(f"mds stress {mds.stress_:.5f}")

    df = pd.DataFrame( zs, columns=['x','y'] )
    piled_ngrams = run_net(ws, test_data)
    df['ngrams'] = piled_ngrams
    tooltips = [ ('ngrams', '@ngrams') ]

    (left_right, top_bottom) = get_ranges_with_margin(zs)
    fig = figure(width=600,height=600,
            tools='wheel_zoom,pan,reset,save',
            #tooltips=tooltips,
            #logo='grey',
            match_aspect=True,
            x_range=left_right,y_range=top_bottom)
    #import pdb; pdb.set_trace() 
    cds = ColumnDataSource(df)

    palette = PuBu[9]
    palette = pipe( palette, reversed, list )
    fig.background_fill_color = palette[0]
    fig.background_fill_alpha = 0.8
    fig.grid.grid_line_alpha = 0.1
    bright_green = '#66ff00'
    dark_grey = '#3c3c3c'
    fig.circle( x='x', y='y',
            source=cds,
            radius=circle_radius,
            color=dark_grey,
            nonselection_color=dark_grey,
            fill_alpha=0.2,
            nonselection_fill_alpha=0.2,
            line_alpha=0
            ) 
    voronoi_regions = get_voronoi_regions(fig, zs)
    for region in voronoi_regions:
        xs = region[0]
        ys = region[1]
        fig.patch(xs,ys,
                line_color='white',
                line_alpha=1,
                fill_alpha=0,
                line_width=0.5
                )
    diamond_renderer = \
            fig.diamond( x='x', y='y',
                    source=cds,
                    size=8,
                    color='black',
                    nonselection_color='black',
                    selection_color='red',
                    alpha=0.7,
                    nonselection_alpha=0.7
                    )
    from bokeh.models import CustomJS, HoverTool, TapTool
    fig.add_tools(HoverTool(renderers=[diamond_renderer], tooltips=tooltips))
    fig.add_tools(TapTool(renderers=[diamond_renderer], behavior='select'))

    cds_ws = ColumnDataSource(data=dict(ws=ws.tolist()))

    year_range = np.arange(1808, 1808+201, step=3).tolist()

    cds_plotted_w = ColumnDataSource(
            data=dict(
                year=year_range,
                year_fraction=pipe( year_range, map(lambda _: 0.5), list)
                )
            )

    def selection_callback(ws=cds_ws, plotted_w=cds_plotted_w, window=None):
        last_selected_ix = cb_obj.selected.indices[0]
        w = ws.data['ws'][last_selected_ix]
        plotted_w.data['year_fraction'] = w
        plotted_w.change.emit()

    selection_callback = CustomJS.from_py_func( selection_callback )
    cds.callback = selection_callback

    fig2 = figure(width=600,height=600,
            tools='',
            x_range=(min(year_range),max(year_range)),
            y_range=(0.2,-0.2)
            )
    fig2.line(x='year', y='year_fraction', source=cds_plotted_w)

    output_file(f"{file_name}.html")
    layout = row(fig, fig2)
    pipe(
            gridplot([[fig, fig2]], toolbar_location='left'),
            save
            )
    print_duration(start_time)
