from __future__ import division

import sys, os.path, argparse, logging, itertools, time

import numpy, scipy.constants, scipy.optimize, scipy.stats
from numpy import array

import matplotlib
# Avoid the need for X11.
matplotlib.use("PS")
import pylab
import matplotlib.backends.backend_ps
import matplotlib.ticker

import common, wlc, kinetics_opt
from retraction_unfolding_filter import RetractionPeak, RetractionCurve, CurveStretch
exec("from numpy import %s" % kinetics_opt.bestfloat.__name__)

if not hasattr(numpy, "float96"):
    numpy.float96 = kinetics_opt.bestfloat
    from numpy import float96
if not hasattr(numpy, "float128"):
    numpy.float128 = kinetics_opt.bestfloat
    from numpy import float128


logger = logging.getLogger("kinetics_opt_plots")
logger.addHandler(logging.StreamHandler())
logger.setLevel(logging.INFO)


class Chunk(object):
    def __init__(
        self, curve_stretch_playlist, in_repr_path, out_repr_path,
        dx_chunk, k0_chunk, normal_dx_chunk, normal_k0_chunk,
        truncated_normal_dx_chunk, truncated_normal_k0_chunk):
        self.curve_stretch_playlist = curve_stretch_playlist
        self.in_repr_path = in_repr_path
        self.out_repr_path = out_repr_path
        self.dx_chunk = dx_chunk
        self.k0_chunk = k0_chunk
        self.normal_dx_chunk = normal_dx_chunk
        self.normal_k0_chunk = normal_k0_chunk
        self.truncated_normal_dx_chunk = truncated_normal_dx_chunk
        self.truncated_normal_k0_chunk = truncated_normal_k0_chunk

    def __repr__(self):
        return "Chunk(%r, %r, %r, %r, %r, %r, %r, %r, %r)" % (
            self.curve_stretch_playlist, self.in_repr_path, self.out_repr_path,
            self.dx_chunk, self.k0_chunk, self.normal_dx_chunk, self.normal_k0_chunk,
            self.truncated_normal_dx_chunk, self.truncated_normal_k0_chunk)


def protocols_of_curve_stretch(curve_stretch, newtons_per_unit):
    return [kinetics_opt.Protocol(
            curve_stretch.retraction_velocity_m_per_s / peak.contour_length_m,
            curve_stretch.spring_constant_n_per_m * peak.contour_length_m,
            int(numpy.around(peak.start_force_n / newtons_per_unit)), i + 1)
            for i, peak in enumerate(curve_stretch.reversed_stretch_at_end)]

def diffuse_protocols_of_curve_stretch(curve_stretch, newtons_per_unit, hsu):
    return [kinetics_opt.DiffuseProtocol(
            kinetics_opt.Protocol(
                curve_stretch.retraction_velocity_m_per_s / peak.contour_length_m,
                curve_stretch.spring_constant_n_per_m * peak.contour_length_m,
                int(numpy.around(peak.start_force_n / newtons_per_unit)), i + 1),
            curve_stretch.noise_stddev_n / newtons_per_unit, hsu)
            for i, peak in enumerate(curve_stretch.reversed_stretch_at_end)]

def full_probabilities_sum(
    t, p, upper_bound_units, newtons_per_unit, protocols, dx, k0, batch_size):
    exact_p = numpy.zeros(upper_bound_units, dtype=kinetics_opt.bestfloat)
    for protocol_batch in itertools.izip_longest(*[iter(protocols)] * batch_size):
        protocol_list = filter(None, protocol_batch)
        exact_fp = kinetics_opt.full_probabilities(
            t, p, upper_bound_units, newtons_per_unit, protocol_list, dx, k0)
        exact_p += exact_fp.sum(axis=0)
    return exact_p

def full_probabilities_diffuse_sum(
    t, p, upper_bound_units, newtons_per_unit, diffuse_protocols, dx, k0, factory,
    batch_size):
    normal_p = numpy.zeros(upper_bound_units, dtype=kinetics_opt.bestfloat)
    for diffuse_protocol_batch in itertools.izip_longest(
        *[iter(diffuse_protocols)] * batch_size):
        diffuse_protocol_list = filter(None, diffuse_protocol_batch)
        normal_fp = kinetics_opt.full_probabilities_diffuse(
            t, p, upper_bound_units, newtons_per_unit, diffuse_protocol_list, dx, k0,
            factory)
        normal_p += normal_fp.sum(axis=0)
    return normal_p

def process(
    t, p, newtons_per_unit, upper_bound_units, hsu,
    chunk, batch_size):
    if chunk.in_repr_path and os.path.isfile(chunk.in_repr_path):
        logger.info("Found: %s", chunk.in_repr_path)
        with open(chunk.in_repr_path, "rb") as in_file:
            force_counts, exact_ps, normal_ps, truncated_normal_ps = map(
                eval, in_file)
    else:
        if chunk.in_repr_path:
            logger.info("Not found: %s", chunk.in_repr_path)
        with open(chunk.curve_stretch_playlist, "rb") as in_file:
            curve_stretches = [eval(line) for line in in_file]
        force_units = numpy.concatenate([
                numpy.asarray(
                    numpy.around(
                        numpy.array(
                            curve_stretch.reversed_forces_at_end(),
                            dtype=kinetics_opt.bestfloat
                            ) / newtons_per_unit), dtype=int)
                for curve_stretch in curve_stretches])
        force_counts = numpy.bincount(force_units, minlength=upper_bound_units)
        diffuse_protocols = sum([
                diffuse_protocols_of_curve_stretch(
                    curve_stretch, newtons_per_unit, hsu)
                for curve_stretch in curve_stretches], [])
        exact_ps = []
        for dx, k0 in zip(chunk.dx_chunk, chunk.k0_chunk):
            if dx:
                logger.info("Computing exact probabilities.")
                protocols = sum([
                        protocols_of_curve_stretch(curve_stretch, newtons_per_unit)
                        for curve_stretch in curve_stretches], [])
                exact_p = full_probabilities_sum(
                    t, p, upper_bound_units, newtons_per_unit, protocols, dx, k0,
                    batch_size)
                numpy.testing.assert_allclose(exact_p.sum(), len(protocols))
            else:
                exact_p = numpy.empty(0, dtype=kinetics_opt.bestfloat)
            exact_ps.append(exact_p)

        diffuse_protocols = sum([
                diffuse_protocols_of_curve_stretch(
                    curve_stretch, newtons_per_unit, hsu)
                for curve_stretch in curve_stretches], [])
        normal_ps = []
        for normal_dx, normal_k0 in zip(
            chunk.normal_dx_chunk, chunk.normal_k0_chunk):
            if normal_dx:
                logger.info("Computing normal probabilities.")
                normal_p = full_probabilities_diffuse_sum(
                    t, p, upper_bound_units, newtons_per_unit, diffuse_protocols,
                    normal_dx, normal_k0,
                    kinetics_opt.DiffusePeak.normal_from_basic_peak, batch_size)
                numpy.testing.assert_allclose(normal_p.sum(), len(diffuse_protocols))
            else:
                normal_p = numpy.empty(0, dtype=kinetics_opt.bestfloat)
            normal_ps.append(normal_p)

        truncated_normal_ps = []
        for truncated_normal_dx, truncated_normal_k0 in zip(
            chunk.truncated_normal_dx_chunk, chunk.truncated_normal_k0_chunk):
            if truncated_normal_dx:
                logger.info("Computing truncated normal probabilities.")
                truncated_normal_fp = kinetics_opt.full_probabilities_diffuse(
                    t, p, upper_bound_units, newtons_per_unit, diffuse_protocols,
                    truncated_normal_dx, truncated_normal_k0,
                    kinetics_opt.DiffusePeak.truncated_normal_from_basic_peak)
                truncated_normal_p = truncated_normal_fp.sum(axis=0)
                numpy.testing.assert_allclose(
                    truncated_normal_p.sum(), len(diffuse_protocols))
            else:
                truncated_normal_p = numpy.empty(0, dtype=kinetics_opt.bestfloat)
            truncated_normal_ps.append(truncated_normal_p)

        if chunk.out_repr_path:
            logger.info("Writing: %s", chunk.out_repr_path)
            with open(chunk.out_repr_path, "wb") as out_file:
                f = lambda a: numpy.array_repr(
                    a, max_line_width=numpy.inf, precision=36)
                out_file.write(
                    numpy.array_repr(force_counts, max_line_width=numpy.inf) + "\n")
                out_file.write("[%s]\n" % ", ".join(map(f, exact_ps)))
                out_file.write("[%s]\n" % ", ".join(map(f, normal_ps)))
                out_file.write("[%s]\n" % ", ".join(map(f, truncated_normal_ps)))
    return force_counts, exact_ps, normal_ps, truncated_normal_ps

def create_histograms(
    rows, bin_width_units, newtons_per_unit,
    figure_upper_bound_pn, figure_y_lower_bound, figure_y_upper_bound, y_scale):
    upper_bound_units = len(rows[0][1][0])
    if bin_width_units:
        i = numpy.arange(upper_bound_units)
        a = (i // bin_width_units) * bin_width_units
        b = numpy.minimum(
            (i // bin_width_units + 1) * bin_width_units, upper_bound_units)
    # TODO: Height should depend on the number of subfigures!
    figure = matplotlib.pyplot.figure(1, figsize=(3.375, 1.0 * len(rows)))
    figure.subplots_adjust(
        hspace=0.0, bottom=0.37 / len(rows), top=1 - 0.1 / len(rows),
        left=0.12, right=0.95)
    subplots = []
    for index, (label_chunk, row) in enumerate(rows):
        force_counts, exact_ps, normal_ps, truncated_normal_ps = row
        common.check_equal(len(force_counts), upper_bound_units)
        ax = figure.add_subplot(len(rows), 1, 1 + index)
        if index == len(rows) - 1:
            ax.set_xlabel("Force (pN)")
        else:
            matplotlib.pyplot.setp(ax.get_xticklabels(), visible=False)
        subplots.append(ax)
        xMajorLocator = matplotlib.ticker.MultipleLocator(100)
        xMinorLocator = matplotlib.ticker.MultipleLocator(25)
        ax.xaxis.set_major_locator(xMajorLocator)
        ax.xaxis.set_minor_locator(xMinorLocator)
        
        n = force_counts.sum()
        ax.set_ylabel("N=%d" % n)
        if bin_width_units:
            f = numpy.empty(upper_bound_units, dtype=kinetics_opt.bestfloat)
            for i in range(upper_bound_units):
                f[i] = force_counts[a[i]:b[i]].mean()
        max_value = numpy.concatenate(
            ([f] if bin_width_units else []) + exact_ps + normal_ps + truncated_normal_ps).max()
        yMajorLocator = matplotlib.ticker.MaxNLocator(
            5, prune="lower" if index != len(rows) - 1 else None, integer=True)
        formatter = matplotlib.ticker.FormatStrFormatter("%3d")
        # def format_y_ticker(x, pos):
        #     s = "%d" % x
        #     # two whitespace characters for each missing digit
        #     return " " * ((3 - len(s)) * 1) + s
        formatter = matplotlib.ticker.FuncFormatter(formatter)
        ax.yaxis.set_major_formatter(formatter)
        ax.yaxis.set_major_locator(yMajorLocator)
        ax.set_yscale(y_scale)
        #ax.yaxis.set_minor_locator(yMinorLocator)
        m = -1 if max_value < 9 else 2 if max_value < 90 else 3
        ax.yaxis.labelpad = 3 - m
        pn_per_unit = newtons_per_unit * 1e12
        data = []
        if bin_width_units and len(f):
            data.append(
                (numpy.arange(upper_bound_units) * pn_per_unit, f, label_chunk[0],
                 "black", "solid"))
        colour_linestyle_list = [
            ("blue", "dashed"), ("red", "dotted"), ("green", "dashdot")]
        for i in range(len(exact_ps)):
            exact_p, normal_p, truncated_normal_p, label = exact_ps[i], normal_ps[i], truncated_normal_ps[i], label_chunk[(not not bin_width_units) + i]
            if len(exact_p):
                data.append((
                        numpy.arange(upper_bound_units) * pn_per_unit,
                        exact_p, label,
                        colour_linestyle_list[i][0], colour_linestyle_list[i][1]))
            if len(normal_p):
                data.append((
                        numpy.arange(upper_bound_units) * pn_per_unit,
                        normal_p, label,
                        colour_linestyle_list[i][0], colour_linestyle_list[i][1]))
            if len(truncated_normal_p):
                data.append((
                        numpy.arange(upper_bound_units) * pn_per_unit,
                        truncated_normal_p, label,
                        colour_linestyle_list[i][0], colour_linestyle_list[i][1]))
        x, weights, labels, colours, linestyles = zip(*data)
        _, _, patches = ax.hist(
            x,
            bins=numpy.arange(upper_bound_units) * pn_per_unit,
            weights=weights, histtype="step",
            label=labels, color=colours)
        common.check_equal(len(data), len(patches))
        for row, patch in zip(
            data[not bin_width_units:], patches[not bin_width_units:]):
            common.check_equal(len(patch), 1)
            patch[0].set_linestyle(row[4])
        if any(labels):
            legend = ax.legend(loc="upper right")
            legend.draw_frame(False)
        ax.set_xlim(0, figure_upper_bound_pn)
        if figure_y_upper_bound:
            ax.set_ylim(figure_y_lower_bound, figure_y_upper_bound)
        else:
            ax.set_ylim(figure_y_lower_bound, ax.get_ylim()[1])
        #print(matplotlib.pyplot.setp(ax.get_xticklabels()))
    canvas = matplotlib.backends.backend_ps.FigureCanvasPS(figure)
    return canvas

def main(params):
    parser = argparse.ArgumentParser()
    parser.add_argument("--in_repr_paths", nargs="*")
    parser.add_argument("--out_repr_paths", nargs="*")
    parser.add_argument("--curve_stretch_playlists", nargs="*")
    parser.add_argument(
        "--bin_width_pn", type=float, default=5,
        help="0 means do not show experimental force histogram")
    parser.add_argument("--temperature_K", type=float, default=301.15)
    parser.add_argument("--p_nm", type=float, default=wlc.default_p * 1e9)
    parser.add_argument("--upper_bound_pn", type=int, default=1000)
    parser.add_argument("--pn_per_unit", type=float, default=1)
    parser.add_argument(
        "--dxs_nm", type=float, nargs="*",
        help="exact multiple of curve_stretch_playlists")
    parser.add_argument(
        "--k0s", type=float, nargs="*",
        help="exact multiple of curve_stretch_playlists")
    parser.add_argument(
        "--normal_dxs_nm", type=float, nargs="*",
        help="exact multiple of curve_stretch_playlists")
    parser.add_argument(
        "--normal_k0s", type=float, nargs="*",
        help="exact multiple of curve_stretch_playlists")
    parser.add_argument(
        "--truncated_normal_dxs_nm", type=float, nargs="*",
        help="exact multiple of curve_stretch_playlists")
    parser.add_argument(
        "--truncated_normal_k0s", type=float, nargs="*",
        help="exact multiple of curve_stretch_playlists")
    parser.add_argument("--batch_size", type=int, default=256)
    parser.add_argument("--labels", nargs="*")
    parser.add_argument("--diffuse_half_support_pn", type=int, default=60)
    parser.add_argument("--figure_y_lower_bound", type=float, default=0)
    parser.add_argument("--figure_y_upper_bound", type=float, default=0)
    parser.add_argument("--figure_upper_bound_pn", type=int, default=500)
    parser.add_argument(
        "--y_scale", choices=matplotlib.scale.get_scale_names(), default="linear")
    parser.add_argument("--eps_path")

    args = parser.parse_args(params)
    logger.info("%s", args)

    t = args.temperature_K
    p = args.p_nm / 1e9
    newtons_per_unit = args.pn_per_unit / 1e12
    upper_bound_units = int(numpy.ceil(args.upper_bound_pn / args.pn_per_unit))
    hsu = int(numpy.ceil(args.diffuse_half_support_pn / args.pn_per_unit))
    bin_width_units = args.bin_width_pn / args.pn_per_unit
    eps_path = args.eps_path

    curve_stretch_playlists = args.curve_stretch_playlists
    in_repr_paths = args.in_repr_paths
    out_repr_paths = args.out_repr_paths
    dxs = [dx_nm / 1e9 for dx_nm in args.dxs_nm]
    k0s = args.k0s
    normal_dxs = [normal_dx_nm / 1e9 for normal_dx_nm in args.normal_dxs_nm]
    normal_k0s = args.normal_k0s
    truncated_normal_dxs = [
        truncated_normal_dx_nm / 1e9
        for truncated_normal_dx_nm in args.truncated_normal_dxs_nm]
    truncated_normal_k0s = args.truncated_normal_k0s
    labels = args.labels

    n = len(curve_stretch_playlists)
    common.check_equal(len(in_repr_paths), n)
    common.check_equal(len(out_repr_paths), n)
    m = len(dxs)
    k = m // n
    common.check_equal(k * n, m)
    common.check_equal(len(dxs), m)
    common.check_equal(len(k0s), m)
    common.check_equal(len(normal_dxs), m)
    common.check_equal(len(normal_k0s), m)
    common.check_equal(len(truncated_normal_dxs), m)
    common.check_equal(len(truncated_normal_k0s), m)
    common.check_equal(len(labels), m + n * (not not bin_width_units))

    rows = []
    for (
        curve_stretch_playlist, in_repr_path, out_repr_path,
        dx_chunk, k0_chunk, normal_dx_chunk, normal_k0_chunk,
        truncated_normal_dx_chunk, truncated_normal_k0_chunk, label_chunk) in zip(
        curve_stretch_playlists, in_repr_paths, out_repr_paths,
        itertools.izip(*[iter(dxs)] * k), itertools.izip(*[iter(k0s)] * k),
        itertools.izip(*[iter(normal_dxs)] * k),
        itertools.izip(*[iter(normal_k0s)] * k),
        itertools.izip(*[iter(truncated_normal_dxs)] * k),
        itertools.izip(*[iter(truncated_normal_k0s)] * k),
        itertools.izip(*[iter(labels)] * (k + (not not bin_width_units)))):
        row = process(
            t, p, newtons_per_unit, upper_bound_units, hsu,
            Chunk(
                curve_stretch_playlist, in_repr_path, out_repr_path,
                dx_chunk, k0_chunk, normal_dx_chunk, normal_k0_chunk,
                truncated_normal_dx_chunk, truncated_normal_k0_chunk),
            args.batch_size)
        rows.append((label_chunk, row))

    logger.info("Creating histograms.")
    # http://eloquentscience.com/2009/09/why-you-should-use-sans-serif-fonts-for-figures-posters-and-slides/
    pylab.rc("font", family="sans-serif")
    pylab.rc("font", size=10)
    pylab.rc("axes", linewidth=0.5)
    pylab.rc("legend", fontsize="small")
    pylab.rc("mathtext", default="regular")
    canvas = create_histograms(
        rows, bin_width_units, newtons_per_unit,
        args.figure_upper_bound_pn, args.figure_y_lower_bound,
        args.figure_y_upper_bound, args.y_scale)
    if eps_path:
        canvas.print_figure(eps_path)
    return zip(*rows)[1], canvas

if __name__ == "__main__":
    main(sys.argv[1:])
