package it.unibo.refolding.pico;

import it.unibo.refolding.alg.RbNode;
import it.unibo.refolding.alg.RbTree;
import it.unibo.refolding.common.CommonThings;
import it.unibo.refolding.common.DoubleIntPair;
import it.unibo.refolding.common.IntPair;
import it.unibo.refolding.common.LinearFunction;
import it.unibo.refolding.common.MovingWindows;
import it.unibo.refolding.common.MyRuntimeException;
import it.unibo.refolding.common.PointFrequencyFunctionInput;
import it.unimi.dsi.fastutil.doubles.DoubleList;
import it.unimi.dsi.fastutil.ints.IntArrayList;
import it.unimi.dsi.fastutil.ints.IntList;
import it.unimi.dsi.fastutil.ints.IntLists;
import it.unimi.dsi.fastutil.ints.IntRBTreeSet;
import it.unimi.dsi.fastutil.ints.IntSortedSet;

import java.awt.geom.Point2D;
import java.io.IOException;
import java.nio.file.Path;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Locale;
import java.util.PriorityQueue;
import java.util.logging.Level;
import java.util.logging.Logger;

import org.apache.commons.lang3.builder.HashCodeBuilder;
import org.apache.commons.lang3.builder.ReflectionToStringBuilder;
import org.apache.commons.lang3.builder.ToStringBuilder;
import org.apache.commons.lang3.builder.ToStringStyle;
import org.apache.commons.math3.stat.descriptive.rank.Percentile;

import com.google.common.base.Function;
import com.google.common.base.Objects;
import com.google.common.base.Preconditions;
import com.google.common.collect.Range;

public class PicoDataAnalyser {

  public static final double defaultMinimumPeakForceN = 80e-12;
  public static final double defaultMinimumPeakContourLengthM = 10e-9;
  public static final double defaultMinContourLengthDeltaM = 5e-9;
  public static final double defaultMinPeakForceRelativeToEmpiricalBaselineN = 100e-12;
  public static final double defaultMedianAbsoluteDeviationLowerBoundN = 17.5e-12;
  public static final double defaultLeftMostlyHigherWindowSizeM = 6.1e-9;
  public static final double defaultRightMuchHigherN = 40e-12;
  public static final double defaultRightMuchHigherThanAllN = 5e-12;
  public static final double defaultAtMostOnePeakWindowM = 5e-9;
  public static final double defaultLeftMostlyHigherFraction = 0.7;
  public static final double defaultLeftMostlyHigherN = 1e-12;

  private static final Logger logger = Logger.getLogger(PicoDataAnalyser.class.getSimpleName());

  static DoubleIntPair findSlopeAndIndexBestFittingEnd(short[] x, short[] y, double fixedSlope) {
    Preconditions.checkArgument(y.length >= 2, "At least 2 points are needed to fit 1 line.");
    if (y.length >= Short.MAX_VALUE) {
      logger.log(
        Level.WARNING, "{0} might be too large for the exact integer calculations", y.length);
    }
    final int argMax;
    final double slope;
    if (Double.isNaN(fixedSlope)) {
      double[] endSlopes = new double[y.length - 1];
      CommonThings.slopeOfBestFitLineFromEachPointOnwards(x, y, endSlopes);
      argMax = findIndexBestFittingEnd(x, y, endSlopes);
      slope = endSlopes[argMax];
    } else {
      argMax = findIndexBestFittingEnd(x, y, fixedSlope);
      slope = fixedSlope;
    }
    // although counts doesn't have element n-1, it is indexed from the
    // "same" zero position.
    return new DoubleIntPair(slope, argMax);
  }

  /**
   * This method is the performance bottleneck for contact point computations.
   */
  private static int findIndexBestFittingEnd(short[] x, short[] y, double[] endSlopes) {
    Preconditions.checkArgument(x.length == y.length);
    final int n = y.length;
    int[] counts = new int[n - 1];
    // O(n^2)
    for (int i = 0; i < n - 1; ++i) {
      double slope = endSlopes[i];
      LinearFunction line = LinearFunction.lineWithSlopeThroughPoint(slope, new Point2D.Double(
        x[i], y[i]));
      int count = 0;
      double value = y[i] - line.evaluate(x[i]);
      for (int j = i + 1; j < n; ++j) {
        double nextValue = y[j] - line.evaluate(x[j]);
        if (value * nextValue <= 0) {
          count++;
        }
        value = nextValue;
      }
      counts[i] = count;
    }
    int argMax = CommonThings.argMax(counts);
    return argMax;
  }

  /**
   * This method is the performance bottleneck for contact point computations.
   * TODO: {@link http://en.wikipedia.org/wiki/Theil-Sen_estimator}
   */
  private static int findIndexBestFittingEnd(short[] x, short[] y, double slope) {
    int[] crossings = findCrossings(x, y, slope);
    int argMax = CommonThings.argMax(crossings);
    return argMax;
  }

  static int[] findCrossings(short[] x, short[] y, double slope) {
    Preconditions.checkArgument(x.length == y.length);
    double[] a = new double[x.length];
    for (int i = a.length; --i >= 0;) {
      a[i] = y[i] - slope * x[i];
    }
    CommonThings.reverse(a, 0, a.length);
    int[] crossings = timeSeriesCrossings(a, true);
    CommonThings.reverse(crossings, 0, crossings.length);
    // int[] expected = PicoDataAnalyserTest.slowTimeSeriesCrossings(x, y,
    // slope);
    // Assert.assertArrayEquals(expected, crossings);
    return crossings;
  }

  static boolean augmented = true;

  /**
   * It takes around 20 seconds for 1 million elements with head is false and 22
   * seconds with head is true.
   * 
   * @param a
   * @return For each element, the number of times it is contained within the
   *         closed intervals [a[i], a[i + 1]] (or [a[i + 1], a[i]]). When a[i]
   *         == a[i + 1], the edge from i to i + 1 is considered to contain the
   *         point a[i] = a[i + 1] only once. When head is true, only the
   *         intervals up to and including the point in question are considered.
   *         Complexity: n * log(n) When head is false, the method is somewhat
   *         similar to
   *         {@link #findIntervalOfHighestFrequency(double[], int, int)}.
   */
  public static int[] timeSeriesCrossings(double[] a, boolean head) {
    if (a.length == 0) {
      return new int[0];
    }
    class X implements Comparable<X> {
      final double d;
      final IntList starting;
      final IntList ending;

      X(double d, IntList starting, IntList ending) {
        this.d = d;
        this.starting = starting;
        this.ending = ending;
      }

      @Override
      public int compareTo(X o) {
        return d < o.d ? -1 : (d == o.d ? 0 : +1);
      }

      @Override
      public String toString() {
        return ToStringBuilder.reflectionToString(this, ToStringStyle.SHORT_PREFIX_STYLE);
      }
    }
    X[] xs = new X[2 * (a.length - 1)];
    for (int i = 0; i < a.length - 1; ++i) {
      xs[2 * i] = new X(Math.min(a[i], a[i + 1]), IntLists.singleton(i), IntLists.EMPTY_LIST);
      xs[2 * i + 1] = new X(Math.max(a[i], a[i + 1]), IntLists.EMPTY_LIST, IntLists.singleton(i));
    }
    Arrays.sort(xs);
    List<X> l = new ArrayList<>();
    for (int i = 0; i < xs.length;) {
      IntList starting = new IntArrayList(xs[i].starting);
      IntList ending = new IntArrayList(xs[i].ending);
      int j = i + 1;
      while (j < xs.length && xs[i].d == xs[j].d) {
        starting.addAll(xs[j].starting);
        ending.addAll(xs[j].ending);
        ++j;
      }
      /*
       * Note that when edge.x == edge.y, the same edge is both added and
       * removed. And it does count as any other added edge.
       */
      l.add(new X(xs[i].d, starting, ending));
      i = j;
    }
    int[] result = new int[a.length];
    /*
     * linkIndices is needed to restrict the search to the head when head is
     * true, without changing the time complexity. We use it nonetheless even
     * when head is false, though it could be replaced by a simple count
     * representing the size of linkIndices in the latter case.
     */
    RbTree<Integer> linkIndices = augmented ? new RbTree<Integer>() : null;
    IntSortedSet linkIndicesSortedSet = augmented ? null : new IntRBTreeSet();

    // Each link is processed at most twice.
    for (int i = 0; i < l.size(); ++i) {
      if (augmented) {
        addAll(linkIndices, l.get(i).starting);
      } else {
        addAll(linkIndicesSortedSet, l.get(i).starting);
      }
      IntList points = new IntArrayList();
      for (IntList indices : new IntList[] {l.get(i).starting, l.get(i).ending}) {
        for (int linkIndex : indices) {
          if (a[linkIndex] == l.get(i).d) {
            points.add(linkIndex);
          }
          if (a[linkIndex + 1] == l.get(i).d) {
            points.add(linkIndex + 1);
          }
        }
      }
      int[] sortedPoints = points.toIntArray();
      /*
       * This sorting procedure is only needed for sanity checks, but it does
       * not affect the n*log(n) complexity: sum(p_i*log(p_i)) <=
       * sum(p_i*log(sum p_i)) = sum(p_i)*log(sum p_i) when p_i >= 1.
       */
      Arrays.sort(sortedPoints);
      for (int j = 0; j < sortedPoints.length;) {
        int k = j + 1;
        while (k < sortedPoints.length && sortedPoints[k] == sortedPoints[j]) {
          ++k;
        }
        /*
         * Each point can be added only twice from starting and twice for ending
         * (?), once as start of a link and once as end of a link.
         */
        Preconditions.checkState(k <= j + 4, "k=%s j=%s", k, j);
        Preconditions.checkState(result[sortedPoints[j]] == 0);
        // log(n), at most once for each point
        final int value;
        if (head) {
          if (augmented) {
            int rank = linkIndices.rank(sortedPoints[j]);
            assert rank >= 0;
            if (rank == 0) {
              int rank2 = linkIndices.rank(sortedPoints[j] - 1);
              Preconditions.checkState(rank2 >= 1);
              value = rank2;
            } else {
              value = rank - 1;
            }
          } else {
            value = linkIndicesSortedSet.headSet(sortedPoints[j]).size();
          }
        } else {
          value = augmented ? linkIndices.size() : linkIndicesSortedSet.size();
        }
        result[sortedPoints[j]] = value;
        j = k;
      }
      if (augmented) {
        removeAll(linkIndices, l.get(i).ending);
      } else {
        removeAll(linkIndicesSortedSet, l.get(i).ending);
      }
    }
    if (augmented) {
      Preconditions.checkState(linkIndices.isEmpty());
    } else {
      Preconditions.checkState(linkIndicesSortedSet.isEmpty());
    }
    return result;
  }

  private static void removeAll(IntSortedSet indices, IntList ending) {
    int size = indices.size();
    indices.removeAll(ending);
    Preconditions.checkState(size - ending.size() == indices.size());
  }

  private static void removeAll(RbTree<Integer> indices, IntList ending) {
    for (int x : ending) {
      RbNode<Integer> removed = indices.delete(x);
      Preconditions.checkNotNull(removed);
    }
  }

  private static void addAll(IntSortedSet indices, IntList starting) {
    int size = indices.size();
    indices.addAll(starting);
    Preconditions.checkState(size + starting.size() == indices.size());
  }

  private static void addAll(RbTree<Integer> indices, IntList starting) {
    for (int index : starting) {
      RbNode<Integer> node = indices.insert(index);
      Preconditions.checkNotNull(node);
    }
  }

  public static class ContactPointsAndSlope {
    public final ZLsbDeflectionLsbPoint approachContactPoint, retractionContactPoint;
    public final double slopeZLsbDefLsb;

    public ContactPointsAndSlope(ZLsbDeflectionLsbPoint approachContactPoint,
      ZLsbDeflectionLsbPoint retractionContactPoint, double slopeZLsbDefLsb) {
      this.approachContactPoint = approachContactPoint;
      this.retractionContactPoint = retractionContactPoint;
      this.slopeZLsbDefLsb = slopeZLsbDefLsb;
    }

    public ContactPointAndSlope approachContactPointAndSlope() {
      return new ContactPointAndSlope(approachContactPoint, slopeZLsbDefLsb);
    }

    public ContactPointAndSlope retractionContactPointAndSlope() {
      return new ContactPointAndSlope(retractionContactPoint, slopeZLsbDefLsb);
    }

    @Override
    public boolean equals(Object obj) {
      if (!(obj instanceof ContactPointsAndSlope)) {
        return false;
      }
      ContactPointsAndSlope other = (ContactPointsAndSlope) obj;
      return Objects.equal(approachContactPoint, other.approachContactPoint)
        && Objects.equal(retractionContactPoint, other.retractionContactPoint)
        && Double.doubleToLongBits(slopeZLsbDefLsb) == Double.doubleToLongBits(other.slopeZLsbDefLsb);
    }

    @Override
    public String toString() {
      return ReflectionToStringBuilder.toString(this, ToStringStyle.SHORT_PREFIX_STYLE);
    }

    public ContactPointsAndSlope withRetractionContactPointDefLsb(
      double empiricalRetractionContactPointDefLsb) {
      return new ContactPointsAndSlope(
        approachContactPoint,
        retractionContactPoint.withDeflectionLsb(empiricalRetractionContactPointDefLsb),
        slopeZLsbDefLsb);
    }

    public ContactPointsAndSlope withRetractionContactPointAndSlope(
      ContactPointAndSlope retractionCps) {
      return new ContactPointsAndSlope(
        approachContactPoint, retractionCps.contactPoint(), retractionCps.slopeZLsbDefLsb);
    }
  }

  public static class ContactPointAndSlope {
    public final double zLsb;
    public final double defLsb;
    public final double slopeZLsbDefLsb;

    public ContactPointAndSlope(double zLsb, double defLsb, double slopeZLsbDefLsb) {
      this.zLsb = zLsb;
      this.defLsb = defLsb;
      this.slopeZLsbDefLsb = slopeZLsbDefLsb;
    }

    public ContactPointAndSlope(ZLsbDeflectionLsbPoint contactPoint, double slopeZLsbDefLsb) {
      this.zLsb = contactPoint.zLsb;
      this.defLsb = contactPoint.deflectionLsb;
      this.slopeZLsbDefLsb = slopeZLsbDefLsb;
    }

    @Override
    public String toString() {
      return ReflectionToStringBuilder.toString(this, ToStringStyle.SHORT_PREFIX_STYLE);
    }

    public ZLsbDeflectionLsbPoint contactPoint() {
      return new ZLsbDeflectionLsbPoint(zLsb, defLsb);
    }

    public LinearFunction baseline() {
      return LinearFunction.lineWithSlopeThroughPoint(slopeZLsbDefLsb, new Point2D.Double(
        zLsb, defLsb));
    }

    public String repr() {
      return String.format(
        Locale.ROOT, "ContactPointAndSlope(%s, %s, %s)", Double.toString(zLsb),
        Double.toString(defLsb), Double.toString(slopeZLsbDefLsb));
    }
  }

  public static class IndexAndZLsbDefLsbSlopes {
    public final int index;

    /**
     * Slope of the line where the cantilever is relaxed, where Z changes
     * linearly but deflection stays roughly constant.
     */
    public final double endLineSlopeZLsbDefLsb;

    public IndexAndZLsbDefLsbSlopes(int index, double endLineSlopeZLsbDefLsb) {
      this.index = index;
      this.endLineSlopeZLsbDefLsb = endLineSlopeZLsbDefLsb;
    }

    public ContactPointAndSlope contactPointAndSlope(DirectionData approach) {
      return new ContactPointAndSlope(
        approach.z(index), approach.deflectionAtIndex(index), endLineSlopeZLsbDefLsb);
    }

    @Override
    public String toString() {
      return ReflectionToStringBuilder.toString(this, ToStringStyle.SHORT_PREFIX_STYLE);
    }
  }

  public static ContactPointAndSlope findApproachContactPointAndSlope(
    DirectionData approach, double slope) {
    return findApproachIndexAndZLsbDefLsbSlopes(approach, slope).contactPointAndSlope(approach);
  }

  /**
   * @param approach
   *          TODO: require only the deflection, not the z data
   * @param slope
   *          TODO
   */
  private static IndexAndZLsbDefLsbSlopes findApproachIndexAndZLsbDefLsbSlopes(
    DirectionData approach, double slope) {
    // /*
    // * We just hope that this factor, which is computed so that both units
    // * signify the same number of metres, will make it behave similarly
    // from
    // * experiment to experiment. But note this is *not* a guarantee that
    // it
    // * will behave the same, since one Z meter in one experiment could
    // mean
    // * a number of metres in a different experiment, if the deflection
    // * sensitivity is wrong. However such differences will only arise
    // * because of a wrong deflection sensitivity setting.
    // */

    // Since different approach/retraction velocities seem to have the same
    // slope,
    // it means the slope depends on Z, not on Time.

    DoubleIntPair slopeAndIndex = findSlopeAndIndexBestFittingEnd(
      approach.sharedZArray(), approach.sharedDeflectionArray(), slope);
    IndexAndZLsbDefLsbSlopes approachIndexAndZLsbDefLsbSlopes = new IndexAndZLsbDefLsbSlopes(
      slopeAndIndex.y, slopeAndIndex.x);
    return approachIndexAndZLsbDefLsbSlopes;
  }

  /**
   * The slopes returned have X in zLsb and Y in deflectionLsb. Note: It only
   * works well if there is a rather long region at the end that is flat.
   * Otherwise it cuts all peaks in 2. For refolding experiments I should use
   * directly the approach contact point as the retraction contact point. Since
   * the slopes already use zLsb, they are fully compatible and no changes are
   * required.
   * 
   * @param plainPicoData
   * @param zLsbStartLineSlope
   * @param zLsbEndLineSlope
   * @return
   */
  public static ZLsbDeflectionLsbPoint findRetractionContactPointGivenSlope(
    DirectionData retraction, double slope) {
    int from = retraction.timeInstantCount() * 3 / 4;
    int to = retraction.timeInstantCount();
    DoubleIntPair bestInterceptAndIndex = findBestInterceptAndIndex(retraction, from, to, slope);
    ZLsbDeflectionLsbPoint contactPoint = firstIntersectionWithLine(retraction, new LinearFunction(
      slope, bestInterceptAndIndex.x));
    return contactPoint;
  }

  private static DoubleIntPair findBestInterceptAndIndex(
    DirectionData retraction, int from, int to, double slope) {
    short[] a = retraction.sharedDeflectionArray();
    /*
     * 2/3 is not enough for misurate2/m20081120a.495.svg (I27 dimer) but 3/4
     * works wonderfully. In the future it might be worth finding the last peak
     * and taking that region, but for now it works fine.
     */
    double[] intercepts = new double[a.length];
    for (int i = a.length; --i >= 0;) {
      short iEval = retraction.z(i);
      double iIntercept = a[i] - slope * iEval;
      intercepts[i] = iIntercept;
    }
    double[] mostFrequentInterval = findIntervalOfHighestFrequency(intercepts, from, to);

    double bestIntercept = (mostFrequentInterval[0] + mostFrequentInterval[1]) / 2;
    return new DoubleIntPair(bestIntercept, CommonThings.argMinAbsDifference(
      intercepts, bestIntercept));
  }

  private static double[] findIntervalOfHighestFrequency(double[] intercepts, int from, int to) {
    List<Range<Double>> ranges = new ArrayList<>(to - from - 1);
    for (int i = from; i < to - 1; ++i) {
      double min = Math.min(intercepts[i], intercepts[i + 1]);
      double max = Math.max(intercepts[i], intercepts[i + 1]);
      ranges.add(Range.closed(min, max));
    }
    double[] minMax = new double[2];
    CommonThings.findIntervalOfHighestFrequency(ranges, minMax);
    return minMax;
  }

  static double minHorizontalDeflectionOfPeakWithContourLength(
    PeakOrNot[] peakOrNots, OffsetAndDeflections offsetAndDeflections) {
    double min = Double.POSITIVE_INFINITY;
    for (int i = 0; i < peakOrNots.length; ++i) {
      PeakOrNot peakOrNot = peakOrNots[i];
      if (peakOrNot.hasContorLength()) {
        assert peakOrNot.isPeak();
        min = Math.min(min, offsetAndDeflections.horizontalDeflectionAfterContactPoint[i]);
      }
    }
    return min;
  }

  public static ZLsbDeflectionLsbPoint firstIntersectionWithLine(
    DirectionData retraction, LinearFunction approachEndLinearFunction) {
    double[] values = evaluateDiffs(retraction, approachEndLinearFunction);
    int i;
    for (i = 0; i < values.length - 2; ++i) {
      if (Math.signum(values[i]) != Math.signum(values[i + 1])) {
        break;
      }
    }
    int j = i + 1;
    while (j < values.length && retraction.z(j) == retraction.z(i)) {
      ++j;
    }
    if (j != values.length) {
      double xi = retraction.z(i);
      double xi1 = retraction.z(j);
      assert xi != xi1; // otherwise signums wouldn't be different
      short yi = retraction.deflectionAtIndex(i), yi1 = retraction.deflectionAtIndex(j);
      LinearFunction retractionSegment = LinearFunction.lineThroughPoints(
        new Point2D.Double(xi, yi), new Point2D.Double(xi1, yi1));
      /*
       * TODO: Since retractionSegment is almost vertical, a more numerically
       * robust approach is needed.
       */
      Point2D.Double p = approachEndLinearFunction.intersection(retractionSegment);
      if (Double.isNaN(p.x) | Double.isNaN(p.y)) {
        return junctionNotFound(retraction, values);
      } else {
        // TODO: Why was it a warning when we were interpolating?
        if (p.x < xi && p.x < xi1) {
          logger.fine(warningMessageForLineIntersection(xi, xi1, yi, yi1, p));
          p.x = Math.min(xi, xi1);
        }
        if (p.x > xi && p.x > xi1) {
          logger.fine(warningMessageForLineIntersection(xi, xi1, yi, yi1, p));
          p.x = Math.max(xi, xi1);
        }
        if (p.y < yi && p.y < yi1) {
          logger.fine(warningMessageForLineIntersection(xi, xi1, yi, yi1, p));
          p.y = Math.min(yi, yi1);
        }
        if (p.y > yi && p.y > yi1) {
          logger.fine(warningMessageForLineIntersection(xi, xi1, yi, yi1, p));
          p.y = Math.max(yi, yi1);
        }
        assert Math.min(xi, xi1) <= p.x : String.format("xi=%g xi1=%g p=%s", xi, xi1, p);
        assert p.x <= Math.max(xi, xi1);
        assert Math.min(yi, yi1) <= p.y;
        assert p.y <= Math.max(yi, yi1);
        return new ZLsbDeflectionLsbPoint(p.x, p.y);
      }
    } else {
      return junctionNotFound(retraction, values);
    }
  }

  private static ZLsbDeflectionLsbPoint junctionNotFound(DirectionData retraction, double[] values) {
    for (int j = values.length; --j >= 0;) {
      values[j] = Math.abs(values[j]);
    }
    logger.warning("Didn't find any junction with line.");
    int index = CommonThings.argMin(values);
    return new ZLsbDeflectionLsbPoint(retraction.z(index), retraction.deflectionAtIndex(index));
  }

  private static String warningMessageForLineIntersection(
    double xi, double xi1, short yi, short yi1, Point2D.Double p) {
    return "p=" + p + " xi=" + xi + " yi=" + yi + " xi1=" + xi1 + " yi1=" + yi1;
  }

  private static double[] evaluateDiffs(
    DirectionData retraction, LinearFunction approachEndLinearFunction) {
    double[] values = new double[retraction.timeInstantCount()];
    for (int i = values.length; --i >= 0;) {
      values[i] = retraction.deflectionAtIndex(i)
        - approachEndLinearFunction.evaluate(retraction.z(i));
    }
    return values;
  }

  public static class OffsetAndDeflections {
    public final int offset;
    public final double[] horizontalDeflectionAfterContactPoint;

    public OffsetAndDeflections(int offset, double[] horizontalDeflectionAfterContactPoint) {
      this.offset = offset;
      this.horizontalDeflectionAfterContactPoint = horizontalDeflectionAfterContactPoint;
    }

    @Override
    public String toString() {
      return ReflectionToStringBuilder.toString(this, ToStringStyle.SHORT_PREFIX_STYLE);
    }

    @Override
    public int hashCode() {
      return new HashCodeBuilder().append(offset).append(horizontalDeflectionAfterContactPoint).toHashCode();
    }

    @Override
    public boolean equals(Object obj) {
      if (!(obj instanceof IntPair)) {
        return false;
      }
      OffsetAndDeflections other = (OffsetAndDeflections) obj;
      return offset == other.offset
        && Arrays.equals(
          horizontalDeflectionAfterContactPoint, other.horizontalDeflectionAfterContactPoint);
    }

    public double[] copyOfDeflectionTimes(double deflectionLsbToNewtonsFactor) {
      double[] forcesN = new double[horizontalDeflectionAfterContactPoint.length];
      for (int i = forcesN.length; --i >= 0;) {
        forcesN[i] = horizontalDeflectionAfterContactPoint[i] * deflectionLsbToNewtonsFactor;
      }
      return forcesN;
    }

    public int length() {
      return horizontalDeflectionAfterContactPoint.length;
    }
  }

  /**
   * Cuts at the contact point, and de-angles the deflections.
   * 
   * @param cps
   * @param retraction
   * @return Offset into retraction indicating where the contiguous region with
   *         all Z >= contact point begins, and double[] of deflections of size
   *         n - offset, from which the angled baseline has been subtracted.
   *         This is not a rotation however, since it doesn't change the other
   *         axis (Z) in any way. Also, no scaling is performed, for any of the
   *         two axes.
   */
  public static OffsetAndDeflections cutAndDeangle(
    double contactPointZLsb, LinearFunction baseline, DirectionData retraction) {
    int offset = offsetOfEndingContiguousRegionNotBeforeContactPoint(
      retraction.sharedZArray(), contactPointZLsb);
    return createOffsetAndDeflections(baseline, retraction, offset);
  }

  static OffsetAndDeflections createOffsetAndDeflections(
    LinearFunction baseline, DirectionData retraction, int offset) {
    double[] deflections = new double[retraction.timeInstantCount() - offset];
    for (int i = deflections.length; --i >= 0;) {
      deflections[i] = retraction.deflectionAtIndex(offset + i)
        - baseline.evaluate(retraction.z(offset + i));
    }
    return new OffsetAndDeflections(offset, deflections);
  }

  public static int offsetOfEndingContiguousRegionNotBeforeContactPoint(
    short[] z, double contactPointZLsb) {
    Preconditions.checkArgument(
      z[z.length - 1] <= contactPointZLsb,
      "Contact point %s must not be lower than the end of the retraction curve %s.",
      contactPointZLsb, z[z.length - 1]);
    int offset = z.length - 1;
    while (offset >= 0 && z[offset] <= contactPointZLsb) {
      --offset;
    }
    ++offset;
    // perhaps not in all cases?
    Preconditions.checkState(0 <= offset && offset <= z.length - 1, "offset=%s", offset);
    if (offset != 0) {
      short a = z[offset - 1];
      if (a < contactPointZLsb) {
        String msg = String.format(
          Locale.ROOT, "offset=%d a=%s <= contactPointZLsb=%s", offset, a, contactPointZLsb);
        logger.warning(msg);
        throw new IllegalStateException(msg);
      }
    }
    assert z[offset] <= contactPointZLsb : String.format("%s > %s", z[offset], contactPointZLsb);
    return offset;
  }

  public static int offsetOfEndingContiguousRegionNotBefore(short[] z, double zLsb) {
    if (z[z.length - 1] > zLsb) {
      return z.length;
    }
    int offset = z.length - 1;
    while (offset >= 0 && z[offset] <= zLsb) {
      --offset;
    }
    ++offset;
    // perhaps not in all cases?
    Preconditions.checkState(0 <= offset && offset <= z.length - 1, "offset=%s", offset);
    assert z[offset] <= zLsb : String.format("%s > %s", z[offset], zLsb);
    return offset;
  }

  public static final double defaultEmpiricalBaselinePercentile = 10.0;

  /**
   * @param tipDistanceInAnyUnitOfMeasurement
   *          in fact only the signum is needed.
   * @param deangledDeflection
   * @param deflectionLsbToNewtonsFactor
   * @param zStepM
   * @param leftMostlyHigherWindowSizeM
   *          8e-9 or greater for nanoscope5_01301721.242
   * @return
   * @TODO: Try out the algorithm of Algorithms Mol Biol. 2011 Jun 6;6(1):16.
   */
  public static double findPeaksInvariantUponTranslationOfDeflectionData(
    double[] deangledDeflection, double deflectionLsbToNewtonsFactor, double zStepM,
    double empiricalBaseline, double minPeakForceRelativeToEmpiricalBaselineN,
    double medianAbsoluteDeviationLowerBoundN, double leftMostlyHigherWindowSizeM,
    double leftMostlyHigherFraction, double leftMostlyHigherN,
    double rightMuchHigherThanAllN, double rightMuchHigherN, double atMostOnePeakWindowM, boolean[] potentialPeaks) {
    Preconditions.checkArgument(atMostOnePeakWindowM >= 0, atMostOnePeakWindowM);
    atMostOnePeakWindowM = -atMostOnePeakWindowM;
    // 11201833.016 wants leftMostlyHigherWindowSizeM <= 6.1
    double rightHigherThanAllWindowSizeM = -3.5e-9;
    double rightMuchHigherWindowSizeM = -3e-9;
    /*
     * sampleFileContent3 cannot take 45, but 40 is ok for rightMuchHigherN
     */
    int rightHigherThanAllWindowSize = (int) Math.ceil(rightHigherThanAllWindowSizeM / zStepM), rightMuchHigherWindowSize = (int) Math.ceil(rightMuchHigherWindowSizeM
      / zStepM), leftMostlyHigherWindowSize = (int) Math.ceil(leftMostlyHigherWindowSizeM / -zStepM);
    double rightMuchHigherThanAllLsb = rightMuchHigherThanAllN / deflectionLsbToNewtonsFactor;
    double rightMuchHigherLsb = rightMuchHigherN / deflectionLsbToNewtonsFactor;
    int atMostOnePeakWindowSize = (int) Math.ceil(atMostOnePeakWindowM / zStepM);
    /*
     * 60 is not enough, but 80 works well for good curves. For refolding, e.g.
     * 11271156.010, 80 is not enough, but 120 is be enough for far-from-surface
     * curves. That's not enough by itself for breakAway/fetchMolecule, but for
     * those the extra base level filter with a low threshold is just fine.
     */
    // 10% is not enough for TestResources.refolding1.
    /*
     * This stuff here is mainly for refolding. Should we only use it then? In
     * principle it can also help for normal curves, when the baseline cannot be
     * properly determined.
     */
    double minimumPeakForceToEmpiricalBaselineLsb = minPeakForceRelativeToEmpiricalBaselineN
      / deflectionLsbToNewtonsFactor;

    filterMinimumPeakForceToEmpiricalBaseline(
      deangledDeflection, minimumPeakForceToEmpiricalBaselineLsb, potentialPeaks, empiricalBaseline);
    filterRightMuchHigher(
      deangledDeflection, rightMuchHigherWindowSize, rightMuchHigherThanAllLsb, rightMuchHigherLsb,
      potentialPeaks);
    filterRightHigher(deangledDeflection, rightHigherThanAllWindowSize, potentialPeaks);
    filterLeftMostlyHigher(
      deangledDeflection, leftMostlyHigherWindowSize,
      (int) Math.ceil(leftMostlyHigherFraction * leftMostlyHigherWindowSize), leftMostlyHigherN
        / deflectionLsbToNewtonsFactor, potentialPeaks);

    double madWindowSizeM = -10e-9;
    int madWindowSize = (int) Math.ceil(madWindowSizeM / zStepM);
    double madLowerBoundLsb = medianAbsoluteDeviationLowerBoundN / deflectionLsbToNewtonsFactor;
    filterMedianAbsoluteDeviation(
      deangledDeflection, madWindowSize, madLowerBoundLsb, potentialPeaks);
    // Must be the last step, with perhaps the exception of
    // filterBasalLevel.
    filterUniquePeakWindow(deangledDeflection, atMostOnePeakWindowSize, potentialPeaks);
    return empiricalBaseline;
  }

  /**
   * Peaks increase locally the spread of the data.
   */
  private static void filterMedianAbsoluteDeviation(
    double[] deflection, int windowSize, final double madLowerBoundLsb, boolean[] potentialPeaks) {
    MovingWindows.centerDeflectionWindow(
      deflection, windowSize, potentialPeaks, new Function<PointFrequencyFunctionInput, Boolean>() {
        @Override
        public Boolean apply(PointFrequencyFunctionInput input) {
          double mad = CommonThings.medianAbsoluteDeviation(input.sortedMap);
          logger.log(Level.FINER, "mad={0} madLowerBoundLsb={1}", new Object[] {
          mad, madLowerBoundLsb});
          return mad >= madLowerBoundLsb;
        }
      });
    logPotentialPeaks(potentialPeaks);
  }

  /**
   * @param deflection
   *          Must be de-angled, but otherwise any translation does not matter.
   */
  public static double findEmpiricalBasalLevel(
    double[] deflection, double empiricalBaselinePercentile) {
    Percentile percentile = new Percentile(100 - empiricalBaselinePercentile);
    return percentile.evaluate(deflection);
  }

  private static void filterMinimumPeakForceToEmpiricalBaseline(
    double[] deflection, double minimumPeakForceToEmpiricalBaselineLsb, boolean[] potentialPeaks,
    double empiricalBaseline) {
    for (int i = deflection.length; --i >= 0;) {
      potentialPeaks[i] &= deflection[i] - empiricalBaseline <= -minimumPeakForceToEmpiricalBaselineLsb;
    }
  }

  private static void filterForNonPositiveTipDistance(
    double[] tipDistanceInAnyUnitOfMeasurement, boolean[] potentialPeaks) {
    for (int i = tipDistanceInAnyUnitOfMeasurement.length; --i >= 0;) {
      potentialPeaks[i] &= isTipDistanceSignOk(tipDistanceInAnyUnitOfMeasurement[i]);
    }
  }

  private static boolean isTipDistanceSignOk(double d) {
    return d <= 0;
  }

  static void filterUniquePeakWindow(double[] deflection, int windowSize, boolean[] potentialPeaks) {
    CommonThings.keepOnlyLowestInEachWindow(deflection, windowSize, potentialPeaks);
  }

  private static void filterBasalLevel(
    double[] deflection, double minimumPeakForceLsb, boolean[] potentialPeaks) {
    for (int i = deflection.length; --i >= 0;) {
      potentialPeaks[i] &= deflection[i] <= -minimumPeakForceLsb;
    }
  }

  private static void filterLeftMostlyHigher(
    double[] deflection, int leftMostlyHigherWindowSize, final int leftMostlyHigherMinimum,
    final double leftMostlyHigherN, boolean[] potentialPeaks) {
    MovingWindows.leftDeflectionWindow(
      deflection, leftMostlyHigherWindowSize, potentialPeaks,
      new Function<PointFrequencyFunctionInput, Boolean>() {
        @Override
        public Boolean apply(PointFrequencyFunctionInput from) {
          int greaterThanOrEqualSize = CommonThings.totalSize(from.sortedMap.tailMap(from.x
            + leftMostlyHigherN));
          return greaterThanOrEqualSize >= leftMostlyHigherMinimum;
        }
      });
  }

  static void filterRightHigher(double[] deflection, int windowSize, boolean[] potentialPeaks) {
    MovingWindows.rightDeflectionWindow(
      deflection, windowSize, potentialPeaks, true,
      new Function<PointFrequencyFunctionInput, Boolean>() {
        @Override
        public Boolean apply(PointFrequencyFunctionInput input) {
          return input.x <= input.sortedMap.firstDoubleKey();
        }
      });
    logPotentialPeaks(potentialPeaks);
  }

  private static void logPotentialPeaks(boolean[] potentialPeaks) {
    if (logger.isLoggable(Level.FINER)) {
      logger.log(Level.FINER, "potentialPeaks={0}", CommonThings.indicesOfTrue(potentialPeaks));
    }
  }

  static void filterRightMuchHigher(
    final double[] deflection, int windowSize, final double minimumDeltaAllLsb,
    final double minimumDelta, boolean[] potentialPeaks) {
    // Preconditions.checkArgument(0 <= minimumDeltaAllLsb &&
    // minimumDeltaAllLsb <= minimumDelta);
    MovingWindows.rightDeflectionWindow(
      deflection, windowSize, potentialPeaks, false,
      new Function<PointFrequencyFunctionInput, Boolean>() {
        @Override
        public Boolean apply(PointFrequencyFunctionInput input) {
          return CommonThings.median(input.sortedMap) - input.x >= minimumDelta;
        }
      });
    MovingWindows.rightDeflectionWindow(
      deflection, windowSize, potentialPeaks, true,
      new Function<PointFrequencyFunctionInput, Boolean>() {
        @Override
        public Boolean apply(PointFrequencyFunctionInput input) {
          return input.sortedMap.firstDoubleKey() - input.x >= minimumDeltaAllLsb;
        }
      });
    logPotentialPeaks(potentialPeaks);
  }

  /**
   * Convenience method to do a full analysis pipeline. Performance: 23
   * curves/second with and without contour length calculations.
   * 
   * @param slope
   *          NaN means that the slope should be computed.
   * 
   * @return highly redundant object with all information we could gather
   */
  public static RedundantPicoCurveInfo analyse(
    PlainPicoData plainPicoData, boolean useApproachEndLineForRetraction, double minimumPeakForceN,
    double minimumPeakContourLengthM, double minContourLengthDeltaM,
    double minPeakForceRelativeToEmpiricalBaselineN, double medianAbsoluteDeviationLowerBoundN,
    double leftMostlyHigherWindowSizeM, double leftMostlyHigherFraction, double leftMostlyHigherN, double rightMuchHigherThanAllN, double rightMuchHigherN, double atMostOnePeakWindowM,
    double slope, double kbtOverP) {
    ContactPointAndSlope approachContactPointAndSlope = PicoDataAnalyser.findApproachContactPointAndSlope(
      plainPicoData.sharedApproach(), slope);
    return analyseWithGivenApproachContactPointAndSlope(
      plainPicoData, useApproachEndLineForRetraction, approachContactPointAndSlope,
      minimumPeakForceN, minimumPeakContourLengthM, minContourLengthDeltaM,
      minPeakForceRelativeToEmpiricalBaselineN, medianAbsoluteDeviationLowerBoundN,
      leftMostlyHigherWindowSizeM, leftMostlyHigherFraction, leftMostlyHigherN, rightMuchHigherThanAllN, rightMuchHigherN, atMostOnePeakWindowM, kbtOverP);
  }

  /**
   * Input to
   * {@link PicoDataAnalyser#analyseWithGivenRetractionContactPointAndBaselineSlope}
   * .
   */
  public static class PeakAnalysisInput {

    public PeakAnalysisInput(PlainPicoData plainPicoData,
      ContactPointsAndSlope contactPointsAndSlope, double minimumPeakForceN,
      double minimumPeakContourLengthM, double minContourLengthDeltaM,
      double minPeakForceRelativeToEmpiricalBaselineN, double medianAbsoluteDeviationLowerBoundN,
      double leftMostlyHigherWindowSizeM, double leftMostlyHigherFraction, double leftMostlyHigherN,
      double rightMuchHigherThanAllN, double rightMuchHigherN, double atMostOnePeakWindowM,
      double kbtOverP) {
      this.plainPicoData = plainPicoData;
      this.contactPointsAndSlope = contactPointsAndSlope;
      this.minimumPeakForceN = minimumPeakForceN;
      this.minimumPeakContourLengthM = minimumPeakContourLengthM;
      this.minContourLengthDeltaM = minContourLengthDeltaM;
      this.minPeakForceRelativeToEmpiricalBaselineN = minPeakForceRelativeToEmpiricalBaselineN;
      this.medianAbsoluteDeviationLowerBoundN = medianAbsoluteDeviationLowerBoundN;
      this.leftMostlyHigherWindowSizeM = leftMostlyHigherWindowSizeM;
      this.leftMostlyHigherFraction = leftMostlyHigherFraction;
      this.leftMostlyHigherN = leftMostlyHigherN;
      this.rightMuchHigherThanAllN = rightMuchHigherThanAllN;
      this.rightMuchHigherN = rightMuchHigherN;
      this.atMostOnePeakWindowM = atMostOnePeakWindowM;
      this.kbtOverP = kbtOverP;
    }

    public final PlainPicoData plainPicoData;
    public final ContactPointsAndSlope contactPointsAndSlope;
    public final double minimumPeakForceN;
    public final double minimumPeakContourLengthM;
    public final double minContourLengthDeltaM;
    public final double minPeakForceRelativeToEmpiricalBaselineN;
    public final double medianAbsoluteDeviationLowerBoundN;
    public final double leftMostlyHigherWindowSizeM;
    public final double leftMostlyHigherFraction;
    public final double leftMostlyHigherN;
    public final double rightMuchHigherThanAllN;
    public final double rightMuchHigherN;
    public final double atMostOnePeakWindowM;
    public final double kbtOverP;

    public double slopeZLsbDefLsb() {
      return contactPointsAndSlope.slopeZLsbDefLsb;
    }

    public ContactPointAndSlope approachContactPointAndSlope() {
      return contactPointsAndSlope.approachContactPointAndSlope();
    }

    public ZLsbDeflectionLsbPoint approachContactPoint() {
      return contactPointsAndSlope.approachContactPoint;
    }

    public ZLsbDeflectionLsbPoint retractionContactPoint() {
      return contactPointsAndSlope.retractionContactPoint;
    }
  }

  public static RedundantPicoCurveInfo analyseWithGivenApproachContactPointAndSlope(
    PlainPicoData plainPicoData, boolean useApproachEndLineForRetraction,
    ContactPointAndSlope contactPointAndSlope, double minimumPeakForceN,
    double minimumPeakContourLengthM, double minContourLengthDeltaM,
    double minPeakForceRelativeToEmpiricalBaselineN, double medianAbsoluteDeviationLowerBoundN,
    double leftMostlyHigherWindowSizeM, double leftMostlyHigherFraction, double leftMostlyHigherN, double rightMuchHigherThanAllN, double rightMuchHigherN, double atMostOnePeakWindowM,
    double kbtOverP) {
    ContactPointsAndSlope contactPointsAndSlope = findContactPointsAndSlope(
      plainPicoData, contactPointAndSlope, useApproachEndLineForRetraction);
    return analyseWithGivenContactPointsAndSlope(
      plainPicoData, contactPointsAndSlope, minimumPeakForceN, minimumPeakContourLengthM,
      minContourLengthDeltaM, minPeakForceRelativeToEmpiricalBaselineN,
      medianAbsoluteDeviationLowerBoundN, leftMostlyHigherWindowSizeM,
      leftMostlyHigherFraction, leftMostlyHigherN,
      rightMuchHigherThanAllN, rightMuchHigherN,
      atMostOnePeakWindowM, kbtOverP);
  }

  public static ContactPointsAndSlope findContactPointsAndSlope(
    PlainPicoData plainPicoData, ContactPointAndSlope approachContactPointAndSlope,
    boolean useApproachEndLineForRetraction) {
    ZLsbDeflectionLsbPoint retractionContactPoint = findRetractionContactPoint(
      plainPicoData, approachContactPointAndSlope, useApproachEndLineForRetraction);
    ContactPointsAndSlope contactPointsAndSlope = new ContactPointsAndSlope(
      approachContactPointAndSlope.contactPoint(), retractionContactPoint,
      approachContactPointAndSlope.slopeZLsbDefLsb);
    return contactPointsAndSlope;
  }

  private static ZLsbDeflectionLsbPoint findRetractionContactPoint(
    PlainPicoData plainPicoData, ContactPointAndSlope approachContactPointAndSlope,
    boolean useApproachEndLineForRetraction) {
    ZLsbDeflectionLsbPoint retractionContactPoint;
    if (useApproachEndLineForRetraction) {
      LinearFunction approachEndLinearFunction = approachContactPointAndSlope.baseline();
      retractionContactPoint = firstIntersectionWithLine(
        plainPicoData.sharedRetraction(), approachEndLinearFunction);
    } else {
      retractionContactPoint = findRetractionContactPointGivenSlope(
        plainPicoData.retraction, approachContactPointAndSlope.slopeZLsbDefLsb);
    }
    return retractionContactPoint;
  }

  public static RedundantPicoCurveInfo analyseWithGivenContactPointsAndSlope(
    PlainPicoData plainPicoData, ContactPointsAndSlope contactPointsAndSlope,
    double minimumPeakForceN, double minimumPeakContourLengthM, double minContourLengthDeltaM,
    double minPeakForceRelativeToEmpiricalBaselineN, double medianAbsoluteDeviationLowerBoundN,
    double leftMostlyHigherWindowSizeM, double leftMostlyHigherFraction, double leftMostlyHigherN,
    double rightMuchHigherThanAllN, double rightMuchHigherN, double atMostOnePeakWindowM,
    double kbtOverP) {
    PeakAnalysisInput peakAnalysisInput = new PeakAnalysisInput(
      plainPicoData, contactPointsAndSlope, minimumPeakForceN, minimumPeakContourLengthM,
      minContourLengthDeltaM, minPeakForceRelativeToEmpiricalBaselineN,
      medianAbsoluteDeviationLowerBoundN, leftMostlyHigherWindowSizeM,
      leftMostlyHigherFraction, leftMostlyHigherN,
      rightMuchHigherThanAllN, rightMuchHigherN,
      atMostOnePeakWindowM, kbtOverP);
    return analyseWithGivenContactPointsAndSlope(peakAnalysisInput);
  }

  public static RedundantPicoCurveInfo analyseWithGivenContactPointsAndSlope(PeakAnalysisInput input) {
    OffsetAndDeflections offsetAndDeflections = PicoDataAnalyser.cutAndDeangle(
      input.retractionContactPoint().zLsb,
      input.retractionContactPoint().linearFunctionWithSlope(input.slopeZLsbDefLsb()),
      input.plainPicoData.retraction);
    EmpiricalBaselineAndPeakOrNots ebapon = computePeakOrNots(
      input.plainPicoData, input.retractionContactPoint().zLsb, offsetAndDeflections,
      input.minimumPeakForceN, input.minimumPeakContourLengthM, input.minContourLengthDeltaM,
      input.minPeakForceRelativeToEmpiricalBaselineN, input.medianAbsoluteDeviationLowerBoundN,
      input.leftMostlyHigherWindowSizeM, input.leftMostlyHigherFraction, input.leftMostlyHigherN,
      input.rightMuchHigherThanAllN, input.rightMuchHigherN, input.atMostOnePeakWindowM,
      input.kbtOverP);
    /*
     * We could re-compute start-index of the peaks immediately after the
     * removed ones, fit their contour length again, and filter out the
     * non-increasing contour lengths and so on until convergence. But even this
     * is not perfect. We should re-iterate the whole procedure from point where
     * one peak can affect the status of another one, such as double peak
     * counting etc, until convergence, or a cycle is reached. But for now this
     * is fine as well, especially since the fake peaks just removed would
     * probably negatively influence the WLC fit of the good peaks.
     */
    return new RedundantPicoCurveInfo(
      input.plainPicoData, input.approachContactPointAndSlope(), input.retractionContactPoint(),
      offsetAndDeflections, ebapon.empiricalBaseline, ebapon.peakOrNots, input.kbtOverP);
  }

  public static class EmpiricalBaselineAndPeakOrNots {
    public final double empiricalBaseline;
    public final PeakOrNot[] peakOrNots;

    public EmpiricalBaselineAndPeakOrNots(double empiricalBaseline, PeakOrNot[] peakOrNots) {
      this.empiricalBaseline = empiricalBaseline;
      this.peakOrNots = peakOrNots;
    }
  }

  /**
   * DMSO 0% at 50.1 nm/s gives mean: 119.28806547123655 and stdev of the
   * average: 4.6490143606499972 minimumPeakForceN
   * 
   * @param plainPicoData
   * @param retractionContactPointZLsb
   * @param offsetAndDeflections
   * @param minimumPeakForceN
   * @return
   */
  public static EmpiricalBaselineAndPeakOrNots computePeakOrNots(
    PlainPicoData plainPicoData, double retractionContactPointZLsb,
    OffsetAndDeflections offsetAndDeflections, double minimumPeakForceN,
    double minimumPeakContourLengthM, double minContourLengthDeltaM,
    double minPeakForceRelativeToEmpiricalBaselineN, double medianAbsoluteDeviationLowerBoundN,
    double leftMostlyHigherWindowSizeM, double leftMostlyHigherFraction, double leftMostlyHigherPn,
    double rightMuchHigherThanAllN, double rightMuchHigherN, double atMostOnePeakWindowM,
    double kbtOverP) {
    double empiricalBaseline = findEmpiricalBasalLevel(
      offsetAndDeflections.horizontalDeflectionAfterContactPoint,
      defaultEmpiricalBaselinePercentile);
    double[] tipDistanceM = tipDistanceMetres(
      plainPicoData, retractionContactPointZLsb, offsetAndDeflections);
    PeakSlopes peakSlopes = findStartSlopeIndices(
      plainPicoData, tipDistanceM, offsetAndDeflections, minimumPeakForceN, empiricalBaseline,
      minPeakForceRelativeToEmpiricalBaselineN, medianAbsoluteDeviationLowerBoundN,
      leftMostlyHigherWindowSizeM, leftMostlyHigherFraction, leftMostlyHigherPn,
      rightMuchHigherThanAllN, rightMuchHigherN, atMostOnePeakWindowM);
    return peakSlopesToPeakOrNots(
      plainPicoData, offsetAndDeflections, minimumPeakContourLengthM, minContourLengthDeltaM,
      empiricalBaseline, tipDistanceM, peakSlopes, kbtOverP);
  }

  public static EmpiricalBaselineAndPeakOrNots peakSlopesToPeakOrNots(
    PlainPicoData plainPicoData, OffsetAndDeflections offsetAndDeflections,
    double minimumPeakContourLengthM, double minContourLengthDeltaM, double empiricalBaseline,
    double[] tipDistanceM, PeakSlopes peakSlopes, double kbtOverP) {
    int[] startSlopeIndices = peakSlopes.toStartSlopeIndices(offsetAndDeflections.length());
    double[] forcesN = offsetAndDeflections.copyOfDeflectionTimes(plainPicoData.deflectionLsbToNewtonsFactor());
    checkSelectedForcesAreNonPositive(startSlopeIndices, forcesN);
    double[] contourLengths = estimateContourLengths(
      forcesN, tipDistanceM, startSlopeIndices, kbtOverP);
    PeakOrNot[] peakOrNots = new PeakOrNot[offsetAndDeflections.horizontalDeflectionAfterContactPoint.length];
    for (int i = peakOrNots.length; --i >= 0;) {
      peakOrNots[i] = contourLengths[i] >= minimumPeakContourLengthM ? PeakOrNot.of(
        startSlopeIndices[i], contourLengths[i]) : PeakOrNot.NON_PEAK;
    }
    // One final peak filtering step. TODO: integrate it somehow with
    // findPeaks().
    loggingFilterPeaksForIncreasingContourLength(
      offsetAndDeflections.horizontalDeflectionAfterContactPoint, minContourLengthDeltaM,
      peakOrNots);
    return new EmpiricalBaselineAndPeakOrNots(empiricalBaseline, peakOrNots);
  }

  private static void checkSelectedForcesAreNonPositive(int[] startSlopeIndices, double[] forcesN) {
    for (int i = 0; i < startSlopeIndices.length; ++i) {
      if (startSlopeIndices[i] != -1) {
        if (!(forcesN[i] <= 0)) {
          throw new IllegalArgumentException("i=" + i + " selected="
            + CommonThings.selected(forcesN, CommonThings.nonEqual(startSlopeIndices, -1)));
        }
      }
    }
  }

  public static PeakSlopes findStartSlopeIndices(
    PlainPicoData plainPicoData, double[] tipDistanceM, OffsetAndDeflections offsetAndDeflections,
    double minimumPeakForceN, double empiricalBaseline,
    double minPeakForceRelativeToEmpiricalBaselineN, double medianAbsoluteDeviationLowerBoundN,
    double leftMostlyHigherWindowSizeM, double leftMostlyHigherFraction, double leftMostlyHigherN,
    double rightMuchHigherThanAllN, double rightMuchHigherN, double atMostOnePeakWindowM) {
    int n = offsetAndDeflections.length();
    boolean[] peaksIndicator = new boolean[n];
    Arrays.fill(peaksIndicator, true);
    PicoDataAnalyser.findPeaksInvariantUponTranslationOfDeflectionData(
      offsetAndDeflections.horizontalDeflectionAfterContactPoint,
      plainPicoData.deflectionLsbToNewtonsFactor(), plainPicoData.actualRetractionStepSizeMetres(),
      empiricalBaseline, minPeakForceRelativeToEmpiricalBaselineN,
      medianAbsoluteDeviationLowerBoundN, leftMostlyHigherWindowSizeM,
      leftMostlyHigherFraction, leftMostlyHigherN,
      rightMuchHigherThanAllN, rightMuchHigherN,
      atMostOnePeakWindowM, peaksIndicator);
    filterForNonPositiveTipDistance(tipDistanceM, peaksIndicator);

    double minimumPeakForceLsb = minimumPeakForceN / plainPicoData.deflectionLsbToNewtonsFactor();
    filterBasalLevel(
      offsetAndDeflections.horizontalDeflectionAfterContactPoint, minimumPeakForceLsb,
      peaksIndicator);
    assert CommonThings.allSelectedAreNonPositive(
      offsetAndDeflections.horizontalDeflectionAfterContactPoint, peaksIndicator) : CommonThings.selected(
      offsetAndDeflections.horizontalDeflectionAfterContactPoint, peaksIndicator);
    PeakSlopes ps = startPeakSlopes(
      offsetAndDeflections.horizontalDeflectionAfterContactPoint, tipDistanceM, peaksIndicator);
    int[] startSlopeIndices = ps.toStartSlopeIndices(n);
    for (int i = 0; i < startSlopeIndices.length; ++i) {
      if (startSlopeIndices[i] != -1) {
        assert peaksIndicator[i];
        assert offsetAndDeflections.horizontalDeflectionAfterContactPoint[startSlopeIndices[i]] >= offsetAndDeflections.horizontalDeflectionAfterContactPoint[i];
        if (offsetAndDeflections.horizontalDeflectionAfterContactPoint[startSlopeIndices[i]] == offsetAndDeflections.horizontalDeflectionAfterContactPoint[i]) {
          startSlopeIndices[i] = -1;
        }
      }
    }
    return PeakSlopes.of(startSlopeIndices);
  }

  /**
   * Gives preference to the lower horizontalDeflectionAfterContactPoint value,
   * and then to the higher index, when peaks at different positions have the
   * same contour length of the ordering of the contour lengths is not the same
   * as that of the peak index.
   * 
   * @param horizontalDeflectionAfterContactPoint
   * @param peakOrNots
   *          input-output
   */
  static void loggingFilterPeaksForIncreasingContourLength(
    double[] horizontalDeflectionAfterContactPoint, double minDeltaM, PeakOrNot[] peakOrNots) {
    Level logLevel = Level.FINEST;
    int beforeCount = logger.isLoggable(logLevel) ? PeakOrNot.countPeaks(peakOrNots) : 0;
    filterPeaksForIncreasingContourLength(
      horizontalDeflectionAfterContactPoint, minDeltaM, peakOrNots);
    int afterCount = logger.isLoggable(logLevel) ? PeakOrNot.countPeaks(peakOrNots) : 0;
    if (beforeCount != afterCount) {
      logger.log(
        logLevel, "Before increasing contour length filtering: {0} After: {1}.", new Object[] {
        beforeCount, afterCount});
    }
  }

  public static void filterPeaksForIncreasingContourLength(
    double[] horizontalDeflectionAfterContactPoint, double minDeltaM, PeakOrNot[] peakOrNots) {
    Preconditions.checkArgument(minDeltaM >= 0);
    /*
     * head is the least element, which is good since
     * horizontalDeflectionAfterContactPoint is negated
     */
    PriorityQueue<DoubleIntPair> pq = new PriorityQueue<DoubleIntPair>();
    for (int i = 0; i < peakOrNots.length; ++i) {
      if (peakOrNots[i].isPeak() && peakOrNots[i].hasContorLength()) {
        pq.add(new DoubleIntPair(horizontalDeflectionAfterContactPoint[i], -i));
      }
    }
    DoubleIntPair head;
    while ((head = pq.poll()) != null) {
      PeakOrNot headPeakOrNotAnyMore = peakOrNots[-head.y];
      if (headPeakOrNotAnyMore.isPeak()) {
        // Still a peak.
        for (DoubleIntPair other : pq) {
          PeakOrNot peakOrNot = peakOrNots[-other.y];
          int diffIndex = other.y - head.y;
          assert diffIndex != 0 : "just removed this one from the queue";
          double diffContourLength = peakOrNot.nonNegativeContourLengthMetres
            - headPeakOrNotAnyMore.nonNegativeContourLengthMetres;
          /*
           * diffY is already negated because of putting -i in the queue.
           */
          if ((diffIndex >= 0 ? +1 : -1) * diffContourLength >= -minDeltaM) {
            // Whether it was a peak or not, make it non-peak.
            peakOrNots[-other.y] = PeakOrNot.NON_PEAK;
          }
        }
      }
    }
  }

  interface ContourLengthEstimator {
    double estimate(double[] nonNegativeTipDistance, double[] nonNegativeForce);
  }

  /**
   * @param forcesN
   *          mostly negative
   * @param tipDistanceM
   *          mostly negative
   * @param startSlopeIndices
   *          inclusive
   * @return 0 for non-peaks and when contour length cannot be computed
   */
  private static double[] estimateContourLengths(
    double[] forcesN, double[] tipDistanceM, int[] startSlopeIndices, double kbtOverP) {
    Preconditions.checkArgument(forcesN.length == tipDistanceM.length
      && tipDistanceM.length == startSlopeIndices.length);
    final int n = forcesN.length;
    double[] contourLengths = new double[n];
    ContourLengthEstimator estimator = new LevenbergMarquardtOptimizerContourLengthEstimator(
      kbtOverP);
    for (int i = n; --i >= 0;) {
      if (startSlopeIndices[i] != -1) {
        final double[] nonNegativeTipDistance = CommonThings.negated(
          tipDistanceM, startSlopeIndices[i], i + 1);
        final double[] nonNegativeForce = CommonThings.negated(forcesN, startSlopeIndices[i], i + 1);
        assert nonNegativeTipDistance.length == nonNegativeForce.length;
        contourLengths[i] = estimator.estimate(nonNegativeTipDistance, nonNegativeForce);
      }
    }
    return contourLengths;
  }

  public static double[] tipDistanceMetres(
    PlainPicoData plainPicoData, double retractionContactPointZLsb,
    OffsetAndDeflections offsetAndDeflections) {
    // tipDistanceM is mostly negative!
    double[] tipDistanceM = new double[offsetAndDeflections.horizontalDeflectionAfterContactPoint.length];
    assert plainPicoData.deflectionLsbToMetresFactor() > 0 : plainPicoData.deflectionLsbToMetresFactor();
    for (int i = tipDistanceM.length; --i >= 0;) {
      double surfaceRelativeZ = plainPicoData.retraction.z(offsetAndDeflections.offset + i)
        - retractionContactPointZLsb;
      /*
       * Force is defLsb * deflectionLsbToMetresFactor * k, so Force/k = defLsb
       * * deflectionLsbToMetresFactor..
       * horizontalDeflectionAfterContactPoint[i] are mostly negative, while
       * deflectionLsbToMetresFactor is always positive.
       */
      tipDistanceM[i] = surfaceRelativeZ * plainPicoData.zLsbToMetresFactor
        - offsetAndDeflections.horizontalDeflectionAfterContactPoint[i]
        * plainPicoData.deflectionLsbToMetresFactor();
    }
    return tipDistanceM;
  }

  private static boolean isHorizontalDeflectionAfterContactPointSignOk(double d) {
    return d <= 0;
  }

  public static PeakSlopes startPeakSlopes(
    double[] horizontalDeflectionAfterContactPoint, double[] tipDistanceInAnyUnitOfMeasurement,
    boolean[] peaks) {
    int n = horizontalDeflectionAfterContactPoint.length;
    Preconditions.checkArgument(n == peaks.length);
    int[] risingStart = new int[n];
    Arrays.fill(risingStart, -1);
    for (int i = n; --i >= 0;) {
      if (peaks[i]) {
        Preconditions.checkState(
          isTipDistanceSignOk(tipDistanceInAnyUnitOfMeasurement[i]),
          "filterForNonPositiveTipDistance() should have ruled out indenture peaks.");
        int j;
        for (j = i; --j >= 0;) {
          if (peaks[j]
            | !isHorizontalDeflectionAfterContactPointSignOk(horizontalDeflectionAfterContactPoint[j])
            | !isTipDistanceSignOk(tipDistanceInAnyUnitOfMeasurement[j])) {
            if (logger.isLoggable(Level.FINER)) {
              logger.finer(String.format(
                "i=%d peaks[%d]=%s horizontalDeflectionAfterContactPoint[%d]=%s", i, j, peaks[j],
                j, horizontalDeflectionAfterContactPoint[j]));
            }
            break;
          }
        }
        // j is the first index that is not allowed
        /*
         * Theoretically it's possible that j = i - 1, when the rise goes
         * immediately above zero. So remember that the resulting rising part
         * might have only one point, the peak itself.
         */
        int argMax = CommonThings.argMax(horizontalDeflectionAfterContactPoint, j + 1, i + 1);
        risingStart[i] = argMax;
      }
    }
    for (int i = n; --i >= 0;) {
      assert peaks[i] == (risingStart[i] != -1);
    }
    return PeakSlopes.of(risingStart);
  }

  public static int wellSpacedAtEnd(DoubleList a, double low, double high, int allowSkipped) {
    Preconditions.checkArgument(low <= high);
    if (a.isEmpty()) {
      return 0;
    }
    if (a.size() == 1) {
      return 1;
    }
    int skipped = 0;
    int i;
    for (i = a.size() - 2; i >= 0; i--) {
      double diff = a.get(i + 1) - a.get(i);
      if (diff < low) {
        break;
      }
      int skippedNow;
      if (diff > high) {
        int c = (int) Math.floor(diff / ((high + low) / 2));
        if (c < 0) {
          // huge overflow
          break;
        }
        assert c >= 1;
        if (c < diff / high) {
          ++c;
          if (c < 0) {
            // huge overflow
            break;
          }
        }
        assert c >= diff / high : String.format(
          Locale.ROOT, "diff=%s low=%s high=%s c=%s diff/high=%s", diff, low, high, c, diff / high);
        if (c <= diff / low) {
          skippedNow = c - 1;
        } else {
          break;
        }
      } else {
        skippedNow = 0;
      }
      if (skipped + skippedNow > allowSkipped) {
        break;
      }
      skipped += skippedNow;
    }
    return a.size() - (1 + i) + skipped;
  }

  public static boolean proteinMightStillBeAttached(
    boolean[] peaksIndicator, int clRangeStart, int clRangeEnd, int minPeaks, int allowSkipped) {
    Preconditions.checkArgument(clRangeStart > 0 && clRangeEnd > 0);
    IntList peakIndices = CommonThings.indicesOfTrue(peaksIndicator);
    if (peakIndices.isEmpty()) {
      return minPeaks == 0;
    }
    int wellSpacedAtEnd = wellSpacedAtEnd(
      CommonThings.toDoubleList(peakIndices), clRangeStart, clRangeEnd, allowSkipped);
    logger.log(Level.FINER, "wellSpacedAtEnd={0}", wellSpacedAtEnd);

    boolean a = wellSpacedAtEnd >= minPeaks;
    boolean b = (CommonThings.last(peakIndices) - peaksIndicator.length) < clRangeEnd;

    return a & b;
  }

  public static boolean proteinMightStillBeAttached(
    RedundantPicoCurveInfo info, double clRangeStart, double clRangeEnd, int minPeaks,
    int allowSkipped) {
    int wellSpacedAtEnd = wellSpacedAtEnd(
      info.peakContourLengthsFromContactPointOnwards(), clRangeStart, clRangeEnd, allowSkipped);
    boolean a = wellSpacedAtEnd >= minPeaks;
    boolean b = (info.lastPeakIndex() - info.peakOrNots.length)
      * info.plainPicoData.actualRetractionStepSizeMetres() < clRangeEnd;
    return a && b;
  }

  public static ContactPointsAndSlope computeContactPointsAndSlopeUsingApproachBaseline(
    PlainPicoData fetch, double slope) {
    ContactPointAndSlope approachContactPointAndSlope = PicoDataAnalyser.findApproachContactPointAndSlope(
      fetch.sharedApproach(), slope);
    ZLsbDeflectionLsbPoint retractionContactPoint = PicoDataAnalyser.firstIntersectionWithLine(
      fetch.sharedRetraction(), approachContactPointAndSlope.baseline());
    return new ContactPointsAndSlope(
      approachContactPointAndSlope.contactPoint(), retractionContactPoint,
      approachContactPointAndSlope.slopeZLsbDefLsb);
  }

  public static double findEmpiricalRetractionContactPointDefLsbGivenZLsbAndSlope(
    AbstractDirectionData retraction, double zLsb, double slopeZLsbDefLsb, double baselineFraction) {
    double arbitraryDeflection = 0;
    return coreFindEmpiricalRetractionContactPointDefLsbGivenZLsbAndSlope(
      retraction, zLsb, slopeZLsbDefLsb, baselineFraction, arbitraryDeflection);
  }

  static double coreFindEmpiricalRetractionContactPointDefLsbGivenZLsbAndSlope(
    AbstractDirectionData retraction, double zLsb, double slopeZLsbDefLsb, double baselineFraction,
    double arbitraryDeflection) {
    LinearFunction baseline = LinearFunction.lineWithSlopeThroughPoint(
      slopeZLsbDefLsb, new Point2D.Double(zLsb, arbitraryDeflection));
    OffsetAndDeflections cutAndDeangle = cutAndDeangle(zLsb, baseline, retraction);
    double empiricalBasalLevel = findEmpiricalBasalLevel(
      cutAndDeangle.horizontalDeflectionAfterContactPoint, baselineFraction * 100);
    double empiricalRetractionContactPointDeflection = empiricalBasalLevel + arbitraryDeflection;
    return empiricalRetractionContactPointDeflection;
  }

  public static RedundantPicoCurveInfo analyseWithGivenContactPointsAndSlope(
    PeakAnalysisInput input, PeakSlopes peakSlopes) {
    OffsetAndDeflections offsetAndDeflections = cutAndDeangle(
      input.retractionContactPoint().zLsb,
      input.retractionContactPoint().linearFunctionWithSlope(input.slopeZLsbDefLsb()),
      input.plainPicoData.retraction);
    double empiricalBaseline = findEmpiricalBasalLevel(
      offsetAndDeflections.horizontalDeflectionAfterContactPoint,
      defaultEmpiricalBaselinePercentile);
    double[] tipDistanceM = tipDistanceMetres(
      input.plainPicoData, input.retractionContactPoint().zLsb, offsetAndDeflections);
    if (peakSlopes == null) {
      peakSlopes = PicoDataAnalyser.findStartSlopeIndices(
        input.plainPicoData, tipDistanceM, offsetAndDeflections, input.minimumPeakForceN,
        empiricalBaseline, input.minPeakForceRelativeToEmpiricalBaselineN,
        input.medianAbsoluteDeviationLowerBoundN, input.leftMostlyHigherWindowSizeM,
        input.leftMostlyHigherFraction, input.leftMostlyHigherN,
        input.rightMuchHigherThanAllN, input.rightMuchHigherN, input.atMostOnePeakWindowM);
    }
    EmpiricalBaselineAndPeakOrNots ebapon = peakSlopesToPeakOrNots(
      input.plainPicoData, offsetAndDeflections, input.minimumPeakContourLengthM,
      input.minContourLengthDeltaM, empiricalBaseline, tipDistanceM, peakSlopes, input.kbtOverP);
    return new RedundantPicoCurveInfo(
      input.plainPicoData, input.approachContactPointAndSlope(), input.retractionContactPoint(),
      offsetAndDeflections, ebapon.empiricalBaseline, ebapon.peakOrNots, input.kbtOverP);
  }

  /**
   * @param peakSlopesDir
   *          optional
   */
  public static RedundantPicoCurveInfo analyseWithGivenContactPointsAndSlope(
    PeakAnalysisInput input, String filename, Path peakSlopesDir) {
    OffsetAndDeflections offsetAndDeflections = cutAndDeangle(
      input.retractionContactPoint().zLsb,
      input.retractionContactPoint().linearFunctionWithSlope(input.slopeZLsbDefLsb()),
      input.plainPicoData.retraction);
    double empiricalBaseline = findEmpiricalBasalLevel(
      offsetAndDeflections.horizontalDeflectionAfterContactPoint,
      defaultEmpiricalBaselinePercentile);
    double[] tipDistanceM = tipDistanceMetres(
      input.plainPicoData, input.retractionContactPoint().zLsb, offsetAndDeflections);
    PeakSlopes peakSlopes;
    try {
      peakSlopes = PeakSlopesHelper.readOrComputeAndWritePeakSlopesOrNull(
        input.plainPicoData, tipDistanceM, offsetAndDeflections, empiricalBaseline,
        input.minimumPeakForceN, input.minPeakForceRelativeToEmpiricalBaselineN,
        input.medianAbsoluteDeviationLowerBoundN, input.leftMostlyHigherWindowSizeM,
        input.leftMostlyHigherFraction, input.leftMostlyHigherN,
        input.rightMuchHigherThanAllN, input.rightMuchHigherN, input.atMostOnePeakWindowM, peakSlopesDir, filename);
    } catch (IOException e) {
      throw new MyRuntimeException(e.getMessage(), e);
    }
    if (peakSlopes == null) {
      logger.log(Level.WARNING, "Could not extact the peaks from: {0}", filename);
      peakSlopes = PeakSlopes.EMPTY;
    }
    EmpiricalBaselineAndPeakOrNots ebapon = peakSlopesToPeakOrNots(
      input.plainPicoData, offsetAndDeflections, input.minimumPeakContourLengthM,
      input.minContourLengthDeltaM, empiricalBaseline, tipDistanceM, peakSlopes, input.kbtOverP);
    return new RedundantPicoCurveInfo(
      input.plainPicoData, input.approachContactPointAndSlope(), input.retractionContactPoint(),
      offsetAndDeflections, ebapon.empiricalBaseline, ebapon.peakOrNots, input.kbtOverP);
  }

  public static int offsetOfStartingContiguousRegionNotBefore(short[] z, double d) {
    Preconditions.checkArgument(d >= 0, d);
    double objective = CommonThings.min(z) + d;
    double threshold = CommonThings.min(objective, CommonThings.min(z, 0, 2));
    if (objective != threshold) {
      logger.log(
        Level.WARNING, "Could not move by {0} as required because of z[0:2]={1}.", new Object[] {
        d, CommonThings.toString(z, 0, 2)});
    }
    short[] reversed = CommonThings.reversed(z, 0, z.length);
    return z.length
      - 1
      - offsetOfEndingContiguousRegionNotBeforeContactPoint(
        CommonThings.negated(reversed, 0, z.length), -threshold);
  }
}
