package org.systemsbiology.rface.hadoop.data;

import edu.montreal.KolmogorovSmirnovDist;
import org.apache.commons.math.stat.descriptive.moment.StandardDeviation;
import org.apache.hadoop.io.Writable;
import org.systemsbiology.stats.HartiganDipLut;
import org.systemsbiology.util.KahanSummation;
import org.systemsbiology.util.Summator;

import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.util.Arrays;
import java.util.MissingResourceException;

/**
 * Created by IntelliJ IDEA.
 * User: anorberg
 * Date: 9/27/11
 * Time: 12:34 PM
 * The final result type of RF-ACE result aggregation, a representation of the calculated stats for the group.
 */
public class StatAggregation implements Writable {

    private static HartiganDipLut lut;

    static{
        try{
            ObjectInputStream objStream = new ObjectInputStream(StatAggregation.class.getResourceAsStream("/hartigan.lut"));
            lut = (HartiganDipLut)objStream.readObject();
            objStream.close();
        } catch (IOException iox){
            throw new MissingResourceException("IOException trying to load dip test significance table", "StatAggregation", "/hartigan.lut");
        } catch (ClassNotFoundException wtf){
            throw new AssertionError("HartiganDipLut not found despite being statically linked trying to load dip test significance table");
        } catch (ClassCastException cce){
            throw new MissingResourceException("Dip test significance data resource did not contain data of the correct type", "StatAggregation", "/hartigan.lut");
        } catch (NullPointerException zpe){
            throw new MissingResourceException("Hartigan Dip Test LUT not found in package", "StatAggregation", "/hartigan.lut");
        }
    }

    private ResultGroup backingGroup;

    private double sameDistributionProbability;
    private double unimodality;
    private double dispersion;
    private double baseMean;
    private double targetMean;
    private double totalMean;

    private static final int MAGIC_OPEN = 0xCA5CADED;
    private static final int MAGIC_CLOSE = 0xB1D00F;

    public StatAggregation(ResultGroup source){
        backingGroup = source;
        sameDistributionProbability = Double.NaN;
        unimodality = Double.NaN;
        calculate();
    }

    private static double znan(double z){
        if(Double.isNaN(z)) return 0;
        return z;
    }

    private double smirnovD(double[] f1, double[] f2){
        double ret = 0;

        //Fast approach:
        //Start with CDF values of 0.
        //Merge step down f1 and f2, recalculating the CDF based on step position.
        //Track greatest difference.
        //Differences can only shrink once something goes over the edge, so either over the limit is the stop condition.

        int f1dex = 0;
        int f2dex = 0;

        while(f1dex < f1.length && f2dex < f2.length){
            //our next probe point
            double foo = f1[f1dex];
            if(f2[f2dex] < foo){
                foo = f2[f2dex];
            }

            //move pointers if this is the low one- and move both in case of draw
            while(f1dex < f1.length && f1[f1dex] <= foo){
                ++f1dex;
            }
            while(f2dex < f2.length && f2[f2dex] <= foo){
                ++f2dex;
            }

            double probe = Math.abs(((double)f1dex/(double)f1.length) - ((double)f2dex/(double)f2.length));
            if(probe > ret){
                ret = probe;
            }
        }

        return ret;
    }

    private static double[] merge(double[] a, double[] b){
        double[] ret = new double[a.length + b.length];
        int aPtr = 0, bPtr = 0, rPtr = 0;
        while(aPtr < a.length && bPtr < b.length){
            if(a[aPtr] < b[bPtr]){
                ret[rPtr++] = a[aPtr++];
            } else {
                ret[rPtr++] = b[bPtr++];
            }
        }
        //one of them ran off the end; clean up the other.
        //remember, while is also implicitly an if, so only one of these loops will happen
        while(aPtr < a.length){
            ret[rPtr++] = a[aPtr++];
        }
        while(bPtr < b.length){
            ret[rPtr++] = b[bPtr++];
        }

        return ret;
    }

    private static double mean(double[] ds){
        Summator sum = new KahanSummation();
        for(double d : ds){
            sum.add(d);
        }
        return sum.total()/ds.length;
    }

    private void calculate(){
        //1. Calculate the probability the target came from the same distribution as everything else
        double d = smirnovD(backingGroup.targetsArray(), backingGroup.valuesArray());
        double t = backingGroup.targetsCount(); //as double, for floating-point math later
        double v = backingGroup.valuesCount(); //easier to read without a million casts
        //two sample virtual sample size
        int n = (int) ((t*v)/(t+v)); //rounding down is pessimistic and thus correct

        sameDistributionProbability = KolmogorovSmirnovDist.cdf(n, d);

        baseMean = mean(backingGroup.valuesArray());
        targetMean = mean(backingGroup.targetsArray());
        totalMean = ((znan(baseMean) * backingGroup.valuesCount())+(znan(targetMean) * backingGroup.targetsCount()))/
                (backingGroup.targetsCount() + backingGroup.valuesCount());


        //2. Determine if the general distribution has a sane stddev
        double[] compound = merge(backingGroup.valuesArray(), backingGroup.targetsArray());
        StandardDeviation stddev = new StandardDeviation();
        dispersion = stddev.evaluate(compound);

        //3. Determine if the general distribution in logit-space has a single peak
        //approach: Generalized EM from opposite extrema, test for divergence. never mind, that approach doesn't work
        //new approach: back to plan A, understand/implement dip test

        unimodality = lut.calculateDipUnimodalP(compound);
    }

    public void write(DataOutput dataOutput) throws IOException {
        dataOutput.writeInt(MAGIC_OPEN);
        dataOutput.writeDouble(sameDistributionProbability);
        dataOutput.writeDouble(dispersion);
        dataOutput.writeDouble(unimodality);
        dataOutput.writeDouble(baseMean);
        dataOutput.writeDouble(targetMean);
        dataOutput.writeDouble(totalMean);
        backingGroup.write(dataOutput);
        dataOutput.writeInt(MAGIC_CLOSE);
    }

    public void readFields(DataInput dataInput) throws IOException {
        if(dataInput.readInt() != MAGIC_OPEN){
            throw new IOException("Corrupt stream- opening signature mismatch");
        }
        sameDistributionProbability = dataInput.readDouble();
        dispersion = dataInput.readDouble();
        unimodality = dataInput.readDouble();
        baseMean = dataInput.readDouble();
        targetMean = dataInput.readDouble();
        totalMean = dataInput.readDouble();
        backingGroup = new ResultGroup(); //now must be initialized via...
        backingGroup.readFields(dataInput); //..that
        if(dataInput.readInt() != MAGIC_CLOSE){
            throw new IOException("Corrupt stream- closing signature mismatch");
        }
    }

    //Tabular output of calculated statistics.
    public String toString(){
        //N; concordance; dispersion; unimodality
        return String.valueOf(backingGroup.targetsCount() + backingGroup.valuesCount()) + "\t" +
                sameDistributionProbability + "\t" +
                dispersion + "\t" +
                unimodality + "\t" +
                baseMean + "\t" +
                targetMean + "\t" +
                totalMean + "\t" +
                Arrays.toString(backingGroup.valuesArray()) + "\t" +
                Arrays.toString(backingGroup.targetsArray()) + "\t";
    }

    public static String getHeader(){
        return "Count\tConcordance\tDispersion\tUnimodality\tBase Mean\tTarget Mean\tTotal Mean\tFirst Set\tSecond Set";
    }

    //TODO: implement getters
}
