package org.broadinstitute.hellbender.utils.genotyper;

import htsjdk.samtools.SAMFileHeader;
import htsjdk.variant.variantcontext.Allele;
import org.apache.commons.math3.stat.descriptive.rank.Median;
import org.broadinstitute.hellbender.utils.*;
import org.broadinstitute.hellbender.utils.read.ArtificialReadUtils;
import org.broadinstitute.hellbender.utils.read.GATKRead;
import org.testng.Assert;
import org.testng.SkipException;
import org.testng.annotations.DataProvider;
import org.testng.annotations.Test;

import java.util.*;
import java.util.function.Predicate;
import java.util.stream.Collectors;
import java.util.stream.IntStream;

/**
 * Test code for {@link AlleleLikelihoods}
 *
 * @author Valentin Ruano-Rubio &lt;valentin@broadinstitute.org&gt;
 */
public final class AlleleLikelihoodsUnitTest {
    private static final double EPSILON = 1e-6;
    private static final int ODD_READ_START = 101;
    private static final int EVEN_READ_START = 1;

    private static final SAMFileHeader SAM_HEADER = ArtificialReadUtils.createArtificialSamHeader(10, 0, 1000);
    private static final GenomeLocParser locParser = new GenomeLocParser(SAM_HEADER.getSequenceDictionary());
    
    private static final int[][] READ_COUNTS = {
            {},
            { 100 },
            { 0 },
            { 0, 0, 0 },
            { 1, 0, 1 },
            { 100, 10 , 100},
            { 1000, 10, 100, 20, 23 }
    };

    private static final int[] ALLELE_COUNTS = { 0, 1, 2, 3, 10, 20 };

    @Test(dataProvider = "dataSets")
    public void testInstantiationAndQuery(final String[] samples, final Allele[] alleles, final Map<String,List<GATKRead>> reads) {
        final AlleleLikelihoods<GATKRead, Allele> result = new AlleleLikelihoods<>(new IndexedSampleList(samples), new IndexedAlleleList<>(alleles), reads);

        Assert.assertEquals(result.numberOfSamples(), samples.length);
        Assert.assertEquals(result.numberOfAlleles(), alleles.length);
        Assert.assertEquals(result.samples().size(), samples.length);
        Assert.assertEquals(result.alleles().size(), alleles.length);

        testSampleQueries(samples, reads, result);
        testAlleleQueries(alleles, result);
        testLikelihoodMatrixQueries(samples, result, null);
    }

    @Test(dataProvider = "dataSets")
    public void testLikelihoodFillingAndQuery(final String[] samples, final Allele[] alleles, final Map<String,List<GATKRead>> reads) {
        final AlleleLikelihoods<GATKRead, Allele> result = new AlleleLikelihoods<>(new IndexedSampleList(samples), new IndexedAlleleList<>(alleles), reads);
        final double[][][] likelihoods = fillWithRandomLikelihoods(samples, alleles, result);
        testLikelihoodMatrixQueries(samples, result, likelihoods);
    }

    private double[][][] fillWithRandomLikelihoods(final String[] samples, final Allele[] alleles, final AlleleLikelihoods<GATKRead, Allele> result) {
        final Random rnd = Utils.getRandomGenerator();
        final double[][][] likelihoods = new double[samples.length][alleles.length][];
        for (int s = 0; s < likelihoods.length; s++) {
            final LikelihoodMatrix<GATKRead, Allele> sampleLikelihoods = result.sampleMatrix(s);
            for (int a = 0; a < likelihoods[s].length; a++) {
                likelihoods[s][a] = new double[result.sampleEvidenceCount(s)];
                for (int r = 0; r < likelihoods[s][a].length; r++)
                    sampleLikelihoods.set(a,r,likelihoods[s][a][r] = -Math.abs(rnd.nextGaussian()));
            }
        }
        return likelihoods;
    }

    // fill two likelihoods objects identically
    private double[][][] fillWithRandomLikelihoods(final String[] samples, final Allele[] alleles, final AlleleLikelihoods<GATKRead, Allele> original, final AlleleLikelihoods<GATKRead, Allele> result) {
        Utils.resetRandomGenerator();
        fillWithRandomLikelihoods(samples, alleles, original);
        Utils.resetRandomGenerator();
        return fillWithRandomLikelihoods(samples, alleles, result);
    }

    @Test(dataProvider = "dataSets")
    public void testBestAlleles(final String[] samples, final Allele[] alleles, final Map<String,List<GATKRead>> reads) {
        final AlleleLikelihoods<GATKRead, Allele> original = new AlleleLikelihoods<>(new IndexedSampleList(samples), new IndexedAlleleList<>(alleles), reads);
        fillWithRandomLikelihoods(samples,alleles,original);
        final int numberOfAlleles = alleles.length;
        final int refIndex = original.indexOfReference();
        final Allele refAllele = refIndex >= 0 ? original.getAllele(refIndex) : null;
        for (int s = 0; s < samples.length; s++) {
            final int sampleReadCount = original.sampleEvidenceCount(s);
            final LikelihoodMatrix<GATKRead, Allele> sampleMatrix = original.sampleMatrix(s);
            final double[] bestLkArray = new double[sampleReadCount];
            final int[] bestIndexArray = new int[sampleReadCount];
            final double[] confidenceArray = new double[sampleReadCount];
            for (int r = 0; r < sampleReadCount; r++) {
                int bestindexOfAllele = -1;
                double bestAlleleLk = Double.NEGATIVE_INFINITY;
                double secondBestAlleleLk = Double.NEGATIVE_INFINITY;
                for (int a = 0; a < numberOfAlleles; a++) {
                    final double lk = sampleMatrix.get(a,r);
                    if (lk > bestAlleleLk) {
                        secondBestAlleleLk = bestAlleleLk;
                        bestAlleleLk = lk;
                        bestindexOfAllele = a;
                    } else if (lk > secondBestAlleleLk) {
                        secondBestAlleleLk = lk;
                    }
                }
                bestLkArray[r] = bestAlleleLk;
                confidenceArray[r] = bestAlleleLk - secondBestAlleleLk;
                bestIndexArray[r] = bestindexOfAllele;
            }
            final Collection<AlleleLikelihoods<GATKRead, Allele>.BestAllele> bestAlleles = original.bestAllelesBreakingTies();
            for (final AlleleLikelihoods<GATKRead, Allele>.BestAllele bestAllele : bestAlleles) {
                final int readIndex = original.evidenceIndex(s,bestAllele.evidence);
                if (readIndex == -1) continue;
                final double refLikelihood = refIndex >= 0 ? sampleMatrix.get(refIndex, readIndex) : Double.NEGATIVE_INFINITY;
                final boolean refOverride = refIndex >= 0 && refIndex !=bestIndexArray[readIndex] && bestLkArray[readIndex] - refLikelihood < AlleleLikelihoods.LOG_10_INFORMATIVE_THRESHOLD;
                Assert.assertEquals(refOverride ? refLikelihood : bestLkArray[readIndex], bestAllele.likelihood);
                Assert.assertEquals(bestAllele.allele, refOverride ? refAllele : alleles[bestIndexArray[readIndex]]);
                Assert.assertEquals(bestAllele.confidence, refOverride ? refLikelihood - bestLkArray[readIndex] : confidenceArray[readIndex], EPSILON);
            }
        }
    }

    @Test(dataProvider = "dataSets")
    public void testBestAlleleMap(final String[] samples, final Allele[] alleles, final Map<String,List<GATKRead>> reads) {
        final AlleleLikelihoods<GATKRead, Allele> original = new AlleleLikelihoods<>(new IndexedSampleList(samples), new IndexedAlleleList<>(alleles), reads);
        fillWithRandomLikelihoods(samples,alleles,original);
        final Map<Allele,List<GATKRead>> expected = new LinkedHashMap<>(alleles.length);
        for (final Allele allele : alleles)
            expected.put(allele,new ArrayList<>());

        final int numberOfAlleles = alleles.length;
        for (int s = 0; s < samples.length; s++) {
            final int sampleReadCount = original.sampleEvidenceCount(s);
            final LikelihoodMatrix<GATKRead, Allele> sampleMatrix = original.sampleMatrix(s);
            for (int r = 0; r < sampleReadCount; r++) {
                int bestindexOfAllele = -1;
                double bestAlleleLk = Double.NEGATIVE_INFINITY;
                double secondBestAlleleLk = Double.NEGATIVE_INFINITY;
                for (int a = 0; a < numberOfAlleles; a++) {
                    final double lk = sampleMatrix.get(a,r);
                    if (lk > bestAlleleLk) {
                        secondBestAlleleLk = bestAlleleLk;
                        bestAlleleLk = lk;
                        bestindexOfAllele = a;
                    } else if (lk > secondBestAlleleLk) {
                        secondBestAlleleLk = lk;
                    }
                }
                if ((bestAlleleLk - secondBestAlleleLk) > AlleleLikelihoods.LOG_10_INFORMATIVE_THRESHOLD)
                    expected.get(alleles[bestindexOfAllele]).add(sampleMatrix.getEvidence(r));
            }
        }

        final Map<Allele,List<GATKRead>> actual = original.evidenceByBestAlleleMap();

        Assert.assertEquals(actual.size(), alleles.length);
        for (final Allele allele : alleles) {
            final List<GATKRead> expectedList = expected.get(allele);
            final List<GATKRead> actualList = actual.get(allele);
            final Set<GATKRead> expectedSet = new LinkedHashSet<>(expectedList);
            final Set<GATKRead> actualSet = new LinkedHashSet<>(actualList);
            Assert.assertEquals(actualSet, expectedSet);
        }
    }

    private AlleleLikelihoods<GATKRead, Allele> makeGoodAndBadLikelihoods(final String[] samples, final Allele[] alleles,
                                                                          final Map<String, List<GATKRead>> reads,
                                                                          final Predicate<Integer> readsToSkip) {
        final AlleleLikelihoods<GATKRead, Allele> original = new AlleleLikelihoods<>(
                new IndexedSampleList(samples), new IndexedAlleleList<>(alleles), reads);

        for (int s = 0; s < samples.length; s++) {
            final int sampleReadCount = original.sampleEvidenceCount(s);
            for (int r = 0; r < sampleReadCount; r++) {
                if (readsToSkip.test(r)) continue;
                for (int a = 0; a < alleles.length; a++)
                    original.sampleMatrix(s).set(a,r,-10000);
            }
        }
        return original;
    }

    @Test(dataProvider = "dataSets")
    public void testFilterPoorlyModeledReads(final String[] samples, final Allele[] alleles, final Map<String,List<GATKRead>> reads) {
        final AlleleLikelihoods<GATKRead, Allele> original =
                makeGoodAndBadLikelihoods(samples, alleles, reads, r -> (r & 1) == 0); // even reads are skipped

        final AlleleLikelihoods<GATKRead, Allele> result =
                makeGoodAndBadLikelihoods(samples, alleles, reads, r -> (r & 1) == 0); // even reads are skipped

        checkEvidenceToIndexMapIsCorrect(result);
        result.filterPoorlyModeledEvidence(read -> -100);
        checkEvidenceToIndexMapIsCorrect(result);

        for (int s = 0; s < samples.length; s++) {
            final int oldSampleReadCount = original.sampleEvidenceCount(s);
            final int newSampleReadCount = result.sampleEvidenceCount(s);
            Assert.assertEquals(newSampleReadCount, (oldSampleReadCount + 1) / 2);
            Assert.assertEquals(newSampleReadCount + result.filteredEvidenceBySampleIndex.get(s).size(), oldSampleReadCount); // assert the correct number of reads were saved int the filtered pool
            final LikelihoodMatrix<GATKRead, Allele> newSampleMatrix = result.sampleMatrix(s);
            final LikelihoodMatrix<GATKRead, Allele> oldSampleMatrix = original.sampleMatrix(s);
            for (int r = 0 ; r < newSampleReadCount; r++) {
                Assert.assertEquals(original.evidenceIndex(s, result.sampleEvidence(s).get(r)), r * 2);
                for (int a = 0; a < alleles.length; a++) {
                    Assert.assertEquals(newSampleMatrix.get(a, r), oldSampleMatrix.get(a, r * 2));
                }
            }
        }
    }

    @Test(dataProvider = "dataSets")
    public void testFilterReadsToOverlap(final String[] samples, final Allele[] alleles, final Map<String,List<GATKRead>> reads) {
        final AlleleLikelihoods<GATKRead, Allele> original = new AlleleLikelihoods<>(new IndexedSampleList(samples),
                new IndexedAlleleList<>(alleles), reads);
        final AlleleLikelihoods<GATKRead, Allele> result = new AlleleLikelihoods<>(new IndexedSampleList(samples),
                new IndexedAlleleList<>(alleles), reads);
        fillWithRandomLikelihoods(samples, alleles, original, result);

        final SimpleInterval evenReadOverlap = new SimpleInterval(SAM_HEADER.getSequenceDictionary()
                .getSequence(0).getSequenceName(), EVEN_READ_START, EVEN_READ_START);

        checkEvidenceToIndexMapIsCorrect(result);
        result.retainEvidence(evenReadOverlap::overlaps);
        checkEvidenceToIndexMapIsCorrect(result);

        final double[][][] newLikelihoods = new double[samples.length][alleles.length][];
        for (int s = 0; s < samples.length ; s++) {
            for (int a = 0; a < alleles.length; a++) {
                newLikelihoods[s][a] = new double[(original.sampleEvidenceCount(s) + 1) / 2];
                final LikelihoodMatrix<GATKRead, Allele> sampleMatrix = original.sampleMatrix(s);
                for (int r = 0; r < newLikelihoods[s][a].length; r++) {
                    Assert.assertEquals(result.evidenceIndex(s, sampleMatrix.getEvidence(r << 1)), r);
                    newLikelihoods[s][a][r] = sampleMatrix.get(a, r << 1);
                }
            }
        }
        testLikelihoodMatrixQueries(samples,result,newLikelihoods);
    }

    // This test asserts that when we call filterPoorlyModeledEvidence -> retainEvidence that the filtered reads have had the
    // predicate applied to them as well.
    @Test(dataProvider = "dataSets")
    public void testFilterReadToOverlap(final String[] samples, final Allele[] alleles, final Map<String,List<GATKRead>> reads) {
        final AlleleLikelihoods<GATKRead, Allele> original = makeGoodAndBadLikelihoods(samples, alleles, reads, r -> (r & 2) == 0); // alternate skipping in blocks of 2
        final AlleleLikelihoods<GATKRead, Allele> result = makeGoodAndBadLikelihoods(samples, alleles, reads, r -> (r & 2) == 0); // alternate skipping in blocks of 2

        final SimpleInterval evenReadOverlap = new SimpleInterval(SAM_HEADER.getSequenceDictionary()
                .getSequence(0).getSequenceName(), EVEN_READ_START, EVEN_READ_START);

        result.filterPoorlyModeledEvidence(read -> -100);

        checkEvidenceToIndexMapIsCorrect(result);
        result.retainEvidence(evenReadOverlap::overlaps);
        checkEvidenceToIndexMapIsCorrect(result);

        final double[][][] newLikelihoods = new double[samples.length][alleles.length][];
        for (int s = 0; s < samples.length ; s++) {
            Assert.assertEquals(result.sampleEvidenceCount(s) + result.filteredEvidenceBySampleIndex.get(s).size(),
                    (original.sampleEvidenceCount(s) + 1) / 2);

            for (int a = 0; a < alleles.length; a++) {
                newLikelihoods[s][a] = new double[(((original.sampleEvidenceCount(s) + 1) / 2) + 1) / 2];
                final LikelihoodMatrix<GATKRead, Allele> sampleMatrix = original.sampleMatrix(s);
                for (int r = 0; r < newLikelihoods[s][a].length; r++) {
                    if (r % 2 == 0) {
                        Assert.assertEquals(result.evidenceIndex(s, sampleMatrix.getEvidence(r << 2)), r);
                        newLikelihoods[s][a][r] = sampleMatrix.get(a, r << 2);
                    } else {
                        Assert.assertEquals(result.evidenceIndex(s, sampleMatrix.getEvidence(r << 1)), -1);
                    }
                }
            }
        }
        testLikelihoodMatrixQueries(samples,result,newLikelihoods);
    }

        @Test(dataProvider = "dataSets")
    public void testContaminationDownsamplingDoesntDoBadThingsToEvidenceToIndexCache(final String[] samples,
                                                                                     final Allele[] alleles,
                                                                                     final Map<String,List<GATKRead>> reads) {
        final AlleleLikelihoods<GATKRead, Allele> original = new AlleleLikelihoods<>(new IndexedSampleList(samples), new IndexedAlleleList<>(alleles), reads);
        final AlleleLikelihoods<GATKRead, Allele> result = new AlleleLikelihoods<>(new IndexedSampleList(samples), new IndexedAlleleList<>(alleles), reads);
        fillWithRandomLikelihoods(samples, alleles, original, result);

        checkEvidenceToIndexMapIsCorrect(result);
        final double downsamplingFraction = 0.5;
        final Map<String, Double> perSampleDownsamplingFractions = Arrays.stream(samples).collect(Collectors.toMap(s -> s, s -> downsamplingFraction));
        result.contaminationDownsampling(perSampleDownsamplingFractions);
        checkEvidenceToIndexMapIsCorrect(result);
    }

    @Test(dataProvider = "marginalizationDataSets")
    public void testMarginalizationWithOverlap(final String[] samples, final Allele[] alleles, final Map<String,List<GATKRead>> reads, final Map<Allele,List<Allele>> newToOldAlleleMapping) {
        final AlleleLikelihoods<GATKRead, Allele> original = new AlleleLikelihoods<>(new IndexedSampleList(samples), new IndexedAlleleList<>(alleles), reads);
        final GenomeLoc evenReadOverlap = locParser.createGenomeLoc(SAM_HEADER.getSequenceDictionary().getSequences().get(0).getSequenceName(),EVEN_READ_START ,EVEN_READ_START );
        fillWithRandomLikelihoods(samples, alleles, original);
        final AlleleLikelihoods<GATKRead, Allele> marginalized = original.marginalize(newToOldAlleleMapping);
        marginalized.retainEvidence(evenReadOverlap::overlaps);
        Assert.assertNotNull(marginalized);
        Assert.assertEquals(newToOldAlleleMapping.size(), marginalized.numberOfAlleles());
        for (int a = 0; a < marginalized.numberOfAlleles(); a++) {
            final List<Allele> oldAlleles = newToOldAlleleMapping.get(marginalized.getAllele(a));
            Assert.assertNotNull(oldAlleles);
            for (int s = 0; s < samples.length; s++) {
                final LikelihoodMatrix<GATKRead, Allele> oldSmapleLikelihoods = original.sampleMatrix(s);
                final LikelihoodMatrix<GATKRead, Allele> sampleLikelihoods = marginalized.sampleMatrix(s);
                final int sampleReadCount = sampleLikelihoods.evidenceCount();
                final int oldSampleReadCount = oldSmapleLikelihoods.evidenceCount();
                Assert.assertEquals(sampleReadCount, (oldSampleReadCount + 1) / 2);
                for (int r = 0; r < sampleReadCount; r++) {
                    double oldBestLk = Double.NEGATIVE_INFINITY;
                    for (final Allele oldAllele : oldAlleles) {
                        oldBestLk = Math.max(oldSmapleLikelihoods.get(original.indexOfAllele(oldAllele), r << 1), oldBestLk);
                    }
                    Assert.assertEquals(sampleLikelihoods.get(a, r), oldBestLk);
                }
            }
        }
    }

    @Test(dataProvider = "marginalizationDataSets")
    public void testMarginalization(final String[] samples, final Allele[] alleles, final Map<String,List<GATKRead>> reads, final Map<Allele,List<Allele>> newToOldAlleleMapping) {
        final AlleleLikelihoods<GATKRead, Allele> original = new AlleleLikelihoods<>(new IndexedSampleList(samples), new IndexedAlleleList<>(alleles), reads);
        fillWithRandomLikelihoods(samples, alleles, original);
        final AlleleLikelihoods<GATKRead, Allele> marginalized = original.marginalize(newToOldAlleleMapping);
        Assert.assertNotNull(marginalized);
        Assert.assertEquals(newToOldAlleleMapping.size(), marginalized.numberOfAlleles());
        for (int a = 0; a < marginalized.numberOfAlleles(); a++) {
            final List<Allele> oldAlleles = newToOldAlleleMapping.get(marginalized.getAllele(a));
            Assert.assertNotNull(oldAlleles);
            for (int s = 0; s < samples.length; s++) {
                final LikelihoodMatrix<GATKRead, Allele> oldSmapleLikelihoods = original.sampleMatrix(s);
                final LikelihoodMatrix<GATKRead, Allele> sampleLikelihoods = marginalized.sampleMatrix(s);
                final int sampleReadCount = sampleLikelihoods.evidenceCount();
                final int oldSampleReadCount = oldSmapleLikelihoods.evidenceCount();
                Assert.assertEquals(oldSampleReadCount, sampleReadCount);
                for (int r = 0; r < sampleReadCount; r++) {
                    double oldBestLk = Double.NEGATIVE_INFINITY;
                    for (final Allele oldAllele : oldAlleles) {
                        oldBestLk = Math.max(oldSmapleLikelihoods.get(original.indexOfAllele(oldAllele), r), oldBestLk);
                    }
                    Assert.assertEquals(sampleLikelihoods.get(a, r), oldBestLk);
                }
            }
        }
    }

    @Test(dataProvider = "dataSets")
    public void testNormalizeCapWorstLK(final String[] samples, final Allele[] alleles, final Map<String,List<GATKRead>> reads) {
        final AlleleLikelihoods<GATKRead, Allele> original = new AlleleLikelihoods<>(new IndexedSampleList(samples), new IndexedAlleleList<>(alleles), reads);
        final AlleleLikelihoods<GATKRead, Allele> result = new AlleleLikelihoods<>(new IndexedSampleList(samples), new IndexedAlleleList<>(alleles), reads);
        final double[][][] originalLikelihoods = fillWithRandomLikelihoods(samples,alleles,original, result);

        result.normalizeLikelihoods(- 0.001, true);
        testAlleleQueries(alleles,result);
        final int numberOfAlleles = alleles.length;
        final double[][][] newLikelihoods = new double[originalLikelihoods.length][alleles.length][];
        for (int s = 0; s < samples.length; s++) {
            final int sampleReadCount = original.sampleEvidenceCount(s);
            for (int a = 0; a < numberOfAlleles; a++)
                newLikelihoods[s][a] = new double[sampleReadCount];
            for (int r = 0; r < sampleReadCount; r++) {
                double bestAlleleLk = Double.NEGATIVE_INFINITY;
                //best likelihood can be alt OR reference
                for (int a = 0; a < numberOfAlleles; a++) {
                    bestAlleleLk = Math.max(bestAlleleLk, originalLikelihoods[s][a][r]);
                }
                if (bestAlleleLk == Double.NEGATIVE_INFINITY)
                    for (int a = 0; a < numberOfAlleles; a++) {
                        newLikelihoods[s][a][r] = originalLikelihoods[s][a][r];
                    }
                else
                    for (int a = 0; a < numberOfAlleles; a++) {
                        newLikelihoods[s][a][r] = Math.max(originalLikelihoods[s][a][r], bestAlleleLk - 0.001);
                    }
            }
        }
        testLikelihoodMatrixQueries(samples,result,newLikelihoods);
    }

    @Test(dataProvider = "dataSets")
    public void testAddMissingAlleles(final String[] samples, final Allele[] alleles, final Map<String,List<GATKRead>> reads) {
        final AlleleLikelihoods<GATKRead, Allele> original = new AlleleLikelihoods<>(new IndexedSampleList(samples), new IndexedAlleleList<>(alleles), reads);
        final AlleleLikelihoods<GATKRead, Allele> result = new AlleleLikelihoods<>(new IndexedSampleList(samples), new IndexedAlleleList<>(alleles), reads);
        final double[][][] originalLikelihoods = fillWithRandomLikelihoods(samples,alleles,original, result);

        // If all the alleles pass are present in the read-likelihoods collection there is no change.
        result.addMissingAlleles(result.alleles(), Double.NEGATIVE_INFINITY);
        testLikelihoodMatrixQueries(samples,result,originalLikelihoods);

        // If the allele list passed is empty there is no effect.
        result.addMissingAlleles(Collections.emptyList(), Double.NEGATIVE_INFINITY);
        testLikelihoodMatrixQueries(samples,result,originalLikelihoods);

        final Allele newOne;
        final Allele newTwo;
        final Allele newThree;

        // We add a single missing.
        result.addMissingAlleles(Arrays.asList(newOne = Allele.create("ACCCCCAAAATTTAAAGGG".getBytes(), false)),-12345.6);
        Assert.assertEquals(result.numberOfAlleles(), original.numberOfAlleles() + 1);

        // We add too more amongst exisisting alleles:
        result.addMissingAlleles(Arrays.asList(newTwo = Allele.create("ATATATTATATTAATATT".getBytes(), false), result.getAllele(1),
                result.getAllele(0), newThree = Allele.create("TGTGTGTATTG".getBytes(), false), Allele.create("ACCCCCAAAATTTAAAGGG".getBytes(), false)),-6.54321);

        Assert.assertEquals(original.numberOfAlleles() + 3, result.numberOfAlleles());

        final List<Allele> expectedAlleles = new ArrayList<>(original.alleles());
        expectedAlleles.add(newOne); expectedAlleles.add(newTwo); expectedAlleles.add(newThree);

        Assert.assertEquals(result.alleles(), expectedAlleles);

        final double[][][] newLikelihoods = new double[originalLikelihoods.length][][];
        for (int s = 0; s < samples.length; s++) {
            newLikelihoods[s] = Arrays.copyOf(originalLikelihoods[s], originalLikelihoods[s].length + 3);
            final int sampleReadCount = original.sampleEvidenceCount(s);
            final int originalnumberOfAlleles = originalLikelihoods[s].length;
            newLikelihoods[s][originalnumberOfAlleles] = new double[sampleReadCount];
            Arrays.fill(newLikelihoods[s][originalnumberOfAlleles], -12345.6);
            newLikelihoods[s][originalnumberOfAlleles+1] = new double[sampleReadCount];
            Arrays.fill(newLikelihoods[s][originalnumberOfAlleles + 1], -6.54321);
            newLikelihoods[s][originalnumberOfAlleles+2] = new double[sampleReadCount];
            Arrays.fill(newLikelihoods[s][originalnumberOfAlleles + 2], -6.54321);
        }
            testLikelihoodMatrixQueries(samples,result,newLikelihoods);
    }

    @Test(dataProvider = "dataSets")
    public void testAddNonRefAllele(final String[] samples, final Allele[] alleles, final Map<String,List<GATKRead>> reads) {
        final AlleleLikelihoods<GATKRead, Allele> original = new AlleleLikelihoods<>(new IndexedSampleList(samples), new IndexedAlleleList<>(alleles), reads);
        final AlleleLikelihoods<GATKRead, Allele> result = new AlleleLikelihoods<>(new IndexedSampleList(samples), new IndexedAlleleList<>(alleles), reads);
        final double[][][] originalLikelihoods = fillWithRandomLikelihoods(samples,alleles,original, result);

        result.addNonReferenceAllele(Allele.NON_REF_ALLELE);
        Assert.assertEquals(result.numberOfAlleles(), original.numberOfAlleles() + 1);
        Assert.assertEquals(result.indexOfAllele(Allele.NON_REF_ALLELE), result.numberOfAlleles() - 1);
        final double[][][] newLikelihoods = new double[originalLikelihoods.length][][];
        for (int s = 0; s < samples.length; s++) {
            newLikelihoods[s] = Arrays.copyOf(originalLikelihoods[s], originalLikelihoods[s].length + 1);
            final int sampleReadCount = original.sampleEvidenceCount(s);
            final int ordinarynumberOfAlleles = originalLikelihoods[s].length;
            newLikelihoods[s][ordinarynumberOfAlleles] = new double[sampleReadCount];
            for (int r = 0; r < sampleReadCount; r++) {

                //TODO secondBestLk is totaly irrelevant, and this code is really just a MathUtils.max to get bestLk
                double bestLk = newLikelihoods[s][0][r];
                double secondBestLk = Double.NEGATIVE_INFINITY;
                for (int a = 1; a < ordinarynumberOfAlleles; a++) {
                    final double lk = originalLikelihoods[s][a][r];
                    if (lk > bestLk) {
                        secondBestLk = bestLk;
                        bestLk = lk;
                    } else if (lk > secondBestLk) {
                        secondBestLk = lk;
                    }
                }
                final Median median = new Median();
                final List<Double> qualifylingLikelihoods = new ArrayList<>();
                for (int a = 0; a < ordinarynumberOfAlleles; a++) {
                    if (originalLikelihoods[s][a][r] >= bestLk) continue;
                    qualifylingLikelihoods.add(originalLikelihoods[s][a][r]);
                }
                final double medianLikelihood = median.evaluate(qualifylingLikelihoods.stream().mapToDouble(d -> d).toArray());
                // NaN is returned in cases whether there is no elements in qualifyingLikelihoods.
                // In such case we set the NON-REF likelihood to -Inf.
                final double expectedNonRefLk = !Double.isNaN(medianLikelihood) ? medianLikelihood
                        : ordinarynumberOfAlleles <= 1 ? Double.NaN : bestLk;
                newLikelihoods[s][ordinarynumberOfAlleles][r] = expectedNonRefLk;
            }
        }
        testLikelihoodMatrixQueries(samples,result,newLikelihoods);
    }

    private void testLikelihoodMatrixQueries(final String[] samples, final AlleleLikelihoods<GATKRead, Allele> result, final double[][][] likelihoods) {
        for (final String sample : samples) {
            final int indexOfSample = result.indexOfSample(sample);
            final int sampleReadCount = result.sampleEvidenceCount(indexOfSample);
            final int numberOfAlleles = result.numberOfAlleles();
            Assert.assertEquals(result.numberOfAlleles(), numberOfAlleles);
            for (int a = 0; a < numberOfAlleles; a++) {
                Assert.assertEquals(result.sampleEvidenceCount(indexOfSample), sampleReadCount);
                for (int r = 0; r < sampleReadCount; r++) {
                    if (Double.isNaN(result.sampleMatrix(indexOfSample).get(a, r))) {
                        Assert.assertTrue(likelihoods != null && Double.isNaN(likelihoods[indexOfSample][a][r]));
                    } else {
                        Assert.assertEquals(result.sampleMatrix(indexOfSample).get(a, r),
                                likelihoods == null ? 0.0 : likelihoods[indexOfSample][a][r], EPSILON);
                    }
                }
            }
        }
    }

    private void testAlleleQueries(Allele[] alleles, AlleleLikelihoods<GATKRead, Allele> result) {
        final Set<Integer> alleleIndices = new LinkedHashSet<>();
        for (final Allele allele : alleles) {
            final int indexOfAllele = result.indexOfAllele(allele);
            Assert.assertTrue(indexOfAllele >= 0);
            Assert.assertFalse(alleleIndices.contains(indexOfAllele));
            alleleIndices.add(indexOfAllele);
            Assert.assertSame(allele, alleles[indexOfAllele]);
        }
    }

    private void testSampleQueries(String[] samples, Map<String, List<GATKRead>> reads, AlleleLikelihoods<GATKRead, Allele> result) {
        final Set<Integer> sampleIds = new LinkedHashSet<>(samples.length);
        for (final String sample : samples) {
            final int indexOfSample = result.indexOfSample(sample);
            Assert.assertTrue(indexOfSample >= 0);
            Assert.assertFalse(sampleIds.contains(indexOfSample));
            sampleIds.add(indexOfSample);

            final List<GATKRead> sampleReads = result.sampleEvidence(indexOfSample);
            final Set<GATKRead> sampleReadsSet = new LinkedHashSet<>(sampleReads);
            final List<GATKRead> expectedSampleReadArray = reads.get(sample);
            final Set<GATKRead> expectedSampleReadsSet = new LinkedHashSet<>(expectedSampleReadArray);
            Assert.assertEquals(sampleReadsSet, expectedSampleReadsSet);

            final int sampleReadCount = sampleReads.size();
            for (int r = 0; r < sampleReadCount; r++) {
                Assert.assertSame(sampleReads.get(r), expectedSampleReadArray.get(r));
                final int readIndex = result.evidenceIndex(indexOfSample, sampleReads.get(r));
                Assert.assertEquals(readIndex,r);
            }
        }
    }

    private String[][] SAMPLE_SETS = {
            {"A","B","C"},
            {"A"},
            {"C","A","D","E","Salsa","Gazpacho"},
    };

    private Allele[][] ALLELE_SETS = {
            {Allele.create("A", true), Allele.create("T"), Allele.create("C")},
            {Allele.create("A", true)},
            {Allele.create("ATTTA"), Allele.create("A", true)},
            {Allele.create("A"), Allele.create("AT", true)},
                    {Allele.create("A", false), Allele.create("AT", false)},
    };

    @DataProvider(name="marginalizationDataSets")
    public Object[][] marginalizationDataSets() {
        try {
            final Random rnd = Utils.getRandomGenerator();
            final Object[][] result = new Object[SAMPLE_SETS.length * ALLELE_SETS.length * ALLELE_SETS.length][];
            int nextIndex = 0;
            for (final String[] sample_set : SAMPLE_SETS) {
                for (final Allele[] alleleSet : ALLELE_SETS) {
                    for (final Allele[] allele_set : ALLELE_SETS) {
                        if (allele_set.length < alleleSet.length)
                            result[nextIndex++] = new Object[]{sample_set, alleleSet,
                                    dataSetReads(sample_set, rnd), randomAlleleMap(alleleSet, allele_set)
                            };
                    }
                }
            }
            return Arrays.copyOf(result, nextIndex);
        }catch (final Throwable e) {
            throw new RuntimeException(e);
        }
    }

    private Map<Allele,List<Allele>> randomAlleleMap(final Allele[] fromAlleles, final Allele[] toAlleles) {
        final Map<Allele,List<Allele>> result = new LinkedHashMap<>(toAlleles.length);
        for (final Allele toAllele : toAlleles )
            result.put(toAllele, new ArrayList<>(fromAlleles.length));
        final ArrayList<Allele> remaining = new ArrayList<>(Arrays.asList(fromAlleles));
        int nextToIndex = 0;
        final Random rnd = Utils.getRandomGenerator();
        for (int i = 0; i < fromAlleles.length; i++) {
            final int fromindexOfAllele = rnd.nextInt(remaining.size());
            result.get(toAlleles[nextToIndex]).add(remaining.remove(fromindexOfAllele));
            nextToIndex = (nextToIndex + 1) % toAlleles.length;
        }
        return result;
    }


    @DataProvider(name="dataSets")
    public Object[][] dataSets() {
        try {
            final Random rnd = Utils.getRandomGenerator();
            final Object[][] result = new Object[SAMPLE_SETS.length * ALLELE_SETS.length][];
            int nextIndex = 0;
            for (final String[] sample_set : SAMPLE_SETS) {
                for (final Allele[] allele_set : ALLELE_SETS) {
                    result[nextIndex++] = new Object[]{
                            sample_set, allele_set,
                            dataSetReads(sample_set, rnd)
                    };
                }
            }
            return result;
        }catch (final Throwable e) {
            throw new RuntimeException(e);
        }
    }

    private Map<String,List<GATKRead>> dataSetReads(final String[] samples, final Random rnd) {
        final Map<String,List<GATKRead>> result = new LinkedHashMap<>(samples.length);
        for (final String sample : samples) {
            final int readCount = rnd.nextInt(100);
            final List<GATKRead> reads = new ArrayList<>(readCount);
            for (int r = 0; r < readCount; r++) {
                final int alignmentStart = (r & 1) == 0 ? EVEN_READ_START : ODD_READ_START;
                reads.add(ArtificialReadUtils.createArtificialRead(SAM_HEADER,
                        "RRR" + sample + "00" + r, 0, alignmentStart, "AAAAA".getBytes(),
                        new byte[]{30, 30, 30, 30, 30}, "5M"));
            }
            result.put(sample,reads);
        }
        return result;
    }

    @Test(dataProvider="readCountsAndnumberOfAllelesDataSkippingNoAlleleAndWithReference")
    public void testInstantiationAndBasicQueries(final int[] readCounts, final int numberOfAlleles, final boolean hasReference) {
        final SampleList sampleList = sampleList(readCounts);

        final AlleleList<Allele> alleleList = alleleList(numberOfAlleles,hasReference);
        final Map<String,List<GATKRead>> sampleToReads = ReadLikelihoodsUnitTester.sampleToReads(sampleList, readCounts);
        final AlleleLikelihoods<GATKRead, Allele> subject = new AlleleLikelihoods<>(sampleList,alleleList,sampleToReads);

        AlleleListUnitTester.assertAlleleList(subject, alleleList.asListOfAlleles());
        SampleListUnitTester.assertSampleList(subject,sampleList.asListOfSamples());

        if (hasReference) {
            final int referenceIndex = alleleList.indexOfReference();
            Assert.assertTrue(referenceIndex >= 0);
            Assert.assertEquals(alleleList.indexOfReference(), referenceIndex);
        } else {
            Assert.assertEquals(subject.indexOfReference(), -1);
        }

        testLikelihoodMatrixQueries(alleleList, sampleList, sampleToReads, subject, 0.0);
        testAlleleQueries(alleleList, subject);
        testSampleQueries(sampleList, sampleToReads, subject);
    }

    @Test(dataProvider="readCountsAndnumberOfAllelesDataSkippingNoAlleleAndWithReference")
    public void testInstantiationAndBasicQueries1(final int[] readCounts, final int numberOfAlleles, final boolean hasReference) {
        final SampleList sampleList = sampleList(readCounts);

        final AlleleList<Allele> alleleList = alleleList(numberOfAlleles,hasReference);
        final AlleleLikelihoods<GATKRead, Allele> subject = new AlleleLikelihoods<>(sampleList,alleleList,Collections.emptyMap());

        final Map<String,List<GATKRead>> sampleToReads = ReadLikelihoodsUnitTester.sampleToReads(sampleList, readCounts);
        final double expectedLik = -0.2;
        checkEvidenceToIndexMapIsCorrect(subject);
        subject.addEvidence(sampleToReads, expectedLik);
        checkEvidenceToIndexMapIsCorrect(subject);

        AlleleListUnitTester.assertAlleleList(subject, alleleList.asListOfAlleles());
        SampleListUnitTester.assertSampleList(subject,sampleList.asListOfSamples());

        if (hasReference) {
            final int referenceIndex = alleleList.indexOfReference();
            Assert.assertTrue(referenceIndex >= 0);
            Assert.assertEquals(alleleList.indexOfReference(), referenceIndex);
        } else {
            Assert.assertEquals(subject.indexOfReference(), -1);
        }

        testLikelihoodMatrixQueries(alleleList, sampleList, sampleToReads, subject, expectedLik);
        testAlleleQueries(alleleList, subject);
        testSampleQueries(sampleList, sampleToReads, subject);
    }

    // Make sure that operations that add or remove evidence result in a correct evidence-to-index map
    // Some such operations update the map; others invalidate it and leave it to regenerated lazily the next time it is queried.
    // Either way, we want the queries after the mutating operations to be correct
    private void checkEvidenceToIndexMapIsCorrect(AlleleLikelihoods<GATKRead, Allele> subject) {
        // test that evidence-to-index cache is valid after adding reads
        for (int s = 0; s < subject.numberOfSamples(); s++) {
            for (int r = 0; r < subject.sampleEvidenceCount(s); r++) {
                Assert.assertEquals(subject.evidenceIndex(s, subject.sampleEvidence(s).get(r)), r);
            }
        }
    }

    @Test(dataProvider="readCountsAndnumberOfAllelesDataSkippingNoLikelihoodsOrNoAlleleAndWithReference")
    public void testLikelihoodWriting(final int[] readCounts, final int numberOfAlleles, final boolean hasReference) {
        final SampleList sampleList = sampleList(readCounts);

        final AlleleList<Allele> alleleList = alleleList(numberOfAlleles,hasReference);
        final Map<String,List<GATKRead>> sampleToReads = ReadLikelihoodsUnitTester.sampleToReads(sampleList, readCounts);
        final AlleleLikelihoods<GATKRead, Allele> subject = new AlleleLikelihoods<>(sampleList,alleleList,sampleToReads);

        final int numberOfSamples = readCounts.length;
        int totalLikelihoodsSet = 0;
        int expectedLikelihoodsSet = 0;
        for (int s = 0; s < numberOfSamples; s++) {
            expectedLikelihoodsSet += readCounts[s] * numberOfAlleles;
            final LikelihoodMatrix<GATKRead, Allele> matrix = subject.sampleMatrix(s);
            final int readCount = matrix.evidenceCount();
            for (int a = 0; a < numberOfAlleles; a++)
                for (int r = 0; r < readCount; r++)  {
                    final double likelihood = testLikelihood(s, a, r);
                    Assert.assertNotEquals(likelihood, 0); //Paranoia
                    totalLikelihoodsSet++;
                    matrix.set(a,r,likelihood);
                    Assert.assertEquals(matrix.get(a, r), likelihood);
                }
        }
        Assert.assertEquals(totalLikelihoodsSet, expectedLikelihoodsSet);
    }

    private double testLikelihood(final int indexOfSample, final int indexOfAllele, final int readIndex) {
        return - Math.abs(31 * (indexOfSample + 1) + 101 * indexOfAllele + 1009 * readIndex);
    }

    private void testLikelihoodMatrixQueries(final AlleleList<Allele> alleles, final SampleList samples,
                                             final Map<String,List<GATKRead>> sampleToReads, AlleleLikelihoods<GATKRead, Allele> result, final double expectedLik) {
        for (final String sample : samples.asListOfSamples()) {
            final int indexOfSample = result.indexOfSample(sample);
            final LikelihoodMatrix<GATKRead, Allele> likelihoodMatrix = result.sampleMatrix(indexOfSample);
            final int sampleReadCount = sampleToReads.get(sample).size();
            final List<GATKRead> reads = sampleToReads.get(sample);
            Assert.assertEquals(likelihoodMatrix.numberOfAlleles(), alleles.numberOfAlleles());
            Assert.assertEquals(likelihoodMatrix.evidenceCount(), sampleReadCount);
            for (int a = 0; a < likelihoodMatrix.numberOfAlleles(); a++) {
                Assert.assertEquals(likelihoodMatrix.getAllele(a), alleles.getAllele(a));
                for (int r = 0; r < sampleReadCount; r++) {
                    Assert.assertEquals(likelihoodMatrix.getEvidence(r), reads.get(r));
                    Assert.assertEquals(likelihoodMatrix.get(a, r), expectedLik);
                }
            }
        }
    }

    private void testAlleleQueries(final AlleleList<Allele> alleles, AlleleLikelihoods<GATKRead, Allele> result) {
        final Set<Integer> alleleIndices = new LinkedHashSet<>();
        for (final Allele allele : alleles.asListOfAlleles()) {
            final int indexOfAllele = result.indexOfAllele(allele);
            Assert.assertTrue(indexOfAllele >= 0);
            Assert.assertFalse(alleleIndices.contains(indexOfAllele));
            alleleIndices.add(indexOfAllele);
            Assert.assertSame(allele, alleles.getAllele(indexOfAllele));
        }
    }

    private void testSampleQueries(final SampleList samples, Map<String, List<GATKRead>> reads,
                                   final AlleleLikelihoods<GATKRead, Allele> result) {
        final Set<Integer> sampleIds = new LinkedHashSet<>(samples.numberOfSamples());
        for (final String sample : samples.asListOfSamples()) {
            final int indexOfSample = result.indexOfSample(sample);
            Assert.assertTrue(indexOfSample >= 0);
            Assert.assertFalse(sampleIds.contains(indexOfSample));
            sampleIds.add(indexOfSample);

            final List<GATKRead> sampleReads = result.sampleEvidence(indexOfSample);
            final Set<GATKRead> sampleReadsSet = new LinkedHashSet<>(sampleReads);
            final List<GATKRead> expectedSampleReadArray = reads.get(sample);
            final Set<GATKRead> expectedSampleReadsSet = new LinkedHashSet<>(expectedSampleReadArray);
            Assert.assertEquals(sampleReadsSet, expectedSampleReadsSet);

            final int sampleReadCount = sampleReads.size();
            for (int r = 0; r < sampleReadCount; r++) {
                Assert.assertSame(sampleReads.get(r), expectedSampleReadArray.get(r));
                final int readIndex = result.evidenceIndex(indexOfSample, sampleReads.get(r));
                Assert.assertEquals(readIndex, r);
            }
        }
    }

    private AlleleList<Allele> alleleList(final int numberOfAlleles, final boolean hasReference) {
        final Allele[] alleles = AlleleListUnitTester.generateRandomUniqueAlleles(numberOfAlleles, 100);
        if (hasReference) {
            final int referenceIndex = Utils.getRandomGenerator().nextInt(numberOfAlleles);
            alleles[referenceIndex] = Allele.create(alleles[referenceIndex].getBases(), true);
        }
        final AlleleList<Allele> alleleList = new IndexedAlleleList<>(alleles);
        if (alleleList.numberOfAlleles() != alleles.length) {
            throw new SkipException("repeated alleles, should be infrequent");
        }
        return alleleList;
    }

    @Test
    public void testBug7153ZombieLkValues() {
        final SimpleInterval evidence1 = new SimpleInterval("seq1", 1,1);
        final SimpleInterval evidence2 = new SimpleInterval("seq2", 2,2);

        final AlleleLikelihoods<SimpleInterval, Allele> lk = new AlleleLikelihoods<SimpleInterval, Allele>(
                SampleList.singletonSampleList("sample"),
                AlleleList.newList(Arrays.asList(Allele.REF_A, Allele.ALT_A)),
                Collections.singletonMap("sample", Collections.singletonList(evidence1))
        );
        lk.sampleMatrix(0).set(0, 0, 99.0);
        lk.retainEvidence(e -> false);
        Assert.assertEquals(lk.evidenceCount(), 0);
        lk.addEvidence(Collections.singletonMap("sample", Collections.singletonList(evidence2)), 0.0);
        Assert.assertEquals(lk.evidenceCount(), 1);
        final double newLk = lk.sampleMatrix(0).get(0, 0);
        Assert.assertEquals(newLk, 0.0, " reporting the zoombie lk of 99.0?");
    }

    @DataProvider(name="readCountsAndnumberOfAllelesData")
    public Object[][] readCountsAndnumberOfAllelesData() {
        final Object[][] result = new Object[READ_COUNTS.length * ALLELE_COUNTS.length * 2][];
        int index = 0;
        for (final int[] readCounts : READ_COUNTS)
            for (final int numberOfAlleles : ALLELE_COUNTS) {
                result[index++] = new Object[]{ readCounts, numberOfAlleles, false};
                result[index++] = new Object[]{ readCounts, numberOfAlleles, true};
            }
        return result;
    }

    @DataProvider(name="readCountsAndnumberOfAllelesDataSkippingNoAlleleAndWithReference")
    public Object[][] readCountsAndnumberOfAllelesDataSkippingNoAlleleAndWithReference() {
        final Object[][] raw = readCountsAndnumberOfAllelesData();
        final List<Object[]> result = new ArrayList<>(raw.length);
        for (final Object[] paramSet : raw)
            if (!paramSet[2].equals(true) || !paramSet[1].equals(0))
                result.add(paramSet);
        return result.toArray(new Object[result.size()][]);
    }

    @DataProvider(name="readCountsAndnumberOfAllelesDataSkippingNoLikelihoodsOrNoAlleleAndWithReference")
    public Object[][] readCountsAndnumberOfAllelesDataSkippingNoLikelihoodsOrNoAlleleAndWithReference() {
        final Object[][] raw = readCountsAndnumberOfAllelesDataSkippingNoAlleleAndWithReference();
        final List<Object[]> result = new ArrayList<>(raw.length);
        for (final Object[] paramSet : raw) {
            final int[] readCounts = (int[]) paramSet[0];
            final long totalReadCount = MathUtils.sum(readCounts);
            if (totalReadCount > 0)
                result.add(paramSet);
        }
        return result.toArray(new Object[result.size()][]);
    }

    private SampleList sampleList(final int[] readCounts) {
        final List<String> samples = new ArrayList<>(readCounts.length);
        for (int i = 0; i < readCounts.length; i++)
            samples.add("SAMPLE_" + i);
        return new IndexedSampleList(samples);
    }
}
