package org.broadinstitute.hellbender.tools.genomicsdb;

import htsjdk.samtools.SAMSequenceDictionary;
import htsjdk.samtools.SAMSequenceRecord;
import htsjdk.samtools.util.FileExtensions;
import htsjdk.samtools.util.IntervalList;
import htsjdk.tribble.AbstractFeatureReader;
import htsjdk.tribble.CloseableTribbleIterator;
import htsjdk.tribble.FeatureReader;
import htsjdk.tribble.index.Index;
import htsjdk.tribble.index.IndexFactory;
import htsjdk.tribble.readers.LineIterator;
import htsjdk.variant.bcf2.BCF2Codec;
import htsjdk.variant.variantcontext.Allele;
import htsjdk.variant.variantcontext.Genotype;
import htsjdk.variant.variantcontext.GenotypeBuilder;
import htsjdk.variant.variantcontext.VariantContext;
import htsjdk.variant.variantcontext.VariantContextBuilder;
import htsjdk.variant.variantcontext.writer.Options;
import htsjdk.variant.variantcontext.writer.VariantContextWriter;
import htsjdk.variant.vcf.VCFCodec;
import htsjdk.variant.vcf.VCFFormatHeaderLine;
import htsjdk.variant.vcf.VCFHeader;
import htsjdk.variant.vcf.VCFHeaderLine;
import htsjdk.variant.vcf.VCFHeaderLineType;
import htsjdk.variant.vcf.VCFStandardHeaderLines;
import java.io.File;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.nio.file.StandardCopyOption;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.stream.Collectors;
import org.broadinstitute.barclay.argparser.CommandLineException;
import org.broadinstitute.hellbender.CommandLineProgramTest;
import org.broadinstitute.hellbender.Main;
import org.broadinstitute.hellbender.cmdline.StandardArgumentDefinitions;
import org.broadinstitute.hellbender.exceptions.UserException;
import org.broadinstitute.hellbender.testutils.ArgumentsBuilder;
import org.broadinstitute.hellbender.testutils.BaseTest;
import org.broadinstitute.hellbender.testutils.VariantContextTestUtils;
import org.broadinstitute.hellbender.tools.IndexFeatureFile;
import org.broadinstitute.hellbender.utils.SimpleInterval;
import org.broadinstitute.hellbender.utils.Utils;
import org.broadinstitute.hellbender.utils.gcs.BucketUtils;
import org.broadinstitute.hellbender.utils.io.IOUtils;
import org.broadinstitute.hellbender.utils.variant.GATKVCFConstants;
import org.broadinstitute.hellbender.utils.variant.GATKVariantContextUtils;
import org.broadinstitute.hellbender.utils.variant.VariantContextGetters;
import org.genomicsdb.GenomicsDBUtils;
import org.genomicsdb.model.GenomicsDBExportConfiguration;
import org.genomicsdb.model.GenomicsDBVidMapProto;
import org.genomicsdb.reader.GenomicsDBFeatureReader;
import org.testng.Assert;
import org.testng.annotations.DataProvider;
import org.testng.annotations.Test;

@Test(groups = {"variantcalling"})
public final class GenomicsDBImportIntegrationTest extends CommandLineProgramTest {
    private static final String HG_00096 = largeFileTestDir + "gvcfs/HG00096.g.vcf.gz";
    private static final String HG_00096_SAMPLE_NAME = "HG00096";
    private static final String HG_00268 = largeFileTestDir + "gvcfs/HG00268.g.vcf.gz";
    private static final String HG_00268_SAMPLE_NAME = "HG00268";
    private static final String NA_19625 = largeFileTestDir + "gvcfs/NA19625.g.vcf.gz";
    private static final String NA_19625_SAMPLE_NAME = "NA19625";
    //The following 3 files were obtained by running CombineGVCFs on the above 3 files (separately). This introduces spanning
    //deletions in the files. Hence, these files can be used to test for spanning deletions in the input VCF.
    private static final String HG_00096_after_combine_gvcfs = largeFileTestDir + "gvcfs/HG00096_after_combine_gvcfs.g.vcf.gz";
    private static final String HG_00268_after_combine_gvcfs = largeFileTestDir + "gvcfs/HG00268_after_combine_gvcfs.g.vcf.gz";
    private static final String NA_19625_after_combine_gvcfs = largeFileTestDir + "gvcfs/NA19625_after_combine_gvcfs.g.vcf.gz";
    private static final String NA_24385 = largeFileTestDir + "NA24385.vcf.gz";
    private static final String NA_12878_PHASED = largeFileTestDir + "NA12878.phasedData.Chr20.vcf"; //NOTE: this is not phased according to the vcf spec but it reflects phasing currently produced by haplotype caller
    private static final String MULTIPLOID_DATA_HG37 = largeFileTestDir + "gvcfs/HapMap5plex.ploidy10.b37.g.vcf";
    private static final String NA12878_HG37 = toolsTestDir + "GenomicsDBImport/expected.testGVCFMode.gatk4.g.vcf";
    //This file was generated by running CombineGVCFs on the input files
    //./gatk CombineGVCFs -V src/test/resources/org/broadinstitute/hellbender/tools/GenomicsDBImport/expected.testGVCFMode.gatk4.g.vcf -V src/test/resources/large/gvcfs/HapMap5plex.ploidy10.b37.g.vcf -R src/test/resources/large/human_g1k_v37.20.21.fasta -L 20:10000000-10100000 -O src/test/resources/org/broadinstitute/hellbender/tools/GenomicsDBImport/expected.testGenomicsDBImportWithNonDiploidData.vcf
    private static final String MULTIPLOID_EXPECTED_RESULT = toolsTestDir + "GenomicsDBImport/expected.testGenomicsDBImportWithNonDiploidData.vcf";
    private static final String MNP_GVCF = toolsTestDir + "GenomicsDBImport/mnp.input.g.vcf";
    private static final String ARTIFICIAL_PHASED = getTestDataDir() + "/ArtificalPhasedData.1.g.vcf";
    private static final String HG_00268_WITH_SPACES = largeFileTestDir + "gvcfs/HG00268.spaceInSampleName.g.vcf";
    private static final List<String> LOCAL_GVCFS = Arrays.asList(HG_00096, HG_00268, NA_19625);
    private static final List<String> LOCAL_GVCFS_AFTER_COMBINE_GVCFS = Arrays.asList(HG_00096_after_combine_gvcfs,
            HG_00268_after_combine_gvcfs,
            NA_19625_after_combine_gvcfs);
    private static final String GENOMICSDB_TEST_DIR = toolsTestDir + "GenomicsDBImport/";
    private static final String COMBINEGVCFS_TEST_DIR = toolsTestDir + "walkers/CombineGVCFs/";
    private static final String COMBINED = largeFileTestDir + "gvcfs/combined.gatk3.7.g.vcf.gz";
    private static final String COMBINED_WITH_GENOTYPES = largeFileTestDir + "gvcfs/combined_with_genotypes.g.vcf.gz";
    //This file was obtained from combined.gatk3.7.g.vcf.gz by dropping all the samples
    private static final String COMBINED_SITES_ONLY = largeFileTestDir + "gvcfs/combined.gatk3.7_sites_only.g.vcf.gz";
    private static final String INTERVAL_PICARD_STYLE_EXPECTED = toolsTestDir + "GenomicsDBImport/interval_expected.interval_list";
    private static final String MULTIPLE_NON_ADJACENT_INTERVALS_THAT_WORK_WITH_COMBINE_GVCFS_PICARD_STYLE_EXPECTED =
            toolsTestDir + "GenomicsDBImport/multiple_non_adjacent_intervals_combine_gvcfs_expected.interval_list";
    private static final String MERGED_CONTIGS_INTERVAL_PICARD_STYLE_EXPECTED =
            toolsTestDir + "GenomicsDBImport/chr20_chr21_merged_contigs_expected.interval_list";
    private static final String TEST_INT64_SUPPORT_GENOMICSDB_BUNDLE = GENOMICSDB_TEST_DIR + "/int64_test.tar.gz";
    //Consider a gVCF with a REF block chr20:50-150. Importing this data into GenomicsDB using multiple intervals
    //-L chr20:1-100 and -L chr20:101-200 will cause the REF block to be imported into both the arrays
    //Now, when reading data from the workspace (assume full scan) - the data is split into 2 REF block intervals chr20:50-100
    //and chr20:101-150 one from each array
    //The following COMBINED_MULTI_INTERVAL gvcf is identical to the gVCF in the previous line except at the partition break
    //position
    //The previous file has the following line:
    //chr20   17970000        .       G       <NON_REF>       .       .       END=17970001
    //
    //while this file has:
    //chr20   17970000        .       G       <NON_REF>       .       .       .
    //chr20   17970001        .       G       <NON_REF>       .       .       .
    //
    private static final String COMBINED_MULTI_INTERVAL = largeFileTestDir + "gvcfs/combined_multi_interval.gatk3.7.g.vcf.gz";
    private static final String COMBINED_WITHSPACES = largeFileTestDir + "gvcfs/combined.gatk3.7.smaller_interval.g.vcf";
    private static final ArrayList<SimpleInterval> INTERVAL =
            new ArrayList<SimpleInterval>(Arrays.asList(new SimpleInterval("chr20", 17960187, 17981445)));
    private static final ArrayList<SimpleInterval> INTERVAL_NOTFULL =
            new ArrayList<SimpleInterval>(Arrays.asList(new SimpleInterval("chr20", 1, 17960187)));
    private static final ArrayList<SimpleInterval> INTERVAL_20_21 =
            new ArrayList<SimpleInterval>(Arrays.asList(new SimpleInterval("chr20"),new SimpleInterval("chr21")));
    private static final ArrayList<SimpleInterval> MULTIPLE_INTERVALS = new ArrayList<SimpleInterval>(Arrays.asList(
        new SimpleInterval("chr20", 17960187, 17970000),
        new SimpleInterval("chr20", 17970001, 17980000),
        new SimpleInterval("chr20", 17980001, 17981445)
    ));
    private static final ArrayList<SimpleInterval> INCLUDES_NON_IMPORTED_INTERVALS = new ArrayList<SimpleInterval>(Arrays.asList(
        new SimpleInterval("chr2", 1, 100),
        new SimpleInterval("chr20", 17960187, 17981445),
        new SimpleInterval("chr22", 1, 100)
    ));
    private static final ArrayList<SimpleInterval> MULTIPLE_INTERVALS_THAT_WORK_WITH_COMBINE_GVCFS =
        new ArrayList<SimpleInterval>(Arrays.asList(
            new SimpleInterval("chr20", 17960187, 17969999),
            new SimpleInterval("chr20", 17970000, 17980000),
            new SimpleInterval("chr20", 17980001, 17981445)
    ));
    private static final ArrayList<SimpleInterval> MULTIPLE_NON_ADJACENT_INTERVALS_THAT_WORK_WITH_COMBINE_GVCFS =
        new ArrayList<SimpleInterval>(Arrays.asList(
            new SimpleInterval("chr20", 17960187, 17969999),
            new SimpleInterval("chr20", 17980001, 17981445),
            new SimpleInterval("chr21", 29477554, 29486255)
    ));
    private static final ArrayList<SimpleInterval> INTERVAL_3736 =
            new ArrayList<SimpleInterval>(Arrays.asList(new SimpleInterval("chr6",130365070,146544250)));
    private static final ArrayList<SimpleInterval> INTERVAL_NONDIPLOID =
            new ArrayList<SimpleInterval>(Arrays.asList(new SimpleInterval("20", 10000000, 10100000)));
    private static final ArrayList<SimpleInterval> SMALLER_INTERVAL =
            new ArrayList<SimpleInterval>(Arrays.asList(new SimpleInterval("chr20", 17960187, 17961973)));
    private static final VCFHeader VCF_HEADER = VariantContextTestUtils.getCompleteHeader();
    private static final String SAMPLE_NAME_KEY = "SN";
    private static final String ANOTHER_ATTRIBUTE_KEY = "AA";

    private static final List<String> GVCFS_WITH_NEW_MQ = Arrays.asList(NA12878_HG37, getTestDataDir() + "/walkers/CombineGVCFs/YRIoffspring.chr20snippet.g.vcf");
    private static final String COMBINED_WITH_NEW_MQ = toolsTestDir + "/walkers/GenomicsDBImport/newMQcalc.combined.g.vcf";
    private static final List<SimpleInterval> INTERVAL2 = Arrays.asList(new SimpleInterval("20", 1, 11_000_000));
    private static final List<String> ATTRIBUTES_TO_IGNORE = Arrays.asList("RAW_MQ","RAW_MQandDP");  //CombineGVCFs doesn't support the old RAW_MQ anymore
    // we're using vcfs instead of gvcfs for many contigs test, and these attributes don't have default combine operations in GenomicsDB
    private static final List<String> MANY_CONTIGS_ATTRIBUTES_TO_IGNORE = Arrays.asList("HaplotypeScore","MLEAC", "MLEAF");
    private static final String P717 = largeFileTestDir + "Ptrichocarpa.v3.sorted.p717.vcf";
    private static final String P717_2 = largeFileTestDir + "Ptrichocarpa.v3.sorted.p717_2.vcf";
    private static final List<String> MANY_CONTIGS_VCF = Arrays.asList(P717, P717_2);
    private static final String EXPECTED_SEVERAL_CONTIGS_VCF = largeFileTestDir + "Ptrichocarpa.v3.p717.p717_2.combined.final.expected.vcf";
    private static final String MANY_CONTIGS_REF = largeFileTestDir + "Populus_trichocarpa.Pop_tri_v3.dna.nonchromosomal_subset_renamed.fa";
    // scaffold_3123 has been removed to test non adjacent interval list works (after scaffold_3381 in header)
    private static final List<String> MANY_CONTIGS_NON_ADJACENT_INTERVALS = Arrays.asList("scaffold_3121", "scaffold_3427", "scaffold_3213", "scaffold_3050", "scaffold_3381",
        "scaffold_3472", "scaffold_2907", "scaffold_3046", "scaffold_3412", "scaffold_3304", "scaffold_3332", "scaffold_3326", "scaffold_3230",
        "scaffold_3160", "scaffold_3403", "scaffold_2851", "scaffold_3416", "scaffold_3340", "scaffold_2911", "scaffold_3442", "scaffold_3681", "scaffold_2889",
        "scaffold_3305", "scaffold_3335", "scaffold_3316", "scaffold_3126", "scaffold_3363", "scaffold_2844", "scaffold_3388", "scaffold_3285", "scaffold_2968",
        "scaffold_3074", "scaffold_3436", "scaffold_3289", "scaffold_3264", "scaffold_2919", "scaffold_3422", "scaffold_3393", "scaffold_3387", "scaffold_3453",
        "scaffold_3171", "scaffold_3372", "scaffold_3389", "scaffold_3259", "scaffold_2930", "scaffold_3129", "scaffold_3044", "scaffold_3147", "scaffold_2885",
        "scaffold_3452", "scaffold_3202", "scaffold_3263", "scaffold_3354", "scaffold_3134", "scaffold_3255", "scaffold_3320", "scaffold_3523", "scaffold_3432",
        "scaffold_3239", "scaffold_3206", "scaffold_3437", "scaffold_2922", "scaffold_3136", "scaffold_3292", "scaffold_3391", "scaffold_3061", "scaffold_3250",
        "scaffold_3226", "scaffold_2857", "scaffold_3528", "scaffold_3325", "scaffold_3296", "scaffold_3298", "scaffold_2924", "scaffold_3157", "scaffold_2855",
        "scaffold_3275", "scaffold_3007", "scaffold_3306", "scaffold_3179", "scaffold_3060", "scaffold_3222", "scaffold_3648", "scaffold_3005", "scaffold_3020",
        "scaffold_3194", "scaffold_3328", "scaffold_3251", "scaffold_3547", "scaffold_3342", "scaffold_3139", "scaffold_3262", "scaffold_3210", "scaffold_2981",
        "scaffold_2933", "scaffold_3056", "scaffold_3413", "scaffold_3064", "scaffold_3353", "scaffold_2913", "scaffold_3445", "scaffold_3374", "scaffold_3214",
        "scaffold_3423", "scaffold_3095", "scaffold_2965", "scaffold_3357", "scaffold_3021", "scaffold_3228", "scaffold_3300", "scaffold_3042", "scaffold_3312",
        "scaffold_3537", "scaffold_3058", "scaffold_3425", "scaffold_3431", "scaffold_3368", "scaffold_2951", "scaffold_3356", "scaffold_3116", "scaffold_3257",
        "scaffold_3478", "scaffold_3068", "scaffold_3008", "scaffold_2893", "scaffold_3088", "scaffold_3269", "scaffold_3245", "scaffold_3190", "scaffold_3054",
        "scaffold_3383", "scaffold_3346", "scaffold_3223", "scaffold_3446", "scaffold_3370", "scaffold_3252", "scaffold_3053", "scaffold_3100", "scaffold_2838",
        "scaffold_3272", "scaffold_3384", "scaffold_2868", "scaffold_3398", "scaffold_3107", "scaffold_3014", "scaffold_3364", "scaffold_2987", "scaffold_3191",
        "scaffold_3076", "scaffold_3246", "scaffold_3011", "scaffold_3348", "scaffold_3231", "scaffold_3448", "scaffold_3360", "scaffold_3352", "scaffold_3294",
        "scaffold_2853", "scaffold_3024", "scaffold_3426", "scaffold_3379", "scaffold_3440", "scaffold_3550", "scaffold_2879", "scaffold_3362", "scaffold_3236");
    private static final int SEVERAL_CONTIGS = 7;
    private static final String MANY_CONTIGS_INTERVAL_PICARD_STYLE_EXPECTED =
            toolsTestDir + "GenomicsDBImport/Ptrichocarpa.v3.expected.interval_list";
    private static String IUPAC_REF = publicTestDir + "/iupacFASTA.fasta";

    @Override
    public String getTestedClassName() {
        return GenomicsDBImport.class.getSimpleName();
    }

    @DataProvider(name="batchSizes")
    public Object[][] batchSizes() {
        return new Object[][] {
                new Object[]{1},
                new Object[]{2},
                new Object[]{3},
                new Object[]{4},
                new Object[]{100},
        };
    }

    @Test
    public void testGenomicsDBImportFileInputs() throws IOException {
        testGenomicsDBImporter(LOCAL_GVCFS, INTERVAL, COMBINED, b38_reference_20_21, true, 1);
    }

    @Test
    public void testGenomicsDBImportFileInputsNativeReader() throws IOException {
        testGenomicsDBImporter(LOCAL_GVCFS, INTERVAL, COMBINED, b38_reference_20_21, true, 1, true);
    }

    @Test
    public void testGenomicsDBImportFileInputs_newMQ() throws IOException {
        testGenomicsDBImporter_newMQ(GVCFS_WITH_NEW_MQ, INTERVAL2, COMBINED_WITH_NEW_MQ, b37_reference_20_21, true, Collections.emptyList());
    }

    @Test
    public void testGenomicsDBImportFileInputsWithMultipleIntervals() throws IOException {
        testGenomicsDBImporter(LOCAL_GVCFS, MULTIPLE_INTERVALS, COMBINED_MULTI_INTERVAL, b38_reference_20_21, true, 1);
    }

    @Test
    public void testGenomicsDBImportFileInputsIncludeNonImportedIntervals() throws IOException {
      final String workspace = createTempDir("genomicsdb-tests-").getAbsolutePath() + "/workspace";
      writeToGenomicsDB(LOCAL_GVCFS, INTERVAL, workspace, 0, false, 0, 1, false, false, false, 0, true);
      checkGenomicsDBAgainstExpected(workspace, INCLUDES_NON_IMPORTED_INTERVALS, COMBINED, b38_reference_20_21, true, ATTRIBUTES_TO_IGNORE);
    }

    @Test
    public void testGenomicsDBImportFileInputsWithMultipleIntervalsNativeReader() throws IOException {
        testGenomicsDBImporter(LOCAL_GVCFS, MULTIPLE_INTERVALS, COMBINED_MULTI_INTERVAL, b38_reference_20_21, true, 1, true);
    }

    @Test(timeOut = 1000000)
    public void testGenomicsDBImportWith1000IntervalsToBeMerged() throws IOException {
        final String workspace = createTempDir("genomicsdb-tests-").getAbsolutePath() + "/workspace";
        LinkedList<SimpleInterval> intervals = new LinkedList<SimpleInterval>();
        //[ 17960187, 17981445 ]
        int base = 17960187;
        for (int i = 0; i < 1000; ++i)
            intervals.add(new SimpleInterval("chr20", base + 20 * i, base + 20 * i + 10)); //intervals of size 10 separated by 10
        writeToGenomicsDB(new ArrayList<String>(Arrays.asList(LOCAL_GVCFS.get(0))), intervals, workspace, 0,
                false, 0, 1, true);
    }

    @Test
    public void testGenomicsDBImportFileInputsAgainstCombineGVCF() throws IOException {
        testGenomicsDBAgainstCombineGVCFs(LOCAL_GVCFS, INTERVAL, b38_reference_20_21, new String[0]);
    }

    @Test
    public void testGenomicsDBImportFileInputsAgainstCombineGVCFMergeContigsToSinglePartition() throws IOException {
        testGenomicsDBAgainstCombineGVCFs(LOCAL_GVCFS, INTERVAL_20_21, b38_reference_20_21, new String[0], 1, 1, false);
    }

    @Test
    public void testGenomicsDBImportFileInputsAgainstCombineGVCFNativeReader() throws IOException {
        testGenomicsDBAgainstCombineGVCFs(LOCAL_GVCFS, INTERVAL, b38_reference_20_21, new String[0], 1, 0, true);
    }

    @Test
    public void testGenomicsDBImportMergeContigsManyNonAdjacentContigsToSeveralContigs() throws IOException {
        List<SimpleInterval> manyContigs = MANY_CONTIGS_NON_ADJACENT_INTERVALS.stream().map(SimpleInterval::new).collect(Collectors.toList());
        final String workspace = createTempDir("genomicsdb-tests-").getAbsolutePath() + "/workspace";

        writeToGenomicsDB(MANY_CONTIGS_VCF, manyContigs, workspace, 0, false, 0, 1, false, false, false, SEVERAL_CONTIGS, false);
        checkJSONFilesAreWritten(workspace);
        checkGenomicsDBAgainstExpected(workspace, manyContigs, EXPECTED_SEVERAL_CONTIGS_VCF, MANY_CONTIGS_REF, true,
                MANY_CONTIGS_ATTRIBUTES_TO_IGNORE, true, false);
    }

    @Test(expectedExceptions = {UserException.class}, expectedExceptionsMessageRegExp=".*entire contigs be specified.*")
    public void testGenomicsDBMergeContigsThrowsOnNotInputIntervalLessThanContigLength() throws IOException {
        testGenomicsDBAgainstCombineGVCFs(LOCAL_GVCFS, INTERVAL_NOTFULL, b38_reference_20_21, new String[0], 1, 1, false);
    }

    @Test
    public void testGenomicsDBImportFileInputsAgainstCombineGVCFWithMultipleIntervals() throws IOException {
        testGenomicsDBAgainstCombineGVCFs(LOCAL_GVCFS, MULTIPLE_INTERVALS_THAT_WORK_WITH_COMBINE_GVCFS, b38_reference_20_21, new String[0]);
    }

    @Test
    public void testGenomicsDBImportFileInputsAgainstCombineGVCFWithMultipleIntervalsNativeReader() throws IOException {
        testGenomicsDBAgainstCombineGVCFs(LOCAL_GVCFS, MULTIPLE_INTERVALS_THAT_WORK_WITH_COMBINE_GVCFS, b38_reference_20_21, new String[0], 1, 0, true);
    }

    @Test
    public void testGenomicsDBImportFileInputsAgainstCombineGVCFWithMultipleIntervalsWithMultipleThreads() throws IOException {
        testGenomicsDBAgainstCombineGVCFs(LOCAL_GVCFS, MULTIPLE_INTERVALS_THAT_WORK_WITH_COMBINE_GVCFS, b38_reference_20_21,
                new String[0], 4);
    }

    @Test
    public void testGenomicsDBImportFileInputsAgainstCombineGVCFWithMultipleIntervalsWithMultipleThreadsNativeReader() throws IOException {
        testGenomicsDBAgainstCombineGVCFs(LOCAL_GVCFS, MULTIPLE_INTERVALS_THAT_WORK_WITH_COMBINE_GVCFS, b38_reference_20_21,
                new String[0], 4, 0, true);
    }

    @Test
    public void testGenomicsDBImportFileInputsAgainstCombineGVCFWithMultipleNonAdjacentIntervals() throws IOException {
        testGenomicsDBAgainstCombineGVCFs(LOCAL_GVCFS, MULTIPLE_NON_ADJACENT_INTERVALS_THAT_WORK_WITH_COMBINE_GVCFS,
            b38_reference_20_21, new String[0]);
    }

    @Test
    public void testGenomicsDBImportFileInputsAgainstCombineGVCFWithMultipleNonAdjacentIntervalsNativeReader() throws IOException {
        testGenomicsDBAgainstCombineGVCFs(LOCAL_GVCFS, MULTIPLE_NON_ADJACENT_INTERVALS_THAT_WORK_WITH_COMBINE_GVCFS,
            b38_reference_20_21, new String[0], 1, 0, true);
    }

    @Test
    public void testGenomicsDBImportFileInputsAgainstCombineGVCFWithMultipleNonAdjacentIntervalsForFilesProducedAfterCombineGVCFs()
        throws IOException {
        //this test covers the scenario where the input vcfs have spanning deletions
        testGenomicsDBAgainstCombineGVCFs(LOCAL_GVCFS_AFTER_COMBINE_GVCFS, MULTIPLE_NON_ADJACENT_INTERVALS_THAT_WORK_WITH_COMBINE_GVCFS,
            b38_reference_20_21, new String[0]);
    }

    @Test
    public void testGenomicsDBImportFileInputsAgainstCombineGVCFWithMultipleNonAdjacentIntervalsForFilesProducedAfterCombineGVCFsNativeReader()
        throws IOException {
        //this test covers the scenario where the input vcfs have spanning deletions
        testGenomicsDBAgainstCombineGVCFs(LOCAL_GVCFS_AFTER_COMBINE_GVCFS, MULTIPLE_NON_ADJACENT_INTERVALS_THAT_WORK_WITH_COMBINE_GVCFS,
            b38_reference_20_21, new String[0], 1, 0, true);
    }

    @Test
    public void testGenomicsDBImportFileInputsAgainstCombineGVCFWithNonDiploidData() throws IOException {
        testGenomicsDBImporterWithGenotypes(Arrays.asList(NA12878_HG37, MULTIPLOID_DATA_HG37), INTERVAL_NONDIPLOID,
                MULTIPLOID_EXPECTED_RESULT, b37_reference_20_21,
                true,
                false,
                false);
    }

    @Test
    public void testGenomicsDBImportPhasedData() throws IOException {
        testGenomicsDBImporterWithGenotypes(Arrays.asList(NA_12878_PHASED), INTERVAL, NA_12878_PHASED, b37_reference_20_21);
    }

    @Test
    public void testGenomicsDBImportPhasedDataWithMultipleIntervals() throws IOException {
        testGenomicsDBImporterWithGenotypes(Arrays.asList(NA_12878_PHASED), MULTIPLE_INTERVALS, NA_12878_PHASED, b37_reference_20_21);
    }

    @Test
    public void testGenomicsDBImportArtificialPhasedData() throws IOException {
        ArrayList<SimpleInterval> intervals = new ArrayList<SimpleInterval>(Arrays.asList(new SimpleInterval("1", 10109, 10297)));
        testGenomicsDBImporterWithGenotypes(Arrays.asList(ARTIFICIAL_PHASED), intervals, ARTIFICIAL_PHASED, b37_reference_20_21);
    }

    @Test
    public void testGenomicsDBThreeLargeSamplesWithGenotypes() throws IOException {
        ArrayList<SimpleInterval> intervals = new ArrayList<SimpleInterval>(Arrays.asList(new SimpleInterval("chr20", 1, 64444167)));
        testGenomicsDBImporterWithGenotypes(LOCAL_GVCFS, intervals, COMBINED_WITH_GENOTYPES, b38_reference_20_21, true, true, false);
    }

    @Test
    public void testGenomicsDBThreeLargeSamplesWithGenotypesNativeReader() throws IOException {
        ArrayList<SimpleInterval> intervals = new ArrayList<SimpleInterval>(Arrays.asList(new SimpleInterval("chr20", 1, 64444167)));
        testGenomicsDBImporterWithGenotypes(LOCAL_GVCFS, intervals, COMBINED_WITH_GENOTYPES, b38_reference_20_21, true, true, false, true);
    }

    @Test
    public void testGenomicsDBThreeLargeSamplesSitesOnlyQuery() throws IOException {
        ArrayList<SimpleInterval> intervals = new ArrayList<SimpleInterval>(Arrays.asList(
                    new SimpleInterval("chr20", 1, 64444167),
                    new SimpleInterval("chr21", 1, 46709983)));
        testGenomicsDBImporterWithGenotypes(LOCAL_GVCFS, intervals, COMBINED_SITES_ONLY, b38_reference_20_21, true, true, true);
    }

    @Test(expectedExceptions={UserException.BadInput.class}, expectedExceptionsMessageRegExp=".*GenomicsDBImport does not support GVCFs.*")
    public void testGenomicsDbImportThrowsOnMnp() throws IOException {
        for (int threads = 1; threads <= 2; ++threads) {
            testGenomicsDBImporter(
                    Collections.singletonList(MNP_GVCF),
                    Collections.singletonList(new SimpleInterval("20", 69700, 69900)),
                    null, // Should never produce a VCF
                    b38_reference_20_21,
                    true,
                    threads
            );
        }
    }

    @DataProvider
    public Object[][] getInvalidArgsForAvoidNio(){
        final ArgumentsBuilder baseArgs = ArgumentsBuilder.create()
            .add(GenomicsDBImport.WORKSPACE_ARG_LONG_NAME, createTempFile())
                .addInterval("fake")
                .addFlag(GenomicsDBImport.AVOID_NIO);
        return new Object[][]{
                {baseArgs, CommandLineException.MissingArgument.class}, //no input
                {baseArgs.copy()
                        .addVCF("fake.vcf"), CommandLineException.class
                }, //not allowed with variant, we shoul have some sort of mutex exception...
                {baseArgs.copy()
                        .add(GenomicsDBImport.SAMPLE_NAME_MAP_LONG_NAME, "fake.samplenames"), CommandLineException.MissingArgument.class
                }, //missing header
                {baseArgs.copy()
                        .add(GenomicsDBImport.VCF_HEADER_OVERRIDE, "fake.vcf"), CommandLineException.MissingArgument.class
                }, //missing input
                {baseArgs.copy()
                        .add(GenomicsDBImport.VCF_HEADER_OVERRIDE, "fake.vcf")
                        .addVCF("fake.vcf"), CommandLineException.class // can't use with -V
                }
         };
    }

    @Test(dataProvider = "getInvalidArgsForAvoidNio")
    public void testInvalidArgumentCombinationsWithAvoidNio(ArgumentsBuilder args, Class<? extends Exception> expectedException){
         Assert.assertThrows(expectedException, () -> runCommandLine(args));
    }

    /*
     * this is a test that can be run locally if you enable it and fill in the SAS token with one from
     * https://app.terra.bio/#workspaces/axin-pipeline-testing-20230927/gatk-azure-testing
     *
     * it's basically an example of how to run the tool on azure
     *
     * note that the http url for the file azure files looks like this:
     *
     * https://<bucket_name>.blob.core.windows.net/<user_name>/<filepath>?<sas token>
     * the SAS token includes the '?' generally
     *
     * to restructure into an az:// link you move the username
     * az://<user_name>@<bucket_name>blob.core.windows.new/<filepath>
     *
     */
    @Test(enabled = false, groups={"cloud","azure"})
    public void testImportFromAzure(){

        final String SAS_TOKEN="put a sas token in me";

        final String workspace = createTempDir("genomicsdb-tests-").getAbsolutePath() + "/workspace";
        final String sample = "NA19625";
        final String azLocation = "az://lzb25a77f5eadb0fa72a2ae7.blob.core.windows.net/sc-62528cd7-3299-4440-8c17-10f458e589d3/NA19625.g.vcf.gz";
        final String sampleMapText = String.format("%s\t%s\n", sample, azLocation);
        final File sampleMappingFile = IOUtils.writeTempFile(sampleMapText, "sampleMapping", ".txt");

        final ArgumentsBuilder args = ArgumentsBuilder.create()
                .add(GenomicsDBImport.WORKSPACE_ARG_LONG_NAME, workspace)
                .addInterval("chr20")
                .addFlag(GenomicsDBImport.AVOID_NIO)
                .add(GenomicsDBImport.SAMPLE_NAME_MAP_LONG_NAME, sampleMappingFile)
                .addFlag(GenomicsDBImport.BYPASS_FEATURE_READER)
                .add(GenomicsDBImport.VCF_HEADER_OVERRIDE, GENOMICSDB_TEST_DIR + "azureHeader.vcf");
        Map<String, String> environment = new HashMap<>(System.getenv());
        final String sasTokenEnvVariable = "AZURE_STORAGE_SAS_TOKEN";
        environment.put(sasTokenEnvVariable, SAS_TOKEN);
        runToolInNewJVM(GenomicsDBImport.class.getSimpleName(), args, environment);
    }

    private void testGenomicsDBImporterWithGenotypes(final List<String> vcfInputs, final List<SimpleInterval> intervals,
                                                     final String expectedCombinedVCF,
                                                      final String referenceFile) throws IOException {
        testGenomicsDBImporterWithGenotypes(vcfInputs, intervals,
                expectedCombinedVCF, referenceFile,
                false,
                true,
                false);
    }

    private void testGenomicsDBImporterWithGenotypes(final List<String> vcfInputs, final List<SimpleInterval> intervals,
                                                      final String expectedCombinedVCF, final String referenceFile,
                                                     final boolean testAll) throws IOException {
        testGenomicsDBImporterWithGenotypes(vcfInputs, intervals,
                expectedCombinedVCF, referenceFile,
                testAll,
                false,
                false);
    }

    private void testGenomicsDBImporterWithGenotypes(final List<String> vcfInputs, final List<SimpleInterval> intervals,
                                                      final String expectedCombinedVCF, final String referenceFile,
                                                     final boolean testAll,
                                                     final boolean produceGTField,
                                                     final boolean sitesOnlyQuery) throws IOException {
         testGenomicsDBImporterWithGenotypes(vcfInputs, intervals, expectedCombinedVCF, referenceFile, testAll, produceGTField,
                 sitesOnlyQuery, false);
    }

    private void testGenomicsDBImporterWithGenotypes(final List<String> vcfInputs, final List<SimpleInterval> intervals,
                                                      final String expectedCombinedVCF, final String referenceFile,
                                                     final boolean testAll,
                                                     final boolean produceGTField,
                                                     final boolean sitesOnlyQuery,
                                                     final boolean useNativeReader) throws IOException {
        final String workspace = createTempDir("genomicsdb-tests-").getAbsolutePath() + "/workspace";

        writeToGenomicsDB(vcfInputs, intervals, workspace, 0, false, 0, 1, false, false, false, 0, useNativeReader);
        checkJSONFilesAreWritten(workspace);
        checkGenomicsDBAgainstExpected(workspace, intervals, expectedCombinedVCF, referenceFile, testAll, ATTRIBUTES_TO_IGNORE, produceGTField, sitesOnlyQuery);
        checkGenomicsDBAgainstExpected(workspace, intervals, expectedCombinedVCF, referenceFile, testAll, ATTRIBUTES_TO_IGNORE, produceGTField, sitesOnlyQuery, true);
    }

    private File runCombineGVCFs(final List<String> inputs, final List<SimpleInterval> intervals, final String reference, final String[] extraArgs) {
        final File output = createTempFile("genotypegvcf", ".vcf");

        final ArgumentsBuilder args = new ArgumentsBuilder();
        args.addReference(new File(reference))
                .addOutput(output);
        for (String input: inputs) {
            args.add("V", input);
        }
        intervals.forEach(args::addInterval);
        Arrays.stream(extraArgs).forEach(args::addRaw);

        Utils.resetRandomGenerator();
        new Main().instanceMain(makeCommandLineArgs(args.getArgsList(), "CombineGVCFs"));
        return output;
    }

    private void testGenomicsDBAgainstCombineGVCFs(final List<String> vcfInputs, final List<SimpleInterval> intervals,
                                                   final String referenceFile, final String[] CombineGVCFArgs) throws IOException {
        testGenomicsDBAgainstCombineGVCFs(vcfInputs, intervals, referenceFile, CombineGVCFArgs, 1);
    }

    private void testGenomicsDBAgainstCombineGVCFs(final List<String> vcfInputs, final List<SimpleInterval> intervals,
                                                   final String referenceFile, final String[] CombineGVCFArgs,
                                                   final int numVCFReaderThreadsInImporter) throws IOException {
        testGenomicsDBAgainstCombineGVCFs(vcfInputs, intervals, referenceFile, CombineGVCFArgs, numVCFReaderThreadsInImporter, 0, false);
    }

    private void testGenomicsDBAgainstCombineGVCFs(final List<String> vcfInputs, final List<SimpleInterval> intervals,
                                                   final String referenceFile, final String[] CombineGVCFArgs,
                                                   final int numVCFReaderThreadsInImporter, final int chrsToPartitions,
                                                   final boolean useNativeReader) throws IOException {
        final String workspace = createTempDir("genomicsdb-tests-").getAbsolutePath() + "/workspace";

        writeToGenomicsDB(vcfInputs, intervals, workspace, 0, false, 0, numVCFReaderThreadsInImporter, false, false, false,
                          chrsToPartitions, useNativeReader);
        checkJSONFilesAreWritten(workspace);
        for(SimpleInterval currInterval : intervals) {
            List<SimpleInterval> tmpList = new ArrayList<SimpleInterval>(Arrays.asList(currInterval));
            File expectedCombinedVCF = runCombineGVCFs(vcfInputs, tmpList, referenceFile, CombineGVCFArgs);
            checkGenomicsDBAgainstExpected(workspace, tmpList, expectedCombinedVCF.getAbsolutePath(), referenceFile, true, ATTRIBUTES_TO_IGNORE);
        }
    }

    @Test(groups = {"bucket"})
    public void testGenomicsDBImportGCSInputs() throws IOException {
        testGenomicsDBImporter(resolveLargeFilesAsCloudURIs(LOCAL_GVCFS), INTERVAL, COMBINED, b38_reference_20_21, true, 1);
    }

    @Test
    public void testGenomicsDBAbsolutePathDependency() throws IOException {
        final File workspace = createTempDir("genomicsdb-tests-");
        final File workspace2 = createTempDir("genomicsdb-secondary-tests-");

        writeToGenomicsDB(LOCAL_GVCFS, INTERVAL, workspace.getAbsolutePath() + "/workspace", 0, false, 0, 1);
        checkJSONFilesAreWritten(workspace.getAbsolutePath() + "/workspace");
        Files.move(workspace.toPath(), workspace2.toPath(), StandardCopyOption.REPLACE_EXISTING);
        checkGenomicsDBAgainstExpected(workspace2.getAbsolutePath() + "/workspace", INTERVAL, COMBINED, b38_reference_20_21, true, ATTRIBUTES_TO_IGNORE);
    }

    @Test (enabled = true)
    public void testGenomicsDBAlleleSpecificAnnotations() throws IOException {
        testGenomicsDBAgainstCombineGVCFs(Arrays.asList(COMBINEGVCFS_TEST_DIR+"NA12878.AS.chr20snippet.g.vcf", COMBINEGVCFS_TEST_DIR+"NA12892.AS.chr20snippet.g.vcf"),
                new ArrayList<SimpleInterval>(Arrays.asList(new SimpleInterval("20", 10433000, 10700000))),
                b37_reference_20_21,
                new String[]{"-G", "StandardAnnotation", "-G", "AS_StandardAnnotation"});
    }

    @Test (enabled = true)
    public void testGenomicsDBAlleleSpecificAnnotationsInTheMiddleOfSpanningDeletion() throws IOException {
        testGenomicsDBAgainstCombineGVCFs(Arrays.asList(COMBINEGVCFS_TEST_DIR+"NA12878.AS.chr20snippet.g.vcf", COMBINEGVCFS_TEST_DIR+"NA12892.AS.chr20snippet.g.vcf"),
                new ArrayList<SimpleInterval>(Arrays.asList(new SimpleInterval("20", 10433313, 10700000))),
                b37_reference_20_21,
                new String[]{"-G", "StandardAnnotation", "-G", "AS_StandardAnnotation"});
    }

    @Test
    public void testGenomicsDBNoRemapMissingToNonRef() throws IOException {
        testGenomicsDBAgainstCombineGVCFs(Arrays.asList(COMBINEGVCFS_TEST_DIR+"NA12878.AS.NON_REF_remap_check.chr20snippet.g.vcf",
                COMBINEGVCFS_TEST_DIR+"NA12892.AS.chr20snippet.g.vcf"),
                new ArrayList<SimpleInterval>(Arrays.asList(new SimpleInterval("20", 10433313, 10700000))),
                b37_reference_20_21,
                new String[]{"-G", "StandardAnnotation", "-G", "AS_StandardAnnotation"});
    }

    @Test
    public void testGenomicsDBSoftMaskedRegion() throws IOException {
        final String workspace = createTempDir("genomicsdb-tests-").getAbsolutePath() + "/workspace";
        final List<String> vcfInputs = Arrays.asList(GENOMICSDB_TEST_DIR+"iupacTestSoftMasked.1.vcf",
                GENOMICSDB_TEST_DIR+"iupacTestSoftMasked.2.vcf");
        final List<SimpleInterval> intervals = Arrays.asList(new SimpleInterval("chr1", 1, 18000));

        writeToGenomicsDB(vcfInputs, intervals, workspace, 0, false, 0, 1);
        checkNoNAlleleInRef(workspace, IUPAC_REF);
    }

    private void checkNoNAlleleInRef(final String workspace, final String referenceFile) throws IOException {
        try(final FeatureReader<VariantContext> reader = getGenomicsDBFeatureReader(workspace, referenceFile)) {
            final CloseableTribbleIterator<VariantContext> iterator = reader.iterator();
            Assert.assertTrue(iterator.hasNext(), "expected to see a variant");
            iterator.forEachRemaining(vc -> {
                Allele refAllele = vc.getReference();
                Assert.assertFalse(refAllele.basesMatch("N"), vc.getContig()+":"+Integer.toString(vc.getStart()));
            });
        }
    }

    /**
     * Converts a list of large file paths into equivalent cloud paths
     * This must be done non-statically because any failure during static initialization results in hard to understand
     * TestNG errors and it is possible for {@link BaseTest#getGCPTestInputPath()} to fail if the environment isn't
     * fully set up.
     *
     * The cloud bucket must be organized the same way as the local test files in order to resolve correctly.
     */
    private static List<String> resolveLargeFilesAsCloudURIs(final List<String> filenames){
        return filenames.stream()
                .map( filename -> filename.replace(publicTestDir, getGCPTestInputPath()))
                .peek( filename -> Assert.assertTrue(BucketUtils.isGcsUrl(filename)))
                .collect(Collectors.toList());
    }

    @Test(dataProvider = "batchSizes")
    public void testGenomicsDBImportFileInputsInBatches(final int batchSize) throws IOException {
        testGenomicsDBImporterWithBatchSize(LOCAL_GVCFS, INTERVAL, COMBINED, batchSize);
    }

    @Test(dataProvider = "batchSizes")
    public void testGenomicsDBImportFileInputsInBatchesWithMultipleIntervals(final int batchSize) throws IOException {
        testGenomicsDBImporterWithBatchSize(LOCAL_GVCFS, MULTIPLE_INTERVALS, COMBINED_MULTI_INTERVAL, batchSize);
    }

    @Test(dataProvider = "batchSizes")
    public void testGenomicsDBImportFileInputsInBatchesWithMultipleIntervalsNativeReader(final int batchSize) throws IOException {
        testGenomicsDBImporterWithBatchSize(LOCAL_GVCFS, MULTIPLE_INTERVALS, COMBINED_MULTI_INTERVAL, batchSize, true);
    }

    @Test(groups = {"bucket"}, dataProvider = "batchSizes")
    public void testGenomicsDBImportGCSInputsInBatches(final int batchSize) throws IOException {
        testGenomicsDBImporterWithBatchSize(resolveLargeFilesAsCloudURIs(LOCAL_GVCFS), INTERVAL, COMBINED, batchSize);
    }

    @Test(groups = {"bucket"}, dataProvider = "batchSizes")
    public void testGenomicsDBImportGCSInputsInBatchesNativeReader(final int batchSize) throws IOException {
        testGenomicsDBImporterWithBatchSize(resolveLargeFilesAsCloudURIs(LOCAL_GVCFS), INTERVAL, COMBINED, batchSize, true);
    }

    @DataProvider
    public Object[][] getThreads(){
        return new Object[][] {
                {1}, {2}, {5}
        };
    }

    @Test(groups = {"bucket"}, dataProvider = "getThreads")
    public void testDifferentThreadValuesFromABucket(final int threads) throws IOException {
        final List<String> vcfInputs = resolveLargeFilesAsCloudURIs(LOCAL_GVCFS);
        final String workspace = createTempDir("genomicsdb-tests-").getAbsolutePath() + "/workspace";

        writeToGenomicsDB(vcfInputs, INTERVAL, workspace, 0, false, 0, threads);
        checkJSONFilesAreWritten(workspace);
        checkGenomicsDBAgainstExpected(workspace, INTERVAL, COMBINED, b38_reference_20_21, true, ATTRIBUTES_TO_IGNORE);
    }

    @Test(dataProvider = "getThreads")
    public void testDifferentThreadValuesLocally(final int threads) throws IOException {
        final List<String> vcfInputs = LOCAL_GVCFS;
        final String workspace = createTempDir("genomicsdb-tests-").getAbsolutePath() + "/workspace";

        writeToGenomicsDB(vcfInputs, INTERVAL, workspace, 0, false, 0, threads);
        checkJSONFilesAreWritten(workspace);
        checkGenomicsDBAgainstExpected(workspace, INTERVAL, COMBINED, b38_reference_20_21, true, ATTRIBUTES_TO_IGNORE);
    }
    /**
     *
     * @throws CommandLineException.OutOfRangeArgumentValue  Value must be >= 1024 bytes
     */
    @Test(expectedExceptions = CommandLineException.OutOfRangeArgumentValue.class)
    public void testZeroVCFBufferSize() throws IOException {
        testGenomicsDBImportWithZeroBufferSize(LOCAL_GVCFS, INTERVAL, COMBINED);
    }


    private void testGenomicsDBImporter(final List<String> vcfInputs, final List<SimpleInterval> intervals,
                                        final String expectedCombinedVCF, final String referenceFile,
                                        final boolean testAll, final int threads) throws IOException {
        testGenomicsDBImporter(vcfInputs, intervals, expectedCombinedVCF, referenceFile, testAll, threads, false);
    }

    private void testGenomicsDBImporter(final List<String> vcfInputs, final List<SimpleInterval> intervals,
                                        final String expectedCombinedVCF, final String referenceFile,
                                        final boolean testAll, final int threads, final boolean useNativeReader) throws IOException {
        final String workspace = createTempDir("genomicsdb-tests-").getAbsolutePath() + "/workspace";
        writeToGenomicsDB(vcfInputs, intervals, workspace, 0, false, 0, 1, false, false, false, 0, useNativeReader);

        checkGenomicsDBAgainstExpected(workspace, intervals, expectedCombinedVCF, referenceFile, testAll, ATTRIBUTES_TO_IGNORE);
    }

    private void testGenomicsDBImporter_newMQ(final List<String> vcfInputs, final List<SimpleInterval> intervals,
                                        final String expectedCombinedVCF, final String referenceFile,
                                        final boolean testAll, final List<String> attributesToIgnore) throws IOException {
        final String workspace = createTempDir("genomicsdb-tests-").getAbsolutePath() + "/workspace";

        writeToGenomicsDB(vcfInputs, intervals, workspace, 0, false, 0, 1);
        checkJSONFilesAreWritten(workspace);

        checkGenomicsDBAgainstExpected(workspace, intervals, expectedCombinedVCF, referenceFile, testAll, attributesToIgnore);
    }

    private void testGenomicsDBImporterWithBatchSize(final List<String> vcfInputs, final List<SimpleInterval> intervals,
                                                     final String expectedCombinedVCF, final int batchSize) throws IOException {
        testGenomicsDBImporterWithBatchSize(vcfInputs, intervals, expectedCombinedVCF, batchSize, false);
    }

    private void testGenomicsDBImporterWithBatchSize(final List<String> vcfInputs, final List<SimpleInterval> intervals,
                                                     final String expectedCombinedVCF, final int batchSize,
                                                     final boolean useNativeReader) throws IOException {
        final String workspace = createTempDir("genomicsdb-batchsize-tests-").getAbsolutePath() + "/workspace-" + batchSize;

        writeToGenomicsDB(vcfInputs, intervals, workspace, batchSize, false, 0, 1, false, false, false, 0, useNativeReader);
        checkJSONFilesAreWritten(workspace);
        checkGenomicsDBAgainstExpected(workspace, intervals, expectedCombinedVCF, b38_reference_20_21, true, ATTRIBUTES_TO_IGNORE);
    }

    private void testGenomicsDBImportWithZeroBufferSize(final List<String> vcfInputs, final List<SimpleInterval> intervals,
                                                        final String expectedCombinedVCF) throws IOException {
        final String workspace = createTempDir("genomicsdb-buffersize-tests-").getAbsolutePath() + "/workspace";

        writeToGenomicsDB(vcfInputs, intervals, workspace, 0, true, 0, 1);
        checkJSONFilesAreWritten(workspace);
        checkGenomicsDBAgainstExpected(workspace, intervals, expectedCombinedVCF, b38_reference_20_21, true, ATTRIBUTES_TO_IGNORE);

    }

    private void writeToGenomicsDB(final List<String> vcfInputs, final List<SimpleInterval> intervals, final String workspace,
                                   final int batchSize, final Boolean useBufferSize, final int bufferSizePerSample, int threads) {
        writeToGenomicsDB(vcfInputs, intervals, workspace, batchSize, useBufferSize, bufferSizePerSample, threads, false);
    }

    private void writeToGenomicsDB(final List<String> vcfInputs, final List<SimpleInterval> intervals, final String workspace,
                                   final int batchSize, final Boolean useBufferSize, final int bufferSizePerSample, int threads, final boolean mergeIntervals) {
        writeToGenomicsDB(vcfInputs, intervals, workspace, batchSize, useBufferSize, bufferSizePerSample, threads, mergeIntervals, false, false);
    }

    private void writeToGenomicsDB(final List<String> vcfInputs, final List<SimpleInterval> intervals, final String workspace,
                                   final int batchSize, final Boolean useBufferSize, final int bufferSizePerSample, int threads,
                                   final boolean mergeIntervals, final boolean overwriteWorkspace, final boolean incremental) {
        writeToGenomicsDB(vcfInputs, intervals, workspace, batchSize, useBufferSize, bufferSizePerSample, threads, mergeIntervals,
                          overwriteWorkspace, incremental, 0, false);
    }

    private void writeToGenomicsDB(final List<String> vcfInputs, final List<SimpleInterval> intervals, final String workspace,
                                   final int batchSize, final Boolean useBufferSize, final int bufferSizePerSample, int threads,
                                   final boolean mergeIntervals, final boolean overwriteWorkspace, final boolean incremental,
                                   final int chrsToPartitions, final boolean useNativeReader) {
        final ArgumentsBuilder args = new ArgumentsBuilder();
        if (incremental) {
            args.add(GenomicsDBImport.INCREMENTAL_WORKSPACE_ARG_LONG_NAME, workspace);
        } else {
            args.add(GenomicsDBImport.WORKSPACE_ARG_LONG_NAME, workspace);
        }
        intervals.forEach(args::addInterval);
        vcfInputs.forEach(vcf -> args.add("V", vcf));
        args.add("batch-size", String.valueOf(batchSize));
        args.add(GenomicsDBImport.VCF_INITIALIZER_THREADS_LONG_NAME, String.valueOf(threads));
        args.add(GenomicsDBImport.MERGE_INPUT_INTERVALS_LONG_NAME, mergeIntervals);
        args.add(GenomicsDBImport.OVERWRITE_WORKSPACE_LONG_NAME, overwriteWorkspace);
        if (chrsToPartitions != 0) {
            args.add(GenomicsDBImport.MERGE_CONTIGS_INTO_NUM_PARTITIONS, String.valueOf(chrsToPartitions));
        }
        args.add(GenomicsDBImport.BYPASS_FEATURE_READER, useNativeReader);
        if (useBufferSize) {
            args.add("genomicsdb-vcf-buffer-size", String.valueOf(bufferSizePerSample));
        }

        runCommandLine(args);
        if (chrsToPartitions != 0) {
            String[] partitions = GenomicsDBUtils.listGenomicsDBArrays(workspace);
            // it may not always be the case that the number of partitions created matches
            // the number we specified, but will be true for our tests
            Assert.assertTrue(partitions.length == chrsToPartitions);
        }
    }

    private static void checkJSONFilesAreWritten(final String workspace) {
        Assert.assertTrue(BucketUtils.fileExists(IOUtils.appendPathToDir(workspace, GenomicsDBConstants.DEFAULT_VIDMAP_FILE_NAME)));
        Assert.assertTrue(BucketUtils.fileExists(IOUtils.appendPathToDir(workspace, GenomicsDBConstants.DEFAULT_CALLSETMAP_FILE_NAME)));
        Assert.assertTrue(BucketUtils.fileExists(IOUtils.appendPathToDir(workspace, GenomicsDBConstants.DEFAULT_VCFHEADER_FILE_NAME)));
    }

    private static void checkGenomicsDBAgainstExpected(final String workspace, final List<SimpleInterval> intervals,
                                                       final String expectedCombinedVCF, final String referenceFile,
                                                       final boolean testAll, final List<String> attributesToIgnore) throws IOException {
        checkGenomicsDBAgainstExpected(workspace, intervals,
                expectedCombinedVCF, referenceFile,
                testAll,
                attributesToIgnore,
                false,
                false,
                false);
    }

    private static void checkGenomicsDBAgainstExpected(final String workspace, final List<SimpleInterval> intervals,
                                                       final String expectedCombinedVCF, final String referenceFile,
                                                       final boolean testAll, final List<String> attributesToIgnore,
                                                       final boolean produceGTfield, final boolean sitesOnlyQuery) throws IOException {
        checkGenomicsDBAgainstExpected(workspace, intervals,
                expectedCombinedVCF, referenceFile,
                testAll,
                attributesToIgnore,
                produceGTfield,
                sitesOnlyQuery,
                false);
    }

    private static void checkGenomicsDBAgainstExpected(final String workspace, final List<SimpleInterval> intervals,
                                                       final String expectedCombinedVCF, final String referenceFile,
                                                       final boolean testAll,
                                                       final List<String> attributesToIgnore,
                                                       final boolean produceGTField,
                                                       final boolean sitesOnlyQuery,
                                                       final boolean useVCFCodec) throws IOException {
        final FeatureReader<VariantContext> genomicsDBFeatureReader =
                getGenomicsDBFeatureReader(workspace, referenceFile, produceGTField, sitesOnlyQuery, useVCFCodec);

        final AbstractFeatureReader<VariantContext, LineIterator> combinedVCFReader =
                AbstractFeatureReader.getFeatureReader(expectedCombinedVCF, new VCFCodec(), true);


        intervals.forEach(interval -> {
            try (CloseableTribbleIterator<VariantContext> actualVcs =
                         genomicsDBFeatureReader.query(interval.getContig(), interval.getStart(), interval.getEnd());

                 CloseableTribbleIterator<VariantContext> expectedVcs =
                         combinedVCFReader.query(interval.getContig(), interval.getStart(), interval.getEnd())) {

                BaseTest.assertCondition(actualVcs, expectedVcs, (a, e) -> {
                        // Test that the VCs match
                    if (testAll) {
                        // To correct a discrepancy between genotypeGVCFs which outputs empty genotypes as "./." and GenomicsDB
                        // which returns them as "." we simply remap the empty ones to be consistent for comparison
                        List<Genotype> genotypes = a.getGenotypes().stream()
                                .map(g -> g.getGenotypeString().equals(".")?new GenotypeBuilder(g).alleles(GATKVariantContextUtils.noCallAlleles(2)).make():g)
                                .collect(Collectors.toList());
                        a = new VariantContextBuilder(a).genotypes(genotypes).make();
                        VariantContextTestUtils.assertVariantContextsAreEqualAlleleOrderIndependent(a, e, attributesToIgnore, Collections.emptyList(), VCF_HEADER);

                        // Test only that the genotypes match
                    } else {
                        List<Genotype> genotypes = e.getGenotypes().stream()
                                .map(g -> g.getGenotypeString().equals(".")?new GenotypeBuilder(g).alleles(Collections.emptyList()).make():g)
                                .collect(Collectors.toList());
                        e = new VariantContextBuilder(e).genotypes(genotypes).make();
                        VariantContextTestUtils.assertVariantContextsHaveSameGenotypes(a, e);
                    }
                });
            } catch (IOException e) {
                Assert.fail(e.getMessage(), e);
            }
        });
    }

    @DataProvider
    public Iterator<Object[]> getOrderingTests(){
        final File outOfOrderSampleMap = getSampleMapFile(
                        "HG00268\t" + HG_00268 + "\n" +
                        "NA19625\t" + NA_19625 + "\n" +
                        "HG00096\t" + HG_00096);

        final List<Integer> batchSizes = Arrays.asList(0, 1, 2, 3, 4);
        final List<Object[]> results = new ArrayList<>();
        for( final Integer batchSize: batchSizes){
            // -V in order
            results.add(new Object[] {new ArgumentsBuilder()
                    .add(GenomicsDBImport.BATCHSIZE_ARG_LONG_NAME, String.valueOf(batchSize))
                    .addVCF(new File(HG_00096))
                    .addVCF(new File(HG_00268))
                    .addVCF(new File(NA_19625))});

            // -V out of order
            results.add(new Object[] {new ArgumentsBuilder()
                    .add(GenomicsDBImport.BATCHSIZE_ARG_LONG_NAME, String.valueOf(batchSize))
                    .addVCF(new File(HG_00268))
                    .addVCF(new File(NA_19625))
                    .addVCF(new File(HG_00096))});

            //in order sample map
            results.add(new Object[] {new ArgumentsBuilder()
                    .add(GenomicsDBImport.BATCHSIZE_ARG_LONG_NAME, String.valueOf(batchSize))
                    .add(GenomicsDBImport.SAMPLE_NAME_MAP_LONG_NAME, createInOrderSampleMap())});

            //out of order sample map
            results.add(new Object[] {new ArgumentsBuilder()
                    .add(GenomicsDBImport.BATCHSIZE_ARG_LONG_NAME, String.valueOf(batchSize))
                    .add(GenomicsDBImport.SAMPLE_NAME_MAP_LONG_NAME, outOfOrderSampleMap)});

            //out of order sample map with multiple threads
            results.add(new Object[] {new ArgumentsBuilder()
                    .add(GenomicsDBImport.BATCHSIZE_ARG_LONG_NAME, String.valueOf(batchSize))
                    .add(GenomicsDBImport.SAMPLE_NAME_MAP_LONG_NAME, outOfOrderSampleMap)
                    .add(GenomicsDBImport.VCF_INITIALIZER_THREADS_LONG_NAME, "2")});
        }
        return results.iterator();
    }

    @Test
    public void testSampleNameWithSpaces() throws IOException {
        final File outOfOrderSampleMap = getSampleMapFile(
                "HG00268 withSpaces\t" + HG_00268_WITH_SPACES + "\n" +
                        "NA19625\t" + NA_19625 + "\n" +
                        "HG00096\t" + HG_00096 );

        final String workspace = createTempDir("gendbtest").getAbsolutePath() + "/workspace";

        ArgumentsBuilder args = new ArgumentsBuilder()
                .add(GenomicsDBImport.BATCHSIZE_ARG_LONG_NAME, String.valueOf(2))
                .add(GenomicsDBImport.SAMPLE_NAME_MAP_LONG_NAME, outOfOrderSampleMap)
                .addInterval(SMALLER_INTERVAL.get(0))
                .add(GenomicsDBImport.WORKSPACE_ARG_LONG_NAME, workspace);

        runCommandLine(args);
        checkJSONFilesAreWritten(workspace);
        checkGenomicsDBAgainstExpected(workspace, SMALLER_INTERVAL, COMBINED_WITHSPACES, b38_reference_20_21, true, ATTRIBUTES_TO_IGNORE);
        checkGenomicsDBAgainstExpected(workspace, SMALLER_INTERVAL, COMBINED_WITHSPACES, b38_reference_20_21, true, ATTRIBUTES_TO_IGNORE, false, false, true);
    }

    @Test(dataProvider = "getOrderingTests")
    public void testSampleNameOrdering(final ArgumentsBuilder args) throws IOException {
        final String workspace = createTempDir("gendbtest").getAbsolutePath() + "/workspace";

        args.addInterval(INTERVAL.get(0))
            .add(GenomicsDBImport.WORKSPACE_ARG_LONG_NAME, workspace);

        runCommandLine(args);
        checkJSONFilesAreWritten(workspace);
        checkGenomicsDBAgainstExpected(workspace, INTERVAL, COMBINED, b38_reference_20_21, true, ATTRIBUTES_TO_IGNORE);
        checkGenomicsDBAgainstExpected(workspace, INTERVAL, COMBINED, b38_reference_20_21, true, ATTRIBUTES_TO_IGNORE, false, false, true);
    }

    private static File createInOrderSampleMap() {
        final String sampleFileContents =
                "HG00096\t" +HG_00096 +"\n" +
                "HG00268\t"+ HG_00268 + "\n" +
                "NA19625\t"+ NA_19625;

        return getSampleMapFile(sampleFileContents);
    }

    private static File getSampleMapFile(final String sampleFileContents) {
        final File sampleNameMap = IOUtils.writeTempFile(sampleFileContents, "sampleNameMap", ".txt");
        sampleNameMap.deleteOnExit();
        return sampleNameMap;
    }

    private static File getSampleMapFile(final Map<String, String> mapping){
        return getSampleMapFile(mapping.entrySet()
                .stream()
                .map( pair -> pair.getKey() + "\t" + pair.getValue())
                .collect(Collectors.joining("\n")));
    }

    @DataProvider
    public Object[][] dataForTestExplicitIndicesInSampleNameMap() {
        final Map<String, File> originalVCFsInOrder = new LinkedHashMap<>();
        originalVCFsInOrder.put(HG_00096_SAMPLE_NAME, new File(HG_00096));
        originalVCFsInOrder.put(HG_00268_SAMPLE_NAME, new File(HG_00268));
        originalVCFsInOrder.put(NA_19625_SAMPLE_NAME, new File(NA_19625));

        final Map<String, File> originalVCFsOutOfOrder = new LinkedHashMap<>();
        originalVCFsOutOfOrder.put(NA_19625_SAMPLE_NAME, new File(NA_19625));
        originalVCFsOutOfOrder.put(HG_00268_SAMPLE_NAME, new File(HG_00268));
        originalVCFsOutOfOrder.put(HG_00096_SAMPLE_NAME, new File(HG_00096));

        return new Object[][] {
                // All VCFs have explicit indices, samples in order, TABIX index
                { originalVCFsInOrder, Arrays.asList(HG_00096_SAMPLE_NAME, HG_00268_SAMPLE_NAME, NA_19625_SAMPLE_NAME), false },

                // All VCFs have explicit indices, samples in order, TRIBBLE index
                { originalVCFsInOrder, Arrays.asList(HG_00096_SAMPLE_NAME, HG_00268_SAMPLE_NAME, NA_19625_SAMPLE_NAME), true },

                // Some VCFs have explicit indices, samples in order, TABIX index
                { originalVCFsInOrder, Arrays.asList(HG_00268_SAMPLE_NAME), false },

                // Some VCFs have explicit indices, samples in order, TRIBBLE index
                { originalVCFsInOrder, Arrays.asList(HG_00268_SAMPLE_NAME), true },

                // All VCFs have explicit indices, samples out of order, TABIX index
                { originalVCFsOutOfOrder, Arrays.asList(HG_00096_SAMPLE_NAME, HG_00268_SAMPLE_NAME, NA_19625_SAMPLE_NAME), false },

                // All VCFs have explicit indices, samples out of order, TRIBBLE index
                { originalVCFsOutOfOrder, Arrays.asList(HG_00096_SAMPLE_NAME, HG_00268_SAMPLE_NAME, NA_19625_SAMPLE_NAME), true },

                // Some VCFs have explicit indices, samples out of order, TABIX index
                { originalVCFsOutOfOrder, Arrays.asList(HG_00268_SAMPLE_NAME), false },

                // Some VCFs have explicit indices, samples out of order, TRIBBLE index
                { originalVCFsOutOfOrder, Arrays.asList(HG_00268_SAMPLE_NAME), true }
        };
    }

    // Test that we can handle explicit index files from a sample name map locally.
    // The cloud version of this test is separate.
    // Note that this test decompresses/reindexes its GVCFs on-the-fly as necessary in order
    // to avoid our having to check uncompressed VCFs in to our repo
    @Test(dataProvider = "dataForTestExplicitIndicesInSampleNameMap")
    public void testExplicitIndicesInSampleNameMap(final Map<String, File> originalVCFs, final List<String> samplesWithExplicitIndices, final boolean useTribbleIndex) throws IOException {
        final String workspace = createTempDir("testExplicitIndicesInSampleNameMap").getAbsolutePath() + "/workspace";
        final File vcfDir = createTempDir("testExplicitIndicesInSampleNameMap_vcfs");
        final File indexDir = createTempDir("testExplicitIndicesInSampleNameMap_indices");
        Assert.assertNotEquals(vcfDir, indexDir,
              "testExplicitIndicesInSampleNameMap failed to create separate directories for the vcfs and their indices");

        final StringBuilder sampleNameMapContents = new StringBuilder();

        for ( final Map.Entry<String, File> originalVCFEntry : originalVCFs.entrySet() ) {
            final String sampleName = originalVCFEntry.getKey();
            final File originalVCFFile = originalVCFEntry.getValue();
            final boolean createExplicitIndex = samplesWithExplicitIndices.contains(sampleName);

            final Path originalVCFPath = originalVCFFile.toPath();
            final String uncompressedVCFName = originalVCFFile.getName().replaceAll("\\.gz$", "");
            Path vcfDestination = new File(vcfDir, originalVCFFile.getName()).toPath();
            if ( useTribbleIndex ) {
                vcfDestination = new File(vcfDir, uncompressedVCFName).toPath();
                IOUtils.gunzip(originalVCFPath.toAbsolutePath().toFile(), vcfDestination.toAbsolutePath().toFile());
            } else {
                Files.copy(originalVCFPath, vcfDestination);
            }

            final File originalVCFIndexFile = new File(originalVCFFile.getAbsolutePath() + FileExtensions.TABIX_INDEX);
            Assert.assertTrue(originalVCFIndexFile.exists());
            final File thisVCFIndexDir = createExplicitIndex ? indexDir : vcfDir;
            Path vcfIndexDestination = new File(thisVCFIndexDir, originalVCFIndexFile.getName()).toPath();
            if ( useTribbleIndex ) {
                vcfIndexDestination = new File(thisVCFIndexDir, uncompressedVCFName + FileExtensions.TRIBBLE_INDEX).toPath();
                final Index inMemoryIndex = IndexFactory.createLinearIndex(vcfDestination, new VCFCodec(), IndexFeatureFile.OPTIMAL_GVCF_INDEX_BIN_SIZE);
                inMemoryIndex.write(vcfIndexDestination);
            } else {
                Files.copy(originalVCFIndexFile.toPath(), vcfIndexDestination);
            }

            if ( createExplicitIndex ) {
              sampleNameMapContents.append(String.format("%s\t%s\t%s\n", sampleName, vcfDestination.toAbsolutePath().toString(), vcfIndexDestination.toAbsolutePath().toString()));
            } else {
              sampleNameMapContents.append(String.format("%s\t%s\n", sampleName, vcfDestination.toAbsolutePath().toString()));
            }
        }

        final File sampleNameMapFile = IOUtils.writeTempFile(sampleNameMapContents.toString(), "testExplicitIndicesInSampleNameMap_samplemap", ".txt");

        final ArgumentsBuilder args = new ArgumentsBuilder();
        args.add(GenomicsDBImport.SAMPLE_NAME_MAP_LONG_NAME, sampleNameMapFile.getAbsolutePath())
                .addInterval(INTERVAL.get(0))
                .add(GenomicsDBImport.WORKSPACE_ARG_LONG_NAME, workspace);
        runCommandLine(args);

        checkJSONFilesAreWritten(workspace);
        checkGenomicsDBAgainstExpected(workspace, INTERVAL, COMBINED, b38_reference_20_21, true, ATTRIBUTES_TO_IGNORE);
        checkGenomicsDBAgainstExpected(workspace, INTERVAL, COMBINED, b38_reference_20_21, true, ATTRIBUTES_TO_IGNORE, false, false, true);
    }

    @DataProvider
    public Object[][] dataForTestExplicitIndicesInSampleNameMapInTheCloud() {
        final String GVCFS_WITH_INDICES_BUCKET = "gs://hellbender/test/resources/org/broadinstitute/hellbender/tools/genomicsdb/gvcfs_with_indices/";
        final String GVCFS_WITHOUT_INDICES_BUCKET = "gs://hellbender/test/resources/org/broadinstitute/hellbender/tools/genomicsdb/gvcfs_without_indices/";
        final String GVCF_INDICES_ONLY_BUCKET = "gs://hellbender/test/resources/org/broadinstitute/hellbender/tools/genomicsdb/gvcf_indices_only/";

        final String HG00096_COMPRESSED_WITH_INDEX = GVCFS_WITH_INDICES_BUCKET + "HG00096.g.vcf.gz";
        final String HG00096_COMPRESSED_NO_INDEX = GVCFS_WITHOUT_INDICES_BUCKET + "HG00096.g.vcf.gz";
        final String HG00096_COMPRESSED_INDEX = GVCF_INDICES_ONLY_BUCKET + "HG00096.g.vcf.gz.tbi";
        final String HG00096_UNCOMPRESSED_WITH_INDEX = GVCFS_WITH_INDICES_BUCKET + "HG00096.g.vcf";
        final String HG00096_UNCOMPRESSED_NO_INDEX = GVCFS_WITHOUT_INDICES_BUCKET + "HG00096.g.vcf";
        final String HG00096_UNCOMPRESSED_INDEX = GVCF_INDICES_ONLY_BUCKET + "HG00096.g.vcf.idx";

        final String HG00268_COMPRESSED_WITH_INDEX = GVCFS_WITH_INDICES_BUCKET + "HG00268.g.vcf.gz";
        final String HG00268_COMPRESSED_NO_INDEX = GVCFS_WITHOUT_INDICES_BUCKET + "HG00268.g.vcf.gz";
        final String HG00268_COMPRESSED_INDEX = GVCF_INDICES_ONLY_BUCKET + "HG00268.g.vcf.gz.tbi";
        final String HG00268_UNCOMPRESSED_WITH_INDEX = GVCFS_WITH_INDICES_BUCKET + "HG00268.g.vcf";
        final String HG00268_UNCOMPRESSED_NO_INDEX = GVCFS_WITHOUT_INDICES_BUCKET + "HG00268.g.vcf";
        final String HG00268_UNCOMPRESSED_INDEX = GVCF_INDICES_ONLY_BUCKET + "HG00268.g.vcf.idx";

        final String NA19625_COMPRESSED_WITH_INDEX = GVCFS_WITH_INDICES_BUCKET + "NA19625.g.vcf.gz";
        final String NA19625_COMPRESSED_NO_INDEX = GVCFS_WITHOUT_INDICES_BUCKET + "NA19625.g.vcf.gz";
        final String NA19625_COMPRESSED_INDEX = GVCF_INDICES_ONLY_BUCKET + "NA19625.g.vcf.gz.tbi";
        final String NA19625_UNCOMPRESSED_WITH_INDEX = GVCFS_WITH_INDICES_BUCKET + "NA19625.g.vcf";
        final String NA19625_UNCOMPRESSED_NO_INDEX = GVCFS_WITHOUT_INDICES_BUCKET + "NA19625.g.vcf";
        final String NA19625_UNCOMPRESSED_INDEX = GVCF_INDICES_ONLY_BUCKET + "NA19625.g.vcf.idx";

        return new Object[][] {
                // All VCFs have explicit indices, samples in order, TABIX index
                {
                    HG_00096_SAMPLE_NAME + "\t" + HG00096_COMPRESSED_NO_INDEX + "\t" + HG00096_COMPRESSED_INDEX + "\n" +
                    HG_00268_SAMPLE_NAME + "\t" + HG00268_COMPRESSED_NO_INDEX + "\t" + HG00268_COMPRESSED_INDEX + "\n" +
                    NA_19625_SAMPLE_NAME + "\t" + NA19625_COMPRESSED_NO_INDEX + "\t" + NA19625_COMPRESSED_INDEX + "\n"
                },

                // All VCFs have explicit indices, samples in order, TRIBBLE index
                {
                    HG_00096_SAMPLE_NAME + "\t" + HG00096_UNCOMPRESSED_NO_INDEX + "\t" + HG00096_UNCOMPRESSED_INDEX + "\n" +
                    HG_00268_SAMPLE_NAME + "\t" + HG00268_UNCOMPRESSED_NO_INDEX + "\t" + HG00268_UNCOMPRESSED_INDEX + "\n" +
                    NA_19625_SAMPLE_NAME + "\t" + NA19625_UNCOMPRESSED_NO_INDEX + "\t" + NA19625_UNCOMPRESSED_INDEX + "\n"
                },

                // Some VCFs have explicit indices, samples in order, TABIX index
                {
                    HG_00096_SAMPLE_NAME + "\t" + HG00096_COMPRESSED_WITH_INDEX + "\n" +
                    HG_00268_SAMPLE_NAME + "\t" + HG00268_COMPRESSED_NO_INDEX + "\t" + HG00268_COMPRESSED_INDEX + "\n" +
                    NA_19625_SAMPLE_NAME + "\t" + NA19625_COMPRESSED_WITH_INDEX + "\n"
                },

                // Some VCFs have explicit indices, samples in order, TRIBBLE index
                {
                    HG_00096_SAMPLE_NAME + "\t" + HG00096_UNCOMPRESSED_WITH_INDEX + "\n" +
                    HG_00268_SAMPLE_NAME + "\t" + HG00268_UNCOMPRESSED_NO_INDEX + "\t" + HG00268_UNCOMPRESSED_INDEX + "\n" +
                    NA_19625_SAMPLE_NAME + "\t" + NA19625_UNCOMPRESSED_WITH_INDEX + "\n"
                },

                // All VCFs have explicit indices, samples out of order, TABIX index
                {
                    NA_19625_SAMPLE_NAME + "\t" + NA19625_COMPRESSED_NO_INDEX + "\t" + NA19625_COMPRESSED_INDEX + "\n" +
                    HG_00268_SAMPLE_NAME + "\t" + HG00268_COMPRESSED_NO_INDEX + "\t" + HG00268_COMPRESSED_INDEX + "\n" +
                    HG_00096_SAMPLE_NAME + "\t" + HG00096_COMPRESSED_NO_INDEX + "\t" + HG00096_COMPRESSED_INDEX + "\n"
                },

                // All VCFs have explicit indices, samples out of order, TRIBBLE index
                {
                    NA_19625_SAMPLE_NAME + "\t" + NA19625_UNCOMPRESSED_NO_INDEX + "\t" + NA19625_UNCOMPRESSED_INDEX + "\n" +
                    HG_00268_SAMPLE_NAME + "\t" + HG00268_UNCOMPRESSED_NO_INDEX + "\t" + HG00268_UNCOMPRESSED_INDEX + "\n" +
                    HG_00096_SAMPLE_NAME + "\t" + HG00096_UNCOMPRESSED_NO_INDEX + "\t" + HG00096_UNCOMPRESSED_INDEX + "\n"
                },

                // Some VCFs have explicit indices, samples out of order, TABIX index
                {
                    NA_19625_SAMPLE_NAME + "\t" + NA19625_COMPRESSED_WITH_INDEX + "\n" +
                    HG_00268_SAMPLE_NAME + "\t" + HG00268_COMPRESSED_NO_INDEX + "\t" + HG00268_COMPRESSED_INDEX + "\n" +
                    HG_00096_SAMPLE_NAME + "\t" + HG00096_COMPRESSED_WITH_INDEX + "\n"
                },

                // Some VCFs have explicit indices, samples out of order, TRIBBLE index
                {
                    NA_19625_SAMPLE_NAME + "\t" + NA19625_UNCOMPRESSED_WITH_INDEX + "\n" +
                    HG_00268_SAMPLE_NAME + "\t" + HG00268_UNCOMPRESSED_NO_INDEX + "\t" + HG00268_UNCOMPRESSED_INDEX + "\n" +
                    HG_00096_SAMPLE_NAME + "\t" + HG00096_UNCOMPRESSED_WITH_INDEX + "\n"
                }
        };
    }

    // Test that we can handle explicit index files from a sample name map in the cloud
    @Test(dataProvider = "dataForTestExplicitIndicesInSampleNameMapInTheCloud", groups = {"bucket"})
    public void testExplicitIndicesInSampleNameMapInTheCloud(final String sampleNameMapContents) throws IOException {
        final String workspace = createTempDir("testExplicitIndicesInSampleNameMapInTheCloud").getAbsolutePath() + "/workspace";
        final File sampleNameMapFile = IOUtils.writeTempFile(sampleNameMapContents, "testExplicitIndicesInSampleNameMapInTheCloud_samplemap", ".txt");

        final ArgumentsBuilder args = new ArgumentsBuilder();
        args.add(GenomicsDBImport.SAMPLE_NAME_MAP_LONG_NAME, sampleNameMapFile.getAbsolutePath())
                .addInterval(INTERVAL.get(0))
                .add(GenomicsDBImport.WORKSPACE_ARG_LONG_NAME, workspace);
        runCommandLine(args);

        checkJSONFilesAreWritten(workspace);
        checkGenomicsDBAgainstExpected(workspace, INTERVAL, COMBINED, b38_reference_20_21, true, ATTRIBUTES_TO_IGNORE);
        checkGenomicsDBAgainstExpected(workspace, INTERVAL, COMBINED, b38_reference_20_21, true, ATTRIBUTES_TO_IGNORE, false, false, true);
    }

    // This test guards against the possibility of someone accidentally putting an index file into
    // the "gvcfs_without_indices" bucket directory used by testExplicitIndicesInSampleNameMapInTheCloud()
    @Test(groups = {"bucket"})
    public void testUnindexedCloudGVCFsAreActuallyUnindexed() throws IOException {
        final String GVCFS_WITHOUT_INDICES_BUCKET = "gs://hellbender/test/resources/org/broadinstitute/hellbender/tools/genomicsdb/gvcfs_without_indices/";
        final Path bucketPath = IOUtils.getPath(GVCFS_WITHOUT_INDICES_BUCKET);

        Files.list(bucketPath).forEach(file -> {
            Assert.assertFalse(file.endsWith(FileExtensions.TABIX_INDEX),
                    "Found a TABIX index in bucket " + GVCFS_WITHOUT_INDICES_BUCKET);
            Assert.assertFalse(file.endsWith(FileExtensions.TRIBBLE_INDEX),
                    "Found a Tribble index in bucket " + GVCFS_WITHOUT_INDICES_BUCKET);
        });
    }

    @DataProvider
    public static Iterator<Object[]> getRenameCombinations() {
        final Map<String,String> noRemapping = new LinkedHashMap<>();
        noRemapping.put("s1", "s1");
        noRemapping.put("s2", "s2");
        noRemapping.put("s3", "s3");

        final Map<String,String> sameInput = new LinkedHashMap<>();
        sameInput.put("s1", "s1");
        sameInput.put("s2", "s1");
        sameInput.put("s3", "s1");


        final Map<String,String> sameInputWeirdOrder = new LinkedHashMap<>();
        sameInputWeirdOrder.put("s3", "s1");
        sameInputWeirdOrder.put("s1", "s1");
        sameInputWeirdOrder.put("s2", "s1");

        final Map<String,String> swizzled = new LinkedHashMap<>();
        swizzled.put("s2","s1");
        swizzled.put("s3","s2");
        swizzled.put("s1","s3");

        final Map<String,String> multipleOutOfOrderRenamingsAcrossBatches = new LinkedHashMap<>();
        multipleOutOfOrderRenamingsAcrossBatches.put("s1", "s1");
        multipleOutOfOrderRenamingsAcrossBatches.put("s2", "s2");
        multipleOutOfOrderRenamingsAcrossBatches.put("s1_Renamed", "s1");
        multipleOutOfOrderRenamingsAcrossBatches.put("Renamed_s2", "s2");
        multipleOutOfOrderRenamingsAcrossBatches.put("s4", "s3");
        multipleOutOfOrderRenamingsAcrossBatches.put("s3", "s3");
        multipleOutOfOrderRenamingsAcrossBatches.put("someOtherSample", "s4");


        final List<Integer> batchSizes = Arrays.asList(0, 1, 4);
        final List<Integer> threads = Arrays.asList(1, 2);
        final List<Map<String, String>> mappings = Arrays.asList(noRemapping, sameInput, sameInputWeirdOrder, swizzled, multipleOutOfOrderRenamingsAcrossBatches);
        final List<Object[]> out = new ArrayList<>();
        for(final Map<String,String> mapping : mappings){
            for(final int batchSize :batchSizes){
                for(final int threading : threads){
                    out.add( new Object[]{mapping, threading, batchSize});
                }
            }
        }
        return out.iterator();
    }

    @Test(dataProvider = "getRenameCombinations")
    public void testRenamingSamples(final Map<String, String> renamingMap, final int threads, final int batchSize) throws IOException {
        final LinkedHashMap<String, String> sampleMap = new LinkedHashMap<>(renamingMap);
        sampleMap.replaceAll( (newSampleName, originalSampleName)-> createInputVCF(originalSampleName).getAbsolutePath());

        final File sampleMapFile = getSampleMapFile(sampleMap);

        final String workspace = createTempDir("workspace").getAbsolutePath();
        Files.delete(Paths.get(workspace));
        final ArgumentsBuilder args = new ArgumentsBuilder()
                .add(GenomicsDBImport.SAMPLE_NAME_MAP_LONG_NAME, sampleMapFile.getAbsolutePath())
                .add(GenomicsDBImport.WORKSPACE_ARG_LONG_NAME, new File(workspace).getAbsolutePath())
                .add(GenomicsDBImport.VCF_INITIALIZER_THREADS_LONG_NAME, String.valueOf(threads))
                .add(GenomicsDBImport.BATCHSIZE_ARG_LONG_NAME, String.valueOf(batchSize))
                .addInterval(INTERVAL.get(0));

        runCommandLine(args);
        final Set<String> expectedSampleNames = sampleMap.keySet();
        try(final FeatureReader<VariantContext> reader = getGenomicsDBFeatureReader(workspace, b37_reference_20_21)) {
            final CloseableTribbleIterator<VariantContext> iterator = reader.iterator();
            Assert.assertTrue(iterator.hasNext(), "expected to see a variant");
            Assert.assertTrue(expectedSampleNames.size() > 0);
            Assert.assertEquals(expectedSampleNames.size(), renamingMap.size());
            iterator.forEachRemaining(vc -> {
                Assert.assertEquals(vc.getSampleNames().size(), expectedSampleNames.size());
                Assert.assertEqualsNoOrder(vc.getSampleNames().toArray(), expectedSampleNames.toArray());
                expectedSampleNames.forEach( sample -> {
                    Assert.assertEquals(vc.getGenotype(sample).getAnyAttribute(SAMPLE_NAME_KEY), renamingMap.get(sample));
                    //check another attribute just to make sure we're not mangling things
                    Assert.assertEquals(VariantContextGetters.getAttributeAsInt(vc.getGenotype(sample), ANOTHER_ATTRIBUTE_KEY, -1), 10);
                });
            });
        }

    }

    private static File createInputVCF(final String sampleName) {
        final String contig = "chr20";
        final SAMSequenceDictionary dict = new SAMSequenceDictionary(
                Collections.singletonList(new SAMSequenceRecord(contig, 64444167)));

        final VCFFormatHeaderLine formatField = new VCFFormatHeaderLine(SAMPLE_NAME_KEY, 1, VCFHeaderLineType.String,
                                                                        "the name of the sample this genotype came from");
        final Set<VCFHeaderLine> headerLines = new HashSet<>();
        headerLines.add(formatField);
        headerLines.add(new VCFFormatHeaderLine(ANOTHER_ATTRIBUTE_KEY, 1, VCFHeaderLineType.Integer, "Another value"));
        headerLines.add(VCFStandardHeaderLines.getFormatLine("GT"));

        final File out = createTempFile(sampleName +"_", ".vcf");
        try (final VariantContextWriter writer = GATKVariantContextUtils.createVCFWriter(out.toPath(), dict, false,
                                                                                         Options.INDEX_ON_THE_FLY)) {
            final VCFHeader vcfHeader = new VCFHeader(headerLines, Collections.singleton(sampleName));
            vcfHeader.setSequenceDictionary(dict);
            writer.writeHeader(vcfHeader);
            final Allele Aref = Allele.create("A", true);
            final Allele C = Allele.create("C");
            final List<Allele> alleles = Arrays.asList(Aref, C);
            final VariantContext variant = new VariantContextBuilder("invented", contig, INTERVAL.get(0).getStart(), INTERVAL.get(0).getStart(), alleles)
                    .genotypes(new GenotypeBuilder(sampleName, alleles).attribute(SAMPLE_NAME_KEY, sampleName)
                                       .attribute(ANOTHER_ATTRIBUTE_KEY, 10).make())
                    .make();
            writer.add(variant);
            return out;
        }
    }

    @Test(expectedExceptions = CommandLineException.class)
    public void testCantSpecifyVCFAndSampleNameFile(){
        final ArgumentsBuilder args = new ArgumentsBuilder()
                .add(GenomicsDBImport.SAMPLE_NAME_MAP_LONG_NAME, createInOrderSampleMap().getAbsolutePath())
                .add(StandardArgumentDefinitions.VARIANT_LONG_NAME, HG_00096)
                .add(GenomicsDBImport.WORKSPACE_ARG_LONG_NAME, createTempDir("workspace").getAbsolutePath())
                .addInterval(INTERVAL.get(0));
        runCommandLine(args);
    }

    @Test(expectedExceptions = CommandLineException.MissingArgument.class)
    public void testRequireOneOfVCFOrSampleNameFile(){
        final ArgumentsBuilder args = new ArgumentsBuilder()
                .add(GenomicsDBImport.WORKSPACE_ARG_LONG_NAME, createTempDir("workspace").getAbsolutePath())
                .add("L", "1:1-10");

        runCommandLine(args);
    }

    @Test
    public void testGenomicsDBImportWithoutDBField() throws IOException {
        //Test for https://github.com/broadinstitute/gatk/issues/3736
        final List<String> vcfInputs = Arrays.asList(NA_24385);
        final String workspace = createTempDir("genomicsdb-tests").getAbsolutePath() + "/workspace";
	writeToGenomicsDB(vcfInputs, INTERVAL_3736, workspace, 0, false, 0, 1);
    }

    @Test
    public void testLongWorkspacePath() throws IOException {
        //Test for https://github.com/broadinstitute/gatk/issues/4160
        final List<String> vcfInputs = LOCAL_GVCFS;
        final String workspace = createTempDir("long_aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa_genomicsdb").getAbsolutePath() + "/should_not_fail_aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa";
        writeToGenomicsDB(vcfInputs, INTERVAL, workspace, 0, false, 0, 1);
    }

    @Test
    public void testCommandIncludedInOutputHeader() throws IOException {
        final List<String> vcfInputs = LOCAL_GVCFS;
        final String workspace = createTempDir("genomicsdb-tests").getAbsolutePath() + "/workspace";

        writeToGenomicsDB(vcfInputs, INTERVAL, workspace, 0, false, 0, 1);
        try(final FeatureReader<VariantContext> genomicsDBFeatureReader =
                    getGenomicsDBFeatureReader(workspace, b38_reference_20_21))
        {
            final VCFHeader header = (VCFHeader) genomicsDBFeatureReader.getHeader();
            final Optional<VCFHeaderLine> commandLineHeaderLine = header.getMetaDataInSortedOrder().stream()
                    .filter(line -> line.getValue().contains(GenomicsDBImport.class.getSimpleName()))
                    .findAny();

            Assert.assertTrue(commandLineHeaderLine.isPresent(), "no headerline was present containing information about the GenomicsDBImport command");
        }


    }

    @Test
    public void testPreserveContigOrderingInHeader() throws IOException {
        final String workspace = createTempDir("testPreserveContigOrderingInHeader-").getAbsolutePath() + "/workspace";
        ArrayList<SimpleInterval> intervals = new ArrayList<SimpleInterval>(Arrays.asList(new SimpleInterval("chr20", 17959479, 17959479)));
        writeToGenomicsDB(Arrays.asList(GENOMICSDB_TEST_DIR + "testHeaderContigLineSorting1.g.vcf",
                GENOMICSDB_TEST_DIR + "testHeaderContigLineSorting2.g.vcf"), intervals, workspace, 0, false, 0, 1);

        try ( final FeatureReader<VariantContext> genomicsDBFeatureReader =
                      getGenomicsDBFeatureReader(workspace, b38_reference_20_21);

             final AbstractFeatureReader<VariantContext, LineIterator> inputGVCFReader =
                      AbstractFeatureReader.getFeatureReader(GENOMICSDB_TEST_DIR + "testHeaderContigLineSorting1.g.vcf", new VCFCodec(), true);
        ) {
            final SAMSequenceDictionary dictionaryFromGenomicsDB = ((VCFHeader)genomicsDBFeatureReader.getHeader()).getSequenceDictionary();
            final SAMSequenceDictionary dictionaryFromInputGVCF =  ((VCFHeader)inputGVCFReader.getHeader()).getSequenceDictionary();

            Assert.assertEquals(dictionaryFromGenomicsDB, dictionaryFromInputGVCF, "Sequence dictionary from GenomicsDB does not match original sequence dictionary from input GVCF");
        }

    }
    private static FeatureReader<VariantContext> getGenomicsDBFeatureReader(
            final String workspace, final String reference,
            final boolean produceGTField) throws IOException {
        return getGenomicsDBFeatureReader(workspace, reference,
                produceGTField, false);
    }

    private static FeatureReader<VariantContext> getGenomicsDBFeatureReader(
            final String workspace, final String reference,
            final boolean produceGTField,
            final boolean sitesOnlyQuery) throws IOException {
        return getGenomicsDBFeatureReader(workspace, reference,
                produceGTField, sitesOnlyQuery, false);
    }

    private static FeatureReader<VariantContext> getGenomicsDBFeatureReader(
            final String workspace, final String reference,
            final boolean produceGTField,
            final boolean sitesOnlyQuery,
            final boolean useVCFCodec) throws IOException {
       String workspaceAbsPath = BucketUtils.makeFilePathAbsolute(workspace);
       GenomicsDBExportConfiguration.ExportConfiguration.Builder exportConfigurationBuilder = GenomicsDBExportConfiguration.ExportConfiguration.newBuilder()
                .setWorkspace(workspace)
                .setReferenceGenome(reference)
                .setVidMappingFile(IOUtils.appendPathToDir(workspaceAbsPath, GenomicsDBConstants.DEFAULT_VIDMAP_FILE_NAME))
                .setCallsetMappingFile(IOUtils.appendPathToDir(workspaceAbsPath, GenomicsDBConstants.DEFAULT_CALLSETMAP_FILE_NAME))
                .setVcfHeaderFilename(IOUtils.appendPathToDir(workspaceAbsPath, GenomicsDBConstants.DEFAULT_VCFHEADER_FILE_NAME))
                .setProduceGTField(produceGTField)
                .setSitesOnlyQuery(sitesOnlyQuery)
               .setGenerateArrayNameFromPartitionBounds(true);
        GenomicsDBVidMapProto.VidMappingPB vidMapPB = null;
        try {
            vidMapPB = GATKGenomicsDBUtils.getProtobufVidMappingFromJsonFile(IOUtils.appendPathToDir(workspace, GenomicsDBConstants.DEFAULT_VIDMAP_FILE_NAME));
        }
        catch (final IOException e) {
            throw new UserException("Could not open vid json file "+GenomicsDBConstants.DEFAULT_VIDMAP_FILE_NAME, e);
        }
        HashMap<String, Integer> fieldNameToIndexInVidFieldsList =
                GATKGenomicsDBUtils.getFieldNameToListIndexInProtobufVidMappingObject(vidMapPB);

        vidMapPB = GATKGenomicsDBUtils.updateINFOFieldCombineOperation(vidMapPB, fieldNameToIndexInVidFieldsList,
                GATKVCFConstants.RAW_MAPPING_QUALITY_WITH_DEPTH_KEY, "element_wise_sum");

        if(vidMapPB != null) {
            exportConfigurationBuilder.setVidMapping(vidMapPB);
        }

        if (useVCFCodec) {
            return new GenomicsDBFeatureReader<>(exportConfigurationBuilder.build(), new VCFCodec(), Optional.empty());
        } else {
            return new GenomicsDBFeatureReader<>(exportConfigurationBuilder.build(), new BCF2Codec(), Optional.empty());
        }
    }

    private static FeatureReader<VariantContext> getGenomicsDBFeatureReader(
            final String workspace, final String reference) throws IOException {
        return getGenomicsDBFeatureReader(workspace, reference, false);
    }

    @Test(expectedExceptions = GenomicsDBImport.UnableToCreateGenomicsDBWorkspace.class)
    public void testYouCantWriteIntoAnExistingDirectory(){
        // this actually creates the directory on disk, not just the file name.
        final String workspace = createTempDir("workspace").getAbsolutePath();
        writeToGenomicsDB(LOCAL_GVCFS, INTERVAL, workspace, 0, false, 0, 1);
    }

    @Test(expectedExceptions = CommandLineException.class)
    public void testOverwriteWorkspaceAndIncrementalImportCannotBothBeTrue() {
        final String workspace = createTempDir("genomicsdb-incremental-tests").getAbsolutePath() + "/workspace";
        writeToGenomicsDB(LOCAL_GVCFS, INTERVAL, workspace, 0, false, 0, 1, false, true, true);
    }

    @Test(expectedExceptions = UserException.class)
    public void testIncrementalMustHaveExistingWorkspace() {
        final String workspace = createTempDir("genomicsdb-incremental-tests").getAbsolutePath();
        writeToGenomicsDB(LOCAL_GVCFS, INTERVAL, workspace + "workspace2", 0, false, 0, 1, false, false, true);
    }

    private void testIncrementalImport(final int stepSize, final List<SimpleInterval> intervals, final String workspace,
                                       final int batchSize, final boolean produceGTField, final boolean useVCFCodec, final String expected,
                                       final int chrsToPartitions, final boolean useNativeReader) throws IOException {
        testIncrementalImport(stepSize, intervals, workspace, batchSize, produceGTField, useVCFCodec, expected,
                              chrsToPartitions, useNativeReader, false);
    }

    private void testIncrementalImport(final int stepSize, final List<SimpleInterval> intervals, final String workspace,
                                       final int batchSize, final boolean produceGTField, final boolean useVCFCodec, final String expected,
                                       final int chrsToPartitions, final boolean useNativeReader, final boolean useNativeReaderInitial)
                                       throws IOException {
        for(int i=0; i<LOCAL_GVCFS.size(); i+=stepSize) {
            int upper = Math.min(i+stepSize, LOCAL_GVCFS.size());
            writeToGenomicsDB(LOCAL_GVCFS.subList(i, upper), intervals, workspace, batchSize, false, 0, 1, false, false, i!=0,
                              chrsToPartitions, (i == 0 && useNativeReaderInitial) || (i > 0 && useNativeReader));
            checkJSONFilesAreWritten(workspace);
        }
        for(SimpleInterval currInterval : intervals) {
            List<SimpleInterval> tmpList = new ArrayList<SimpleInterval>(Arrays.asList(currInterval));
            String expectedVcf = expected;
            if (expected.isEmpty()) {
                File expectedCombinedVCF = runCombineGVCFs(LOCAL_GVCFS, tmpList, b38_reference_20_21, new String[0]);
                expectedVcf = expectedCombinedVCF.getAbsolutePath();
            }
            checkGenomicsDBAgainstExpected(workspace, tmpList, expectedVcf, b38_reference_20_21, true,
                                           ATTRIBUTES_TO_IGNORE, produceGTField, false);
            if (useVCFCodec) {
                checkGenomicsDBAgainstExpected(workspace, tmpList, expectedVcf, b38_reference_20_21, true,
                                               ATTRIBUTES_TO_IGNORE, produceGTField, false, true);
            }
        }
    }

    @Test
    public void testGenomicsDBBasicIncremental() throws IOException {
        final String workspace = createTempDir("genomicsdb-incremental-tests").getAbsolutePath() + "/workspace";
        testIncrementalImport(2, INTERVAL, workspace, 0, true, true, COMBINED_WITH_GENOTYPES, 0, false);
        createAndCheckIntervalListFromExistingWorkspace(workspace, INTERVAL_PICARD_STYLE_EXPECTED);
    }

    @Test
    public void testGenomicsDBBasicIncrementalAllNativeReader() throws IOException {
        final String workspace = createTempDir("genomicsdb-incremental-tests").getAbsolutePath() + "/workspace";
        testIncrementalImport(2, INTERVAL, workspace, 0, true, true, COMBINED_WITH_GENOTYPES, 0, true, true);
        createAndCheckIntervalListFromExistingWorkspace(workspace, INTERVAL_PICARD_STYLE_EXPECTED);
    }

    @Test
    public void testGenomicsDBIncrementalAndBatchSize1WithNonAdjacentIntervals() throws IOException {
        final String workspace = createTempDir("genomicsdb-incremental-tests").getAbsolutePath() + "/workspace";
        testIncrementalImport(2, MULTIPLE_NON_ADJACENT_INTERVALS_THAT_WORK_WITH_COMBINE_GVCFS, workspace, 1, false, true, "", 0, false);
        createAndCheckIntervalListFromExistingWorkspace(workspace, MULTIPLE_NON_ADJACENT_INTERVALS_THAT_WORK_WITH_COMBINE_GVCFS_PICARD_STYLE_EXPECTED);
    }

    @Test
    public void testGenomicsDBIncrementalAndBatchSize1WithNonAdjacentIntervalsNativeReader() throws IOException {
        final String workspace = createTempDir("genomicsdb-incremental-tests").getAbsolutePath() + "/workspace";
        testIncrementalImport(2, MULTIPLE_NON_ADJACENT_INTERVALS_THAT_WORK_WITH_COMBINE_GVCFS, workspace, 1, false, true, "", 0, true);
        createAndCheckIntervalListFromExistingWorkspace(workspace, MULTIPLE_NON_ADJACENT_INTERVALS_THAT_WORK_WITH_COMBINE_GVCFS_PICARD_STYLE_EXPECTED);
    }

    @Test(expectedExceptions = {UserException.class}, expectedExceptionsMessageRegExp=".*must be block compressed.*")
    public void testGenomicsDBImportNativeReaderNoCompressedVcf() throws IOException {
        testGenomicsDBImporterWithGenotypes(Arrays.asList(NA_12878_PHASED), MULTIPLE_INTERVALS, NA_12878_PHASED, b37_reference_20_21,
                false, true, false, true);
    }

    @Test
    public void testGenomicsDBIncrementalAndBatchSize1WithNonAdjacentIntervalsMergeContigsIntoPartitions() throws IOException {
        final String workspace = createTempDir("genomicsdb-incremental-tests").getAbsolutePath() + "/workspace";
        testIncrementalImport(2, INTERVAL_20_21, workspace, 1, false, true, "", 1, false);
        createAndCheckIntervalListFromExistingWorkspace(workspace, MERGED_CONTIGS_INTERVAL_PICARD_STYLE_EXPECTED);
    }

    @Test
    public void testGenomicsDBIncrementalAndBatchSize2() throws IOException {
        final String workspace = createTempDir("genomicsdb-incremental-tests").getAbsolutePath() + "/workspace";
        testIncrementalImport(2, MULTIPLE_INTERVALS_THAT_WORK_WITH_COMBINE_GVCFS, workspace, 2, true, false,
                              COMBINED_WITH_GENOTYPES, 0, false);
    }

    @Test
    public void testGenomicsDBMultipleIncrementalImports() throws IOException {
        final String workspace = createTempDir("genomicsdb-incremental-tests").getAbsolutePath() + "/workspace";
        testIncrementalImport(1, MULTIPLE_INTERVALS_THAT_WORK_WITH_COMBINE_GVCFS, workspace, 2, true, true,
                              COMBINED_WITH_GENOTYPES, 0, false);
    }

    @Test
    public void testGenomicsDBIncrementalWithManyNonAdjacentContigsToSeveralPartitions() throws IOException {
        List<SimpleInterval> manyContigs = MANY_CONTIGS_NON_ADJACENT_INTERVALS.stream().map(SimpleInterval::new).collect(Collectors.toList());
        final String workspace = createTempDir("genomicsdb-incremental-tests").getAbsolutePath() + "/workspace";

        writeToGenomicsDB(MANY_CONTIGS_VCF.subList(0, 1), manyContigs, workspace, 0, false, 0, 1, false, false, false, SEVERAL_CONTIGS, false);
        writeToGenomicsDB(MANY_CONTIGS_VCF.subList(1, 2), manyContigs, workspace, 0, false, 0, 1, false, false, true, SEVERAL_CONTIGS, false);
        checkJSONFilesAreWritten(workspace);
        checkGenomicsDBAgainstExpected(workspace, manyContigs, EXPECTED_SEVERAL_CONTIGS_VCF, MANY_CONTIGS_REF, true,
                MANY_CONTIGS_ATTRIBUTES_TO_IGNORE, true, false);

        createAndCheckIntervalListFromExistingWorkspace(workspace, MANY_CONTIGS_INTERVAL_PICARD_STYLE_EXPECTED);
    }

    private void createAndCheckIntervalListFromExistingWorkspace(final String workspace, final String expectedOutput) {
        final ArgumentsBuilder args = new ArgumentsBuilder();
        final String outputIntervalList = workspace + "interval_output";
        args.add(GenomicsDBImport.INCREMENTAL_WORKSPACE_ARG_LONG_NAME, workspace);
        args.add(GenomicsDBImport.INTERVAL_LIST_LONG_NAME, outputIntervalList);

        runCommandLine(args);

        final IntervalList generatedInterval = IntervalList.fromFile(new File(outputIntervalList));
        final IntervalList expectedInterval = IntervalList.fromFile(new File(expectedOutput));
        Assert.assertTrue(generatedInterval.sorted().equals(expectedInterval.sorted()));
    }

    void basicWriteAndQueryWithOptions(String workspace, Map<String, Object> options) throws IOException {
        boolean isGcsHDFSConnectorSet = false;
        final ArgumentsBuilder args = new ArgumentsBuilder();
        args.add(GenomicsDBImport.WORKSPACE_ARG_LONG_NAME, workspace);
        INTERVAL.forEach(args::addInterval);
        LOCAL_GVCFS.forEach(vcf -> args.add("V", vcf));
        for ( String key : options.keySet()) {
            if (key.equals(GenomicsDBImport.SHARED_POSIXFS_OPTIMIZATIONS)) {
                Assert.assertTrue(options.get(key) instanceof Boolean);
                args.add(GenomicsDBImport.SHARED_POSIXFS_OPTIMIZATIONS, (Boolean)options.get(key));
            }
            if (key.equals(GenomicsDBImport.OVERWRITE_WORKSPACE_LONG_NAME)) {
                Assert.assertTrue(options.get(key) instanceof Boolean);
                args.add(GenomicsDBImport.OVERWRITE_WORKSPACE_LONG_NAME, (Boolean)options.get(key));
            }
            if (key.equals(GenomicsDBImport.USE_GCS_HDFS_CONNECTOR)) {
                Assert.assertTrue(options.get(key) instanceof Boolean);
                args.add(GenomicsDBImport.USE_GCS_HDFS_CONNECTOR, (Boolean)options.get(key));
                isGcsHDFSConnectorSet = (Boolean)options.get(key);
            }
        }
        runCommandLine(args);
        checkJSONFilesAreWritten(workspace);
        checkGenomicsDBAgainstExpected(workspace, INTERVAL, COMBINED, b38_reference_20_21, true, ATTRIBUTES_TO_IGNORE);
        // Reset this to false
        if (isGcsHDFSConnectorSet) {
            GenomicsDBUtils.useGcsHdfsConnector(false);
        }
    }

    @Test
    public void testWithMiscOptions() throws IOException {
        final String workspace = createTempDir("genomicsdb-misc-options").getAbsolutePath() + "/workspace";
        IOUtils.deleteOnExit(IOUtils.getPath(workspace));
        Map<String, Object> options = new HashMap<String, Object>();

        // Test with shared posixfs optimizations set
        options.put(GenomicsDBImport.SHARED_POSIXFS_OPTIMIZATIONS, true);
        basicWriteAndQueryWithOptions(workspace, options);

        // Test with shared posixfs optimizations and overwrite workspace set
        options.put(GenomicsDBImport.OVERWRITE_WORKSPACE_LONG_NAME, true);
        basicWriteAndQueryWithOptions(workspace, options);
    }

    @Test(expectedExceptions = GenomicsDBImport.UnableToCreateGenomicsDBWorkspace.class)
    public void testWithMiscOptionsNoOverwrite() throws IOException {
        final String workspace = createTempDir("genomicsdb-misc-options-nooverwrite").getAbsolutePath() + "/workspace";
        IOUtils.deleteOnExit(IOUtils.getPath(workspace));
        Map<String, Object> options = new HashMap<String, Object>();
        basicWriteAndQueryWithOptions(workspace, options);

        // Test with overwrite workspace set to false - should throw an exception - GenomicsDBImport.UnableToCreateGenomicsDBWorkspace
        options.replace(GenomicsDBImport.OVERWRITE_WORKSPACE_LONG_NAME, false);
        basicWriteAndQueryWithOptions(workspace, options);
    }

    @Test
    public void testQueryWithComputationsExceeding32BitsDefault() throws IOException {
        final String folder = createTempDir("computations_exceed_32bits").getAbsolutePath();
        IOUtils.extractTarGz(Paths.get(TEST_INT64_SUPPORT_GENOMICSDB_BUNDLE), Paths.get(folder));
        IOUtils.deleteOnExit(IOUtils.getPath(folder));
        final String workspace = folder + "/bigint_genomicsdb_ws";
        checkGenomicsDBAgainstExpected(workspace, new ArrayList<SimpleInterval>(Arrays.asList(new SimpleInterval("1"))), folder+"/expected_combined_bigint.vcf",
                folder+"/reference/chr1_10MB.fasta.gz", true, ATTRIBUTES_TO_IGNORE, false, false, true);
    }

    // The following test should fail with a Throwable because of limitations in BCF2Codec - see https://github.com/broadinstitute/gatk/issues/6548
    @Test(expectedExceptions = Throwable.class)
    public void testQueryWithComputationsExceeding32BitsBCFCodec() throws IOException {
        final String folder = createTempDir("computations_exceed_32bits_bcf2codec").getAbsolutePath() + "/testQueryWithComputationsExceed32Bits";
        IOUtils.extractTarGz(Paths.get(TEST_INT64_SUPPORT_GENOMICSDB_BUNDLE), Paths.get(folder));
        IOUtils.deleteOnExit(IOUtils.getPath(folder));
        final String workspace = folder + "/bigint_genomicsdb_ws";
        checkGenomicsDBAgainstExpected(workspace, new ArrayList<SimpleInterval>(Arrays.asList(new SimpleInterval("1"))), folder+"/expected_combined_bigint.vcf",
                folder+"/reference/chr1_10MB.fasta.gz", true, ATTRIBUTES_TO_IGNORE, false, false, false);
    }

    @Test(groups = {"bucket"})
    public void testWriteToAndQueryFromGCS() throws IOException {
        final String workspace = BucketUtils.randomRemotePath(getGCPTestStaging(), "", "") + "/";
        IOUtils.deleteOnExit(IOUtils.getPath(workspace));
        writeToGenomicsDB(LOCAL_GVCFS, INTERVAL, workspace, 0, false, 0, 1);
        checkJSONFilesAreWritten(workspace);
        checkGenomicsDBAgainstExpected(workspace, INTERVAL, COMBINED, b38_reference_20_21, true, ATTRIBUTES_TO_IGNORE);
    }

    @Test(groups = {"bucket"}, expectedExceptions = GenomicsDBImport.UnableToCreateGenomicsDBWorkspace.class)
    public void testWriteToExistingGCSDirectory() throws IOException {
        final String workspace = BucketUtils.randomRemotePath(getGCPTestStaging(), "", "") + "/";
        IOUtils.deleteOnExit(IOUtils.getPath(workspace));
        int rc = GenomicsDBUtils.createTileDBWorkspace(workspace, false);
        Assert.assertEquals(rc, 0);
        writeToGenomicsDB(LOCAL_GVCFS, INTERVAL, workspace, 0, false, 0, 1);
    }

    @Test(groups = {"bucket"})
    public void testWriteToAndQueryFromGCSUsingConnector() throws IOException {
        final String workspace = BucketUtils.randomRemotePath(getGCPTestStaging(), "", "") + "/";
        IOUtils.deleteOnExit(IOUtils.getPath(workspace));
        Map<String, Object> options = new HashMap<String, Object>();
        options.put(GenomicsDBArgumentCollection.USE_GCS_HDFS_CONNECTOR, true);
        basicWriteAndQueryWithOptions(workspace, options);
        GenomicsDBUtils.useGcsHdfsConnector(false);
    }
}
