// Copyright (c) Lawrence Livermore National Security, LLC and other VisIt
// Project developers.  See the top-level LICENSE file for dates and other
// details.  No copyright assignment is required to contribute to VisIt.

// ************************************************************************* //
//                           avtChomboFileFormat.C                           //
// ************************************************************************* //

#include <avtChomboFileFormat.h>

#include <string>
#include <vector>
#include <cstring>
#include <limits>
#include <algorithm>
#include <sstream>

#include <vtkCellArray.h>
#include <vtkCellData.h>
#include <vtkCellType.h>
#include <vtkDoubleArray.h>
#include <vtkFieldData.h>
#include <vtkInformation.h>
#include <vtkIntArray.h>
#include <vtkPolyData.h>
#include <vtkRectilinearGrid.h>
#include <vtkStreamingDemandDrivenPipeline.h>
#include <vtkUnsignedCharArray.h>
#include <vtkUnstructuredGrid.h>

#include <DBOptionsAttributes.h>

#include <avtDatabase.h>
#include <avtDatabaseMetaData.h>
#include <avtIntervalTree.h>
#include <avtResolutionSelection.h>
#include <avtStructuredDomainBoundaries.h>
#include <avtStructuredDomainNesting.h>
#include <avtVariableCache.h>
#include <avtMaterial.h>

#include <Expression.h>

#include <FileFunctions.h>

#include <BadDomainException.h>
#include <ImproperUseException.h>
#include <DebugStream.h>
#include <InvalidDBTypeException.h>
#include <InvalidFilesException.h>
#include <InvalidVariableException.h>
#include <TimingsManager.h>

// Define this symbol BEFORE including hdf5.h to indicate the HDF5 code
// in this file uses version 1.6 of the HDF5 API. This is harmless for
// versions of HDF5 before 1.8 and ensures correct compilation with
// version 1.8 and thereafter. When, and if, the HDF5 code in this file
// is explicitly upgraded to the 1.8 API, this symbol should be removed.
#define H5_USE_16_API
#include <hdf5.h>
#include <visit-hdf5.h>
using     std::string;


// ****************************************************************************
//  Method: avtChomboFileFormat constructor
//
//  Programmer: childs -- generated by xml2avt
//  Creation:   Thu Jan 19 11:17:14 PDT 2006
//
//  Modifications:
//    Brad Whitlock, Mon Sep 25 13:59:20 PST 2006
//    Initialize the cycle and time.
//
//    Hank Childs, Mon Oct  8 17:17:24 PDT 2007
//    Initialized atts.
//
//    Gunther H. Weber, Tue Apr 15 17:43:30 PDT 2008
//    Add support to automatically import a coordinate mapping file via conn_cmfe
//
//    Gunther H. Weber, Thu Apr 17 14:29:25 PDT 2008
//    Check if an option exists before querying its value
//
//    Hank Childs, Sun Jan 25 15:39:08 PST 2009
//    Improve support for ghost data.
//
//    Gunther H. Weber, Tue Sep 15 11:26:12 PDT 2009
//    Added support for 3D mappings for 2D files.
//
//    Hank Childs, Fri Mar  5 13:16:52 PST 2010
//    Initialize hasParticles.
//
//    Gunther H. Weber, Thu Jun 17 10:10:17 PDT 2010
//    Added ability to connect particle mesh based on polymer_id and
//    particle_nid
//
// ****************************************************************************

avtChomboFileFormat::avtChomboFileFormat(const char *filename,
                                         const DBOptionsAttributes *atts)
    : avtSTMDFileFormat(&filename, 1)
{
    initializedReader = false;
    allowedToUseGhosts = true;
    fileContainsGhosts = false;
    enableOnlyRootLevel = false;
    enableOnlyExplicitMaterials = false;
    checkForMappingFile = true;
    mappingFileExists = false;
    mappingIs3D = false;
    hasParticles = false;
    connectParticles = false;
    alwaysComputeDomainBoundaries = false;
    if (atts != NULL)
    {
        for (int i = 0; i < atts->GetNumberOfOptions(); ++i)
        {
            if (atts->GetName(i) == "Use ghost data (if present)")
                allowedToUseGhosts = atts->GetBool("Use ghost data (if present)");
            else if (atts->GetName(i) == "Enable only root level by default")
                enableOnlyRootLevel = atts->GetBool("Enable only root level by default");
            else if (atts->GetName(i) == "Enable only explicitly defined materials by default")
                enableOnlyExplicitMaterials = atts->GetBool("Enable only explicitly defined materials by default");
            else if (atts->GetName(i) == "Check for mapping file and import coordinates if available")
                checkForMappingFile = atts->GetBool("Check for mapping file and import coordinates if available");
            else if (atts->GetName(i) == "Use particle_nid and polymer_id to connect particles")
                connectParticles = atts->GetBool("Use particle_nid and polymer_id to connect particles");
            else if (atts->GetName(i) == "Always compute domain boundaries (hack for AMR stitch cells)")
                alwaysComputeDomainBoundaries = atts->GetBool("Always compute domain boundaries (hack for AMR stitch cells)");
            else
                debug1 << "Ignoring unknown option " << atts->GetName(i) << endl;
        }
    }

    file_handle = -1;
    dtime = 0.;
    cycle = 0;
}


// ****************************************************************************
//  Method: avtChomboFileFormat destructor
//
//  Programmer: Hank Childs
//  Creation:   January 22, 2006
//
// ****************************************************************************

avtChomboFileFormat::~avtChomboFileFormat()
{
    FreeUpResources();
}


// ****************************************************************************
//  Method: avtChomboFileFormat::FreeUpResources
//
//  Purpose:
//      When VisIt is done focusing on a particular timestep, it asks that
//      timestep to free up any resources (memory, file descriptors) that
//      it has associated with it.  This method is the mechanism for doing
//      that.
//
//  Programmer: childs -- generated by xml2avt
//  Creation:   Thu Jan 19 11:17:14 PDT 2006
//
//  Modifications:
//
//    Hank Childs, Mon Jun 19 16:56:26 PDT 2006
//    Add support for ghosts.
//
//    Gunther H. Weber, Thu Oct 11 16:00:16 PDT 2007
//    Clean up expressions from file
//
//    Gunther H. Weber, Thu Aug 15 11:37:51 PDT 2013
//    Initial bare-bones support for 4D Chombo files (fairly limited and 
//    "hackish")
//
// ****************************************************************************

void
avtChomboFileFormat::FreeUpResources(void)
{
    initializedReader = false;
    if (file_handle != -1)
    {
        H5Fclose(file_handle);
        file_handle = -1;
    }
    varnames.clear();
    patchesPerLevel.clear();
    refinement_ratio.clear();
    dx.clear();
    lowI.clear();
    hiI.clear();
    lowJ.clear();
    hiJ.clear();
    lowK.clear();
    hiK.clear();
    lowL.clear();
    hiL.clear();
    numGhosts.clear();
    for (std::list<Expression*>::iterator it = expressions.begin(); it != expressions.end(); ++it)
        delete *it;
    expressions.clear();
    listOfRepresentativeBoxes.clear();
    representativeBox.clear();
    representedBoxes.clear();
}

// ****************************************************************************
// Method: avtChomboFileFormat::GetCycle
//
// Purpose:
//   Get the cycle.
//
// Note:       We have to initialize the reader before returning a cycle
//             so we can accurately read the cycle from the file.
//
// Programmer: Brad Whitlock
// Creation:   Mon Sep 25 13:58:05 PST 2006
//
// Modifications:
//
// ****************************************************************************

int
avtChomboFileFormat::GetCycle(void)
{
    InitializeReader();
    return cycle;
}

// ****************************************************************************
// Method: avtChomboFileFormat::GetTime
//
// Purpose:
//   Get the cycle.
//
// Note:       We have to initialize the reader before returning a time
//             so we can accurately read the time from the file.
//
// Programmer: Brad Whitlock
// Creation:   Mon Sep 25 13:58:05 PST 2006
//
// Modifications:
//
// ****************************************************************************

double
avtChomboFileFormat::GetTime(void)
{
    InitializeReader();
    return dtime;
}

// ****************************************************************************
//  Method: avtChomboFileFormat::GetCycleFromFilename
//
//  Purpose:
//      Gets the cycle from the file name.  A custom implementation is needed,
//      because they have file names like "...plt0080.2d.hdf5".  We want the
//      "80".
//
//  Programmer: Hank Childs
//  Creation:   January 22, 2006
//
//  Modifications:
//    Brad Whitlock, Mon Sep 25 13:57:02 PST 2006
//    Added code to support names like plot0.0080.2d.hdf5 where we want the 80.
//
//    Mark C. Miller, Thu Jun 14 10:26:37 PDT 2007
//    Modified to use regular expression based guess
//
//    Mark C. Miller, Wed Sep 24 11:43:38 PDT 2008
//    Modified regular expression to meet specifications in ticket 8737
//
//    Gunther H. Weber, Wed Jul 22 11:48:18 PDT 2009
//    Modified regular expression to meet new specifications set by Terry
//    Ligocki (ANAG/APDEC).
//
// ****************************************************************************

int
avtChomboFileFormat::GetCycleFromFilename(const char *fname) const
{
    if (fname == 0 || fname[0] == '\0')
        return avtFileFormat::INVALID_CYCLE;
    else
        return GuessCycle(fname,
                "<^.*[^0-9]([0-9][0-9]*)\\.(2|3)[dD]\\.(hdf5|h5)$> \\1");
}


// ****************************************************************************
//  Method: avtChomboFileFormat::ActivateTimestep
//
//  Purpose:
//      Tells the reader it can now do some initialization work.
//
//  Programmer: Hank Childs
//  Creation:   January 22, 2006
//
// ****************************************************************************

void
avtChomboFileFormat::ActivateTimestep(void)
{
    InitializeReader();
}


// ****************************************************************************
//  Method: avtChomboFileFormat::InitializeReader
//
//  Purpose:
//      Walks through the HDF5 file and reads in some non-problem size data.
//
//  Programmer: Hank Childs
//  Creation:   January 22, 2006
//
//  Modifications:
//
//    Hank Childs, Mon Jun 19 14:51:26 PDT 2006
//    Add support for when "time" is placed at "level_0".
//
//    Hank Childs, Mon Jun 19 16:44:06 PDT 2006
//    Added support for ghost zones.
//
//    Brad Whitlock, Mon Sep 25 15:22:38 PST 2006
//    I made it revert to cycles from the filename in the event that cycles
//    are not in the file.
//
//    Gunther H. Weber, Tue Aug  7 15:58:03 PDT 2007
//    Added check for variables specifying material fractions (variable name
//    fraction-<i>)
//
//    Gunther H. Weber, Thu Oct 11 15:49:41 PDT 2007
//    Read expressions from Chombo files.
//
//    Gunther H. Weber, Fri Oct 19 16:47:45 PDT 2007
//    Disable  HDF5 diagnostic output while attempting to read information
//    that may not be contained in the data set. Read problem domain from
//    file (used to figure out, which ghost cells are external to the problem).
//
//    Gunther H. Weber, Mon Oct 22 11:22:35 PDT 2007
//    Read information about problem domain [low|hi]Prob[I|J|K] needed
//    to figure out whether a ghost zone is external to the problem.
//
//    Gunther H. Weber, Mon Nov 19 14:02:59 PST 2007
//    Added missing H5Tclose in expression reading code.
//
//    Gunther H. Weber, Mon Mar 24 20:46:04 PDT 2008
//    Added support for node centered Chombo data.
//
//    Dave Pugmire, Fri Aug 22 10:27:39 EDT 2008
//    boxes_buff was leaking.
//
//    Hank Childs, Sun Jan 25 15:43:03 PST 2009
//    Set proper Boolean if we find evidence of ghost data.
//
//    Gunther H. Weber, Wed Mar 25 13:31:56 PDT 2009
//    Close file to prevent file handle depletion
//
//    Gunther H. Weber, Wed Jun 10 18:28:24 PDT 2009
//    Added ability to handle particle data in Chombo files.
//
//    Gunther H. Weber, Tue Sep 15 11:26:12 PDT 2009
//    Added support for 3D mappings for 2D files.
//
//    Jeremy Meredith, Thu Jan  7 15:36:19 EST 2010
//    Close all open ids when returning an exception.
//
//    Jeremy Meredith, Wed Mar 30 13:30:13 EDT 2011
//    More of the previous: close the file when returing an exception.
//
//    Gunther H. Weber, Thu Aug 15 11:37:51 PDT 2013
//    Initial bare-bones support for 4D Chombo files (fairly limited and 
//    "hackish")
//
//    Mark C. Miller, Wed Feb  9 11:15:05 PST 2022
//    Add contingency read logic for prob_lo, aspect_ratio, vec_dx and
//    vec_ref_ratio to read these as simple arrays instead of structy types.
//    The HDF5 library will fail the reads if you don't give it a compatible
//    memory type to read into and the new logic allows for either case.
//    Also handle possibility that dx and ref_ratio are stored as arrays
//    instead of scalars.
//
// ****************************************************************************

extern "C"  herr_t
add_var(hid_t loc_id, const char *varname, void *opData)
{
  std::list<std::string> *l = (std::list<std::string>*)opData;
  l->push_back(varname);
  return 0;
}

//
// Purpose: Open HDF5 file with close degree semi
//
// Programmer: Mark C. Miller, Wed Feb  9 13:35:50 PST 2022
//
static hid_t OpenHDF5File(char const *fname)
{
    hid_t retval;
    hid_t fapl = H5Pcreate(H5P_FILE_ACCESS);
    H5Pset_fclose_degree(fapl, H5F_CLOSE_SEMI);
    retval = H5Fopen(fname, H5F_ACC_RDONLY, fapl);
    H5Pclose(fapl);
    return retval;
}

//
//  Modifications
//    Mark C. Miller, Wed Feb  9 13:37:12 PST 2022
//    Use new method, OpenHDF5File, to open the file.
//
void
avtChomboFileFormat::InitializeReader(void)
{
    if (initializedReader)
        return;

    debug5 << "avtChomboFileFormat: Initializing reader." << std::endl;

    //
    // Get current automatic stack traversal function to re-enable it later and
    // disable HDF5's automatic error printing
    //
    H5E_auto_t h5e_autofunc;
    void* h5e_clientdata;
    H5Eget_auto(&h5e_autofunc, &h5e_clientdata);
    H5Eset_auto(0, 0);

    //
    // Open file
    //
    file_handle = OpenHDF5File(filenames[0]);
    if (file_handle < 0)
    {
        EXCEPTION1(InvalidDBTypeException, "Cannot be a Chombo file, since "
                                           "it is not even an HDF5 file.");
    }

    //
    // Open up "Chombo_global". Need to do open this before the "/" group to
    // determine number of dimensions, which we need to know for reading origin
    // and aspect ratio information.
    //
    hid_t global = H5Gopen(file_handle, "Chombo_global");
    if (global < 0)
    {
        H5Fclose(file_handle);
        EXCEPTION1(InvalidDBTypeException, "Cannot be a Chombo file, does "
                                           "not have Chombo_global");
    }
    hid_t dim_id = H5Aopen_name(global, "SpaceDim");
    if (dim_id < 0)
    {
        H5Fclose(file_handle);
        EXCEPTION1(InvalidDBTypeException, "Cannot be a Chombo file, does "
                                         "not have SpaceDim in Chombo_global");
    }
    H5Aread(dim_id, H5T_NATIVE_INT, &dimension);
    if (dimension < 2 ||  dimension > 4)
    {
        debug1 << "ERROR: Reader only supports 2D, 3D and 4D data sets." << endl;
        H5Fclose(file_handle);
        EXCEPTION1(InvalidFilesException, filenames[0]);
    }
    H5Aclose(dim_id);
    H5Gclose(global);

    //
    // Most of the global info is stored in the "/" group.
    //
    hid_t slash = H5Gopen(file_handle, "/");
    if (slash < 0)
    {
        H5Fclose(file_handle);
        EXCEPTION1(InvalidDBTypeException, "Cannot be a Chombo file, must "
                                           "have the \"/\" group.");
    }

    bool hasTime = false;
    bool hasIterations = false;
    bool hasCentering = false;
    bool hasProbLo = false;
    bool hasAspectRatio = false;

    int numAttrs = H5Aget_num_attrs(slash);
    char buf[1024];
    for (int i = 0 ; i < numAttrs ; i++)
    {
        hid_t idx = H5Aopen_idx(slash, i);
        H5Aget_name(idx, 1024, buf);
        if (strcmp(buf, "time") == 0)
            hasTime = true;
        if (strcmp(buf, "iteration") == 0)
            hasIterations = true;
        if (strcmp(buf,"data_centering") == 0)
            hasCentering = true;
        if (strcmp(buf,"prob_lo") == 0)
            hasProbLo = true;
        if (strcmp(buf,"aspect_ratio") == 0)
            hasAspectRatio = true;
        H5Aclose(idx);
    }

    //
    // Get the time.
    //
    if (hasTime)
    {
        hid_t time_id = H5Aopen_name(slash, "time");
        if (time_id < 0)
        {
            H5Gclose(slash);
            H5Fclose(file_handle);
            EXCEPTION1(InvalidDBTypeException, "Cannot be a Chombo file, must "
                                               "have time in \"/\" group.");
        }
        H5Aread(time_id, H5T_NATIVE_DOUBLE, &dtime);
        H5Aclose(time_id);
    }
    else
        dtime = 0.;

    //
    // Get the cycle.
    //
    if (hasIterations)
    {
        hid_t cycle_id = H5Aopen_name(slash, "iteration");
        if (cycle_id < 0)
        {
            H5Gclose(slash);
            H5Fclose(file_handle);
            EXCEPTION1(InvalidDBTypeException, "Cannot be a Chombo file, must "
                                             "have iteration in \"/\" group.");
        }
        H5Aread(cycle_id, H5T_NATIVE_INT, &cycle);
        H5Aclose(cycle_id);
    }
    else
        cycle = GetCycleFromFilename(filenames[0]);

    //
    // Get the centering.
    //
    nodeCentered = false;
    if (hasCentering)
    {
        debug1 << "Chombo file has centering information." << std::endl;
        hid_t time_id = H5Aopen_name(slash, "data_centering");
        if (time_id < 0)
        {
            H5Gclose(slash);
            H5Fclose(file_handle);
            EXCEPTION1(InvalidDBTypeException, "Cannot be a Chombo file, must "
                                               "have data_centering in \"/\" group.");
        }
        int centeringVal;
        H5Aread(time_id, H5T_NATIVE_INT, &centeringVal);
        H5Aclose(time_id);
        if (centeringVal == 7)
        {
            debug1 << "Node centered data." << std::endl;
            nodeCentered = true;
        }
        else
        {
            debug1 << "Cell centered data." << std::endl;
        }
    }

    //
    // Get origin and aspect ratio
    //
    for (int i=0; i<3; ++i)
    {
        probLo[i] = 0.0;
        aspectRatio[i] =1.0;
    }

    hid_t doublevect2d_id = H5Tcreate (H5T_COMPOUND, sizeof(doublevect2d));
    H5Tinsert (doublevect2d_id, "x", HOFFSET(doublevect2d, x), H5T_NATIVE_DOUBLE);
    H5Tinsert (doublevect2d_id, "y", HOFFSET(doublevect2d, y), H5T_NATIVE_DOUBLE);
    hid_t doublevect3d_id = H5Tcreate (H5T_COMPOUND, sizeof(doublevect3d));
    H5Tinsert (doublevect3d_id, "x", HOFFSET(doublevect3d, x), H5T_NATIVE_DOUBLE);
    H5Tinsert (doublevect3d_id, "y", HOFFSET(doublevect3d, y), H5T_NATIVE_DOUBLE);
    H5Tinsert (doublevect3d_id, "z", HOFFSET(doublevect3d, z), H5T_NATIVE_DOUBLE);
    hid_t doublevect4d_id = H5Tcreate (H5T_COMPOUND, sizeof(doublevect4d));
    H5Tinsert (doublevect4d_id, "x", HOFFSET(doublevect4d, x), H5T_NATIVE_DOUBLE);
    H5Tinsert (doublevect4d_id, "y", HOFFSET(doublevect4d, y), H5T_NATIVE_DOUBLE);
    H5Tinsert (doublevect4d_id, "z", HOFFSET(doublevect4d, z), H5T_NATIVE_DOUBLE);
    H5Tinsert (doublevect4d_id, "u", HOFFSET(doublevect4d, u), H5T_NATIVE_DOUBLE);

    if (hasProbLo)
    {
        if (dimension > 3)
        {
            EXCEPTION1(InvalidDBTypeException, "prob_lo not yet supported for 4D data");
        }
        hid_t probLo_id = H5Aopen_name(slash, "prob_lo");
        if (probLo_id < 0)
        {
            EXCEPTION1(InvalidDBTypeException, "Could not open attribute \"prob_lo\".");
        }
        else
        {
            doublevect probLoBuff;
            if (H5Aread(probLo_id, (dimension == 2 ? doublevect2d_id : doublevect3d_id), &probLoBuff) < 0)
            {
                double tmp[4];
                if (H5Aread(probLo_id, H5T_NATIVE_DOUBLE, &tmp[0]) < 0) // try reading as simple array
                { 
                    EXCEPTION1(InvalidDBTypeException, "Cannot read \"prob_lo\".");
                }
                probLo[0] = tmp[0];
                probLo[1] = tmp[1];
                if (dimension > 2)
                    probLo[2] = tmp[2];
            }
            else
            {
                if (dimension == 2)
                {
                    probLo[0] = probLoBuff.dv2.x;
                    probLo[1] = probLoBuff.dv2.y;
                }
                else
                {
                    probLo[0] = probLoBuff.dv3.x;
                    probLo[1] = probLoBuff.dv3.y;
                    probLo[2] = probLoBuff.dv3.z;
                }
            }
            H5Aclose(probLo_id);
        }
    }

    if (hasAspectRatio)
    {
        if (dimension > 3)
        {
            EXCEPTION1(InvalidDBTypeException, "aspect_ratio not yet supported for 4D data");
        }
        hid_t aspectRatio_id = H5Aopen_name(slash, "aspect_ratio");
        if (aspectRatio_id < 0)
        {
            EXCEPTION1(InvalidDBTypeException, "Could not open attribute \"aspect_ratio\".");
        }
        else
        {
            doublevect aspectRatioBuff;
            if (H5Aread(aspectRatio_id, (dimension == 2 ? doublevect2d_id : doublevect3d_id), &aspectRatioBuff) < 0)
            {
                double tmp[4];
                if (H5Aread(aspectRatio_id, H5T_NATIVE_DOUBLE, &tmp[0]) < 0) // try reading as simple array
                {
                    EXCEPTION1(InvalidDBTypeException, "Cannot read \"aspect_ratio\".");
                }
                aspectRatio[0] = tmp[0];
                aspectRatio[1] = tmp[1];
                if (dimension > 2)
                    aspectRatio[2] = tmp[2];
            }
            else
            {
                if (dimension == 2)
                {
                    aspectRatio[0] = aspectRatioBuff.dv2.x;
                    aspectRatio[1] = aspectRatioBuff.dv2.y;
                }
                else
                {
                    aspectRatio[0] = aspectRatioBuff.dv3.x;
                    aspectRatio[1] = aspectRatioBuff.dv3.y;
                    aspectRatio[2] = aspectRatioBuff.dv3.z;
                }
            }
            H5Aclose(aspectRatio_id);
        }
    }


    //
    // Note: max_level, per conversation with John Shalf, is for the code
    // to say what the maximum level could be.  We don't need this...
    //
    /*
    int max_level;
    hid_t ml_id = H5Aopen_name(slash, "max_level");
    H5Aread(ml_id, H5T_NATIVE_INT, &max_level);
    H5Aclose(ml_id);
    */

    //
    // Determine how many refinement levels there are.
    //
    hid_t nl_id = H5Aopen_name(slash, "num_levels");
    if (nl_id < 0)
    {
        H5Gclose(slash);
        H5Fclose(file_handle);
        EXCEPTION1(InvalidDBTypeException, "Cannot be a Chombo file, must "
                                           "have num_levels in \"/\" group.");
    }
    H5Aread(nl_id, H5T_NATIVE_INT, &num_levels);
    H5Aclose(nl_id);
    if (num_levels <= 0)
    {
        debug1 << "ERROR: Number of levels (" << num_levels
               << ") must be at least 1" << endl;
        H5Gclose(slash);
        H5Fclose(file_handle);
        EXCEPTION1(InvalidFilesException, filenames[0]);
    }

    //
    // Determine how many variables there are.
    //
    int num_vars;
    hid_t nv_id = H5Aopen_name(slash, "num_components");
    if (nv_id < 0)
    {
        EXCEPTION1(InvalidDBTypeException, "Cannot be a Chombo file, must "
                                        "have num_components in \"/\" group.");
    }
    H5Aread(nv_id, H5T_NATIVE_INT, &num_vars);
    if (num_vars < 0)
    {
        debug1 << "ERROR: Number of variables less than 0" << endl;
        EXCEPTION1(InvalidFilesException, filenames[0]);
    }
    H5Aclose(nv_id);

    //
    // Read out each variable name and store it in vector "varnames".
    //
    for (int i = 0 ; i < num_vars ; i++)
    {
        char name[1024];
        snprintf(name, 1024, "component_%d", i);
        hid_t vname = H5Aopen_name(slash, name);
        if (vname < 0)
        {
            EXCEPTION1(InvalidDBTypeException, "Cannot be a Chombo file, does "
                                               "not have all component names");
        }
        char name2[1024] = { '\0' };
        hid_t atype = H5Aget_type(vname);
        H5Aread(vname, atype, name2);
        varnames.push_back(name2);
        H5Aclose(vname);
    }

    //
    // We're done reading everything from "slash".
    //
    H5Gclose(slash);

    //
    // Look for epxressions
    //
    hid_t expressionsGroup = H5Gopen(file_handle, "/Expressions");
    if (expressionsGroup > 0)
    {
        for (int i=0; i<H5Aget_num_attrs(expressionsGroup); ++i)
        {
            // Open expression
            hid_t currExpression = H5Aopen_idx(expressionsGroup, i);

            // Get name, which has form <return type> <name>
            const size_t buffSize = 1024;
            char buffer[buffSize];
            H5Aget_name(currExpression, buffSize, buffer);

            // Split into type and name
            char *separatorPos = std::strchr(buffer, ' ');
            *separatorPos = '\0';
            char *exprTypeStr = buffer;
            char *exprName = separatorPos + 1;

            // Parse type
            Expression::ExprType exprType = Expression::Unknown;
            if (strcmp("vector", exprTypeStr) == 0)
                exprType = Expression::VectorMeshVar;
            else if (strcmp("scalar", exprTypeStr) == 0)
                exprType = Expression::ScalarMeshVar;
            else
                debug1 << "Unknown expression type " << exprType << " in file." << std::endl;

            if (exprType != Expression::Unknown)
            {
                hid_t strType = H5Aget_type(currExpression);
                hsize_t strLen;
                if (strType >= 0 && H5Tget_class(strType) == H5T_STRING &&
                    (strLen = H5Tget_size(strType)) < buffSize - 1)
                {
                    Expression *newExpression = new Expression;
                    newExpression->SetName(exprName);
                    newExpression->SetType(exprType);
                    newExpression->SetHidden(false);

                    // Read definition
                    H5Aread(currExpression, strType, buffer);
                    buffer[strLen]  = '\0';
                    newExpression->SetDefinition(buffer);
                    expressions.push_back(newExpression);
                }
                H5Tclose(strType);
            }
            H5Aclose(currExpression);
        }
        H5Gclose(expressionsGroup);
    }

    hid_t intvect2d_id = H5Tcreate (H5T_COMPOUND, sizeof(intvect2d));
    H5Tinsert (intvect2d_id, "intvecti", HOFFSET(intvect2d, i), H5T_NATIVE_INT);
    H5Tinsert (intvect2d_id, "intvectj", HOFFSET(intvect2d, j), H5T_NATIVE_INT);

    hid_t intvect3d_id = H5Tcreate (H5T_COMPOUND, sizeof(intvect3d));
    H5Tinsert (intvect3d_id, "intvecti", HOFFSET(intvect3d, i), H5T_NATIVE_INT);
    H5Tinsert (intvect3d_id, "intvectj", HOFFSET(intvect3d, j), H5T_NATIVE_INT);
    H5Tinsert (intvect3d_id, "intvectk", HOFFSET(intvect3d, k), H5T_NATIVE_INT);

    hid_t intvect4d_id = H5Tcreate (H5T_COMPOUND, sizeof(intvect4d));
    H5Tinsert (intvect4d_id, "intvecti", HOFFSET(intvect4d, i), H5T_NATIVE_INT);
    H5Tinsert (intvect4d_id, "intvectj", HOFFSET(intvect4d, j), H5T_NATIVE_INT);
    H5Tinsert (intvect4d_id, "intvectk", HOFFSET(intvect4d, k), H5T_NATIVE_INT);
    H5Tinsert (intvect4d_id, "intvectl", HOFFSET(intvect4d, l), H5T_NATIVE_INT);

    //
    // Now iterate over each refinement level and determine how many patches
    // there are at that refinement level, what the refinement ratio is, and
    // what "dx" is ... "dx" is the distance between consecutive points.
    //
    int totalPatches = 0;
    patchesPerLevel.resize(num_levels);
    refinement_ratio.resize(num_levels-1);
    dx.resize(num_levels);
    for (int i = 0 ; i < num_levels ; i++)
    {
        char name[1024];
        snprintf(name, 1024, "level_%d", i);
        hid_t level = H5Gopen(file_handle, name);
        if (level < 0)
        {
            EXCEPTION1(InvalidDBTypeException, "Does not contain all "
                                               "refinement levels.");
        }

        hid_t boxes = H5Dopen(level, "boxes");
        if (boxes < 0)
        {
            EXCEPTION1(InvalidDBTypeException, "Does not contain \"boxes\".");
        }
        hid_t boxspace = H5Dget_space(boxes);
        hsize_t dims[1], maxdims[1];
        H5Sget_simple_extent_dims(boxspace, dims, maxdims);
        hid_t memdataspace = H5Screate_simple(1, dims, NULL);
        patchesPerLevel[i] = dims[0];
        totalPatches += patchesPerLevel[i];

        hid_t dx_id = H5Aopen_name(level, "vec_dx");
        if (dx_id >= 0)
        {
            double dtmp[4];
            if (dimension == 2)
            {
                doublevect2d dx_tmp;
                if (H5Aread(dx_id, doublevect2d_id, &dx_tmp) < 0)
                {
                    if (H5Aread(dx_id, H5T_NATIVE_DOUBLE, &dtmp[0]) >= 0) // try as simple array
                    {
                        dx_tmp.x = dtmp[0];
                        dx_tmp.y = dtmp[1];
                    }
                }
                dx[i].push_back(dx_tmp.x);
                dx[i].push_back(dx_tmp.y);
            }
            else if (dimension == 3)
            {
                doublevect3d dx_tmp;
                if (H5Aread(dx_id, doublevect3d_id, &dx_tmp) < 0)
                {
                    if (H5Aread(dx_id, H5T_NATIVE_DOUBLE, &dtmp[0]) >= 0) // try as simple array
                    {
                        dx_tmp.x = dtmp[0];
                        dx_tmp.y = dtmp[1];
                        dx_tmp.z = dtmp[2];
                    }
                }
                dx[i].push_back(dx_tmp.x);
                dx[i].push_back(dx_tmp.y);
                dx[i].push_back(dx_tmp.z);
            }
            else
            {
                doublevect4d dx_tmp;
                if (H5Aread(dx_id, doublevect4d_id, &dx_tmp) < 0)
                {
                    if (H5Aread(dx_id, H5T_NATIVE_DOUBLE, &dtmp[0]) >= 0) // try as simple array
                    {
                        dx_tmp.x = dtmp[0];
                        dx_tmp.y = dtmp[1];
                        dx_tmp.z = dtmp[2];
                        dx_tmp.u = dtmp[3];
                    }
                }
                dx[i].push_back(dx_tmp.x);
                dx[i].push_back(dx_tmp.y);
                dx[i].push_back(dx_tmp.z);
                dx[i].push_back(dx_tmp.u);
            }
        }
        else
        {
            dx_id = H5Aopen_name(level, "dx");

            if (dx_id < 0)
                EXCEPTION1(InvalidDBTypeException,
                           "Does not contain \"vec_dx\" or \"dx\".");

            // Figure out how many values in this attribute
            hid_t dx_sid = H5Aget_space(dx_id);
            int nvals = (int) H5Sget_simple_extent_npoints(dx_sid);
            H5Sclose(dx_sid);

            std::vector<double> dx_tmp(nvals);
            H5Aread(dx_id, H5T_NATIVE_DOUBLE, &dx_tmp[0]);
            for (int d = 0; d<dimension; ++d)
                dx[i].push_back(dx_tmp[d<nvals?d:0]);
        }
        H5Aclose(dx_id);

        if (i != num_levels - 1)
        {
            hid_t rr_id = H5Aopen_name(level, "vec_ref_ratio");
            if (rr_id >= 0)
            {
                int itmp[4];
                if (dimension == 2)
                {
                    intvect2d rr_tmp;
                    if (H5Aread(rr_id, intvect2d_id, &rr_tmp) < 0)
                    {
                        if (H5Aread(rr_id, H5T_NATIVE_INT, &itmp[0]) >= 0) // try as simple array
                        {
                            rr_tmp.i = itmp[0];
                            rr_tmp.j = itmp[1];
                        }
                    }
                    refinement_ratio[i].push_back(rr_tmp.i);
                    refinement_ratio[i].push_back(rr_tmp.j);
                }
                else if (dimension == 3)
                {
                    intvect3d rr_tmp;
                    if (H5Aread(rr_id, intvect3d_id, &rr_tmp) < 0)
                    {
                        if (H5Aread(rr_id, H5T_NATIVE_INT, &itmp[0]) >= 0) // try as simple array
                        {
                            rr_tmp.i = itmp[0];
                            rr_tmp.j = itmp[1];
                            rr_tmp.k = itmp[2];
                        }
                    }
                    refinement_ratio[i].push_back(rr_tmp.i);
                    refinement_ratio[i].push_back(rr_tmp.j);
                    refinement_ratio[i].push_back(rr_tmp.k);
                }
                else
                {
                    EXCEPTION1(InvalidDBTypeException, "vec_ref_ratio not yet supported for 4D data");
                }

            }
            else
            {
                rr_id = H5Aopen_name(level, "ref_ratio");

                if (rr_id < 0)
                    EXCEPTION1(InvalidDBTypeException,
                            "Does not contain \"vec_ref_ratio\" or \"ref_ratio\".");

                // Figure out how many values in this attribute
                hid_t rr_sid = H5Aget_space(rr_id);
                int nvals = (int) H5Sget_simple_extent_npoints(rr_sid);
                H5Sclose(rr_sid);

                std::vector<int> rr_tmp(nvals);
                H5Aread(rr_id, H5T_NATIVE_INT, &rr_tmp[0]);
                for (int d = 0; d < dimension; ++d)
                    refinement_ratio[i].push_back(rr_tmp[d<nvals?d:0]);
            }
            H5Aclose(rr_id);
        }

        if (!hasTime && i == 0)
        {
            // Some Chombo files put time at level_0 instead of in the "/"
            // directory.  Look for it here.
            hid_t time_id = H5Aopen_name(level, "time");
            if (time_id >= 0)
            {
                H5Aread(time_id, H5T_NATIVE_DOUBLE, &dtime);
                H5Aclose(time_id);
            }
        }

        H5Sclose(memdataspace);
        H5Sclose(boxspace);
        H5Dclose(boxes);
        H5Gclose(level);
    }

    H5Tclose(doublevect2d_id);
    H5Tclose(doublevect3d_id);
    H5Tclose(doublevect4d_id);

    //
    // Now that we know how many total patches there are, create our data
    // structures to hold the extents of each patch.
    //
    lowI.resize(totalPatches);
    hiI.resize(totalPatches);
    lowJ.resize(totalPatches);
    hiJ.resize(totalPatches);
    if (dimension >= 3)
    {
        lowK.resize(totalPatches);
        hiK.resize(totalPatches);

        if (dimension == 4)
        {
            lowL.resize(totalPatches);
            hiL.resize(totalPatches);
        }
    }

    //
    // Also, create space for the problem domain extent for each level
    //
    lowProbI.resize(num_levels);
    hiProbI.resize(num_levels);
    lowProbJ.resize(num_levels);
    hiProbJ.resize(num_levels);
    if (dimension >= 3)
    {
        lowProbK.resize(num_levels);
        hiProbK.resize(num_levels);

        if (dimension == 4)
        {
            lowProbL.resize(num_levels);
            hiProbL.resize(num_levels);
        }
    }

    //
    // Now iterate over the patches again, storing their extents in our
    // internal data structure.
    //
    hid_t box2d_id = H5Tcreate (H5T_COMPOUND, sizeof(box));
    H5Tinsert (box2d_id, "lo_i", HOFFSET(box2d, lo.i), H5T_NATIVE_INT);
    H5Tinsert (box2d_id, "lo_j", HOFFSET(box2d, lo.j), H5T_NATIVE_INT);
    H5Tinsert (box2d_id, "hi_i", HOFFSET(box2d, hi.i), H5T_NATIVE_INT);
    H5Tinsert (box2d_id, "hi_j", HOFFSET(box2d, hi.j), H5T_NATIVE_INT);

    hid_t box3d_id = H5Tcreate (H5T_COMPOUND, sizeof(box));
    H5Tinsert (box3d_id, "lo_i", HOFFSET(box3d, lo.i), H5T_NATIVE_INT);
    H5Tinsert (box3d_id, "lo_j", HOFFSET(box3d, lo.j), H5T_NATIVE_INT);
    H5Tinsert (box3d_id, "lo_k", HOFFSET(box3d, lo.k), H5T_NATIVE_INT);
    H5Tinsert (box3d_id, "hi_i", HOFFSET(box3d, hi.i), H5T_NATIVE_INT);
    H5Tinsert (box3d_id, "hi_j", HOFFSET(box3d, hi.j), H5T_NATIVE_INT);
    H5Tinsert (box3d_id, "hi_k", HOFFSET(box3d, hi.k), H5T_NATIVE_INT);

    hid_t box4d_id = H5Tcreate (H5T_COMPOUND, sizeof(box));
    H5Tinsert (box4d_id, "lo_i", HOFFSET(box4d, lo.i), H5T_NATIVE_INT);
    H5Tinsert (box4d_id, "lo_j", HOFFSET(box4d, lo.j), H5T_NATIVE_INT);
    H5Tinsert (box4d_id, "lo_k", HOFFSET(box4d, lo.k), H5T_NATIVE_INT);
    H5Tinsert (box4d_id, "lo_l", HOFFSET(box4d, lo.l), H5T_NATIVE_INT);
    H5Tinsert (box4d_id, "hi_i", HOFFSET(box4d, hi.i), H5T_NATIVE_INT);
    H5Tinsert (box4d_id, "hi_j", HOFFSET(box4d, hi.j), H5T_NATIVE_INT);
    H5Tinsert (box4d_id, "hi_k", HOFFSET(box4d, hi.k), H5T_NATIVE_INT);
    H5Tinsert (box4d_id, "hi_l", HOFFSET(box4d, hi.l), H5T_NATIVE_INT);

    int patchId = 0;
    for (int i = 0 ; i < num_levels ; i++)
    {
        char name[1024];
        snprintf(name, 1024, "level_%d", i);

        hid_t level = H5Gopen(file_handle, name);
        hid_t boxes = H5Dopen(level, "boxes");
        hid_t boxspace = H5Dget_space(boxes);
        hsize_t dims[1], maxdims[1];
        H5Sget_simple_extent_dims(boxspace, dims, maxdims);
        hid_t memdataspace = H5Screate_simple(1, dims, NULL);

        //
        // Level 0 contains information about the problem domain
        //
        if (i == 0)
        {
            hid_t probDomain = H5Aopen_name(level, "prob_domain");
            if (probDomain < 0)
            {
                EXCEPTION1(InvalidDBTypeException, "Does not contain \"prob_domain\".");
            }
            box *probDomain_buff = new box;
            if (H5Aread(probDomain, (dimension == 2 ? box2d_id : (dimension == 3 ? box3d_id : box4d_id)), probDomain_buff) < 0)
            {
                EXCEPTION1(InvalidDBTypeException, "Cannot read \"prob_domain\".");
            }
            if (dimension == 2)
            {
                lowProbI[0] = probDomain_buff->b2.lo.i;
                hiProbI[0] = probDomain_buff->b2.hi.i;
                lowProbJ[0] = probDomain_buff->b2.lo.j;
                hiProbJ[0] = probDomain_buff->b2.hi.j;
            }
            else if (dimension == 3)
            {
                lowProbI[0] = probDomain_buff->b3.lo.i;
                hiProbI[0] = probDomain_buff->b3.hi.i;
                lowProbJ[0] = probDomain_buff->b3.lo.j;
                hiProbJ[0] = probDomain_buff->b3.hi.j;
                lowProbK[0] = probDomain_buff->b3.lo.k;
                hiProbK[0] = probDomain_buff->b3.hi.k;
            }
            else
            {
                lowProbI[0] = probDomain_buff->b4.lo.i;
                hiProbI[0] = probDomain_buff->b4.hi.i;
                lowProbJ[0] = probDomain_buff->b4.lo.j;
                hiProbJ[0] = probDomain_buff->b4.hi.j;
                lowProbK[0] = probDomain_buff->b4.lo.k;
                hiProbK[0] = probDomain_buff->b4.hi.k;
                lowProbL[0] = probDomain_buff->b4.lo.l;
                hiProbL[0] = probDomain_buff->b4.hi.l;
            }
            delete probDomain_buff;
            H5Aclose(probDomain);
        }
        else
        {
            // In higher levels, calculate the information using
            // the previous level and refinement ratio
            lowProbI[i] = refinement_ratio[i-1][0] * lowProbI[i-1];
            hiProbI[i] = refinement_ratio[i-1][0] * hiProbI[i-1];
            lowProbJ[i] = refinement_ratio[i-1][1] * lowProbJ[i-1];
            hiProbJ[i] = refinement_ratio[i-1][1] * hiProbJ[i-1];
            if (dimension >= 3)
            {
                lowProbK[i] = refinement_ratio[i-1][2] * lowProbK[i-1];
                hiProbK[i] = refinement_ratio[i-1][2] * hiProbK[i-1];

                if (dimension == 4)
                {
                    lowProbL[i] = refinement_ratio[i-1][3] * lowProbL[i-1];
                    hiProbL[i] = refinement_ratio[i-1][3] * hiProbL[i-1];
                }
            }
        }

        //
        // Read box information
        //
        box *boxes_buff = new box[dims[0]];
        H5Dread(boxes, dimension == 2 ? box2d_id : (dimension == 3 ? box3d_id : box4d_id), memdataspace,
                boxspace, H5P_DEFAULT, boxes_buff);

        for (int j = 0 ; j < patchesPerLevel[i] ; j++)
        {
            if (dimension == 2)
            {
                lowI[patchId] = boxes_buff[j].b2.lo.i;
                lowJ[patchId] = boxes_buff[j].b2.lo.j;
                hiI[patchId] = boxes_buff[j].b2.hi.i+1;
                hiJ[patchId] = boxes_buff[j].b2.hi.j+1;
            }
            else if (dimension == 3)
            {
                lowI[patchId] = boxes_buff[j].b3.lo.i;
                lowJ[patchId] = boxes_buff[j].b3.lo.j;
                lowK[patchId] = boxes_buff[j].b3.lo.k;
                hiI[patchId] = boxes_buff[j].b3.hi.i+1;
                hiJ[patchId] = boxes_buff[j].b3.hi.j+1;
                hiK[patchId] = boxes_buff[j].b3.hi.k+1;
            }
            else
            {
                lowI[patchId] = boxes_buff[j].b4.lo.i;
                lowJ[patchId] = boxes_buff[j].b4.lo.j;
                lowK[patchId] = boxes_buff[j].b4.lo.k;
                lowL[patchId] = boxes_buff[j].b4.lo.l;
                hiI[patchId] = boxes_buff[j].b4.hi.i+1;
                hiJ[patchId] = boxes_buff[j].b4.hi.j+1;
                hiK[patchId] = boxes_buff[j].b4.hi.k+1;
                hiL[patchId] = boxes_buff[j].b4.hi.l+1;
            }
            patchId++;
        }

        delete [] boxes_buff;

        hid_t data_atts = H5Gopen(level, "data_attributes");
        if (data_atts >= 0)
        {
            hid_t ghost_id = H5Aopen_name(data_atts, "outputGhost");
            if (ghost_id < 0)
            {
                numGhosts.push_back(0);
                numGhosts.push_back(0);
                numGhosts.push_back(0);
                numGhosts.push_back(0);
            }
            else
            {
                if (dimension == 2)
                {
                    intvect2d g;
                    H5Aread(ghost_id, intvect2d_id, &g);
                    numGhosts.push_back(g.i);
                    numGhosts.push_back(g.j);
                    if (g.i > 0 || g.j > 0)
                        fileContainsGhosts = true;
                    numGhosts.push_back(0);
                    numGhosts.push_back(0);
                }
                else if (dimension == 3)
                {
                    intvect3d g;
                    H5Aread(ghost_id, intvect3d_id, &g);
                    if (g.i > 0 || g.j > 0 || g.k > 0)
                        fileContainsGhosts = true;
                    numGhosts.push_back(g.i);
                    numGhosts.push_back(g.j);
                    numGhosts.push_back(g.k);
                    numGhosts.push_back(0);
                }
                else
                {
                    intvect4d g;
                    H5Aread(ghost_id, intvect4d_id, &g);
                    if (g.i > 0 || g.j > 0 || g.k > 0 || g.l > 0)
                        fileContainsGhosts = true;
                    numGhosts.push_back(g.i);
                    numGhosts.push_back(g.j);
                    numGhosts.push_back(g.k);
                    numGhosts.push_back(g.l);
                }
                H5Aclose(ghost_id);
            }
            H5Gclose(data_atts);
        }
        else
        {
            numGhosts.push_back(0);
            numGhosts.push_back(0);
            numGhosts.push_back(0);
            numGhosts.push_back(0);
        }

        H5Sclose(memdataspace);
        H5Sclose(boxspace);
        H5Dclose(boxes);
        H5Gclose(level);
    }

    H5Tclose(box2d_id);
    H5Tclose(box3d_id);
    H5Tclose(box4d_id);
    H5Tclose(intvect2d_id);
    H5Tclose(intvect3d_id);
    H5Tclose(intvect4d_id);

    if (dimension == 4)
    {
        if (num_levels != 1)
            EXCEPTION1(ImproperUseException, "Chombo reader currently only supports single level 4D files.");

        // FIXME: Replace inefficient order n^2 algorithm with something better
        representativeBox.resize(patchesPerLevel[0]);
        representedBoxes.resize(patchesPerLevel[0]);
        for (int patchNo = 0; patchNo < patchesPerLevel[0]; ++patchNo)
        {
            for (int repCandidateNo = 0; repCandidateNo < patchesPerLevel[0]; ++repCandidateNo)
            {
                if (lowI[patchNo] == lowI[repCandidateNo] && hiI[patchNo] == hiI[repCandidateNo] &&
                    lowJ[patchNo] == lowJ[repCandidateNo] && hiJ[patchNo] == hiJ[repCandidateNo])
                {
                    representativeBox[patchNo] = repCandidateNo;
                    representedBoxes[repCandidateNo].push_back(patchNo);
                    if (patchNo == repCandidateNo) listOfRepresentativeBoxes.push_back(repCandidateNo);
                    break;
                }
            }
        }

#if 0
        // Debug output about boxes
        for (int patchNo = 0; patchNo < patchesPerLevel[0]; ++patchNo)
        {
            std::cout << "Representative for box " << patchNo << " [ " << lowI[patchNo] << ", " << hiI[patchNo] << ", " << lowJ[patchNo] << ", " << hiJ[patchNo] << ", " << lowK[patchNo] << ", " << hiK[patchNo] <<  ", " << lowL[patchNo] << ", " << hiL[patchNo] << "] is " << representativeBox[patchNo] << std::endl;
            if (representedBoxes[patchNo].size())
            {
                std::cout << "This box represents: ";
                for (std::vector<int>::const_iterator it = representedBoxes[patchNo].begin(); it != representedBoxes[patchNo].end(); ++it)
                {
                    std::cout << *it << " ";
                }
                std::cout << std::endl;
            }
        }
#endif
    }

    //
    // Look for particles
    //
    std::list<std::string> varList;
    if (H5Giterate(file_handle, "/particles", 0, add_var, &varList) == 0)
    {
        bool hasXPos = false;
        bool hasYPos = false;
        bool hasZPos = false;
        for (std::list<std::string>::iterator it = varList.begin(); it != varList.end(); ++it)
        {
            if (*it == "position_x") hasXPos = true;
            else if (*it == "position_y") hasYPos = true;
            else if (*it == "position_z") hasZPos = true;
            else
            {
                particleVarnames.push_back(*it);
            }
        }

        if (hasXPos && hasYPos && (dimension == 2 || hasZPos))
        {
            hasParticles = true;
        }
        else
        {
            debug1 << "Ignoring particles since position information is missing." << std::endl;
        }
    }

    //
    // The domain nesting takes a while to calculate.  We don't need the
    // data structure if we are on the mdserver.  But we do if we're on the
    // engine.  So only calculate it conditionally.
    //
    if (!avtDatabase::OnlyServeUpMetaData())
    {
        int t0 = visitTimer->StartTimer();
        CalculateDomainNesting();
        visitTimer->StopTimer(t0, "Chombo calculating domain nesting");
    }

    H5Fclose(file_handle);
    file_handle = -1;
    initializedReader = true;

    //
    // Find any materials
    //
    nMaterials = 0;
    for (size_t i = 0; i < varnames.size(); ++i)
    {
        if (varnames[i].find("fraction-") == 0)
        {
            int val = atoi(varnames[i].c_str()+9) + 1;

            if (val > nMaterials)
                nMaterials = val;
        }
    }

    if (nMaterials != 0)
    {
        ++nMaterials; // There is always one extra material
    }

    //
    // Check for mapping file and whether mapping is 3D
    //
    if (checkForMappingFile)
    {
        std::string mappingFilename(filenames[0]);
        size_t extPos = mappingFilename.find(".hdf5");
        if  (extPos == std::string::npos)
        {
            extPos = mappingFilename.find(".h5");
        }

        if (extPos != std::string::npos)
        {
            mappingFilename.insert(extPos, ".map");

            FileFunctions::VisItStat_t fs;
            if (FileFunctions::VisItStat(mappingFilename.c_str(), &fs) == 0)
            {
                hid_t mapping_file_handle = OpenHDF5File(mappingFilename.c_str());
                if (mapping_file_handle > 0)
                {
                    hid_t slash = H5Gopen(mapping_file_handle, "/");
                    if (slash > 0)
                    {
                        hid_t ncomponents_id = H5Aopen_name(slash, "num_components");
                        if (ncomponents_id > 0)
                        {
                            int mapping_ncomponents;
                            H5Aread(ncomponents_id, H5T_NATIVE_INT, &mapping_ncomponents);
                            if (mapping_ncomponents == 2)
                            {
                                mappingFileExists = true;
                            }
                            else if (mapping_ncomponents == 3)
                            {
                                mappingFileExists = true;
                                mappingIs3D = true;
                            }
                            else if (mapping_ncomponents == 4 && dimension == 4)
                            {
                                mappingFileExists = true;
                            }
                            else
                            {
                                debug1 << "Ignoring mapping file since it has ";
                                debug1 << mapping_ncomponents << " instead of expected ";
                                debug1 << "two, three or four components." << std::endl;
                            }
                            H5Aclose(ncomponents_id);
                        }
                        else
                        {
                            debug1 << "Ignoring mapping file since it has no ";
                            debug1 << "\"num_components\" attribute" << std::endl;
                        }
                        H5Gclose(slash);
                    }
                    else
                    {
                        debug1 << "Ignoring mapping file since I cannot open its ";
                        debug1 << "root group." << std::endl;
                    }
                    H5Fclose(mapping_file_handle);
                }
                else
                {
                    debug1 << "Igonoring mapping file since it is not an HDF5 file.";
                    debug1 << std::endl;
                }
            }
        }
    }

    //
    // Re-enable HDF5's automatic diagnostic output
    //
    H5Eset_auto(h5e_autofunc, h5e_clientdata);
}


// ****************************************************************************
//  Method: avtChomboFileFormat::CalculateDomainNesting
//
//  Purpose:
//      Calculates two important data structures.  One is the structure domain
//      nesting, which tells VisIt how the AMR patches are nested, which allows
//      VisIt to ghost out coarse zones that are refined by smaller zones.
//      The other structure is the rectilinear domain boundaries, which tells
//      VisIt which patches are next to each other, allowing VisIt to create
//      a layer of ghost zones around each patch.  Note that this only works
//      within a refinement level, not across refinement levels.
//
//  Programmer: Hank Childs
//  Creation:   January 22, 2006
//
//  Modifications:
//
//    Hank Childs, Fri Nov 14 09:11:33 PST 2008
//    Only create the domain boundaries object if we are not using ghost data.
//
//    Hank Childs, Sun Jan 25 15:54:52 PST 2009
//    Improve the test for whether or not to create the domain boundaries
//    object.
//
//    Tom Fogal, Thu Aug  5 16:45:30 MDT 2010
//    Fix incorrect destructor function.
//
//    Gunther H. Weber, Mon Jan 10 10:42:45 PST 2011
//    Add setting of refinement ratios.
//
//    Gunther H. Weber, Wed Jan 18 18:09:21 PST 2012
//    Add setting of cell sizes.
//
//    Gunther H. Weber, Thu Aug 15 11:37:51 PDT 2013
//    Initial bare-bones support for 4D Chombo files (fairly limited and 
//    "hackish")
//
// ****************************************************************************

void
avtChomboFileFormat::CalculateDomainNesting(void)
{
    int level;

    //
    // Calculate some info we will need in the rest of the routine.
    //
    int t0 = visitTimer->StartTimer();
    int totalPatches = 0;
    std::vector<int> levelStart;
    std::vector<int> levelEnd;
    for (level = 0 ; level < num_levels ; level++)
    {

        levelStart.push_back(totalPatches);
        totalPatches += patchesPerLevel[level];
        levelEnd.push_back(totalPatches);
    }
    visitTimer->StopTimer(t0, "Pre-work for domain nesting");

    //
    // Now that we know the total number of patches, we can allocate the
    // data structure for the patch nesting.
    //
    int t1 = visitTimer->StartTimer();
    avtStructuredDomainNesting *dn = new avtStructuredDomainNesting(
            dimension < 4 ? totalPatches : (int)listOfRepresentativeBoxes.size(), num_levels);

    //
    // Calculate what the refinement ratio is from one level to the next.
    //
    std::vector<double> cs(dimension <= 3 ? dimension : 2);
    for (level = 0 ; level < num_levels ; level++)
    {
        if (level == 0)
            dn->SetLevelRefinementRatios(level, std::vector<int>((dimension <= 3 ? dimension : 2), 1));
        else
            dn->SetLevelRefinementRatios(level, refinement_ratio[level-1]);

        for (int d=0; d < (dimension <= 3 ? dimension : 2) ; ++d)
            cs[d] = dx[level][d]*aspectRatio[d];
        dn->SetLevelCellSizes(level, cs);
    }

    //
    // This multiplier will be needed to find out if patches are nested.
    //
    std::vector< std::vector<int> > multiplier(num_levels);
    for (int d = 0; d < (dimension <= 3 ? dimension : 2); ++d)
        multiplier[num_levels-1].push_back(1);
    for (level = num_levels-2 ; level >= 0 ; level--)
    {
        multiplier[level].resize(dimension <= 3 ? dimension : 2);
        for (int d = 0; d < (dimension <= 3 ? dimension : 2); ++d)
            multiplier[level][d] = multiplier[level+1][d]*refinement_ratio[level][d];
    }
    visitTimer->StopTimer(t1, "Setting up domain nesting: part 1");

    //
    // Now set up the data structure for patch boundaries.  The data
    // does all the work ... it just needs to know the extents of each patch.
    //
    if (alwaysComputeDomainBoundaries || !allowedToUseGhosts || !fileContainsGhosts)
    {
        int t2 = visitTimer->StartTimer();
        avtRectilinearDomainBoundaries *rdb
                                    = new avtRectilinearDomainBoundaries(true);
        rdb->SetNumDomains(totalPatches);
        rdb->SetRefinementRatios(refinement_ratio);
        if (dimension < 4)
        {
            for (int patch = 0 ; patch < totalPatches ; patch++)
            {
                int my_level, local_patch;
                GetLevelAndLocalPatchNumber(patch, my_level, local_patch);

                int e[6];
                e[0] = lowI[patch];
                e[1] = hiI[patch];
                e[2] = lowJ[patch];
                e[3] = hiJ[patch];
                e[4] = (dimension < 3 ? 0 : lowK[patch]);
                e[5] = (dimension < 3 ? 0 : hiK[patch]);

                rdb->SetIndicesForAMRPatch(patch, my_level, e);
            }
        }
        else
        {
            for (size_t patchNo = 0; patchNo < listOfRepresentativeBoxes.size(); ++patchNo)
            {
                int patch = listOfRepresentativeBoxes[patchNo];

                int my_level, local_patch;
                GetLevelAndLocalPatchNumber(patch, my_level, local_patch);

                int e[6];
                e[0] = lowI[patch];
                e[1] = hiI[patch];
                e[2] = lowJ[patch];
                e[3] = hiJ[patch];
                e[4] = 0;
                e[5] = 0;

                rdb->SetIndicesForAMRPatch((int)patchNo, my_level, e);
            }
        }
        rdb->CalculateBoundaries();
        void_ref_ptr vrdb = void_ref_ptr(rdb,
                                       avtRectilinearDomainBoundaries::Destruct);
        cache->CacheVoidRef("any_mesh", AUXILIARY_DATA_DOMAIN_BOUNDARY_INFORMATION,
                            timestep, -1, vrdb);
        visitTimer->StopTimer(t2, "Chombo reader doing rect domain boundaries");
    }

    //
    // Calculate the child patches.
    //
    // FIXME: We will need chages for 4-dimensional AMR hierarchies if/once we support them.
    // Though at the moment they do not fit into the VisIt data model at all.
    int t3 = visitTimer->StartTimer();
    std::vector< std::vector<int> > childPatches(totalPatches);
    for (level = num_levels-1 ; level > 0 ; level--)
    {
        int prev_level             = level-1;
        int coarse_start           = levelStart[prev_level];
        int coarse_end             = levelEnd[prev_level];
        int num_coarse             = coarse_end - coarse_start;
        const std::vector<int>& mc = multiplier[prev_level];
        avtIntervalTree coarse_levels(num_coarse, (dimension <= 3 ? dimension : 2), false);
        double exts[6] = { 0, 0, 0, 0, 0, 0 };
        for (int i = 0 ; i < num_coarse ; i++)
        {
            exts[0] = mc[0]*lowI[coarse_start+i];
            exts[1] = mc[0]*hiI[coarse_start+i];
            exts[2] = mc[1]*lowJ[coarse_start+i];
            exts[3] = mc[1]*hiJ[coarse_start+i];
            if (dimension == 3)
            {
                exts[4] = mc[2]*lowK[coarse_start+i];
                exts[5] = mc[2]*hiK[coarse_start+i];
            }
            coarse_levels.AddElement(i, exts);
        }
        coarse_levels.Calculate(true);

        int patches_start          = levelStart[level];
        int patches_end            = levelEnd[level];
        const std::vector<int>& mp = multiplier[level];
        for (int patch = patches_start ; patch < patches_end ; patch++)
        {
            double min[3];
            double max[3];
            min[0] = mp[0]*lowI[patch];
            max[0] = mp[0]*hiI[patch];
            min[1] = mp[1]*lowJ[patch];
            max[1] = mp[1]*hiJ[patch];
            if (dimension == 3)
            {
                min[2] = mp[2]*lowK[patch];
                max[2] = mp[2]*hiK[patch];
            }
            std::vector<int> list;
            coarse_levels.GetElementsListFromRange(min, max, list);
            for (size_t i = 0 ; i < list.size() ; i++)
            {
                int candidate = coarse_start + list[i];
                if (hiI[patch]*mp[0] < lowI[candidate]*mc[0])
                    continue;
                if (lowI[patch]*mp[0] >= hiI[candidate]*mc[0])
                    continue;
                if (hiJ[patch]*mp[1] < lowJ[candidate]*mc[1])
                    continue;
                if (lowJ[patch]*mp[1] >= hiJ[candidate]*mc[1])
                    continue;
                if (dimension == 3)
                {
                    if (hiK[patch]*mp[2] < lowK[candidate]*mc[2])
                        continue;
                    if (lowK[patch]*mp[2] >= hiK[candidate]*mc[2])
                        continue;
                }
                childPatches[candidate].push_back(patch);
           }
        }
    }
    visitTimer->StopTimer(t3, "Slow part of Chombo nesting.");

    //
    // Now that we know the extents for each patch and what its children are,
    // tell the structured domain boundary that information.
    //
    int t4 = visitTimer->StartTimer();
    if (dimension < 4)
    {
        for (int i = 0 ; i < totalPatches ; i++)
        {
            int my_level, local_patch;
            GetLevelAndLocalPatchNumber(i, my_level, local_patch);

            std::vector<int> logExts(6);
            logExts[0] = lowI[i];
            logExts[3] = hiI[i]-1;
            logExts[1] = lowJ[i];
            logExts[4] = hiJ[i]-1;
            logExts[2] = 0;
            logExts[5] = 0;
            if (dimension == 3)
            {
                logExts[2] = lowK[i];
                logExts[5] = hiK[i]-1;
            }

            dn->SetNestingForDomain(i, my_level, childPatches[i], logExts);
        }
    }
    else
    {
        for (size_t patchNo = 0; patchNo < listOfRepresentativeBoxes.size(); ++patchNo)
        {
            int patch = listOfRepresentativeBoxes[patchNo];

            int my_level, local_patch;
            GetLevelAndLocalPatchNumber(patch, my_level, local_patch);

            std::vector<int> logExts(6);
            logExts[0] = lowI[patch];
            logExts[3] = hiI[patch]-1;
            logExts[1] = lowJ[patch];
            logExts[4] = hiJ[patch]-1;
            logExts[2] = 0;
            logExts[5] = 0;

            dn->SetNestingForDomain((int)patchNo, my_level, childPatches[patch], logExts);
        }
    }

    //
    // Register this structure with the generic database so that it knows
    // to ghost out the right cells.
    //
    dn->SetNumDimensions(dimension  <= 3 ? dimension : 2);
    void_ref_ptr vr = void_ref_ptr(dn, avtStructuredDomainNesting::Destruct);
    cache->CacheVoidRef("any_mesh", AUXILIARY_DATA_DOMAIN_NESTING_INFORMATION,
                        timestep, -1, vr);
    visitTimer->StopTimer(t4, "Final step of Chombo nesting");
}


// ****************************************************************************
//  Method: avtChomboFileFormat::PopulateDatabaseMetaData
//
//  Purpose:
//      This database meta-data object is like a table of contents for the
//      file.  By populating it, you are telling the rest of VisIt what
//      information it can request from you.
//
//  Programmer: childs -- generated by xml2avt
//  Creation:   Thu Jan 19 11:17:14 PDT 2006
//
//  Modifications:
//
//    Hank Childs, Mon Jun 19 14:30:12 PDT 2006
//    Fix problem with grouping vectors in 3D.  Also add support for U,V,W
//    vectors.
//
//    Gunther H. Weber, Tue Aug  7 16:00:22 PDT 2007
//    If material information was found in the file, add corresponding
//    meta data
//
//    Gunther H. Weber, Thu Oct 11 15:49:41 PDT 2007
//    Add expressions from Chombo files.
//
//    Hank Childs, Sun Oct 28 09:42:50 PST 2007
//    Set meta-data saying whether or not we have ghosts on the exterior
//    boundary.
//
//    Gunther H. Weber, Mon Mar 24 20:46:04 PDT 2008
//    Added support for node centered Chombo data.
//
//    Gunther H. Weber, Mon Mar 24 21:14:59 PDT 2008
//    Fixed bug for files containing only one level when "default to only root
//    level was selected" (for these files an empty selection was the result).
//
//    Gunther H. Weber, Tue Apr 15 17:43:30 PDT 2008
//    Add support to automatically import a coordinate mapping file via
//    conn_cmfe.
//
//    Gunther H. Weber, Thu Apr 17 14:47:44 PDT 2008
//    Do not use a unique but unreadable name for displacement expression
//    since database-defined expressions are local to database.
//
//    Gunther H. Weber, Thu May  8 19:41:52 PDT 2008
//    Set spatial extents.
//
//    Hank Childs, Sun Jan 25 15:55:17 PST 2009
//    Change test for whether or not we are using ghost data.
//
//    Gunther H. Weber, Wed Jun 10 18:28:24 PDT 2009
//    Added ability to handle particle data in Chombo files.
//
//    Gunther H. Weber, Wed Jul 22 15:42:27 PDT 2009
//    Only set cycle in metadata if it can be determined. Otherwise announce
//    that it is not accurate.
//
//    Gunther H. Weber, Tue Sep 15 11:26:12 PDT 2009
//    Added support for 3D mappings for 2D files.
//
//    Tom Fogal, Thu Aug  5 20:11:04 MDT 2010
//    Add support for resolution selection contract.
//
//    Gunther H. Weber, Thu Aug 15 11:37:51 PDT 2013
//    Initial bare-bones support for 4D Chombo files (fairly limited and 
//    "hackish")
//
// ****************************************************************************

void
avtChomboFileFormat::PopulateDatabaseMetaData(avtDatabaseMetaData *md)
{
    if (!initializedReader)
        InitializeReader();

    int totalPatches = 0;
    for (int level = 0 ; level < num_levels ; level++)
    {
        totalPatches += patchesPerLevel[level];
    }

    // Prevent VisIt from sorting the variables.
    md->SetMustAlphabetizeVariables(false);

    //
    // Set up the mesh.  Use the groups construct to represent refinement
    // levels.  Also take care to name each patch in an appropriate way.
    char mesh_name[32] = "Mesh";
    avtMeshMetaData *mesh = new avtMeshMetaData;
    mesh->name = mesh_name;
    mesh->meshType = AVT_AMR_MESH;
    mesh->numBlocks = dimension < 4 ? totalPatches : (int)listOfRepresentativeBoxes.size();
    mesh->blockOrigin = 0;
    mesh->spatialDimension = dimension;
    mesh->topologicalDimension = dimension;
    if (dimension == 2 && checkForMappingFile && mappingFileExists && mappingIs3D)
    {
        mesh->spatialDimension = 3;
    }
    if (dimension == 4)
    {
        mesh->spatialDimension = 2;
        mesh->topologicalDimension = 2;
    }
    mesh->hasSpatialExtents = true;
    mesh->minSpatialExtents[0] = probLo[0] + lowProbI[0] * dx[0][0] * aspectRatio[0];
    mesh->maxSpatialExtents[0] = probLo[0] + (hiProbI[0] + 1) * dx[0][0] * aspectRatio[0];
    mesh->minSpatialExtents[1] = probLo[1] + lowProbJ[0] * dx[0][1] * aspectRatio[1];
    mesh->maxSpatialExtents[1] = probLo[1] + (hiProbJ[0] + 1) * dx[0][1] * aspectRatio[1];
    if (dimension == 3)
    {
        mesh->minSpatialExtents[2] = probLo[2] + lowProbK[0] * dx[0][2] * aspectRatio[2];
        mesh->maxSpatialExtents[2] = probLo[2] + (hiProbK[0] + 1) * dx[0][2] * aspectRatio[2];
    }
    mesh->blockTitle = "patches";
    mesh->blockPieceName = "patch";
    mesh->numGroups = num_levels;
    mesh->groupTitle = "levels";
    mesh->groupPieceName = "level";
    mesh->containsExteriorBoundaryGhosts = allowedToUseGhosts && fileContainsGhosts;
    if (dimension < 4)
    {
        std::vector<int> groupIds(totalPatches);
        std::vector<std::string> blockPieceNames(totalPatches);
        int levels_of_detail = 0;
        for (int i = 0 ; i < totalPatches ; ++i)
        {
            char tmpName[128];
            int level, local_patch;
            GetLevelAndLocalPatchNumber(i, level, local_patch);
            groupIds[i] = level;
            sprintf(tmpName, "level%d,patch%d", level, local_patch);
            blockPieceNames[i] = tmpName;
            levels_of_detail = std::max(levels_of_detail, level);
        }
        mesh->blockNames = blockPieceNames;
        mesh->LODs = levels_of_detail;
        this->resolution = levels_of_detail; // current acceptable res = max res.
        md->Add(mesh);
        md->AddGroupInformation(num_levels,totalPatches,groupIds);
    }
    else
    {
        std::vector<int> groupIds(listOfRepresentativeBoxes.size());
        std::vector<std::string> blockPieceNames(listOfRepresentativeBoxes.size());
        int levels_of_detail = 0;
        for (size_t i = 0; i < listOfRepresentativeBoxes.size(); ++i)
        {
            char tmpName[128];
            int level, local_patch;
            GetLevelAndLocalPatchNumber(listOfRepresentativeBoxes[i], level, local_patch);
            groupIds[i] = level;
            sprintf(tmpName, "level%d,patch%d", level, local_patch);
            blockPieceNames[i] = tmpName;
            levels_of_detail = std::max(levels_of_detail, level);
        }
        mesh->blockNames = blockPieceNames;
        mesh->LODs = levels_of_detail;
        this->resolution = levels_of_detail; // current acceptable res = max res.
        md->Add(mesh);
        md->AddGroupInformation(num_levels,(int)listOfRepresentativeBoxes.size(), groupIds);
    }

    //
    // Add each scalar variable.
    //
    std::list<std::string> addedExpressionNames;
    int nVars = (int)varnames.size();
    for (int i = 0; i < nVars; i++)
    {
        if (dimension == 4)
        {
            int nArrayComps = nodeCentered ?
                (hiProbK[0] - lowProbK[0] + 2) * (hiProbL[0] - lowProbL[0] + 2) :
                (hiProbK[0] - lowProbK[0] + 1) * (hiProbL[0] - lowProbL[0] + 1);
            AddArrayVarToMetaData(md, varnames[i], nArrayComps, mesh_name, nodeCentered ? AVT_NODECENT : AVT_ZONECENT);

            int buff_size = 4096;
            char sum_expr_buffer[4096];

            Expression sum_expr;
            snprintf(sum_expr_buffer, buff_size, "%s_sum", varnames[i].c_str());
            sum_expr.SetName(sum_expr_buffer);
            addedExpressionNames.push_back(sum_expr_buffer);
            snprintf(sum_expr_buffer, buff_size, "array_sum(%s)", varnames[i].c_str());
            sum_expr.SetDefinition(sum_expr_buffer);
            sum_expr.SetType(Expression::ScalarMeshVar);
            md->AddExpression(&sum_expr);
        }
        else
            AddScalarVarToMetaData(md, varnames[i], mesh_name, nodeCentered ? AVT_NODECENT : AVT_ZONECENT);
    }

    // Add vars _vpar and _mu generated by reader
    if (dimension == 4)
    {
        int nArrayComps = nodeCentered ?
            (hiProbK[0] - lowProbK[0] + 2) * (hiProbL[0] - lowProbL[0] + 2) :
            (hiProbK[0] - lowProbK[0] + 1) * (hiProbL[0] - lowProbL[0] + 1);
        AddArrayVarToMetaData(md, "_vpar", nArrayComps, mesh_name, nodeCentered ? AVT_NODECENT : AVT_ZONECENT);
        AddArrayVarToMetaData(md, "_mu", nArrayComps, mesh_name, nodeCentered ? AVT_NODECENT : AVT_ZONECENT);
    }

    //
    // Chombo has no vector variables.  But it clearly has some scalar
    // variables that should be combined into vectors.  Identify these and
    // make expressions for the vectors.
    //
    for (int i = 0; i < nVars; i++)
    {
        if (varnames[i][0] == 'x' || varnames[i][0] == 'X' ||
            varnames[i][0] == 'u' || varnames[i][0] == 'U')
        {
            char yChar = '\0', zChar = '\0';
            if (varnames[i][0] == 'x')
            {
                yChar = 'y';
                zChar = 'z';
            }
            else if (varnames[i][0] == 'X')
            {
                yChar = 'Y';
                zChar = 'Z';
            }
            else if (varnames[i][0] == 'u')
            {
                yChar = 'v';
                zChar = 'w';
            }
            else if (varnames[i][0] == 'U')
            {
                yChar = 'V';
                zChar = 'W';
            }

            char yName[1024];
            snprintf(yName, 1024, "%c%s", yChar, varnames[i].c_str()+1);
            int matchY = -1;
            for (int j = 0 ; j < nVars ; j++)
            {
                if (varnames[j] == yName)
                {
                    matchY = j;
                    break;
                }
            }
            if (matchY < 0)
                continue;
            if (dimension == 2)
            {
                Expression vec;
                const char *str = varnames[i].c_str()+1;
                while (*str != '\0' && !isalpha(*str))
                    str++;
                vec.SetName(str);
                char defn[1024];
                snprintf(defn, 1024, "{<%s>, <%s>}", varnames[i].c_str(), yName);
                vec.SetDefinition(defn);
                vec.SetType(Expression::VectorMeshVar);
                md->AddExpression(&vec);
                addedExpressionNames.push_back(str);
            }
            else
            {
                char zName[1024];
                snprintf(zName, 1024, "%c%s", zChar, varnames[i].c_str()+1);
                int matchZ = -1;
                for (int j = 0 ; j < nVars ; j++)
                {
                    if (varnames[j] == zName)
                    {
                        matchZ = j;
                        break;
                    }
                }
                if (matchZ < 0)
                    continue;
                Expression vec;
                const char *str = varnames[i].c_str()+1;
                while (*str != '\0' && !isalpha(*str))
                    str++;
                vec.SetName(str);
                char defn[1024];
                snprintf(defn, 1024, "{<%s>, <%s>, <%s>}", varnames[i].c_str(),
                                                     yName,zName);
                vec.SetDefinition(defn);
                vec.SetType(Expression::VectorMeshVar);
                md->AddExpression(&vec);
                addedExpressionNames.push_back(str);
            }
        }
    }

    // Add particle mesh and vars if available
    if (hasParticles)
    {
        // Add particle mesh
        AddMeshToMetaData(md, std::string("particles"), AVT_POINT_MESH, 0, 1, 0, dimension, 0);

        for (std::vector<std::string>::iterator it = particleVarnames.begin();
                it != particleVarnames.end(); ++it)
        {
            AddScalarVarToMetaData(md, *it, std::string("particles"), AVT_NODECENT);
        }

        //
        // Chombo has no vector variables.  But it clearly has some scalar
        // variables that should be combined into vectors.  Identify these and
        // make expressions for the vectors.
        //
        for (size_t i = 0; i < particleVarnames.size(); i++)
        {
            size_t len = particleVarnames[i].size();
            if (particleVarnames[i][len-2] == '_' &&
                    (particleVarnames[i][len-1] == 'x' || particleVarnames[i][len-1] == 'X' ||
                     particleVarnames[i][len-1] == 'u' || particleVarnames[i][len-1] == 'U'))
            {
                char yChar = '\0', zChar = '\0';
                if (particleVarnames[i][len-1] == 'x')
                {
                    yChar = 'y';
                    zChar = 'z';
                }
                else if (particleVarnames[i][len-1] == 'X')
                {
                    yChar = 'Y';
                    zChar = 'Z';
                }
                else if (particleVarnames[i][len-1] == 'u')
                {
                    yChar = 'v';
                    zChar = 'w';
                }
                else if (particleVarnames[i][len-1] == 'U')
                {
                    yChar = 'V';
                    zChar = 'W';
                }

                std::string yName = particleVarnames[i];
                yName[len-1] = yChar;
                int matchY = -1;
                for (size_t j = 0 ; j < particleVarnames.size() ; j++)
                {
                    if (particleVarnames[j] == yName)
                    {
                        matchY = (int)j;
                        break;
                    }
                }
                if (matchY < 0)
                    continue;
                if (dimension == 2)
                {
                    Expression vec;
                    std::string expressionName(particleVarnames[i], 0, len-2);
                    if (std::find(addedExpressionNames.begin(), addedExpressionNames.end(),
                                expressionName) != addedExpressionNames.end())
                        expressionName.insert(0, std::string("particle_"));
                    vec.SetName(expressionName);
                    char defn[1024];
                    snprintf(defn, 1024, "{<%s>, <%s>}", particleVarnames[i].c_str(), yName.c_str());
                    vec.SetDefinition(defn);
                    vec.SetType(Expression::VectorMeshVar);
                    md->AddExpression(&vec);
                }
                else
                {
                    std::string zName = particleVarnames[i];
                    zName[len-1] = zChar;
                    int matchZ = -1;
                    for (size_t j = 0 ; j < particleVarnames.size() ; j++)
                    {
                        if (particleVarnames[j] == zName)
                        {
                            matchZ = (int)j;
                            break;
                        }
                    }
                    if (matchZ < 0)
                        continue;
                    Expression vec;
                    std::string expressionName(particleVarnames[i], 0, len-2);
                    if (std::find(addedExpressionNames.begin(), addedExpressionNames.end(),
                                expressionName) != addedExpressionNames.end())
                        expressionName.insert(0, std::string("particle_"));
                    vec.SetName(expressionName);
                    char defn[1024];
                    snprintf(defn, 1024, "{<%s>, <%s>, <%s>}", particleVarnames[i].c_str(),
                            yName.c_str(), zName.c_str());
                    vec.SetDefinition(defn);
                    vec.SetType(Expression::VectorMeshVar);
                    md->AddExpression(&vec);
                }
            }
        }
    }
    //
    // If any materials were found, add them here
    //
    if (nMaterials)
    {
        std::vector<std::string> mnames(nMaterials);

        std::string matname;
        matname = "materials";

        char str[32];
        for (int m = 0; m < nMaterials; ++m)
        {
            sprintf(str, "mat%d", m+1);
            mnames[m] = str;
        }
        AddMaterialToMetaData(md, matname, mesh_name, nMaterials, mnames);
    }

    //
    // If any expressions where found, add them here
    //
    for (std::list<Expression*>::iterator it = expressions.begin(); it != expressions.end(); ++it)
    {
        md->AddExpression(*it);
    }

    //
    // Check for existence of mapping file
    //
    if (checkForMappingFile)
    {
        std::string mappingFilename(filenames[0]);
        size_t extPos = mappingFilename.find(".hdf5");
        if  (extPos == std::string::npos)
        {
            extPos = mappingFilename.find(".h5");
        }

        if (extPos != std::string::npos)
        {
            mappingFilename.insert(extPos, ".map");

            if (mappingFileExists)
            {
                debug5 << "Found mapping file " << mappingFilename << ". ";
                debug5 << "Adding cmfe expression!" << std::endl;

#ifdef _WIN32
                // Escape ":" and "\" in Windows filenames
                int numCharactersToEscape = 0;
                for (std::string::iterator it = mappingFilename.begin();
                        it != mappingFilename.end(); ++it)
                    if (*it == ':' || *it == '\\')
                        ++numCharactersToEscape;

                if (numCharactersToEscape)
                {
                    std::string escapedFilename;
                    for (std::string::iterator it = mappingFilename.begin();
                            it != mappingFilename.end(); ++it)
                    {
                        if (*it == ':' || *it == '\\')
                            escapedFilename.push_back('\\');
                        escapedFilename.push_back(*it);
                    }
                    mappingFilename = escapedFilename;
                }
#endif

                Expression *mappingExpression = new Expression;
                mappingExpression->SetName("_mapping_displacement");
                mappingExpression->SetType(Expression::VectorMeshVar);
                mappingExpression->SetHidden(false);

                if (dimension == 2 && !mappingIs3D)
                    mappingExpression->SetDefinition(
                            "{conn_cmfe(<"+mappingFilename+":x>,Mesh)," +
                            " conn_cmfe(<"+mappingFilename+":y>,Mesh)} - coords(Mesh)");
                else if (dimension == 4)
                    mappingExpression->SetDefinition(
                            "{array_decompose(conn_cmfe(<"+mappingFilename+":x>,Mesh), 0)," +
                            " array_decompose(conn_cmfe(<"+mappingFilename+":y>,Mesh), 1)} - coords(Mesh)");
                else
                    mappingExpression->SetDefinition(
                            "{conn_cmfe(<"+mappingFilename+":x>,Mesh)," +
                            " conn_cmfe(<"+mappingFilename+":y>,Mesh)," +
                            " conn_cmfe(<"+mappingFilename+":z>,Mesh)} - coords(Mesh)");

                md->AddExpression(mappingExpression);
            }
            else
            {
                debug5 << "No mapping file " << mappingFilename << " exists. ";
                debug5 << "No need to add cmfe expressions." << std::endl;
            }
        }
        else
        {
            debug1 << "Warning: Chombo file does not have .h5 or .hdf5 extension. ";
            debug1 << "Cannot figure out possible filename for coordinate mapping file. ";
            debug1 << "Ignoring any mapping files." << endl;
        }
    }

    if (dimension == 4)
    {
        Expression *dv_parExpression = new Expression;
        dv_parExpression->SetName("_dv_par");
        char buffer[256];
        snprintf(buffer, 256, "cell_constant(Mesh, %lf)", dx[0][2]);
        dv_parExpression->SetDefinition(buffer);
        md->AddExpression(dv_parExpression);
        Expression *dmuExpression = new Expression;
        dmuExpression->SetName("_dmu");
        snprintf(buffer, 256, "cell_constant(Mesh, %lf)", dx[0][3]);
        dmuExpression->SetDefinition(buffer);
        md->AddExpression(dmuExpression);
    }

    //
    // Add information about SIL restrictions
    //
    if (enableOnlyRootLevel || enableOnlyExplicitMaterials)
    {
        if (enableOnlyRootLevel && num_levels > 1)
        {
            md->AddDefaultSILRestrictionDescription(std::string("!TurnOffAll"));
            md->AddDefaultSILRestrictionDescription(std::string("+level0"));
        }
        else
        {
            md->AddDefaultSILRestrictionDescription(std::string("!TurnOnAll"));
        }
        if (enableOnlyExplicitMaterials && nMaterials > 1)
        {
            char str[32];
            sprintf(str, "-mat%d", nMaterials);
            md->AddDefaultSILRestrictionDescription(std::string(str));
        }
    }

    md->SetTime(timestep, dtime);
    if (cycle != avtFileFormat::INVALID_CYCLE)
    {
        md->SetCycle(timestep, cycle);
        md->SetCycleIsAccurate(true, timestep);
    }
    else
    {
        md->SetCycleIsAccurate(false, timestep);
    }
}


// ****************************************************************************
//  Method: avtChomboFileFormat::GetLevelAndLocalPatchNumber
//
//  Purpose:
//      Translates our global patch identifier to a refinement level and patch
//      number local to that refinement level.
//
//  Programmer: Hank Childs
//  Creation:   January 22, 2006
//
// ****************************************************************************

void
avtChomboFileFormat::GetLevelAndLocalPatchNumber(int global_patch,
                                            int &level, int &local_patch) const
{
    int tmp = global_patch;
    level = 0;
    while (1 && level < num_levels)
    {
        if (tmp < patchesPerLevel[level])
        {
            break;
        }
        tmp -= patchesPerLevel[level];
        level++;
    }
    local_patch = tmp;
}


// ****************************************************************************
//  Method: avtChomboFileFormat::GetMesh
//
//  Purpose:
//      Gets the mesh associated with this file.  The mesh is returned as a
//      derived type of vtkDataSet (ie vtkRectilinearGrid, vtkStructuredGrid,
//      vtkUnstructuredGrid, etc).
//
//  Arguments:
//      domain      The index of the domain.  If there are NDomains, this
//                  value is guaranteed to be between 0 and NDomains-1,
//                  regardless of block origin.
//      meshname    The name of the mesh of interest.  This can be ignored if
//                  there is only one mesh.
//
//  Programmer: childs -- generated by xml2avt
//  Creation:   Thu Jan 19 11:17:14 PDT 2006
//
//  Modifications:
//
//    Hank Childs, Thu Sep 27 17:14:58 PDT 2007
//    Avoid floating point accumulation errors.
//
//    Gunther H. Wever, Wed Oct  3 17:34:56 PDT 2007
//    Do not strip ghost zones from file and instead add appropriate
//    ghost zone field data.
//
//    Hank Childs, Mon Oct  8 17:22:26 PDT 2007
//    Make Gunther's code go live using DB options.  Also correct bug with
//    setting coordinate positions with ghost zones.
//
//    Gunther H. Weber, Mon Oct 22 11:50:41 PDT 2007
//    Distinguish between ghost zones internal and exterior to problem.
//
//    Hank Childs, Sun Oct 28 13:36:43 PST 2007
//    Fix bug with avtRealDims.
//
//    Hank Childs, Sun Jan 25 15:55:31 PST 2009
//    Change test for whether or not we are allowed to use ghost data.
//
//    Gunther H. Weber, Wed Jun 10 18:28:24 PDT 2009
//    Added ability to handle particle data in Chombo files.
//
//    Gunther H. Weber, Thu Jun 17 10:10:17 PDT 2010
//    Added ability to connect particle mesh based on polymer_id and
//    particle_nid
//
//    Gunther H. Weber, Fri Mar 25 13:20:48 PDT 2011
//    Only add avtRealDims if file contains ghosts.
//
//    Gunther H. Weber, Thu Aug 15 11:37:51 PDT 2013
//    Initial bare-bones support for 4D Chombo files (fairly limited and 
//    "hackish")
//
//    Gunther H. Weber, Wed Nov 20 15:49:21 PST 2013
//    Return coordinates as double instead of float (consistent with data
//    values).
//
// ****************************************************************************

// Comaprator class used to sort an array with integers so that the permutation
// of integers gives an order so that all particles belonging to a single polymer
// are next to each other and in correct order along the polymer.
class LookUpOrderCmp
{
    public:
        LookUpOrderCmp(const int *d1, const int *d2) : order1Var(d1), order2Var(d2) {}
        bool operator()(vtkIdType a, vtkIdType b)
        {
            return order1Var[a] < order1Var[b] ||
                (order1Var[a] == order1Var[b] && order2Var[a] < order2Var[b]);
        }

    private:
        const int *order1Var;
        const int *order2Var;
};

//
//  Modifications
//    Mark C. Miller, Wed Feb  9 13:38:49 PST 2022
//    Use new method, OpenHDF5File, to open file.
//
vtkDataSet *
avtChomboFileFormat::GetMesh(int patch, const char *meshname)
{
    if (dimension == 4) patch = listOfRepresentativeBoxes[patch];
    int   i;

    if (strcmp(meshname, "Mesh") == 0)
    {
        if (!initializedReader)
            InitializeReader();

        int level, local_patch;
        GetLevelAndLocalPatchNumber(patch, level, local_patch);

        if (level >= num_levels)
        {
            EXCEPTION1(InvalidVariableException, meshname);
        }

        if (local_patch >= patchesPerLevel[level])
        {
            EXCEPTION2(BadDomainException, local_patch, patchesPerLevel[level]);
        }

        int dims[3];
        int numGhostI = 0, numGhostJ = 0, numGhostK = 0;
        if (!allowedToUseGhosts)
        {
            dims[0] = hiI[patch]-lowI[patch]+1;
            dims[1] = hiJ[patch]-lowJ[patch]+1;
            dims[2] = (dimension == 3 ? hiK[patch]-lowK[patch]+1 : 1);
        }
        else
        {
            numGhostI = numGhosts[4*level];
            numGhostJ = numGhosts[4*level+1];
            numGhostK = numGhosts[4*level+2];

            dims[0] = hiI[patch]-lowI[patch]+1+2*numGhostI;
            dims[1] = hiJ[patch]-lowJ[patch]+1+2*numGhostJ;
            dims[2] = (dimension == 3 ? hiK[patch]-lowK[patch]+1+2*numGhostK : 1);
        }

        vtkRectilinearGrid *rg = vtkRectilinearGrid::New();
        rg->SetDimensions(dims);

        vtkDoubleArray *xcoord = vtkDoubleArray::New();
        vtkDoubleArray *ycoord = vtkDoubleArray::New();
        vtkDoubleArray *zcoord = vtkDoubleArray::New();

        xcoord->SetNumberOfTuples(dims[0]);
        ycoord->SetNumberOfTuples(dims[1]);
        zcoord->SetNumberOfTuples(dims[2]);

        double *ptr = xcoord->GetPointer(0);
        if (!allowedToUseGhosts)
            ptr[0] = probLo[0] + lowI[patch]*dx[level][0]*aspectRatio[0];
        else
            ptr[0] = probLo[0] + (lowI[patch]-numGhostI)*dx[level][0]*aspectRatio[0];

        for (i = 1; i < dims[0]; i++)
            ptr[i] = ptr[0] + i*dx[level][0]*aspectRatio[0];

        ptr = ycoord->GetPointer(0);
        if (!allowedToUseGhosts)
            ptr[0] = probLo[1] + lowJ[patch]*dx[level][1]*aspectRatio[1];
        else
            ptr[0] = probLo[1] + (lowJ[patch]-numGhostJ)*dx[level][1]*aspectRatio[1];

        for (i = 1; i < dims[1]; i++)
            ptr[i] = ptr[0] + i*dx[level][1]*aspectRatio[1];

        if (dimension == 3)
        {
            ptr = zcoord->GetPointer(0);
            if (!allowedToUseGhosts)
                ptr[0] = probLo[2] + lowK[patch]*dx[level][2]*aspectRatio[2];
            else
                ptr[0] = probLo[2] + (lowK[patch]-numGhostK)*dx[level][2]*aspectRatio[2];

            for (i = 1; i < dims[2]; i++)
                ptr[i] = ptr[0] + i*dx[level][2]*aspectRatio[2];
        }
        else
            zcoord->SetTuple1(0, 0.);

        rg->SetXCoordinates(xcoord);
        rg->SetYCoordinates(ycoord);
        rg->SetZCoordinates(zcoord);

        xcoord->Delete();
        ycoord->Delete();
        zcoord->Delete();

        //
        // Determine the indices of the mesh within its group.  Add that to the
        // VTK dataset as field data.
        //
        vtkIntArray *arr = vtkIntArray::New();
        arr->SetNumberOfTuples(3);
        arr->SetValue(0, lowI[patch]);
        arr->SetValue(1, lowJ[patch]);
        arr->SetValue(2, (dimension == 3 ? lowK[patch] : 0));
        arr->SetName("base_index");
        rg->GetFieldData()->AddArray(arr);
        arr->Delete();

        if (dimension >= 4)
        {
            if (dx[level].size() >= 4)
            {
                vtkDoubleArray *dx_arr = vtkDoubleArray::New();
                dx_arr->SetNumberOfTuples(2);
                dx_arr->SetValue(0, dx[level][2]);
                dx_arr->SetValue(1, dx[level][3]);
                dx_arr->SetName("dx_array");
                rg->GetFieldData()->AddArray(dx_arr);
                dx_arr->Delete();
            }
            else
                debug1 << "Warning: Dimension > 3 but dx[level].size() <= 3." << std::endl;

            vtkIntArray *v_base_index = vtkIntArray::New();
            v_base_index->SetNumberOfTuples(2);
            v_base_index->SetValue(0, lowProbK[level]);
            v_base_index->SetValue(1, lowProbL[level]);
            v_base_index->SetName("v_base_index");
            rg->GetFieldData()->AddArray(v_base_index);
            v_base_index->Delete();

            const int outputNK = nodeCentered ? (hiProbK[0] - lowProbK[0] + 2) : (hiProbK[0] - lowProbK[0] + 1);
            const int outputNL = nodeCentered ? (hiProbL[0] - lowProbL[0] + 2) : (hiProbL[0] - lowProbL[0] + 1);
            vtkIntArray *v_dims = vtkIntArray::New();
            v_dims->SetNumberOfTuples(2);
            v_dims->SetValue(0, outputNK);
            v_dims->SetValue(1, outputNL);
            v_dims->SetName("v_dims");
            rg->GetFieldData()->AddArray(v_dims);
            v_dims->Delete();
        }

        if (allowedToUseGhosts && (numGhostI > 0 || numGhostJ > 0 || numGhostK > 0))
        {
            //
            // Store real dims so that pick reports correct indices
            //
            // If: G G R R R R G G G  (G = ghost, R = real)
            //     0 1 2 3 4 5 6 7 9
            // then dims = 9
            // avtRealDims[0] = IDX of node that borders first real zone
            // -> node #2
            // avtRealDims[1] = IDX of node that borders last real zone
            // -> node #6
            //
            arr = vtkIntArray::New();
            arr->SetNumberOfTuples(6);
            arr->SetValue(0, numGhostI);
            arr->SetValue(1, dims[0]-numGhostI-1);
            arr->SetValue(2, numGhostJ);
            arr->SetValue(3, dims[1]-numGhostJ-1);
            arr->SetValue(4, numGhostK);
            arr->SetValue(5, dims[2]-numGhostK-1);
            arr->SetName("avtRealDims");
            rg->GetFieldData()->AddArray(arr);
            arr->Delete();

            //
            // Calculate the problem domian in the current level
            //
            //
            // Generate ghost zone information
            //
            unsigned char realVal = 0, ghostInternal = 0, ghostExternal = 0;
            avtGhostData::AddGhostZoneType(ghostInternal,
                    DUPLICATED_ZONE_INTERNAL_TO_PROBLEM);
            avtGhostData::AddGhostZoneType(ghostExternal,
                    ZONE_EXTERIOR_TO_PROBLEM);

            vtkUnsignedCharArray *ghostCells = vtkUnsignedCharArray::New();
            ghostCells->SetName("avtGhostZones");

            ghostCells->Allocate(rg->GetNumberOfCells());

            if (dimension == 3)
            {
                for (int k=lowK[patch] - numGhostK; k<hiK[patch] + numGhostK; ++k)
                    for (int j=lowJ[patch] - numGhostJ; j<hiJ[patch] + numGhostJ; ++j)
                        for (int i=lowI[patch] - numGhostI; i<hiI[patch] + numGhostI; ++i)
                        {
                            if (i>=lowI[patch] && i<hiI[patch] &&
                                    j>=lowJ[patch] && j<hiJ[patch] &&
                                    k>=lowK[patch] && k<hiK[patch])
                            {
                                ghostCells->InsertNextValue(realVal);
                            }
                            else
                            {
                                if (i>=lowProbI[level] && i<=hiProbI[level] &&
                                        j>=lowProbJ[level] && j<=hiProbJ[level] &&
                                        k>=lowProbK[level] && k<=hiProbK[level])
                                {
                                    ghostCells->InsertNextValue(ghostInternal);
                                }
                                else
                                {
                                    ghostCells->InsertNextValue(ghostExternal);
                                }
                            }
                        }
            }
            else
            {
                for (int j=lowJ[patch] - numGhostJ; j<hiJ[patch] + numGhostJ; ++j)
                    for (int i=lowI[patch] - numGhostI; i<hiI[patch] + numGhostI; ++i)
                    {
                        if (i>=lowI[patch] && i<hiI[patch] &&
                                j>=lowJ[patch] && j<hiJ[patch])
                        {
                            ghostCells->InsertNextValue(realVal);
                        }
                        else
                        {
                            if (i>=lowProbI[level] && i<=hiProbI[level] &&
                                    j>=lowProbJ[level] && j<=hiProbJ[level])
                            {
                                ghostCells->InsertNextValue(ghostInternal);
                            }
                            else
                            {
                                ghostCells->InsertNextValue(ghostExternal);
                            }
                        }
                    }
            }

            rg->GetCellData()->AddArray(ghostCells);
            rg->GetInformation()->Set(
                vtkStreamingDemandDrivenPipeline::UPDATE_NUMBER_OF_GHOST_LEVELS(), 0);
            ghostCells->Delete();
        }

        return rg;
    }
    else if (strcmp(meshname, "particles") == 0)
    {
        if (!initializedReader)
            InitializeReader();

        if (file_handle < 0)
        {
            file_handle = OpenHDF5File(filenames[0]);
            if (file_handle < 0)
            {
                EXCEPTION1(InvalidDBTypeException, "Cannot be a Chombo file, since "
                        "it is not even an HDF5 file.");
            }
        }

        const char particlesGroupName[] = "/particles/";
        const char posVarname[] = "position_x";
        char *datasetname = new char[strlen(particlesGroupName)+strlen(posVarname)+1];
        std::strcpy(datasetname, particlesGroupName);
        std::strcat(datasetname, posVarname);

        hsize_t nParticles = 0;
        double *xPos = 0;
        double *yPos = 0;
        double *zPos = 0;

        hid_t dataSet = H5Dopen(file_handle, datasetname);
        if ( dataSet > 0)
        {
            hid_t dataSpace = H5Dget_space(dataSet);
            if (H5Sis_simple(dataSpace))
            {
                if (H5Sget_simple_extent_ndims(dataSpace) == 1)
                {
                    H5Sget_simple_extent_dims(dataSpace, &nParticles, NULL);
                    xPos = new double[nParticles];
                    H5Dread(dataSet, H5T_NATIVE_DOUBLE, H5S_ALL, H5S_ALL, H5P_DEFAULT, xPos);
                }
                H5Dclose(dataSet);
            }
            else
            {
                H5Dclose(dataSet);
                H5Fclose(file_handle);
                file_handle = -1;
                EXCEPTION1(InvalidDBTypeException, "x coordinate data set "
                        "does not have rank two.");
            }
        }
        else
        {
            EXCEPTION1(InvalidDBTypeException, "Cannot open x coordinate data set!");
        }

        datasetname[strlen(datasetname)-1] = 'y';
        dataSet = H5Dopen(file_handle, datasetname);
        if ( dataSet > 0)
        {
            hid_t dataSpace = H5Dget_space(dataSet);
            if (H5Sis_simple(dataSpace))
            {
                hsize_t nParticlesCheck;
                if (H5Sget_simple_extent_ndims(dataSpace) == 1)
                {
                    H5Sget_simple_extent_dims(dataSpace, &nParticlesCheck, NULL);
                    if (nParticles == nParticlesCheck)
                    {
                        yPos = new double[nParticles];
                        H5Dread(dataSet, H5T_NATIVE_DOUBLE, H5S_ALL, H5S_ALL, H5P_DEFAULT, yPos);
                    }
                    else
                    {
                        delete[] xPos;
                        delete[] datasetname;
                        H5Dclose(dataSet);
                        H5Fclose(file_handle);
                        file_handle = -1;
                        EXCEPTION1(InvalidDBTypeException, "y coordinate data set "
                        "has different size from x coordinate data set.");
                    }
                }
                H5Dclose(dataSet);
            }
            else
            {
                delete[] xPos;
                delete[] datasetname;
                H5Dclose(dataSet);
                H5Fclose(file_handle);
                file_handle = -1;
                EXCEPTION1(InvalidDBTypeException, "y coordinate data set "
                        "does not have rank two.");
            }
        }
        else
        {
            delete[] xPos;
            delete[] datasetname;
            EXCEPTION1(InvalidDBTypeException, "Cannot open y coordinate data set!");
        }

        if (dimension > 2)
        {
            datasetname[strlen(datasetname)-1] = 'z';
            dataSet = H5Dopen(file_handle, datasetname);
            if ( dataSet > 0)
            {
                hid_t dataSpace = H5Dget_space(dataSet);
                if (H5Sis_simple(dataSpace))
                {
                    hsize_t nParticlesCheck;
                    if (H5Sget_simple_extent_ndims(dataSpace) == 1)
                    {
                        H5Sget_simple_extent_dims(dataSpace, &nParticlesCheck, NULL);
                        if (nParticles == nParticlesCheck)
                        {
                            zPos = new double[nParticles];
                            H5Dread(dataSet, H5T_NATIVE_DOUBLE, H5S_ALL, H5S_ALL, H5P_DEFAULT, zPos);
                        }
                        else
                        {
                            delete[] xPos;
                            delete[] yPos;
                            delete[] datasetname;
                            H5Dclose(dataSet);
                            H5Fclose(file_handle);
                            file_handle = -1;
                            EXCEPTION1(InvalidDBTypeException, "z coordinate data set "
                                    "has different size from x coordinate data set.");
                        }
                    }
                    H5Dclose(dataSet);
                }
                else
                {
                    delete[] xPos;
                    delete[] yPos;
                    delete[] datasetname;
                    H5Dclose(dataSet);
                    H5Fclose(file_handle);
                    file_handle = -1;
                    EXCEPTION1(InvalidDBTypeException, "z coordinate data set "
                            "does not have rank two.");
                }
            }
            else
            {
                delete[] xPos;
                delete[] yPos;
                delete[] datasetname;
                EXCEPTION1(InvalidDBTypeException, "Cannot open z coordinate data set!");
            }
        }

        int *particleOrder = 0;
        int *polymerNo = 0;

        if (connectParticles &&
            std::find(
                particleVarnames.begin(), particleVarnames.end(), "particle_nid"
                     ) != particleVarnames.end() &&
            std::find(
                particleVarnames.begin(), particleVarnames.end(), "polymer_id"
                     ) != particleVarnames.end())
        {
            dataSet = H5Dopen(file_handle, "/particles/particle_nid");

            if ( dataSet > 0)
            {
                hid_t dataSpace = H5Dget_space(dataSet);
                if (H5Sis_simple(dataSpace))
                {
                    hsize_t nParticlesCheck;
                    if (H5Sget_simple_extent_ndims(dataSpace) == 1)
                    {
                        H5Sget_simple_extent_dims(dataSpace, &nParticlesCheck, NULL);
                        if (nParticles == nParticlesCheck)
                        {
                            particleOrder = new int[nParticles];
                            H5Dread(dataSet, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, particleOrder);
                        }
                    }
                }
                H5Dclose(dataSet);
            }

            dataSet = H5Dopen(file_handle, "/particles/polymer_id");
            if ( dataSet > 0)
            {
                hid_t dataSpace = H5Dget_space(dataSet);
                if (H5Sis_simple(dataSpace))
                {
                    hsize_t nParticlesCheck;
                    if (H5Sget_simple_extent_ndims(dataSpace) == 1)
                    {
                        H5Sget_simple_extent_dims(dataSpace, &nParticlesCheck, NULL);
                        if (nParticles == nParticlesCheck)
                        {
                            polymerNo = new int[nParticles];
                            H5Dread(dataSet, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, polymerNo);
                        }
                    }
                }
                H5Dclose(dataSet);
            }

            // Check if both data sets for connecting particles were loaded
            if (!(particleOrder && polymerNo))
            {
                // No, delete any that may be loaded and set pointers to zero
                delete[] particleOrder;
                particleOrder = 0;
                delete[] polymerNo;
                polymerNo = 0;
            }
        }

        H5Fclose(file_handle);
        file_handle = -1;

        vtkPoints *points = vtkPoints::New();
        points->SetNumberOfPoints(nParticles);
        if (dimension > 2)
            for (size_t i=0; i <nParticles; ++i) points->SetPoint(i, xPos[i], yPos[i], zPos[i]);
        else
            for (size_t i=0; i <nParticles; ++i) points->SetPoint(i, xPos[i], yPos[i], 0);

        delete[] xPos;
        delete[] yPos;
        delete[] zPos;
        delete[] datasetname;

        if (particleOrder)
        {
            // Create an integer array with proper permutation of particles such that
            // all particles on same polymer are "next" to each other and that particles
            // within a polymer are ordered according to particle_nid
            vtkIdType *orderPermutation = new vtkIdType[nParticles];
            for (vtkIdType i=0; i<static_cast<vtkIdType>(nParticles); ++i) orderPermutation[i]=i;

            LookUpOrderCmp cmp(polymerNo, particleOrder);
            std::sort(orderPermutation, orderPermutation+nParticles, cmp);

            //
            // Create poly data
            //
            vtkPolyData *pd = vtkPolyData::New();
            pd->SetPoints(points);
            points->Delete();
            vtkCellArray *verts = vtkCellArray::New();
            pd->SetVerts(verts);
            verts->Delete();
            for (vtkIdType i= 0; i<static_cast<vtkIdType>(nParticles); ++i)
            {
                verts->InsertNextCell(1);
                verts->InsertCellPoint(i);
            }
            vtkCellArray *lines = vtkCellArray::New();
            pd->SetLines(lines);
            lines->Delete();
            for (vtkIdType i=0; i<static_cast<vtkIdType>(nParticles)-1; ++i)
            {
                if (polymerNo[i] == polymerNo[i+1])
                {
                    lines->InsertNextCell(2);
                    lines->InsertCellPoint(orderPermutation[i]);
                    lines->InsertCellPoint(orderPermutation[i+1]);
                }
            }

            delete[] particleOrder;
            delete[] polymerNo;

            return pd;
        }
        else {
            //
            // Create a vtkUnstructuredGrid to contain the point cells.
            //
            vtkUnstructuredGrid* ugrid = vtkUnstructuredGrid::New();
            ugrid->SetPoints(points);
            points->Delete();
            ugrid->Allocate(nParticles);
            vtkIdType onevertex;
            for(size_t i = 0; i < nParticles; ++i)
            {
                onevertex = i;
                ugrid->InsertNextCell(VTK_VERTEX, 1, &onevertex);
            }

            return ugrid;
        }
    }
    else
        EXCEPTION1(InvalidVariableException, meshname);
}


// ****************************************************************************
//  Method: avtChomboFileFormat::GetVar
//
//  Purpose:
//      Gets a scalar variable associated with this file.  Although VTK has
//      support for many different types, the best bet is vtkFloatArray, since
//      that is supported everywhere through VisIt.
//
//  Arguments:
//      domain     The index of the domain.  If there are NDomains, this
//                 value is guaranteed to be between 0 and NDomains-1,
//                 regardless of block origin.
//      varname    The name of the variable requested.
//
//  Programmer: childs -- generated by xml2avt
//  Creation:   Thu Jan 19 11:17:14 PDT 2006
//
//  Modifcations:
//
//    Mark C. Miller, Thu Apr  6 17:06:33 PDT 2006
//    Added conditional compilation for hssize_t type
//
//    Hank Childs, Tue Jun 20 08:25:45 PDT 2006
//    Add support for ghost zones.
//
//    Gunther H. Wever, Wed Oct  3 17:34:56 PDT 2007
//    Do not strip ghost zones from file and instead add appropriate
//    ghost zone field data (in GetMesh()).
//
//    Hank Childs, Mon Oct  8 17:22:26 PDT 2007
//    Make Gunther's code go live using DB options.
//
//    Gunther H. Weber, Mon Nov  5 17:07:26 PST 2007
//    Use 64-bit arithmetic for offset calculations.
//
//    Gunther H. Weber, Mon Feb  4 14:37:49 PST 2008
//    Read doubles instead of floats from Chombo files.
//
//    Gunther H. Weber, Mon Mar 24 20:46:04 PDT 2008
//    Added support for node centered Chombo data.
//
//    Hank Childs, Sun Jan 25 15:55:58 PST 2009
//    Change test for whether or not we are allowed to use ghost data.
//
//    Gunther H. Weber, Wed Mar 25 13:31:56 PDT 2009
//    Open and close file to prevent file handle depletion
//
//    Gunther H. Weber, Wed Jun 10 18:28:24 PDT 2009
//    Added ability to handle particle data in Chombo files.
//
//    Tom Fogal, Fri Aug  6 16:29:16 MDT 2010
//    Add support for resolution selection contract.
//
//    Gunther H. Weber, Thu Aug 15 11:37:51 PDT 2013
//    Initial bare-bones support for 4D Chombo files (fairly limited and 
//    "hackish")
//
//    Mark C. Miller, Wed Feb  9 13:39:31 PST 2022
//    Use new method, OpenHDF5File, to open the file.
// ****************************************************************************

vtkDataArray *
avtChomboFileFormat::GetVar(int patch, const char *varname)
{
    if (dimension == 4)
        EXCEPTION1(InvalidVariableException,
                "Internal error: Trying to use GetVar() an 4D Chombo file. Please contact a VisIt developer.");

    int   i;

    if (!initializedReader)
        InitializeReader();

    int varIdx = -1;
    int nVars = (int)varnames.size();
    for (i = 0 ; i < nVars ; i++)
    {
        if (varnames[i] == varname)
        {
            varIdx = i;
            break;
        }
    }
    if (varIdx >= 0)
    {
        int level, local_patch;
        GetLevelAndLocalPatchNumber(patch, level, local_patch);

        if (level >= num_levels)
        {
            EXCEPTION1(InvalidVariableException, varname);
        }
        if (level > static_cast<int>(this->resolution))
        {
            std::ostringstream err;
            err << "Level '" << level << "' exceeds current resolution, '"
                << this->resolution << "'.";
            EXCEPTION1(ImproperUseException, err.str());
        }

        if (local_patch >= patchesPerLevel[level])
        {
            EXCEPTION2(BadDomainException, local_patch, patchesPerLevel[level]);
        }

        hsize_t numGhostI = numGhosts[4*level];
        hsize_t numGhostJ = numGhosts[4*level+1];
        hsize_t numGhostK = numGhosts[4*level+2];

        //
        // Figure out how much data to read and what it's offset is into the
        // bigger array.  This will be needed so we can read a hyperslab from
        // the HDF file.
        //
        int patchStart = patch-local_patch;
        hsize_t nvals = 0;
        for (i = patchStart ; i < patch ; i++)
        {
            hsize_t numZones = nodeCentered ?
                (hsize_t(hiI[i]-lowI[i]+1)+2*numGhostI)
                * (hsize_t(hiJ[i]-lowJ[i]+1)+2*numGhostJ) :
                (hsize_t(hiI[i]-lowI[i])+2*numGhostI)
                * (hsize_t(hiJ[i]-lowJ[i])+2*numGhostJ);
            if (dimension == 3)
            {
                if (nodeCentered)
                    numZones *= hsize_t(hiK[i]-lowK[i]+1)+2*numGhostK;
                else
                    numZones *= hsize_t(hiK[i]-lowK[i])+2*numGhostK;
            }
            nvals += numZones*nVars;
        }

#if HDF5_VERSION_GE(1,6,4)
        hsize_t start = nvals;
#else
        hssize_t start = nvals;
#endif
        hsize_t amt = nodeCentered ?
            (hsize_t(hiI[patch]-lowI[patch]+1)+2*numGhostI)
            * (hsize_t(hiJ[patch]-lowJ[patch]+1)+2*numGhostJ) :
            (hsize_t(hiI[patch]-lowI[patch])+2*numGhostI)
            * (hsize_t(hiJ[patch]-lowJ[patch])+2*numGhostJ);
        if (dimension == 3)
        {
            if (nodeCentered)
                amt *= hsize_t(hiK[patch]-lowK[patch]+1)+2*numGhostK;
            else
                amt *= hsize_t(hiK[patch]-lowK[patch])+2*numGhostK;
        }

        start += amt*varIdx;

        if (amt > static_cast<hsize_t>(std::numeric_limits<vtkIdType>::max()))
        {
            EXCEPTION1(InvalidFilesException, "Grid contains more cells than installed "
                    "VTK can handle. Installing a VTK version with 64-bit indices "
                    "enabled may help.");
        }

        vtkDoubleArray *farr = vtkDoubleArray::New();
        farr->SetNumberOfComponents(1);
        farr->SetNumberOfTuples(amt);
        double *ptr = farr->GetPointer(0);

        //
        // Now do the HDF magic.  Disclosure: this code was cobbled together
        // from examples I found on the internet.  If you think that there is a
        // more efficient way to do this (in lines of code or in performance), you
        // are probably right...
        //
        char name[1024];
        snprintf(name, 1024, "level_%d", level);
        if (file_handle < 0)
        {
            file_handle = OpenHDF5File(filenames[0]);
            if (file_handle < 0)
            {
                EXCEPTION1(InvalidDBTypeException, "Cannot be a Chombo file, since "
                        "it is not even an HDF5 file.");
            }
        }
        hid_t level_id = H5Gopen(file_handle, name);
        if (level_id < 0)
        {
            EXCEPTION1(InvalidFilesException, "Chombo file does not contain group for requested level.");
        }

        hid_t data = H5Dopen(level_id, "data:datatype=0");
        if (data < 0)
        {
            EXCEPTION1(InvalidFilesException, "Level does not contain data.");
        }

        hid_t space_id = H5Dget_space(data);
        hid_t rank     = H5Sget_simple_extent_ndims(space_id);
        if (rank != 1)
        {
            EXCEPTION1(InvalidFilesException, "Rank of dataspace differs from one.");
        }

        hsize_t dims[1];
        int status_n   = H5Sget_simple_extent_dims(space_id, dims, NULL);
        if (status_n < 0)
        {
            EXCEPTION1(InvalidFilesException, "Cannot get dataspace dimensions.");
        }

        H5Sselect_hyperslab(space_id, H5S_SELECT_SET, &start, NULL, &amt, NULL);

        hid_t memdataspace = H5Screate_simple(1, &amt, NULL);

        H5Dread(data, H5T_NATIVE_DOUBLE, memdataspace, space_id, H5P_DEFAULT, ptr);

        H5Sclose(memdataspace);
        H5Sclose(space_id);
        H5Dclose(data);
        H5Gclose(level_id);

        if (!allowedToUseGhosts)
        {
            //
            // Strip out the ghost information.  Note: this is probably an inefficient
            // path.  We would probably be better served leaving the ghost information
            // and not creating ghost zones later in the process.  But that requires
            // handling for external boundaries to the problem, which are not in place
            // yet.
            //
            if (numGhostI > 0 || numGhostJ > 0 || numGhostK > 0)
            {
                vtkDoubleArray *new_farr = vtkDoubleArray::New();
                size_t new_amt = nodeCentered ?
                    size_t(hiI[patch]-lowI[patch]+1)
                    * size_t(hiJ[patch]-lowJ[patch]+1) :
                    size_t(hiI[patch]-lowI[patch])
                    * size_t(hiJ[patch]-lowJ[patch]);
                if (dimension == 3)
                {
                    if (nodeCentered)
                        new_amt *= (hiK[patch]-lowK[patch]+1);
                    else
                        new_amt *= (hiK[patch]-lowK[patch]);
                }
                new_farr->SetNumberOfTuples(new_amt);

                size_t nJ = nodeCentered ? hiJ[patch] - lowJ[patch] + 1 : hiJ[patch] - lowJ[patch];
                size_t nI = nodeCentered ? hiI[patch] - lowI[patch] + 1 : hiI[patch] - lowI[patch];

                size_t nJ2 = nJ + 2*numGhostJ;
                size_t nI2 = nI + 2*numGhostI;

                double *new_ptr = new_farr->GetPointer(0);
                double *old_ptr = farr->GetPointer(0);
                if (dimension == 3)
                {
                    size_t nK = nodeCentered ? hiK[patch] - lowK[patch] + 1 : hiK[patch] - lowK[patch];
                    for (size_t k = 0 ; k < nK ; k++)
                        for (size_t j = 0 ; j < nJ ; j++)
                            for (size_t i = 0 ; i < nI ; i++)
                            {
                                size_t idx_new = hsize_t(k)*nJ*nI + j*nI + i;
                                size_t idx_old = (k+numGhostK)*nJ2*nI2 + (j+numGhostJ)*nI2
                                    + (i+numGhostI);
                                new_ptr[idx_new] = old_ptr[idx_old];
                            }
                }
                else
                {
                    for (size_t j = 0 ; j < nJ ; j++)
                        for (size_t i = 0 ; i < nI ; i++)
                        {
                            size_t idx_new = j*nI + i;
                            size_t idx_old = (j+numGhostJ)*nI2 + (i+numGhostI);
                            new_ptr[idx_new] = old_ptr[idx_old];
                        }
                }
                farr->Delete();
                farr = new_farr;
            }
        }

        return farr;
    }
    else
    {
        if (hasParticles)
        {
            if (file_handle < 0)
            {
                file_handle = OpenHDF5File(filenames[0]);
                if (file_handle < 0)
                {
                    EXCEPTION1(InvalidDBTypeException, "Cannot be a Chombo file, since "
                            "it is not even an HDF5 file.");
                }
            }

            const char particlesGroupName[] = "/particles/";
            char *datasetname = new char[strlen(particlesGroupName)+strlen(varname)+1];
            std::strcpy(datasetname, particlesGroupName);
            std::strcat(datasetname, varname);
            hsize_t nParticles = 0;
            hid_t dataSet = H5Dopen(file_handle, datasetname);
            delete[] datasetname;
            if ( dataSet > 0)
            {
                hid_t dataSpace = H5Dget_space(dataSet);
                if (H5Sis_simple(dataSpace))
                {
                    if (H5Sget_simple_extent_ndims(dataSpace) == 1)
                    {
                        H5Sget_simple_extent_dims(dataSpace, &nParticles, NULL);
                        vtkDoubleArray *array = vtkDoubleArray::New();
                        array->SetNumberOfTuples(nParticles);
                        double *vals = (double *) array->GetVoidPointer(0);
                        H5Dread(dataSet, H5T_NATIVE_DOUBLE, H5S_ALL, H5S_ALL, H5P_DEFAULT, vals);
                        H5Dclose(dataSet);
                        H5Fclose(file_handle);
                        file_handle = -1;
                        return array;
                    }
                    H5Dclose(dataSet);
                    H5Fclose(file_handle);
                    file_handle = -1;
                }
                else
                {
                    H5Dclose(dataSet);
                    H5Fclose(file_handle);
                    file_handle = -1;

                    EXCEPTION1(InvalidVariableException, "Variable data set "
                            "does not have rank two.");
                }
            }
            else
            {
                H5Fclose(file_handle);
                file_handle = -1;
                EXCEPTION1(InvalidVariableException, "Cannot open variable data set!");
            }
        }
        else
        {
            EXCEPTION1(InvalidVariableException, varname);
        }
    }
    return NULL;
}

// ****************************************************************************
//  Method: avtChomboFileFormat::GetVectorVar
//
//  Purpose:
//      Gets a vector variable associated with this file.  Although VTK has
//      support for many different types, the best bet is vtkFloatArray, since
//      that is supported everywhere through VisIt.
//
//  Arguments:
//      domain     The index of the domain.  If there are NDomains, this
//                 value is guaranteed to be between 0 and NDomains-1,
//                 regardless of block origin.
//      varname    The name of the variable requested.
//
//  Programmer: childs -- generated by xml2avt
//  Creation:   Thu Jan 19 11:17:14 PDT 2006
//
//  Modifications:
//    Gunther H. Weber, Thu Aug 15 11:37:51 PDT 2013
//    Initial bare-bones support for 4D Chombo files (fairly limited and 
//    "hackish")
//
//    Mark C. Miller, Wed Feb  9 13:40:08 PST 2022
//    Use new method, OpenHDF5File, to open file.
// ****************************************************************************

vtkDataArray *
avtChomboFileFormat::GetVectorVar(int patch, const char *varname)
{
    if (dimension == 4) patch = listOfRepresentativeBoxes[patch];

    if (!initializedReader)
        InitializeReader();

    int varIdx = -1;
    int nVars = (int)varnames.size();
    for (int i = 0; i < nVars; i++)
    {
        if (varnames[i] == varname)
        {
            varIdx = i;
            break;
        }
    }
    if (varIdx >= 0)
    {
        int level, local_patch;
        GetLevelAndLocalPatchNumber(patch, level, local_patch);

        hsize_t numGhostI = numGhosts[4*level];
        hsize_t numGhostJ = numGhosts[4*level+1];
        hsize_t numGhostK = numGhosts[4*level+2];
        hsize_t numGhostL = numGhosts[4*level+3];

        if (dimension == 4 && (numGhostK != 0 || numGhostL != 0))
            EXCEPTION1(ImproperUseException, "Ghost zones in K and L dimension not yet supoorted.");

        hsize_t num_tuples = nodeCentered ?
            (hsize_t(hiI[patch]-lowI[patch]+1)+2*numGhostI) * (hsize_t(hiJ[patch]-lowJ[patch]+1)+2*numGhostJ) :
            (hsize_t(hiI[patch]-lowI[patch])+2*numGhostI) * (hsize_t(hiJ[patch]-lowJ[patch])+2*numGhostJ);
        if (dimension == 3)
        {
            if (nodeCentered)
                num_tuples *= hsize_t(hiK[patch]-lowK[patch]+1)+2*numGhostK;
            else
                num_tuples *= hsize_t(hiK[patch]-lowK[patch])+2*numGhostK;
        }

        if (num_tuples > static_cast<hsize_t>(std::numeric_limits<vtkIdType>::max()))
        {
            EXCEPTION1(InvalidFilesException, "Grid contains more cells than installed "
                    "VTK can handle. Installing a VTK version with 64-bit indices "
                    "enabled may help.");
        }

        const int outputNK = nodeCentered ? (hiProbK[0] - lowProbK[0] + 2) : (hiProbK[0] - lowProbK[0] + 1);
        const int outputNL = nodeCentered ? (hiProbL[0] - lowProbL[0] + 2) : (hiProbL[0] - lowProbL[0] + 1);
        const int num_array_comps = outputNK * outputNL;
        vtkDoubleArray *farr = vtkDoubleArray::New();
        farr->SetNumberOfComponents(num_array_comps);
        farr->SetNumberOfTuples(num_tuples);
        double *ptr = farr->GetPointer(0);
        size_t sz = farr->GetNumberOfComponents() * farr->GetNumberOfTuples();
        for (size_t it = 0; it < sz; ++it) ptr[it] = std::numeric_limits<double>::quiet_NaN();

        for (std::vector<int>::const_iterator it = representedBoxes[patch].begin(); it != representedBoxes[patch].end(); ++it)
        {
            int patch = *it;
            int level, local_patch;
            GetLevelAndLocalPatchNumber(patch, level, local_patch);

            if (level >= num_levels)
            {
                EXCEPTION1(InvalidVariableException, varname);
            }
            if (level > static_cast<int>(this->resolution))
            {
                std::ostringstream err;
                err << "Level '" << level << "' exceeds current resolution, '"
                    << this->resolution << "'.";
                EXCEPTION1(ImproperUseException, err.str());
            }

            if (local_patch >= patchesPerLevel[level])
            {
                EXCEPTION2(BadDomainException, local_patch, patchesPerLevel[level]);
            }

            //
            // Figure out how much data to read and what it's offset is into the
            // bigger array.  This will be needed so we can read a hyperslab from
            // the HDF file.
            //
            int patchStart = patch-local_patch;
            hsize_t nvals = 0;
            for (int i = patchStart ; i < patch ; i++)
            {
                hsize_t numZones = nodeCentered ?
                    (hsize_t(hiI[i]-lowI[i]+1)+2*numGhostI)
                    * (hsize_t(hiJ[i]-lowJ[i]+1)+2*numGhostJ) :
                    (hsize_t(hiI[i]-lowI[i])+2*numGhostI)
                    * (hsize_t(hiJ[i]-lowJ[i])+2*numGhostJ);
                if (dimension >= 3)
                {
                    if (nodeCentered)
                        numZones *= hsize_t(hiK[i]-lowK[i]+1)+2*numGhostK;
                    else
                        numZones *= hsize_t(hiK[i]-lowK[i])+2*numGhostK;
                }
                if (dimension == 4)
                {
                    if (nodeCentered)
                        numZones *= hsize_t(hiL[i]-lowL[i]+1)+2*numGhostL;
                    else
                        numZones *= hsize_t(hiL[i]-lowL[i])+2*numGhostL;
                }
                nvals += numZones*nVars;
            }

#if HDF5_VERSION_GE(1,6,4)
            hsize_t start = nvals;
#else
            hssize_t start = nvals;
#endif
            hsize_t amt = nodeCentered ?
                (hsize_t(hiI[patch]-lowI[patch]+1)+2*numGhostI)
                * (hsize_t(hiJ[patch]-lowJ[patch]+1)+2*numGhostJ) :
                (hsize_t(hiI[patch]-lowI[patch])+2*numGhostI)
                * (hsize_t(hiJ[patch]-lowJ[patch])+2*numGhostJ);
            if (dimension >= 3)
            {
                if (nodeCentered)
                    amt *= hsize_t(hiK[patch]-lowK[patch]+1)+2*numGhostK;
                else
                    amt *= hsize_t(hiK[patch]-lowK[patch])+2*numGhostK;
            }
            if (dimension == 4)
            {
                if (nodeCentered)
                    amt *= hsize_t(hiL[patch]-lowL[patch]+1)+2*numGhostL;
                else
                    amt *= hsize_t(hiL[patch]-lowL[patch])+2*numGhostL;
            }

            start += amt*varIdx;

            //
            // Now do the HDF magic.  Disclosure: this code was cobbled together
            // from examples I found on the internet.  If you think that there is a
            // more efficient way to do this (in lines of code or in performance), you
            // are probably right...
            //
            char name[1024];
            snprintf(name, 1024, "level_%d", level);
            if (file_handle < 0)
            {
                file_handle = OpenHDF5File(filenames[0]);
                if (file_handle < 0)
                {
                    EXCEPTION1(InvalidDBTypeException, "Cannot be a Chombo file, since "
                            "it is not even an HDF5 file.");
                }
            }
            hid_t level_id = H5Gopen(file_handle, name);
            if (level_id < 0)
            {
                EXCEPTION1(InvalidFilesException, "Chombo file does not contain group for requested level.");
            }

            hid_t data = H5Dopen(level_id, "data:datatype=0");
            if (data < 0)
            {
                EXCEPTION1(InvalidFilesException, "Level does not contain data.");
            }

            hid_t space_id = H5Dget_space(data);
            hid_t rank     = H5Sget_simple_extent_ndims(space_id);
            if (rank != 1)
            {
                EXCEPTION1(InvalidFilesException, "Rank of dataspace differs from one.");
            }

            hsize_t dims[1];
            int status_n   = H5Sget_simple_extent_dims(space_id, dims, NULL);
            if (status_n < 0)
            {
                EXCEPTION1(InvalidFilesException, "Cannot get dataspace dimensions.");
            }

            H5Sselect_hyperslab(space_id, H5S_SELECT_SET, &start, NULL, &amt, NULL);

            hid_t memdataspace = H5Screate_simple(1, &amt, NULL);

            double *tmp = new double[amt];
            H5Dread(data, H5T_NATIVE_DOUBLE, memdataspace, space_id, H5P_DEFAULT, tmp);
            H5Sclose(memdataspace);
            H5Sclose(space_id);
            H5Dclose(data);
            H5Gclose(level_id);

            int nI = nodeCentered ? (hsize_t(hiI[patch]-lowI[patch]+1)+2*numGhostI) :
                (hsize_t(hiI[patch]-lowI[patch])+2*numGhostI);
            int nJ = nodeCentered ? (hsize_t(hiJ[patch]-lowJ[patch]+1)+2*numGhostJ) :
                (hsize_t(hiJ[patch]-lowJ[patch])+2*numGhostJ);
            int nK = nodeCentered ? (hsize_t(hiK[patch]-lowK[patch]+1)+2*numGhostK) :
                (hsize_t(hiK[patch]-lowK[patch])+2*numGhostK);
            int nL = nodeCentered ? (hsize_t(hiL[patch]-lowL[patch]+1)+2*numGhostL) :
                (hsize_t(hiL[patch]-lowL[patch])+2*numGhostL);

            for (int i = 0; i < nI; ++i)
                for (int j = 0; j < nJ; ++j)
                    for (int k = 0; k < nK; ++k)
                        for (int l = 0; l < nL; ++l)
                        {
                            size_t from_idx = (((l*nK)+k)*nJ+j)*nI+i;
                            size_t to_idx = (j*nI+i)*farr->GetNumberOfComponents()+(k+lowK[local_patch]-lowProbK[level]-numGhostK)*outputNL+l+lowL[local_patch]-lowProbL[level]-numGhostL;
#if 0
                            if (from_idx >= amt)
                            {
                                std::cerr << "Invalid read: "  << i << " " << j << " " << k << " " << l << " -> " << from_idx << "/" << amt << std::endl;
                                continue;
                            }
                            if (to_idx >= sz)
                            {
                                std::cerr << "Invalid write: " << i << " " << j << " " << k << " " << l << " -> " << to_idx << "/" << sz << std::endl;
                                continue;
                            }
#endif
                            ptr[to_idx] = tmp[from_idx];
                        }
            delete[] tmp;

            if (!allowedToUseGhosts)
            {
                //
                // Strip out the ghost information.  Note: this is probably an inefficient
                // path.  We would probably be better served leaving the ghost information
                // and not creating ghost zones later in the process.  But that requires
                // handling for external boundaries to the problem, which are not in place
                // yet.
                //
                if (numGhostI > 0 || numGhostJ > 0 || numGhostK > 0)
                {
                    EXCEPTION1(ImproperUseException, "Stripping ghost zones not implemented for array variables.");
                }
            }
        }
        return farr;
    }
    else
    {
        enum {none, vpar, mu} generatedVar = none;
        if (strcmp(varname, "_vpar") == 0)
        {
            generatedVar = vpar;
        }
        else if (strcmp(varname, "_mu") == 0)
        {
            generatedVar = mu;
        }
        else
            EXCEPTION1(InvalidVariableException, varname);

        if (nodeCentered)
            EXCEPTION1(InvalidVariableException, varname);

        int level, local_patch;
        GetLevelAndLocalPatchNumber(patch, level, local_patch);

        hsize_t numGhostI = numGhosts[4*level];
        hsize_t numGhostJ = numGhosts[4*level+1];
        hsize_t numGhostK = numGhosts[4*level+2];
        hsize_t numGhostL = numGhosts[4*level+3];

        if (dimension == 4 && (numGhostK != 0 || numGhostL != 0))
            EXCEPTION1(ImproperUseException, "Ghost zones in K and L dimension not yet supoorted.");

        hsize_t num_tuples = nodeCentered ?
            (hsize_t(hiI[patch]-lowI[patch]+1)+2*numGhostI) * (hsize_t(hiJ[patch]-lowJ[patch]+1)+2*numGhostJ) :
            (hsize_t(hiI[patch]-lowI[patch])+2*numGhostI) * (hsize_t(hiJ[patch]-lowJ[patch])+2*numGhostJ);
        if (dimension == 3)
        {
            if (nodeCentered)
                num_tuples *= hsize_t(hiK[patch]-lowK[patch]+1)+2*numGhostK;
            else
                num_tuples *= hsize_t(hiK[patch]-lowK[patch])+2*numGhostK;
        }

        if (num_tuples > static_cast<hsize_t>(std::numeric_limits<vtkIdType>::max()))
        {
            EXCEPTION1(InvalidFilesException, "Grid contains more cells than installed "
                    "VTK can handle. Installing a VTK version with 64-bit indices "
                    "enabled may help.");
        }

        const int outputNK = nodeCentered ? (hiProbK[0] - lowProbK[0] + 2) : (hiProbK[0] - lowProbK[0] + 1);
        const int outputNL = nodeCentered ? (hiProbL[0] - lowProbL[0] + 2) : (hiProbL[0] - lowProbL[0] + 1);
        const int num_array_comps = outputNK * outputNL;
        vtkDoubleArray *farr = vtkDoubleArray::New();
        farr->SetNumberOfComponents(num_array_comps);
        farr->SetNumberOfTuples(num_tuples);
        vtkIdType nTuples = (vtkIdType)num_tuples;

        if (generatedVar == vpar)
            for (vtkIdType tuple = 0; tuple < nTuples; ++tuple)
                for (int k =0; k < outputNK; ++k)
                    for (int l = 0; l < outputNL; ++l)
                        farr->SetComponent(tuple, k*outputNL+l, (lowProbK[0] + k + 0.5) * dx[0][2]);
        else if (generatedVar == mu)
            for (vtkIdType tuple = 0; tuple < nTuples; ++tuple)
                for (int k =0; k < outputNK; ++k)
                    for (int l = 0; l < outputNL; ++l)
                        farr->SetComponent(tuple, k*outputNL+l, (lowProbL[0] + l + 0.5) * dx[0][3]);
        else
            EXCEPTION1(ImproperUseException, "Internal error.");

        return farr;
    }

    return NULL;
}


// ****************************************************************************
//  Method: avtBoxlib2DFileFormat::GetAuxiliaryData
//
//  Purpose:
//      Gets the auxiliary data specified.
//
//  Arguments:
//      var        The variable of interest.
//      dom        The domain of interest.
//      type       The type of auxiliary data.
//      <unnamed>  The arguments for that type -- not used.
//      df         Destructor function.
//
//  Returns:    The auxiliary data.
//
//  Programmer: Hank Childs
//  Creation:   January 22, 2006
//
//  Modifications:
//    Kathleen Bonnell, Mon Aug 14 16:40:30 PDT 2006
//    API change for avtIntervalTree.
//
//    Gunther H. Weber, Tue Aug  7 16:01:28 PDT 2007
//    Return material information
//
//    Gunther H. Weber, Thu Aug 15 11:37:51 PDT 2013
//    Initial bare-bones support for 4D Chombo files (fairly limited and 
//    "hackish")
//
// ****************************************************************************

void *
avtChomboFileFormat::GetAuxiliaryData(const char *var, int dom,
                                      const char * type, void *,
                                      DestructorFunction &df)
{
    if (strcmp(type, AUXILIARY_DATA_MATERIAL) == 0)
    {
        return GetMaterial(var, dom, type, df);
    }
    else if (strcmp(type, AUXILIARY_DATA_SPATIAL_EXTENTS) == 0)
    {
        int totalPatches = 0;
        for (int level = 0 ; level < num_levels ; level++)
            totalPatches += patchesPerLevel[level];

        avtIntervalTree *itree;
        if (dimension < 4)
        {
            itree = new avtIntervalTree(totalPatches, dimension);

            for (int patch = 0 ; patch < totalPatches ; patch++)
            {
                double bounds[6];
                int level, local_patch;

                GetLevelAndLocalPatchNumber(patch, level, local_patch);

                bounds[0] = probLo[0] + lowI[patch]*dx[level][0]*aspectRatio[0];
                bounds[1] = probLo[0] + bounds[0] + (hiI[patch]-lowI[patch])*dx[level][0]*aspectRatio[0];
                bounds[2] = probLo[1] + lowJ[patch]*dx[level][1]*aspectRatio[1];
                bounds[3] = probLo[1] + bounds[2] + (hiJ[patch]-lowJ[patch])*dx[level][1]*aspectRatio[1];
                bounds[4] = 0;
                bounds[5] = 0;
                if (dimension == 3)
                {
                    bounds[4] = probLo[2] + lowK[patch]*dx[level][2]*aspectRatio[2];
                    bounds[5] = probLo[2] + bounds[4] + (hiK[patch]-lowK[patch])*dx[level][2]*aspectRatio[2];
                }
                itree->AddElement(patch, bounds);
            }
            itree->Calculate(true);
        }
        else
        {
            itree = new avtIntervalTree((int)listOfRepresentativeBoxes.size(), 2);
            for (std::vector<int>::iterator it = listOfRepresentativeBoxes.begin(); it != listOfRepresentativeBoxes.end(); ++it)
            {
                double bounds[6];
                int level, local_patch;

                GetLevelAndLocalPatchNumber(*it, level, local_patch);

                bounds[0] = probLo[0] + lowI[*it]*dx[level][0]*aspectRatio[0];
                bounds[1] = probLo[0] + bounds[0] + (hiI[*it]-lowI[*it])*dx[level][0]*aspectRatio[0];
                bounds[2] = probLo[1] + lowJ[*it]*dx[level][1]*aspectRatio[1];
                bounds[3] = probLo[1] + bounds[2] + (hiJ[*it]-lowJ[*it])*dx[level][1]*aspectRatio[1];
                bounds[4] = 0;
                bounds[5] = 0;
                itree->AddElement(*it, bounds);
            }
            itree->Calculate(true);
        }

        df = avtIntervalTree::Destruct;

        return ((void *) itree);
    }

    return NULL;
}

// ****************************************************************************
//  Method: avtChomboFileFormat::GetMaterial
//
//  Purpose:
//      Gets an avtMaterial object for the specified patch
//
//  Notes:      This routine was largely taken from GetMaterial() of the
//              BoxLib database written by Hank Childs and Akira Haddox.
//
//  Programmer: Gunther H. Weber
//  Creation:   August 8, 2007
//
//  Modifications:
//
//    Hank Childs, Tue Feb  5 16:28:31 PST 2008
//    Fix problem with GetVar returning doubles.  Also fix memory leak.
//
//    Kathleen Bonnell, Fri Apr 23 10:33:17 MST 2010
//    Fix crash on windows -- cast std::vectors to pointers before passing
//    as args to avtMaterial, so can catch the case where the vector is empty
//    and therefore pass NULL, because attempting to  dereference an empty
//    vector's 0'th item crashes on windows.
//
// ****************************************************************************

void *
avtChomboFileFormat::GetMaterial(const char *var, int patch,
                                   const char *type, DestructorFunction &df)
{
    if (!initializedReader)
        InitializeReader();

    std::vector<std::string> mnames(nMaterials);
    char str[32];
    for (int i = 0; i < nMaterials; ++i)
    {
        sprintf(str, "mat%d", i+1);
        mnames[i] = str;
    }

    // Get the material fractions
    std::vector<double *> mats(nMaterials);
    std::vector<vtkDoubleArray *> deleteList;
    int nCells = 0;
    for (int i = 0; i <= nMaterials - 2; ++i)
    {
        sprintf(str,"fraction-%d", i);
        vtkDoubleArray *doubleArray = (vtkDoubleArray *)(GetVar(patch, str));
        nCells = doubleArray->GetNumberOfTuples();
        mats[i] = doubleArray->GetPointer(0);
        deleteList.push_back(doubleArray);
    }

    // Calculate fractions for additional "missing" material
    double *addMatPtr =  new double[nCells];

    for(int cellNo = 0; cellNo < nCells; ++cellNo)
    {
        double frac = 1.0;
        for (int matNo = 0; matNo < nMaterials - 1; ++matNo)
            frac -= mats[matNo][cellNo];
        addMatPtr[cellNo] = frac;
    }

    mats[nMaterials - 1] = addMatPtr;

    // Build the appropriate data structures
    std::vector<int> material_list(nCells);
    std::vector<int> mix_mat;
    std::vector<int> mix_next;
    std::vector<int> mix_zone;
    std::vector<float> mix_vf;

    for (int i = 0; i < nCells; ++i)
    {
        int j;

        // First look for pure materials
        int nmats = 0;
        int lastMat = -1;
        for (j = 0; j < nMaterials; ++j)
        {
            if (mats[j][i] > 0)
            {
                nmats++;
                lastMat = j;
            }
        }

        if (nmats == 1)
        {
            material_list[i] = lastMat;
            continue;
        }

        // For unpure materials, we need to add entries to the tables.
        material_list[i] = -1 * (1 + (int)mix_zone.size());
        for (j = 0; j < nMaterials; ++j)
        {
            if (mats[j][i] <= 0)
                continue;
            // For each material that's present, add to the tables
            mix_zone.push_back(i);
            mix_mat.push_back(j);
            mix_vf.push_back(mats[j][i]);
            mix_next.push_back((int)mix_zone.size() + 1);
        }

        // When we're done, the last entry is a '0' in the mix_next
        mix_next[mix_next.size() - 1] = 0;
    }

    int mixed_size =(int) mix_zone.size();
    // get pointers to pass to avtMaterial.  Windows will except if
    // an empty std::vector's zeroth item is dereferenced.
    int *ml = NULL, *mixm = NULL, *mixn = NULL, *mixz = NULL;
    float *mixv = NULL;
    if (material_list.size() > 0)
        ml = &(material_list[0]);
    if (mix_mat.size() > 0)
        mixm = &(mix_mat[0]);
    if (mix_next.size() > 0)
        mixn = &(mix_next[0]);
    if (mix_zone.size() > 0)
        mixz = &(mix_zone[0]);
    if (mix_vf.size() > 0)
        mixv = &(mix_vf[0]);

    avtMaterial * mat = new avtMaterial(nMaterials, mnames, nCells, ml,
                                        mixed_size, mixm, mixn, mixz, mixv);

    df = avtMaterial::Destruct;

    delete[] addMatPtr;
    for (size_t i = 0 ; i < deleteList.size() ; i++)
        deleteList[i]->Delete();

    return (void*) mat;
}

// ****************************************************************************
//  Method: avtChomboFileFormat::RegisterDataSelections
//
//  Purpose:
//      Tries to read requests for specific resolutions.
//
//  Programmer: Tom Fogal
//  Creation:   August 5, 2010
//
//  Modifications:
//
// ****************************************************************************
void avtChomboFileFormat::RegisterDataSelections(
       const std::vector<avtDataSelection_p>& sels,
       std::vector<bool>* applied)
{
    for(size_t i=0; i < sels.size(); ++i)
    {
        if(strcmp(sels[i]->GetType(), "avtResolutionSelection") == 0)
        {
            const avtResolutionSelection* sel =
                static_cast<const avtResolutionSelection*>(*sels[i]);
            this->resolution = sel->resolution();
            (*applied)[i] = true;
        }
    }
}
