import h5py
import time
from inspect import getdoc, getsource
from numpy import ndarray, recarray

class HDF5(object):
    """
    Methods to help with hdf5 files
    """
    def ls_hdf5(self,f_h5py,description=False,keyword=None):
        """
        Method to list the contents of an hdf5 file.
        f_h5py - hdf5 file
        description - boolean to show attributes for each item
        keywords - keywords to search for in hdf5 file
        returns list of groups and datasets with attributes
        """
        tree = []
        attributes = []
        for k,v in f_h5py.attrs.items():
            print(str(k)+': '+v)

        def list_members(group,level=0):
            """
            group - object from an hdf5 file
            space - integer number of spaces to indent
            """
            for k,v in group.items():
                k = str(k)
                if isinstance(v,h5py.Dataset):
                    if not v.value.shape:
                        tree.append(4*level*' '+k+ ': ' + str(v.value))
                    else:
                        tree.append(4*level*' '+k+':'+str(v).split(':')[1])
                else:
                    tree.append(level*4*' '+k)
                    list_members(v,level+1)

        def build_tree(k,v):
            k = str(k)
            level = k.count('/')
            attributes.append(f_h5py[k].attrs.items())
            if isinstance(v,h5py.Dataset):
                if not v.value.shape:
                    tree.append(4*level*' ' + k.split('/')[-1] + ': ' + str(v.value))
                else:
                    tree.append(4*level*' ' + k.split('/')[-1]+ ': ' + str(v).split(':')[1])
            else:
                tree.append(level*4*' '  +  k.split('/')[-1])

        #list_members(f_h5py)
        f_h5py.visititems(build_tree)

        if description:
            tree = zip(tree,attributes)

        return tree

class HDFDecorator(object):
    """
    Wraps a function such that the arguments for the original function can be
    replaced by arguments that point to data stored in an HDF5 file. If the new
    function is called without a keyword parameter named "HDF_fname", then the
    original function is called with no argument substitution. If the function
    is called with the keyword argument "HDF_fname" then the corresponding HDF
    file (HDF_fname) is opened and all the remaining arguments are expected The
    HDF file and data locations are determined from the decorator parameters.
    The goal is to allow the programming of "normal" functions that require
    data arguments (during scouting work), but when the function is deemed
    "finished" can be easily converted to "manipulating" data within an HDF
    file, by wrapping the function with this decorator. This decorator also
    adds three data attributes into the HDF dataset for aiding in documenting
    the data. The attributes inserted into the HDFd5 are:
        Function : String name of the function that generated the dataset
        FunctionDoc : String of the documentation for the function that generated
            the dataset
        FucntionCode: String of the code for the function that generated the dataset

    Decorator Arguments: None
        None
        HDF_fname : string of the HDF file name to use to extract data and save data.
        *args, **kwargs : string path names to datasets to substitute for the wrapped
            function arguments
    example
    @HDFDecorator
    y = f({x})
    decorated function call:
    y = f({x}) -> returns same as original function
    or 
    y = f({'x_paths'}, HDF_fname="HDFfile.hdf5")
    """
    def __init__(self, func):
        """
        func : function to wrap
        """
        self.funct = func

    def __call__(self, *args, **kwargs):
        """
        Call function, func, with:
            args, and kwargs if kwargs DOES NOT contain the item "HDF_fname"
            (call the function "normally")
              else
            the values in the HDF file determined by kwargs
            (use HDF data values)
        For using HDF data there must be a keyword argument name: "HDF_fname"
        thats value is a string of an HDF5 filename.
        For saving the function results, there must be a keyword argument
        named: "HDF_save" with a string value determining the pathname to save
        the dataset
        """
        # If HDF_fname is a kwarg, then assume all arguments are HDF paths and get the
        # argument values from the HDF file.
        if ('HDF_fname' in kwargs):
            with h5py.File(kwargs['HDF_fname'], mode='r') as f_h5py:
                try:
                    hdf5_args = [f_h5py[a].value for a in args]
                    hdf5_kwargs = {k:f_h5py[v].value for k,v in kwargs.items() 
                        if k!='HDF_fname' and k!='save_path' }
                except KeyError as e:
                    print "key: %s not found" %a
                    raise e

            # Check that a dataset can be created at the HDF "save_path" node.
            # Note: Do this before the function call to avoid a long calculation that 
            # does not save!
            if ('save_path' in kwargs):
                with h5py.File(kwargs['HDF_fname'], mode='r+') as f_h5py:
                    f_h5py.create_dataset(kwargs['save_path'], data="test")
                    del f_h5py[kwargs['save_path']]

                # Exectute the function with the HDF argument values.
                results = self.funct(*hdf5_args, **hdf5_kwargs)

                # Save results and add the meta data descriptors.
                with h5py.File(kwargs['HDF_fname'], mode='r+') as f_h5py:
                    f_h5py.create_dataset(kwargs['save_path'], data=results)
                    f_h5py[kwargs['save_path']].attrs["Created Date"] = time.strftime("%c")

                    arglist = map(lambda x: '"'+x+'"', list(args)) \
                            + map(lambda x: x[0]+"="+'"'+x[1]+'"', zip(kwargs.keys(),kwargs.values()))
                    f_h5py[kwargs['save_path']].attrs["Function Name"] = self.funct.func_name \
                            + "(" + ", ".join(arglist) + ")"

                    doc = getdoc(self.funct)
                    if not doc is None:
                        f_h5py[kwargs['save_path']].attrs["Function Doc"] = getdoc(self.funct)
                    f_h5py[kwargs['save_path']].attrs["Function Code"] = getsource(self.funct)
                    if (type(results) is ndarray) or (type(results) is recarray):
                        f_h5py[kwargs['save_path']].attrs["dtype"] = str(results.dtype)
                    f_h5py.flush()
                return
            else:
                # Return the function output.
                return self.funct(*hdf5_args, **hdf5_kwargs)
        else: 
            # Call the function using the data values in the passed arguments (no HDF).
            return self.funct(*args, **kwargs)
