import hdf5plugin
import h5py
from matplotlib.pyplot import cm
from matplotlib import pyplot as plt
import numpy as np

from astropy.units import Quantity
from astropy.time import Time
from astropy.table import QTable

########################
## Helper functions
########################
class VerificationError(Exception):
    """ Error raised if Verification fails """
    def __init__(self, msg):
        super().__init__(self, msg)


def get_slice(my_array, dimension, index):
    """ Helper method to extract a slice along a given dimension """
    items = [slice(None, None, None)] * my_array.ndim
    items[dimension] = index
    array_slice = my_array[tuple(items)]
    return array_slice

def get_dataset_keys(f):
    """ Get all dataset keys from a h5py file """
    keys = []
    f.visit(lambda key : keys.append(key) if isinstance(f[key], h5py.Dataset) else None)
    return keys

def assert_sdhdf_class(sdhdf_group, desired_class):
    """ Confirm HDF5 object has correct SDHDF_CLASS 
    
    Args:
        sdhdf_group (Group/Dataset): HDF5 group/dataset object
        desired_class (str): Name of SDHDF_CLASS, e.g. sdhdf_meta
    
    Returns:
        name (str): Name of HDF5 object
    """
    h5_types = {
        h5py._hl.dataset.Dataset: 'dataset',
        h5py._hl.group.Group: 'group', 
        h5py._hl.attrs.AttributeManager: 'attrs',
    }

    objtype = h5_types.get(type(sdhdf_group), 'unknown_type')
    try:
        assert sdhdf_group.attrs.get('SDHDF_CLASS') == desired_class
        name = sdhdf_group.attrs.get('NAME')
    except AssertionError:
        raise RuntimeError(f"{objtype} is does not have SDHDF_CLASS={desired_class}")

    return name


def sdhdf_setattr(parent, obj):
    """ Set an attribute on an SDHDF class object 
    
    Args:
        parent (SDHDF class): parent to add attribute to
        obj (h5py Group/Dataset): object to load into an SDHDF class and attach to parent
    """
    sdhdf_classes = {
        'sdhdf_config': SDHDFConfig,
        'sdhdf_meta': SDHDFMeta,
        'sdhdf_beam': SDHDFBeam,
        'sdhdf_data': SDHDFData,
        'sdhdf_band': SDHDFBand,
        'sdhdf_waterfall': SDHDFWaterfall,
        'sdhdf_frequency': SDHDFFrequency
    }
    _Loader = sdhdf_classes.get(obj.attrs.get('SDHDF_CLASS'), None)
    if _Loader is not None:
        # Special case: we want beams in sdhdf.data.beam_0 etc
        if _Loader == SDHDFBeam:
            parent.data._add_beam(obj)
        else:
            sd_obj = _Loader(obj)
            setattr(parent, obj.attrs['NAME'], sd_obj)
            return sd_obj

########################
## SDHDF_CLASS - Groups
########################
class SDHDFConfig(object):
    """ Class for sdhdf_config """
    def __init__(self, sdhdf_group):
        self.name = assert_sdhdf_class(sdhdf_group, 'sdhdf_config')
        self.attrs = dict(sdhdf_group.attrs)

        for objname, obj in sdhdf_group.items():
            if isinstance(obj, h5py._hl.dataset.Dataset):
                if obj.attrs.get('SDHDF_CLASS') == 'sdhdf_table':
                    setattr(self, obj.attrs['NAME'], SDHDFTable(obj))

class SDHDFMeta(object):
    """ Class for sdhdf_meta """
    def __init__(self, sdhdf_group):
        self.name = assert_sdhdf_class(sdhdf_group, 'sdhdf_meta')
        self.attrs = dict(sdhdf_group.attrs)

        for objname, obj in sdhdf_group.items():
            if isinstance(obj, h5py._hl.dataset.Dataset):
                if obj.attrs.get('SDHDF_CLASS') == 'sdhdf_table':
                    setattr(self, obj.attrs['NAME'], SDHDFTable(obj))            

class SDHDFBand(object):
    """ Class for sdhdf_band """
    # astronomy_data, calibrator_data, metadata
    def __init__(self, sdhdf_group):
        self.name = assert_sdhdf_class(sdhdf_group, 'sdhdf_band')
        self.attrs = dict(sdhdf_group.attrs)
        for objname, obj in sdhdf_group.items():
            sd_class = sdhdf_setattr(self, obj)
        
class SDHDFBeam(object):
    """ Class for sdhdf_beam """
    def __init__(self, sdhdf_group):
        self.name = assert_sdhdf_class(sdhdf_group, 'sdhdf_beam')
        self.attrs = dict(sdhdf_group.attrs)
        self._subbands = []

        for objname, obj in sdhdf_group.items():
            sd_class = sdhdf_setattr(self, obj)
            if isinstance(sd_class, SDHDFBand):
                self._subbands.append(sd_class)

class SDHDFData(object):
    """ Class for sdhdf_data """
    def __init__(self, sdhdf_group):
        self.name = assert_sdhdf_class(sdhdf_group, 'sdhdf_data')
        self.attrs = dict(sdhdf_group.attrs)
        for objname, obj in sdhdf_group.items():
            sdhdf_setattr(self, obj)


########################
## SDHDF_CLASS - Datasets
########################

class SDHDFFrequency(object):
    """ Class for sdhdf_frequency """
    def __init__(self, sdhdf_dataset):
        self.name = assert_sdhdf_class(sdhdf_dataset, 'sdhdf_frequency')
        self.attrs = dict(sdhdf_dataset.attrs)
        self._data = sdhdf_dataset

class SDHDFTable(QTable):
    """ Class for sdhdf_table """
    def __init__(self, sdhdf_dataset, *args, **kwargs):
        self.name = assert_sdhdf_class(sdhdf_dataset, 'sdhdf_table')
        self.attrs = dict(sdhdf_dataset.attrs)
        QTable.__init__(self, sdhdf_dataset[:], *args, **kwargs)
        self._apply_units()

    def _apply_units(self):
        """ Read attributes and convert columns into astropy Quanitites where possible """
        for col in self.colnames:
            if col + '_UNIT' in self.attrs:
                try:
                    if col == 'MJD':
                        # NOTE: MJD currently expressed as Quantity (days), should be Time (mjd)
                        # TODO: Fix in subsequent SDHDF version
                        self[col] = Time(self[col], format='mjd')
                    else:
                        self[col].unit = self.attrs[col + '_UNIT']
                        self[col] = self[col].astype('float64')
                except:
                    # Handling of time. This is a bit messy.
                    # First, if something like ELAPSED_TIME has a unit,
                    # e.g. seconds, then we treat it as a quantity
                    # Second, we currently have to guess based on column names
                    # TODO: Fix in susbsequent versions of SDHDF!
                    try:
                        if 'TIME' in col or 'DATE' in col:
                            self[col] = Time(self[col], scale='utc')
                        elif 'UTC' in col or 'AEST' in col:
                            pass  # This is a HH:MM:SS, can't convert into astropy Time()
                        if 'MJD' in col:
                            self[col] = Time(self[col], format='mjd')
                    except:
                        raise VerificationError(f"Cannot convert {self.name} {col} into astropy time")
                    
class SDHDFWaterfall(object):
    """ Class for sdhdf_waterfall """
    def __repr__(self):
        rstr = f"<SDHDF Waterfall: {self.dims} | {self._data.shape}>"
        return rstr

    def _verify(self):
        required_dims = ['time', 'beam', 'polarization', 'frequency', 'bin']
        for d in self.dims:
            try:
                assert d in self.dims
            except:
                VerificationError(f"SDHDF Class Waterfall must have dimension {d}")


    def __init__(self, sdhdf_dataset):
        self.name = assert_sdhdf_class(sdhdf_dataset, 'sdhdf_waterfall')
        self._data = sdhdf_dataset
        # Extract dimensions and attributes from underlying dataset
        self.dims = [d.decode('ascii') for d in  self._data.attrs['DIMENSION_LABELS']]
        self.attrs = dict(self._data.attrs)
        # Verify
        self._verify()

        self.freq_axis = self.dims.index('frequency')
        self.pol_axis  = self.dims.index('polarization')
        self.time_axis = self.dims.index('time')
        self.beam_axis = self.dims.index('beam')
        self.bin_axis  = self.dims.index('bin')

        self.pol_scale = None   # Needs to be set manually 

        # Use DIMENSION_LIST reference to return frequency dataset
        # This is 'proper' way to use DIMENSION_LIST, but we could
        # Have equally just used the known location.
        self._frequency_data = self._data.file[self.attrs['DIMENSION_LIST'][self.freq_axis][0]]
        self.freq_scale = Quantity(self._frequency_data, unit=self._frequency_data.attrs['UNIT'])

    def _attach_dimension_scale(self, dim_name, dim_scale):
        """ Attach a missing dimension scale """
        setattr(self, dim_name+'_scale', dim_scale)

    def plot_spectrum(self, pol='all', dB=False, *args, **kwargs):
        """ Plot spectrum using MPL """
        plt.figure(figsize=(10, 8))
        npol = self._data.shape[self.pol_axis]
        color = cm.viridis(np.linspace(0, 1, npol))
        u = f"dB({self.attrs['UNIT']})" if dB else self.attrs['UNIT']

        # TODO: Pol types currently stored in s.data.beam_0.metadata.band_params['POLTYPE']
        # TODO: Make this accessible from Waterfall via dimensions. Need a sdhdf_polarization class
        # TODO: pol = ['AA', 'BB', 'CR', 'CI'] and then attach like frequency.
        if pol == 'all':
            for sp in range(npol):
                plt.subplot(npol, 1, sp+1)
                d = get_slice(self._data, self.pol_axis, sp)
                d = np.mean(d, axis=(self.time_axis)).squeeze()
                d = 10*np.log10(d) if dB else d
                plt.plot(self._frequency_data, d, color=color[sp], label=f'Pol {self.pol_scale[sp]}')
                plt.ylabel(f"Flux [{u}]")
                plt.legend()
        else:
            d = get_slice(self._data, self.pol_axis, pol)
            d = np.mean(d, axis=(self.time_axis)).squeeze()
            plt.plot(self._frequency_data, d, color=color[0], label=f'Pol {self.pol_scale[sp]}')
            plt.ylabel(f"Flux [{u}]")
            plt.legend()

        plt.tight_layout()
        plt.xlabel(f"Frequency [{self._frequency_data.attrs['UNIT']}]")

    def plot_waterfall(self, pol='all', dB=False, *args, **kwargs):
        """ Plot waterfall using MPL """
        plt.figure(figsize=(10, 8))
        npol = self._data.shape[self.pol_axis]
        u = f"dB({self.attrs['UNIT']})" if dB else self.attrs['UNIT']
        extent=(self.freq_scale[0].value, self.freq_scale[-1].value, 
                self.time_scale[-1].value, self.time_scale[0].value)
        if pol == 'all':
            for sp in range(npol):
                if npol == 2:
                    plt.subplot(2, 1, sp+1)
                else:
                    plt.subplot(2, 2, sp+1)
                d = get_slice(self._data, self.pol_axis, sp).squeeze()
                d = 10*np.log10(d) if dB else d
                
                plt.imshow(d, aspect='auto', extent=extent)
                plt.ylabel(self.time_scale.format)
                plt.xlabel(f"Frequency [{self._frequency_data.attrs['UNIT']}]")
                plt.colorbar(label=f'Power[{u}]')
        else:
            d = get_slice(self._data, self.pol_axis, pol).squeeze()
            d = 10*np.log10(d) if dB else d
            plt.imshow(d, aspect='auto', extent=extent)
            plt.ylabel(self.time_scale.format)
            plt.xlabel(f"Frequency [{self._frequency_data.attrs['UNIT']}]")
            plt.colorbar(label=f'Power[{u}]')


########################
## Main SDHDF Class
########################

class SDHDFDataContainer(object):
    """ Container class for holding multiple beams """
    def __init__(self):
        self._beams = []
    def _add_beam(self, beam_dataset):
        beam = SDHDFBeam(beam_dataset)
        self._beams.append(beam)
        setattr(self, beam_dataset.attrs['NAME'], beam)

class SDHDF(object):

    def _verify(self):
        """ Put verification logic here """
        # TODO: e.g. What is mandatory?
        # This should catch anything that if missing will cause this class to explode
        # E.g. mandatory tables that are missing
        pass

    def __init__(self, filename):
        """ Class for handling SDHDF files 
        
        Args:
            filename (str): Name of file to open
        
        Notes:
            Generates a SDHDF object, reading the file hierarchy and
            creating nested children (each assigned a SDHDF class).
                ```
                 s = SDHDF('myfile.sdhdf')          # Open file
                 s.config.attrs                     # Access attribute dict
                 sb7 = s.data.beam_0.band_SB7       # Beam 0, subband 7
                 sb7.calibrator_data.cal_data_on    # Access calibrator data
                ```
        """
        self._h5 = h5py.File(filename, mode='r')

        try:
            assert self._h5.attrs.get('SDHDF_CLASS') == 'sdhdf_file'
            self.attrs = dict(self._h5.attrs)
            self.name = self.attrs.get('NAME')

            self.data = SDHDFDataContainer()

        except AssertionError:
            raise RuntimeError("File does not appear to be SDHDF")
        
        for objname, obj in self._h5.items():
            if isinstance(obj, h5py._hl.group.Group):
                sdhdf_setattr(self, obj)

        # TODO: Currently need to manually set dimension scales on Waterfall objects. 
        # TODO: This shouldn't be the case (at least ideally!)
        for idx, beam in enumerate(self.data._beams):
            # Example locations of metadata tables
            # s.data.beam_0.band_SB7.metadata.obs_params
            # s.data.beam_0.band_SB7.astronomy_data.data
            # s.data.beam_0.metadata.band_params
            poltype  = beam.metadata.band_params['POL_TYPE'][idx]
            pol_dim = [poltype[2*i:2*i+2] for i in range(len(poltype)//2)]
            for band in beam._subbands:
                time_dim = band.metadata.obs_params['MJD']
                freq_dim = band.astronomy_data.frequency
                band.astronomy_data.data._attach_dimension_scale('time', time_dim)
                band.astronomy_data.data._attach_dimension_scale('pol', pol_dim)
                #band.astronomy_data.data._attach_dimension_scale('freq', freq_dim) #NB: OVERRIDES EXISTING!
        
        # Find all datasets and assign their locations
        self._tables = []
        self._wfs    = []
        dataset_keys = get_dataset_keys(self._h5)
        for dset in dataset_keys:
            sd_class = self._h5[dset].attrs.get('SDHDF_CLASS')
            if sd_class == 'sdhdf_table':
                self._tables.append(dset)
            elif sd_class == 'sdhdf_waterfall':
                self._wfs.append(dset)

        self._verify()

    def __repr__(self):
        rstr = f"<SDHDF file: {self.name}>"
        return rstr
    
    def info(self):
        """ Display basic info from primary header """
        for colname in self.metadata.primary_header.colnames:
            print(f"{colname:24s}: {str(self.metadata.primary_header[colname][0]):20s}")
    
    def list_tables(self):
        """ List all tables in the file """
        return self._tables
    
    def list_datasets(self):
        """ List all SDHDF Waterfall datasets """
        return self._wfs
        

    
