"""
beetlespectra.py is a python class for creating and manipulating 
astronomical spectra objects.

Created on Sep 14, 2009

@author: aamn
@note: pybeetle is a port of the hammer (v1_1) spectral typing code from IDL to
python.  hammer.pro was written by K. Covey.
@acknowledgements:
    Extensive instruction from the work of Bressert/Robitaille (aplpy/atpy)

@code:  ['author','url','source','version','revision date','access date']
"""

from copy import deepcopy
from numpy import ma
import numpy as np
import os, sys
import pyfits
from atpy import Table, TableSet

from beetletools import vac2air, specsum, parse_wtype
from datamodels.obsini import Obs
from datamodels.ssap import Data

Axes = ['SpectralAxis', 'FluxAxis']

class Spectra(object):
    """ Spectral Object 
        s = Spectra('/path/to/file.fits', model='hecto', format='fits')
    
        datamodel = Obs
        dataformt = 'fits' : pyfits; 'text' : readcol, 'ssap' : ??
        where path to fits file is then a ssap uri.
    
        has these convenience attributes:
    
        s.wave = masked wave array
        s.energy = masked energy array
        s.waven = masked wave number array
        s.flux = masked flux array
        s.mask = the mask
        s.errors
    
        and these methods:
    
        s.mask_badflux()
        s.mask_badwave(wave=[wave,energy,waven])
        s.resample() # regrid/sample wave
        s.normalize() # returns flux continuum normalized by some function.
        s.clip() # returns a new instance with clipped arrays by wave.
        s.snr() # snr in some band. see Fit.
    
        s.set_units.flux(,calib=)
        s.set_units.wave(wave=[wave,energy])
    
        s.shift(rv=, z=, to/from)
    
        r = Spectra.merge(s,t) where s/t are existing instances
    """

    def __init__(self, *args, **kwargs):
        """ initialize.
            Notes:
                - all attribute assignment is disabled at the object level.
        """
        object.__setattr__(self, 'Data',
                           Data())
        for axis in Axes: setattr(self.Data, axis, getattr(Data, axis)())
        for axis in Axes: setattr(self.Data.__dict__[axis], 'data', TableSet())

        object.__setattr__(self, 'source',
                           len(args) > 0 and args or None)

        # parse observation metadata model.
        object.__setattr__(self, 'model',
                           kwargs.get('model', 'generic'))
        try:
            self._meta(**kwargs)
        except:
            raise Exception('Bad Observation Metadata Model')

        # format from __init__ should override model.
        object.__setattr__(self, 'format',
                           kwargs.get('format')  or \
                           self.Obs.General.format or \
                           'none')

        # save all the args. many args could be many arrays, or multiple spectra.
        object.__setattr__(self, '_version',
                           version())
        try:
            self._base(**kwargs)
        except:
            raise #Exception('Bad Data')

    def __getattr__(self, attribute):
        """ Intercepter for special functions/attributes
            1. pop axis into existence on request, pointing at current version.
            2. convenience functions to access axis spectral, flux, masks
            3. convenience functions for versioning
        """
        #convenience = [foo[3:] for foo in dir(self) if foo[:3] == '_c_']
        convenience = ['wave', 'flux', 'mask', 'version', 'versions']

        if attribute in Axes:
            return getattr(self.Data.__dict__[attribute].data, self.version)
        elif attribute in self._version:
            self.set_version(attribute)
        elif attribute in convenience:
            convenience_func = getattr(self, "_c_%s" % attribute)
            return convenience_func()
        else:
            raise AttributeError(attribute)

    def __setattr__(self, n, v):
        return None

    #-------------------------------------------------------- Private Functions
    def _base(self, **kwargs):
        """ perform data processing on source
        """
        self._version.add('base')
        try:
            self._from(self.source, self.format, 'base', **kwargs)
        except:
            raise #Exception('Failed _from in _base')
        return

    def _from(self, source, format, version, **kwargs):
        """ private function to parse data source into spectral subset
            according to data format.
            
            Required:
                    source:    string full path to data source file
                    format:    format of data source file
                    version:   named spectral subset to hold data source.
        """
        from_func = getattr(self, "from_%s" % format, self.from_none)
        if source is not None:
            return from_func(source, version, **kwargs)
        else:
            return self.from_none(version)

    def _meta(self, **kwargs):
        """ private function to parse Obs metadata
        """
        model = kwargs.get('model', self.model)
        object.__setattr__(self, 'Obs', Obs(model))
        return

    #-------------------------------------------- Private Convenience Functions
    def _c_flux(self):
        """ return masked version of the Flux Axis
        """
        return ma.masked_array(self.FluxAxis.Value, mask=self.mask)

    def _c_mask(self):
        """ return mask
        """
        try:
            return self.return_mask()
        except:
            print 'Mask building failed for set %s. Returning False' % self.version
            return False

    def _c_version(self):
        """ Return Version string
        """
        return self._version()

    def _c_versions(self):
        """ Return list of Versions set
        """
        return list(self._version)

    def _c_wave(self):
        """ return masked version of the Spectral Axis
        """
        return ma.masked_array(self.SpectralAxis.Value, mask=self.mask)

    #---------------------------------------------- Pretty or Utility Functions
    def comments(self, target=[], all=False, axis=None):
        """ print comments on specified spectral subset(s).
            self.comments('base')
            self.comments(all=True)
        """
        if all:
            args = self.versions
        elif len(target) is 0:
            args = [self.version]
        elif isinstance(target, str):
            args = [target]
        else:
            args = target

        if axis is None:
            show = Axes
        else:
            show = [foo for foo in Axes if foo in axis]

        for arg in args:
            try:
                for axis in show:
                    comments = getattr(self.Data.__dict__[axis].data,
                                       arg).comments
                    for comment in comments:
                        print "%-12s  %-12s  %-s" % (arg, axis, comment)
            except AttributeError:
                print "%-12s  is not a spectral version" % arg
            print '\n'
        return

    def describe(self, version=None, verbose=0):
        """ Print out some useful information about the Spectra. Return dict.
        """
        if version is not None: self.set_version(version)
        info, warn = self.tests()

        br = lambda s, l: len(s) * l
        bf = lambda s: "%" + str(len(s)) + "s"
        bif = "%-20s : %s"

        hd = "Current Table"
        print bf(hd) % br(hd, '*')
        print bf(hd) % hd
        print bf(hd) % br(hd, '*')
        attrs = ['version', 'source', 'model', 'format']
        for attr in attrs:
            print bif % (attr, getattr(self, attr))
        print '\n'

        hd = "*Basic Information*"
        print bf(hd) % br(hd, '*')
        print bf(hd) % hd
        print bf(hd) % br(hd, '*')

        for k, v in info.items():
            print bif % (k, v)
            if warn.has_key(k):
                print bif % ('', warn[k])
        print '\n'

        if verbose >= 1:
            for axis in Axes:
                hd = "*%s Description*" % axis
                print bf(hd) % br(hd, '*')
                print bf(hd) % hd
                print bf(hd) % br(hd, '*')
                getattr(getattr(self, axis), 'describe')()
                print '\n'
                if verbose >= 2:
                    self.comments(axis=axis)

    def print_versions(self, *args, **kwargs):
        for foo in self._version: print foo

    def set_version(self, *args, **kwargs):
        """ informative change spectral subset
        """
        fromto = (self.version, args[0])
        if fromto[1] in self._version:
            self._version(fromto[1])
            print 'version changed from %s to %s' % fromto
        else:
            print 'version unchanged from %s as %s is not known' % fromto
            self.print_versions()
        return

    #-------------------------------------------------------- Spectra Functions
    def normalize(self, target=None,
                        power=1,
                        version='normalize1'):
        """ normalize the flux continuum with a power law
            Required:
                    None
    
            Optional:
                    target:  specify that spectra subset for normalization. 
                             if not prescribed then use the current pointer
                    power:   set index of power law continuum fit (default=1)
                    version: string name for newly created data subset.
        """
        target = target or self.version
        verbs = ['normalized', None]
        comment = '(Power= %s)' % power
        comments = dict(zip(Axes, [triple(target, verb, comment) for verb in verbs]))

        self._version(target)

        wave = self.wave
        flux = self.flux
        ffit = ma.polyfit(wave, flux, power, full=True)
        print 'fit: ', ffit[0]
        yfit = np.polyval(ffit[0], wave)

        for axis in Axes:
            tmp = deepcopy(getattr(self, axis)) #deepcopy *current* table
            tmp.table_name = version #change its name
            tmp.add_comment(comments[axis])
            if axis == 'FluxAxis':
                tmp.Value = tmp.Value / yfit
            self.Data.__dict__[axis].data.append(tmp)

        self._version.add(version)
        self._version(version)
        return

    def resample(self, newrez, target=None,
                               units='Angstroms',
                               version='resample1'):
        """ resample the spectral resolution and interpolate flux axis
    
            Required:
                    newres: a new spectral resolution in units 
                        (optionally given or defaults to Angstroms)
            Optional:
                    units:   unit of the new resolution, default is Angstroms.
                    target:  name of spectral subset to resample. if not given
                            the current pointer is used.
                    version: name for the new spectral subset as resampled.
        """
        target = target or self.version
        verbs = ['resampled to', 'interpolated to']
        comment = '%s %s' % (newrez, units)
        comments = dict(zip(Axes, [triple(target, verb, comment) for verb in verbs]))

        self._version(target)

        wavet = deepcopy(getattr(self, 'SpectralAxis'))
        wavev = deepcopy(wavet)
        wavev.remove_columns(wavet.columns.keys)
        wavev.table_name = version

        wave = wavet.data['Value']
        wavec = wavet.columns['Value']
        if not isinstance(newrez, np.recarray) and \
           not isinstance(newrez, np.ndarray):
            elem = np.floor(wave.ptp() / newrez) - 1
            newwave = np.zeros(elem, dtype=wave.dtype)
            for foo in range(0, elem):
                newwave[foo] = wave.min() + (foo + 1.) * newrez
        else:
            elem = len(newrez)
            newwave = newrez

        for foo in wavet.columns:
            if foo != 'Value':
                data = wavet.data[foo]
                datac = wavet.columns[foo]
                newdata = np.interp(newwave, wave, data)
                wavev.add_column(foo,
                    newdata,
                    unit=datac.unit,
                    format=datac.format,
                    null=datac.null,
                    dtype=datac.dtype,
                    description=datac.description)
            else:
                wavev.add_column('Value',
                    newwave,
                    unit=wavec.unit,
                    format=wavec.format,
                    null=wavec.null,
                    dtype=wavec.dtype,
                    description=wavec.description)
        wavev.add_comment(comments['SpectralAxis'])

        fluxt = deepcopy(getattr(self, 'FluxAxis'))
        fluxv = deepcopy(fluxt)
        fluxv.remove_columns(fluxt.columns.keys)
        fluxv.table_name = version

        for foo in fluxt.columns:
            data = fluxt.data[foo]
            datac = fluxt.columns[foo]
            newdata = np.interp(newwave, wave, data)
            fluxv.add_column(foo,
                newdata,
                unit=datac.unit,
                format=datac.format,
                null=datac.null,
                dtype=datac.dtype,
                description=datac.description)
        fluxv.add_comment(comments['FluxAxis'])

        self.Data.__dict__['SpectralAxis'].data.append(wavev)
        self.Data.__dict__['FluxAxis'].data.append(fluxv)

        self._version.add(version)
        self._version(version)
        return

    def window(self, window, target=None,
                               version='window1'):
        """ Window the current target version to a new spectral range.
    
            Required:
                    window: a spectral range  
                        (optionally given or defaults to Angstroms)
            Optional:
                    target:  name of spectral subset to window. if not given
                            the current pointer is used.
                    version: name for the new spectral subset as resampled.
                    
            I've debated using masks instead of creating a new spectra subset. 
            Currently the masks are built on the fly from the rules in the 
            observation metadata model. If I end up creating functions that
            modify these rules for other purposes, then I might use masks
            again.  
        """
        window = tuple(window)
        target = target or self.version
        verbs = ['windowed to', 'windowed to']
        comment = '%s %s' % window
        comments = dict(zip(Axes, [triple(target, verb, comment) for verb in verbs]))

        mask = ma.masked_inside(self.wave, *window).mask

        for axis in Axes:
            olddata = getattr(self, axis)
            newdata = olddata.where(mask)
            newdata.table_name = version
            newdata.add_comment(comments[axis])
            self.Data.__dict__[axis].data.append(newdata)

        self._version.add(version)
        self._version(version)
        return

#    def snip(self, s1, s2, version='snip'):
#        """
#        produce a snippet of the spectral sequence by masking the spectral
#        axis outside of a given range.
#        """
#        self.mask = ma.masked_outside(s.wave, s1, s2).mask
#        return

    #---------------------------------------------------- Data Access Functions
    def from_arrays(self, **kwargs):
        """ transform a set of 1d arrays into a spectra
        """
        return


    def from_ascii(self):
        """ read text file, create data.  Use np.genfromtxt?
        """
        return

    def from_bintable(self, source, version, **kwargs):
        """ read binary FITS tables
        """
        source = source[0]
        hdu = kwargs.get('hdu') or \
              self.Obs.get('FITS', 'hdu', int) or \
              0

        comment = 'bintable has column'
        FluxAxis = Table(name=version)
        SpectralAxis = Table(name=version)

        try:
            raw = Table(source, hdu=hdu)
        except IndexError, err:
            raise IOError('ATpy cannot parse table in %s: hdu %s %s' %
                          (source, hdu, err))

        for foo, bar in self.Obs.SpectralAxis().items():
            unit = raw.columns[bar].unit or \
                   self.Obs.SpectralAxis.units or \
                   'Angstroms'
            try:
                SpectralAxis.add_column(foo,
                     raw.data[bar],
                     unit=unit,
                     null=raw.columns[bar].null,
                     description=self.Data.SpectralAxis[foo]['description'],
                     format=raw.columns[bar].format,
                     dtype=raw.columns[bar].dtype)
                SpectralAxis.add_comment("%s %s" % (comment, foo))
            except:
                if foo not in self.Data.SpectralAxis.names:
                    # pass silently. This must be some other config item as
                    # Obs should already have been validated.
                    pass
                elif bar not in raw.names:
                    failcolcom = "Column %s not in FITS Bintable." % (bar)
                    SpectralAxis.add_comment(failcolcom)
                else:
                    failcolcom = "failed for unknown causes"
                    SpectralAxis.add_comment(failcolcom)
            else:
                # test dtype
                if self.Data.SpectralAxis[foo]['dtype'] != raw.columns[bar].dtype:
                    dtypemismatch = "%s: Dtype of source (%s) does not match spec (%s)" % \
                    (foo, raw.columns[bar].dtype, self.Data.SpectralAxis[foo]['dtype'])
                    SpectralAxis.add_comment(dtypemismatch)
                # test finite
                if not np.any(np.isfinite(raw.data[bar])):
                    novalidvalue = "%s: No finite value in column %s." % (foo, bar)
                    SpectralAxis.add_comment(novalidvalue)

        for foo, bar in self.Obs.FluxAxis().items():
            unit = self.Obs.FluxAxis.units or \
                   raw.columns[bar].unit
            try:
                FluxAxis.add_column(foo,
                     raw.data[bar],
                     unit=unit,
                     null=raw.columns[bar].null,
                     description=self.Data.FluxAxis[foo]['description'],
                     format=raw.columns[bar].format,
                     dtype=raw.columns[bar].dtype)
                FluxAxis.add_comment("%s %s" % (comment, foo))

            except:
                if foo not in self.Data.FluxAxis.names:
                    pass # pass silently. This must be some other config item as
                         # Obs should already have been validated.
                elif bar not in raw.names:
                    failcolcom = "Invalid FITS Table column: %s" % (bar)
                    FluxAxis.add_comment(failcolcom)
                else:
                    failcolcom = "failed for unknown causes"
                    FluxAxis.add_comment(failcolcom)
            else:
                # test dtype
                if self.Data.FluxAxis[foo]['dtype'] != raw.columns[bar].dtype:
                    dtypemismatch = "%s: Dtype of source (%s) does not match spec (%s)" % \
                    (foo, raw.columns[bar].dtype, self.Data.FluxAxis[foo]['dtype'])
                    FluxAxis.add_comment(dtypemismatch)
                # test finite
                if not np.any(np.isfinite(raw.data[bar])):
                    novalidvalue = "%s: No finite value in column %s." % (foo, bar)
                    FluxAxis.add_comment(novalidvalue)

        self.Data.FluxAxis.data.tables = [foo for foo in self.Data.FluxAxis.data.tables
                                    if foo.table_name != version]
        self.Data.FluxAxis.data.append(FluxAxis)

        self.Data.SpectralAxis.data.tables = [foo for foo in self.Data.SpectralAxis.data.tables
                                        if foo.table_name != version]
        self.Data.SpectralAxis.data.append(SpectralAxis)

        return

    def from_none(self, version):
        """ what to do if no format is given
        """
        comment = 'no data was processed'
        FluxAxis = Table(name=version)
        SpectralAxis = Table(name=version)

        self.Data.FluxAxis.data.tables = [foo for foo in self.Data.FluxAxis.data.tables
                                    if foo.table_name != version]
        self.Data.FluxAxis.data.append(FluxAxis)

        self.Data.SpectralAxis.data.tables = [foo for foo in self.Data.SpectralAxis.data.tables
                                        if foo.table_name != version]
        self.Data.SpectralAxis.data.append(SpectralAxis)

        return

    def from_fits(self, source, version, **kwargs):
        """ read fits arrays and use header to build spectral axis
        """
        source = source[0]
        hdu = kwargs.get('hdu') or \
              self.Obs.get('FITS', 'hdu', int) or \
              0

        comment = 'fitsfile has array column'
        FluxAxis = Table(name=version)
        SpectralAxis = Table(name=version)

        try:
            HDUlist = pyfits.open(source)
        except:
            raise IOError('pyfits could not open data file')

        if len(HDUlist) > hdu and isinstance(HDUlist[hdu], pyfits.PrimaryHDU):
            raw = HDUlist[hdu]
            head = raw.header
            data = raw.data.squeeze()
            btpx = raw.data.dtype
        else:
            if len(HDUlist) < hdu:
                err = 'hdu %s exceeds file HDU list length %s' % (hdu, len(HDUlist))
            else:
                err = 'hdu %s is not a PrimaryHDU' % hdu
            raise IOError(err)

        # figure out columns in fits image
        fluxvecs = [foo for foo in self.Obs.FluxAxis().keys()
                        if foo in self.Data.FluxAxis.names]

        # get units
        bzero = head.get(self.Obs.FITS.BZERO) or np.double(0)
        bscale = head.get(self.Obs.FITS.BSCALE) or np.double(1)
        fluxunit = self.Obs.FluxAxis.units or \
               head.get(self.Obs.FITS.BUNIT) \
               or "Counts"

        if len(fluxvecs) != 0:

            for foo in fluxvecs:
                bar = self.Obs.get('FluxAxis', foo, int)
                if len(data.shape) == 1: values = data
                elif len(data.shape) == 2: values = data[bar, :]
                else:
                    raise Exception('i do not understand Naxis>2 values')
                try:
                    FluxAxis.add_column(foo,
                         (bscale * values) + bzero,
                         unit=fluxunit,
                         null='',
                         description=self.Data.FluxAxis.description[foo],
                         format='',
                         dtype=btpx)
                    FluxAxis.add_comment("%s %s" % (comment, foo))
                except:
                    print 'failed to add fluxaxis vector %s' % foo
                    failcolcom = "Failed to add data in column %s from input file %s to %s Table as column %s" % (bar, source, "FluxAxis", foo)
                    FluxAxis.add_comment(failcolcom)
        else:
            print 'Are you sure the model %s is correct?' % (self.model)
            print 'There are no valid column mappings for the FluxAxis'

        # see if file is using image for spectral units. I hope not both!
        specvecs = [foo for foo in self.Obs.SpectralAxis().keys()
                        if foo in self.Data.SpectralAxis.names]

        # get spectral units
        try:
            cunit1 = self.Obs.FITS.CUNIT1
            wtype = head.get(cunit1).upper()
            if cunit1[0:4] == "WAT1":
                wtypes = self.parse_wtype(wtype)
                specunit = wtypes['units']
            else:
                specunit = wtype
        except:
            specunit = self.Obs.SpectralAxis.units or 'Angstroms'
        else:
            specunit = unit.lower().title()

        if len(specvecs) != 0:

            for foo in specvecs:
                bar = self.Obs.get('SpectralAxis', foo, int)
                if len(data.shape) == 1: values = data
                elif len(data.shape) == 2: values = data[bar, :]
                else:
                    raise Exception('i do not understand Naxis>2 values')
                try:
                    SpectralAxis.add_column(foo,
                         values,
                         unit=specunit,
                         null='',
                         description=self.Data.SpectralAxis.description[foo],
                         format='',
                         dtype=btpx)
                    SpectralAxis.add_comment("%s %s" % (comment, foo))
                except:
                    print 'failed to add fluxaxis vector %s' % foo
                    failcolcom = "Failed to add data in column %s from input file %s to %s Table as column %s" % (bar, source, "FluxAxis", foo)
                    SpectralAxis.add_comment(failcolcom)
        else:
            # rebuild spectral axis
            ctype1 = head.get(self.Obs.FITS.CTYPE1).upper() or 'LINEAR'
            if ctype1 == 'LINEAR':
                vector = self.return_linear_wave_vector(raw)
            else:
                raise Exception('i dont have any other ctypes implemented.')
            try:
                foo = 'Value'
                SpectralAxis.add_column(foo,
                     vector,
                     unit=unit,
                     null='',
                     description=self.Data.SpectralAxis.description[foo],
                     format='',
                     dtype=btpx)
            except:
                print 'failed to add spectral axis column %s' % foo
                failcolcom = "Failed to add spectral vector from input file %s to %s Table as column %s" % (source, "SpectralAxis", foo)
                SpectralAxis.add_comment(failcolcom)


        self.Data.FluxAxis.data.tables = [foo for foo in self.Data.FluxAxis.data.tables
                                    if foo.table_name != version]
        self.Data.FluxAxis.data.append(FluxAxis)

        self.Data.SpectralAxis.data.tables = [foo for foo in self.Data.SpectralAxis.data.tables
                                        if foo.table_name != version]
        self.Data.SpectralAxis.data.append(SpectralAxis)

        return

    #--------------------------------------------------------- Return Factories
    def return_linear_wave_vector(self, raw):
        """ build linear wave vector from FITS keys
        """
        head = raw.header
        bp = raw.data.dtype

        cpx = 1.0
        sax = ['NAXIS1', 'CRVAL1', 'CRPIX1', 'CDELT1']
        dax = [head.get(self.Obs.get('FITS', foo, '')) for foo in sax]
        lax = dax[1] + dax[3] * (1. - dax[2]) + \
              dax[3] * (np.arange(dax[0], dtype=bp)) + \
              (cpx - dax[2])

        if self.Obs.SpectralAxis.store == 'log10': lax = np.double(10.) ** lax
        if self.Obs.SpectralAxis.air == 'vac':  lax = vac2air(lax)
        return lax

    def return_mask(self):
        """ build content masks based on content of Spectral/FluxAxis
        """
        # ----- create masked flux array
        # Q1: do data have intrinsic masks, like sdss?
        # Q2: do the data have intrinsic bad values?
        # Q3: can one specify a priori that certain data ranges are bad?
        # Q4: re Q3 -- is that an Obs or SpectralDM dependency?
        # Q5: what can I implement now?
        # A5: badvalues that are NaN or Inf;
        # A5: badvalues that are known, like "0"
        # A5: bad could be a range or a logical operator. Setup for both.
        # A5: assume there is nothing negative.
        flux = ma.masked_array(self.FluxAxis.Value, copy=True)
        flux = ma.masked_invalid(flux, copy=False)
        badvalues = self.Obs.FluxAxis.badvalues
        if badvalues is not None and badvalues is not '':
            badlist = badvalues.split(',')
            for bad in badlist:
                p1, p2, p3 = parsebadvalues(bad)
                if p1 == 'equality':
                    if p2 == '=':
                        flux = ma.masked_values(flux, p3, copy=False)
                    elif p2 == '<':
                        flux = ma.masked_less(flux, p3, copy=False)
                    elif p2 == '>':
                        flux = ma.masked_greater(flux, p3, copy=False)
                elif p1 == 'range':
                    flux = ma.masked_inside(flux, p2, p3)

        # badwaves? Could be lambda1-lambda2, >=<lambda1
        # assume (again, that "
        wave = ma.masked_array(self.SpectralAxis.Value, copy=True)
        wave = ma.masked_invalid(wave, copy=False)
        badvalues = self.Obs.SpectralAxis.badvalues
        if badvalues is not None and badvalues is not '':
            badlist = badvalues.split(',')
            for bad in badlist:
                p1, p2, p3 = parsebadvalues(bad)
                if p1 == 'equality':
                    if p2 == '=':
                        wave = ma.masked_values(wave, p3, copy=False)
                    elif p2 == '<':
                        wave = ma.masked_less(wave, p3, copy=False)
                    elif p2 == '>':
                        wave = ma.masked_greater(wave, p3, copy=False)
                elif p1 == 'range':
                    wave = ma.masked_inside(wave, p2, p3)

        return ma.masked_array(wave, mask=flux.mask).mask

    #-------------------------------------------- Fix, Reset or Check Functions
    def reset(self, meta=False, all=False):
        """ quickly switch to the base spectra, with options to rebuild Obs or
            remove all changed versions.
        """
        self._version('base')

        if meta:
            self._meta(model=self.model)

        if all:
            #loop and delete all but base. what
            for axis in Axes:
                tables = getattr(self.Data.__dict__[axis].data, 'tables')
                setattr(self.Data.__dict__[axis].data, 'tables',
                        [foobject for foobject in tables
                                  if foobject.table_name is 'base'])
            self._version.intersection_update(['base'])
        return

    def tests(self, action='warn'):
        """ Run various self checks on the current data.
        """
        def warn():
            return 'Warning!'
        def fail():
            raise AssertionError
        act = warn
        if action == 'fail': act = fail

        tests = {}
        warnings = {'Axis Pixels':'Flux and Spectral Axis Pixels do not agree',
                    'Spectral Spacing':'Spectral Axis has non-uniform spacing',
                    'Dynamic Range':'Dynamic Range > 2.5 orders of magnitude'
                    }
        info = {}
        warn = {}

        wave = self.SpectralAxis.Value
        flux = self.FluxAxis.Value
        dwave = np.unique(np.diff(wave))
        shape = np.unique([wave.shape[0], flux.shape[0]])

        tests['Spectral Spacing'] = (dwave.ptp() == 0,
                          len(dwave) == 1 and list(dwave)[0] or list(dwave))
        tests['Axis Pixels'] = (shape.ptp() == 0,
                          len(shape) == 1 and list(shape)[0] or list(shape))
        tests['Dynamic Range'] = (np.log10(self.flux.ptp()) < 2.5,
                                  2.5 * np.log10(self.flux.ptp()))

        for test, outcome in tests.items():
            mesg = ''
            try:
                assert outcome[0]
            except:
                mesg = act()
                mesg = "%s %s" % (mesg, warnings[test])
                warn[test] = mesg
            finally:
                info[test] = outcome[1]

        return info, warn

def parsebadvalues(value):
    """ parse a string to figure out if it contains a logical or range operator.
        examples:
                value = '<333'
                value = '333-444'
                value = '<-333'
                value = '=0'
                value = '-333--111'
    """
    equals = ['=', '<', '>']
    if sum([value.count(foo) for foo in equals]) > 0:
        p1, p2, p3 = 'equality', value[0], np.float(value[1:])
    elif value.count('-') > 0:
        v = [np.float(foo) for foo in value.split('-') if foo != '']
        nv = [(not z and [0] or [1])[0]
              for z, foo in enumerate(value.split('-')) if foo == '']
        vmin, vmax = [i in nv and foo * -1 or foo for i, foo in enumerate(v)]
        p1, p2, p3 = 'range', vmin, vmax
    else:
        p1, p2, p3 = 'equality', '=', np.float(value)
    #print value, ' : ', p1, p2, p3
    return p1, p2, p3

def triple(s, v, o):
    """ dummy triple writer
    """
    if v is not None:
        return "%s %s %s" % (s, v, o)
    else:
        return ''

class version(set):
    """ subclass of set that has a pointer to a current version in that set
        and a lookup dictionary of queued versions.
        Uses:
             1. a = version(['foo','bar'])
                a.add('one')
             2. a() == 'foo' # True
             3. a('bar')  # returns 'bar'
             4. a() == 'bar' # True
             5. a
             3.
    """
    def __init__(self, *args, **kwargs):
        base = kwargs.get('version') or None
        if len(args) > 0:
            for arg in args:
                self.update(arg)
            if base is None or base not in args[0]:
                base = isinstance(args[0], list) and args[0][0] or args[0]
        object.__setattr__(self, 'version', base)

    def __call__(self, arg=None, parent=None):
        call = arg or self._version or list(self)[0]
        if call in self:
            object.__setattr__(self, 'version', call)
            return call

    def __setattr__(self, n, v):
        if n == '_version': object.__setattr__(self, n, v)

