#!/usr/bin python

import numpy as np
import constants 
import itertools

derived_field_info = {}


class OpenFile : 
    '''
    Initialize to open any text file with csv data ( and a header denoted by a hash '#' )
    '''
    def __init__( self, filename ) :
        self.filename = filename
        self.dictdata = None #for sorting DMhalo and AGN data. dict key is particle id
        self.importdata = None #temp dict to store imported DMhalo data while it is sorted
        self._data = None
        self._header = None
        self.fields = None
        self.derived_fields = None 
        self.imported_fields = None #fields imported from DM halo
        self._imported_data = None #imported data from DMhalo
        self._imported_header = None #imported header from DMhalo
        self.bsize = 128 #box size of code units (for distance)
 
    def get_numpy_data( self, _dtype, _usecols=None ) :
        if self._data == None :
            self._build_numpy_data( _dtype, _usecols )
        return self._data, self._header

    def _build_numpy_data( self, _dtype, _usecols ) :
        print 'Loading %s' % self.filename
        self._data = np.loadtxt( self.filename, _dtype, usecols = _usecols ) 

    def return_field( self, name ) :
        #If the requested field is not valid at all
        if not name in self.fields and not name in derived_field_info.keys() and not name in self.imported_fields:
            print 'Error: The %s field is not output by this class' % name
            return None
        #If the field is a regular field, just return it
        if name in self.fields : return self._data[ name ]
        #If the field is a derived field, call _derived_field
        elif name in derived_field_info.keys() : return self._derived_field( name )
        elif name in self.imported_fields : return self._imported_data[ name ]
            
    #Prints the list of available fields
    def print_fields( self ) :
        if self.fields == None :
            print 'Error: No available fields in this class'
            return None
        return self.fields

    #Prints the list of available imported fields
    def print_imported_fields( self ) :
        if self.imported_fields == None :
            print 'No available imported fields in this class'
            return None
        return self.imported_fields

    #Prints the list of available derived fields
    def print_derived_fields( self ) :
        if self.derived_fields == None :
            print 'No available derived fields in this class'
            return None
        return self.derived_fields

    #Makes a DerivedFieldColelction object and to return the requested derived field
    def _derived_field( self, _name ) :
        dfc = DerivedFieldCollection( self )
        return dfc.__getitem__(_name)



class DerivedField :
    def __init__(self, inputobject, name, function, combine_function, units="", n_ret=0 ) :
        self.__doc__ = function.__doc__
        self.__name__ = name
        self._inputobject = inputobject
        self.func = function
        self.c_func = combine_function
        self.n_ret = n_ret
    
    #This is general 'call' function that makes the class callable. It simply redirets to _call_function right now, but this gives us flexibility to later add elif/switch statements to call different functions dependingon what's needed
    def __call__( self, *args, **kwargs ) :
        return self._call_function( args, kwargs )

    #First sets retval to the value returned by the normal function, in this case BHtime, then returns the result of the combine function, which in this case is division
    def _call_function( self, args, kwargs ) :
        retval = self.func( self._inputobject, args, kwargs )
        return self.c_func( retval )


class DerivedFieldCollection(object) :
    functions = derived_field_info
    
    def __init__( self, inputobject) :
        self.inputobject = inputobject
    
    #
    def __getitem__( self, key ) :
        if key not in self.functions :
            print "Error: We haven't constructed the functions to derive %s" % key
        args = self.functions[key][:3]  # Pulls name, function, and combine function
        kwargs = self.functions[key][3] # Pulls any keyword arguments
        
        df = DerivedField(self.inputobject, *args, **kwargs)
        return df.__call__(self,*args,**kwargs) 

    def keys( self ) :
        return self.functions.keys()


#Adds a new derived field, specifying the name, function, combine_function, and any other keyword arguments such as n_ret
def add_derived_field( name, **kwargs ) :
    if 'function' not in kwargs or 'combine_function' not in kwargs :
        print "Error: Not adding derived field %s because both function and combine function necessary" % name
        return
    f = kwargs.pop("function")
    c = kwargs.pop("combine_function")
    derived_field_info[name] = (name, f, c, kwargs)

# The following two functions are needed to create another derived field.
def _BHtime(inputobject, *args, **kwargs ) :
    '''
    This function collects average BH mass and accretion rates to get a time scale
    '''
    Mbh = inputobject.return_field('avgMbh')
    Macc = inputobject.return_field('Macc')
    return Mbh, Macc
def _combDivision(  outputs ) :
    return outputs[0]/outputs[1]
def _combAddition(  outputs ) :
    return outputs[0]+outputs[1]
def _combMultiplication(  outputs ) :
    return outputs[0]*outputs[1]
def _combSubtraction(  outputs ) :
    return outputs[0]-outputs[1]



add_derived_field('BHtime', function=_BHtime, combine_function=_combDivision, n_ret=2 )
# We have created the derived field "BHtime"





class PlotInput( OpenFile ) :
    '''
    Create a text file with the runs, epochs, object ids (halo or black hole), line color, linestyle, and legend labels we wish to plot over.
    plot_input reads in the text file, and returns instances of each piece of information

    Ex:
    input.txt: 
    rundir 1.0 11111 r . legend_label 
    
    input = PlotInput( 'input.txt' )
    input.return_field('ids') # Returns a list of ids
    Fields: rundirs, aexps, ids, linecolors, linestyles, legend_labels, png_folder
    '''
    def __init__( self, filename ) :

        OpenFile.__init__( self, filename )

        self.fields = [ 'rundirs', 'aexps', 'ids', 'linecolors', 'linestyles', 'legend_labels' ]
        self.types = [ 'S100', 'S100', 'S100', 'S100', 'S100', 'S100' ]
        self.dtype = np.dtype({'names':self.fields,'formats':self.types})

        self.get_numpy_data( self.dtype )

class BlackHoleData( OpenFile ) :
    '''
    This class takes data from the print_agn_Macc_a?.????.txt files
    # Mbh[Msun/h] BHx BHy BHz [code] Macc[Msun/h/Myr] Eddratio avgMbh[Msun/h]
    '''
    def __init__( self, filename ) :

        OpenFile.__init__( self, filename )

        self.fields = [ 'Mbh', 'BHx', 'BHy', 'BHz', 'Macc', 'Eddratio', 'avgMbh' ]
        self.types = [ np.float, np.float, np.float, np.float, np.float, np.float, np.float ]
        self.dtype = np.dtype({'names':self.fields,'formats':self.types})
        self.usecols = (0,1,2,3,4,5,6)
        self.get_numpy_data( self.dtype, self.usecols )

        self.derived_fields = [ 'Luminosity' ]

   def _calculate_derived_field( self, name ) :
        if name not in self.derived_fields :
            print 'Error: %s is not an implemented derived field' % name
            return None
        
 
class AGNData( OpenFile ) :
    '''
    This class takes data from the print_agn_a?.????.txt files
    # particle_id BHM BHx BHy BHz
    '''
    def __init__( self, filename ) :

        OpenFile.__init__( self, filename )

        self.fields = ['particle_id','BHM', 'BHx', 'BHy', 'BHz']
        self.types = [ np.float, np.float, np.float, np.float, np.float ]
        self.dtype = np.dtype({'names':self.fields,'formats':self.types})
        self.usecols = (0,1,2,3,4)
        self.get_numpy_data( self.dtype, self.usecols )

        #Imports the corresponding DMhalo data based on file name
        #Replaces the file name, then creates a DMHalo object with new filename, and imports all of the fields and data from the new DMhalo object
        DMhaloname = filename.replace('agn/print_agn','agn/print_DM_halo')
        print 'Replaced DMhaloname'
        DMhalo = DarkMatterHaloData(DMhaloname)
        print 'Made DarkMatterHaloData object'
        self.imported_fields = DMhalo.fields
        print 'Imported fields'
        self._imported_data = DMhalo._data
        print 'Imported data'
        self._imported_header = DMhalo._header
        print 'Imported header'

        self.populate_dict()
        self.sort()

    #makes a dictionary for the native data and a dict for the imported data
    def populate_dict( self ):
        #takes the native data and makes the particle id the dict key, and the entry is a list with [BHx, BHy, BHz, BHM] for that particle id
        self.dictdata = dict((x[0],x[1:]) for x in itertools.izip(self.return_field('particle_id'), self.return_field('BHx'), self.return_field('BHy'), self.return_field('BHz'), self.return_field('BHM')))
        n = range(len(self.return_field('Mdm')))
        #takes the imported data, the dict key is just a number, and the entries are lists of [DMx, DMy, DMz, Mdm, Rvir]. These 'rows' then get moved around in the sort function to match the corresponding particle id
        self.importdata = dict(zip(n, zip(self.return_field('DMx'),self.return_field('DMy'),self.return_field('DMz'),self.return_field('Mdm'),self.return_field('Rvir'))))

        
    def sort( self ) : 
        #for each particle id in dictdata...
        for item in self.dictdata : 
            mindist = self.bsize
            #...we go through each row in the imported data...
            for eitem in self.importdata :
                #...first comparing x distances...
                if abs(self.dictdata[item][0] - self.importdata[eitem][0]) > (self.bsize/2) :
                    dx = abs(self.bsize - abs(self.dictdata[item][0] - self.importdata[eitem][0]))
                else :
                    dx = abs(self.dictdata[item][0] - self.importdata[eitem][0])
               #...then y distances...
                if abs(self.dictdata[item][1] - self.importdata[eitem][1]) > (self.bsize/2) :
                    dy = abs(self.bsize - abs(self.dictdata[item][1] - self.importdata[eitem][1]))
                else :
                    dy = abs(self.dictdata[item][1] - self.importdata[eitem][1])
                #...then z distances...
                if abs(self.dictdata[item][2] - self.importdata[eitem][2]) > (self.bsize/2) :
                    dz = abs(self.bsize - abs(self.dictdata[item][2] - self.importdata[eitem][2]))
                else :
                    dz = abs(self.dictdata[item][2] - self.importdata[eitem][2])
                #set the distance
                dist = dx*dx + dy*dy + dz*dz
                if mindist == self.bsize : #this only runs the first time through, to set a first mindist...
                    print 'inside if' 
                    mindist = dist
                    self.dictdata[item] += self.importdata[eitem] #...and tacks on the DMhalo data to the end of the AGN data
                elif dist < mindist : #mindist gets replaced in successive iterations if there is a smaller one
                    mindist = dist
                    #print 'set new mindist'
                    #print item, eitem
                    #print self.dictdata[item]
                    self.dictdata[item] = self.dictdata[item][:4]
                    #print self.dictdata[item]
                    #print self.importdata[eitem]
                    self.dictdata[item] += self.importdata[eitem] #and this also replaces the data tacked on at the end
                    #print self.dictdata[item]
            #print mindist
        return None
        #so the end result is that dictdata is the complete dictionary with the key being the particle id, and the entries being [BHx, BHy, BHz, BHm, DMx, DMy, DMz, Mdm, Rvir]

    def dictdata_return( self, name ) :
        temp = []
        if name == 'BHx' :
            for item in self.dictdata :
                temp.append(self.dictdata[item][0])
            return temp
        elif name == 'BHy' :
            for item in self.dictdata :
                temp.append(self.dictdata[item][1])
            return temp           
        elif name == 'BHz' :
            for item in self.dictdata :
                temp.append(self.dictdata[item][2])
            return temp
        elif name == 'BHm' :
            for item in self.dictdata :
                temp.append(self.dictdata[item][3])
            return temp
        elif name == 'DMx' :
            for item in self.dictdata :
                temp.append(self.dictdata[item][4])
            return temp
        elif name == 'DMy' :
            for item in self.dictdata :
                temp.append(self.dictdata[item][5])
            return temp
        elif name == 'DMz' :
            for item in self.dictdata :
                temp.append(self.dictdata[item][6])
            return temp
        elif name == 'Mdm' :
            for item in self.dictdata :
                temp.append(self.dictdata[item][7])
            return temp
        elif name == 'Rvir' :
            for item in self.dictdata :
                temp.append(self.dictdata[item][8])
            return temp
        else :
            print 'no such field'
            return None

    def _calculate_derived_field( self, name ) :
        if name not in self.derived_fields :
            print 'Error: %s is not an implemented derived field' % name
            return None
        
    
class DarkMatterHaloData( OpenFile ) :
    '''
    This class takes in print_DM_halo_a?.????.dat
    # DMx DMy DMz [code] Mdm [Msun/h] Rvir [kpc/h]
    '''
    def __init__( self, filename ) :

        OpenFile.__init__( self, filename )

        self.fields = [ 'DMx', 'DMy', 'DMz', 'Mdm', 'Rvir' ]
        self.types = [ np.float, np.float, np.float, np.float, np.float ]
        self.dtype = np.dtype({'names':self.fields,'formats':self.types})
        self.usecols = (0,1,2,3,4)
        self.get_numpy_data( self.dtype, self.usecols )

class SinkFileData( OpenFile ) :
    '''
    This class takes in a black hole logfile ??????.txt (only the key columns for now)
    # Particle_id aexp Old_Macc[Msun/Myr] New_Macc[Msun/Myr] T[K] cs_cell cs_sink[km/s] dv_cell dv_sink[km/s] rho_cell rho_sink[/cm^3] Efb[eV] BHx BHy BHz[code] BHvx vy vz[km/s] Estored[eV] Efbthreshold[eV] Msink[Msun] Weightedmass[Msun] Mbh[Msun] cs_cell/cs_sink dv_cell/dv_sink rho_cell/rho_sink Efb/delta_t[eV/Myr] New_Eddratio Old_Eddratio r_K[kpc] Mbondibetafact/Mbondifact cs_EOS[km/s] delta_t[Myr]
    '''
    def __init__( self, filename ) :

        OpenFile.__init__( self, filename )
        self.fields = [ 'aexp', 'Macc', 'T' ]
        self.types = [ np.float, np.float, np.float ]
        self.dtype = np.dtype({'names':self.fields,'formats':self.types})

        self.usecols = ( 1, 3, 4 )
        
        self.get_numpy_data( self.dtype, self.usecols )
        


    
#class MyLogFormatter(LogFormatter) :
    
#    def __call__(self,x,pos=None) :
#        if abs(log10(x)) < 2 :
#            return '$%g$' % (x,)
#        else :
#            return '$10^{%g}$' % (log10(x),)
