# Joao Lencart e Silva: j.lencart@ua.pt
import csv
import math
import os
import sys
import numpy
import netCDF4
from netCDF4 import Dataset
from datetime import datetime
from odcutils._etc import _parts
import fnmatch
from odcutils.ncgen.netCDFGen import ncgen
import loaders
import operator
from string import maketrans

# Set default encoding
netCDF4.default_encoding = 'utf-8'

class netCDFLoader:
    def __init__(self):
        # Initioalize list of cdl files
        self.newcdl = []
        self.netcdf_name = []
        self.csv = csv
    
    def go(self, type, gfile, loadit = True):
#    def templateCDL(self, type, gfile):
        """Templates, creates and load data into neCDF file.
           Use one global attribute file for each template type (e.g.
           CTD, ADCP, Minibat).
                Usage:
                    go(type, gfile)
                    
                    type:
                        - 'CTD'   : SBE, RBR and IDRONAUT CTDs
                        - 'TSO'   : Star-Oddi milli-T and TD thermistors
                        - 'ADCP'  : ADCP from EPIC format
                        - 'AWS'   : Automatic weather station (Davis pro)
                        - 'GLD'   : Gliders such as the Guildline MiniBat
                    gfile: Global attributes file. Please see examples and
                          documentation for formats regarding each type.
                                                                
           This top level function can be called to spawn the templated
           cdl files for troubleshooting new templates to optionally
           generate and load netCDF files at a later time.
                Usage:
                    go(type, gfile, loadit = False)
            """ 
        self.type = type
        # Open definition input files
        f1 = open(gfile, 'rt')
        self.gfile = []
        # Read in global and dimension files into a list of dictionaries
        # one per row
        reader = csv.DictReader(f1, delimiter = ',' , \
                                quoting = csv.QUOTE_NONNUMERIC)
        for line in reader:
            self.gfile.append(line)
        self.nfiles = len(self.gfile)
        f1.close()
        # Choose templater
        if (type == "CTD") | (type == "TSO") | (type == "AWS") | \
           (type == "GDL"):
            self._template()
        elif type == "ADCP_moored_EPIC":
            self._preload_metadata_epic()
        # And then run the template
            self._template()
        elif type == "ADCP_hull":
            print "not yet implemented\n"
        else:
            print "Just choose from what you're offered!\n"

        # Run all loading functions if loadit is true
        if loadit:
            self.createNetCDF()
            self.load()


    def _preload_metadata_epic(self):
        """Private method to insert the correct dimensions in the template"""
        # Loop all lines in attribute table
        for line in self.gfile:
        # Open netCDF file
            nc = Dataset(line['source_file'], 'r')
        # Get all global attributes and values
            ncattrs = nc.ncattrs()
            ncattr_vals = map(nc.getncattr, ncattrs)
        # add the colon
            ncattrs = map(operator.add, ':' * len(ncattrs), ncattrs)
        # Build dictionary
            ncattr_dict = dict(zip(ncattrs, ncattr_vals))
        # Copy water level metadata to new keys according to FISUA
            ncattr_dict[':water_depth'] = nc.WATER_DEPTH
            ncattr_dict[':water_depth_datum'] = nc.WATER_DEPTH_datum
            ncattr_dict[':water_depth_source'] = nc.WATER_DEPTH_source
        # Update the ncattr_dict to add new keys and values  line from gfile,
        # retain the values on gfile and discard the duplicates on
        # the EPIC file
#            print 'line 1', line
#            print 'ncattr1', ncattr_dict
            ncattr_dict.update(line)
#            print 'ncattr2', ncattr_dict
        # Update line of gfile to add all new keys (all conflicting keys were
        # retained in previous line)
            line.update(ncattr_dict)
#            print 'line 2', line
#            print 'Beguin DEBUG1'
#            for k in line.keys():
#                print k, line[k]
#            print 'End DEBUG'
#            print 'Beguin DEBUG2'
#            for l in self.gfile:
#                for k in l:
#                    print k, l[k]
#            print 'End DEBUG'
        # Look for time and bin dimensions
            time_dim = len(nc.dimensions['time'])
            z_dim = len(nc.dimensions['depth'])
        # Find start date, water depth, water depth source and datum
            sdate = nc.start_time
            edate = nc.stop_time
            do = datetime.strptime(sdate,'%d-%b-%Y %H:%M:%S')
            sdate = do.strftime('%Y%m%d%H%M%S')
            do = datetime.strptime(edate,'%d-%b-%Y %H:%M:%S')
            edate = do.strftime('%Y%m%d%H%M%S')
        # write this to each line dictionary
            line['time']                = time_dim
            line['z']                   = z_dim
            line[':start_date']         = int(sdate)
            line[':end_date']           = int(edate)
# The following 3 lines were commented to force the value in gfile
#            line[':water_depth']        = float(wdepth)
#            line[':water_depth_datum']  = wdepth_d
#            line[':water_depth_source'] = wdepth_s
        # Find if _Fill value is equal for all variables
        #    fv = [];
        #    for k in nc.variables.keys():
        # Try if the variable has attribute _FillVaue
        #        try:
        #            fv.append(nc.variables[k]._FillValue)
        #        except AttributeError:
        #            continue
        # Convert to set which finds unique values
        # Non unique values of _FillValue are handled in the loader
        #    if len(set(fv)) == 1:
        #        val = fv.pop()
        #        line[':_FillValue'] = val
        #        line[':missing_value'] = val
            line[':_FillValue'] = nc.variables['u_1205']._FillValue
            line[':missing_value'] = nc.variables['u_1205']._FillValue
        # Close netCDF file
            nc.close()

    def _template(self):
#    def _ctdtemplate(self):
        """Private template function for all datastructures.
           Builds cdl files acording to the supplied global attributes file.
        """

        for i in range(self.nfiles):
        # Read in template
            incdl = self._choosetemplate(self.gfile[i])
        # Skip if station_id field exists in gfile and is not empty
            try:
                cond = len(self.gfile[i][':station_id']) != 0
            except KeyError:
        # If :station_id does not exist
                cond = False
        # if station_id is not defined in gfile
            if not(cond):
        # Create composite fields
                self.gfile[i][':station_id'] = \
                    self.gfile[i][':institution_code'] + \
                    self.gfile[i][':station_name']
        # Create output lsts
            acdl = incdl[:]
        # Run trough all the lines in cdl file and replace value of each key
            for line in acdl:
        # Loop keys in gfile
                for key in self.gfile[i]:
        # Retain conventions from the template
                    if key == ':Conventions': continue
        # Build attribute phrase
                    mkey = str(key) + ' ='
                    if line.startswith(mkey):
                        val = self.gfile[i][key]
        # Check if val is digit
                        if _parts.isDigit(val):
        # If so, convert it to str
                            val = str(val)
        # Strip all non nomerical characters and join with comma
        # Make translation table
                            tt = maketrans(',', ' ')
        # Translate comma to space and strip all kinds of brackets
                            val = val.translate(tt, '{}[]()')
                            val = val.split()
                            val = ', '.join(val)
                            aline = key + ' = ' + val + ';\n'
                            acdl[acdl.index(line)] = aline
                            break
                        else:
        # If it isnt digit incase val in double quotes
                            aline = key + ' = "' + str(val) +\
                                    '";\n'
                            acdl[acdl.index(line)] = aline
                            break
                    else:
                        continue
        # Change the name of the netcdf file to be created
            netcdf_name = self.gfile[i][':institution_code'] + '_' + \
                            self.gfile[i][':instrument_code'] + '_' + \
                            '%6.0f' % self.gfile[i][':start_date'] + '_' +\
                            self.gfile[i][':station_id'] + '_' + \
                            self.gfile[i][':instrument_sn'] + '_'\
                            + 'L' + \
                            '%i' % self.gfile[i][':process_level']
            self.netcdf_name.append(netcdf_name)
        # Find line with filename
            for line in acdl:
                if line.split()[0] == 'netcdf':
                    acdl[acdl.index(line)] = 'netcdf '+ netcdf_name + '{\n'
        # Build new templates
            nzero = math.floor(math.log(self.nfiles,10)) + 1
            fmt = '%0' + str(nzero) + 'd'
            self.newcdl.append(self.type + '_new' + fmt % i + '.cdl')
            f2 = open(self.newcdl[i],'w')
            f2.write(''.join(acdl))
            f2.close()

    def createNetCDF(self):
        """Creates the netcdf files templated with _template method.
        """
############################ USES EXTERNAL ncgen #######################
#        for cdl in self.newcdl:
#            cmd = 'ncgen -b ' + cdl
#            os.system(cmd)
########################################################################
        for cdl in self.newcdl:
        # Create temporary file
            [nc_name, dims, vars, atts] = ncgen(cdl, nc_name = 'nc.tmp')
        # Open temporary file
            nc = Dataset(nc_name)
        # Global _FillValue
            globfv = nc._FillValue
        # Check if _FillValues exist for all variables except dimensions
            fv = [];
            allhavefv = True
            dims = nc.dimensions.keys()
            for k in nc.variables.keys():
                if k in dims:
                    continue
        # Try if the variable has attribute _FillValue
                try:
                    fv.append(nc.variables[k]._FillValue)
                except AttributeError:
                    allhavefv = False
                    continue
        # Close temporary file
            nc.close()
        # Delete temporary file
            os.remove('nc.tmp')
        # If not every files has _FillValue
            if ~allhavefv:
        # Create final file with common _FillValue for all
                [nc_name, dims, vars, atts] = ncgen(cdl, fill_value = globfv)
            else:
        # Create final file as in the template
                [nc_name, dims, vars, atts] = ncgen(cdl)

    def load(self, loader = None):
        """Chooses loader from loaders module and loads the nc files created.
        Current supported formats for loader:
            - 'CTDSBE': all Seabird electronics CTD profilers
            - 'CTDRBR': RBR XR620 CTD single cast file from matlab processing
            - 'CTDIDR': IDR320 CTD single cast file from IHPT
            - 'TSO'   : Star-Oddi milli-T and TD thermistors
            - 'ADCP' : ADCP from EPIC format
            - 'GLD' : Guildline minibat ondulator
        """
        if loader == None:
        # Define instrument codes
            icodes = ['CTDSBE', 'CTDRBR', 'TSO' , 'ADCP', 'AWS', 'GLD', 'CTDIDR']
        # Find loader type from instrument code in 1st line of gfile
            for ic in icodes:
                if self.gfile[0][':instrument_code'].find(ic) > -1:
                    loader = ic
                    break
        # Loop all lines in input parameters looking for each data file
        for i in range(len(self.gfile)):
        # Except for ADCP and Weather stations
            if not(loader in  ['ADCP', 'AWS']):
        # Open data file
                f1 = open(self.gfile[i]['source_file'])
        # Dump all data into intermediate variable
                dump = f1.readlines()
                f1.close()
        # Define netcdf filename
            fname = self.netcdf_name[i] + '.nc'
        # Choose loader
            if loader == 'CTDSBE':
                print 'Now entering loader for source file',\
                         self.gfile[i]['source_file'],\
                      'and target file', fname
                loaders.loadSBE(dump, fname, self.gfile[i])
            elif loader == 'CTDRBR':
                loaders.loadRBR(dump, fname, self.gfile[i])
            elif loader == 'CTDIDR':
                loaders.loadIDR(dump, fname, self.gfile[i])
            elif loader == 'TSO':
                loaders.loadDST(dump, fname, self.gfile[i])
            elif loader == 'ADCP':
                loaders.loadADCPEPIC(fname, self.gfile[i])
            elif loader == 'GLD':
                loaders.loadMB(dump, fname, self.gfile[i])
            elif loader == 'AWS':
                print 'Working on it!'
            else:
                print 'Loader for ' + loader + ' format, not implemented.'
                print 'Nothing done'
                return


    def _choosetemplate(self, gline):
        """Returns the lines of the correct template file"""
        # Get path to local install of  netCDFLoader.py
        basedir = os.path.dirname(os.path.realpath(__file__))
        # Up one level
        basedir = os.path.dirname(basedir)
        # Build path to _template directory
        templdir = os.path.join(basedir,'templates')
        # Build template filename
        icode = str(gline[':instrument_code'])
        plevel = 'L' + str(int(gline[':process_level']))
        # Try to open file with instrument code and process level
        try:
            fname = icode + '_' + plevel + '.cdl'
            fname = os.path.join(templdir,fname)
            f = open(fname)
            lines = f.readlines()
            f.close()
        except IOError:
        # If unlucky try with just instrument code
            fname = icode + '.cdl'
            fname = os.path.join(templdir,fname)
            f = open(fname)
            lines = f.readlines()
            f.close()
        return lines
