import os
import numpy
from numpy import result_type as numpy_result_type
import netCDF4
import csv
import re
import textwrap
import time
from .                 import __version__, __Conventions__, __file__
from .space            import Space
from .transform        import Transform
from .field            import Field
from .fieldlist        import FieldList
from .cellmethods      import CellMethods
from .coordinate       import Coordinate
from .coordinatebounds import CoordinateBounds
from .utils            import (RTOL, ATOL, equals, 
                               parse_indices, subspace_array)
from .data             import Data
from .units            import Units

# Number of bytes per word in the input PP files
_word_size = 4
_pp_rmdi   = -1e+30 

# --------------------------------------------------------------------
# Set default UM version from the environment
# --------------------------------------------------------------------
if 'UMVERSION' in os.environ:
    _UMVERSION = os.environ['UMVERSION'].replace('.', '0', 1)
else:
    _UMVERSION = None

# --------------------------------------------------------------------
# Constants for converting creating unrotated lat/lon auxiliary
# coordinates
# --------------------------------------------------------------------
_PI          = numpy.pi
_PI_OVER_180 = _PI/180.0

# --------------------------------------------------------------------
# Number matching regular expression
# --------------------------------------------------------------------
_number_regex = '([-+]?\d*\.?\d+(e[-+]?\d+)?)'

# --------------------------------------------------------------------
# Datetime object that copes with non-standard calendars
# --------------------------------------------------------------------
datetime = netCDF4.netcdftime.datetime

# --------------------------------------------------------------------
# Cache for derived values
# --------------------------------------------------------------------
_cache = {'runid' : {},
          'latlon': {},
          'time'  : {},
          }

# --------------------------------------------------------------------
# Names of PP integer and real header items
# --------------------------------------------------------------------
_lhd_items = ('lbyr', 'lbmon', 'lbdat', 'lbhr', 'lbmin', 'lbday',
              'lbyrd', 'lbmond', 'lbdatd', 'lbhrd', 'lbmind',
              'lbdayd', 'lbtim', 'lbft', 'lblrec', 'lbcode', 'lbhem',
              'lbrow', 'lbnpt', 'lbext', 'lbpack', 'lbrel', 'lbfc',
              'lbcfc', 'lbproc', 'lbvc', 'lbrvc', 'lbexp', 'lbegin', 
              'lbnrec', 'lbproj', 'lbtyp', 'lblev', 'lbrsvd1',
              'lbrsvd2', 'lbrsvd3', 'lbrsvd4', 'lbsrce', 'lbuser1',
              'lbuser2', 'lbuser3', 'lbuser4', 'lbuser5', 'lbuser6',
              'lbuser7')

_bhd_items = ('brsvd1', 'brsvd2', 'brsvd3', 'brsvd4', 
              'bdatum', 'bacc', 'blev', 'brlev', 'bhlev', 'bhrlev',
              'bplat', 'bplon', 'bgor',
              'bzy', 'bdy', 'bzx', 'bdx', 'bmdi', 'bmks')

# --------------------------------------------------------------------
# Positions of PP header items in their arrays
# --------------------------------------------------------------------
(lbyr, lbmon, lbdat, lbhr, lbmin, lbday,
 lbyrd, lbmond, lbdatd, lbhrd, lbmind,
 lbdayd, lbtim, lbft, lblrec, lbcode, lbhem,
 lbrow, lbnpt, lbext, lbpack, lbrel, lbfc,
 lbcfc, lbproc, lbvc, lbrvc, lbexp, lbegin, 
 lbnrec, lbproj, lbtyp, lblev, lbrsvd1,
 lbrsvd2, lbrsvd3, lbrsvd4, lbsrce, lbuser1,
 lbuser2, lbuser3, lbuser4, lbuser5, lbuser6,
 lbuser7
 ) = range(45)

(brsvd1, brsvd2, brsvd3, brsvd4, 
 bdatum, bacc, blev, brlev, bhlev, bhrlev,
 bplat, bplon, bgor,
 bzy, bdy, bzx, bdx, bmdi, bmks
 ) = range(19)

# --------------------------------------------------------------------
# Assign CF standard name attributes to PP axis codes. (The full list
# of field code keys may be found at
# http://cms.ncas.ac.uk/html_umdocs/wave/@header.)
# --------------------------------------------------------------------
_coord_standard_name = {
    1   : 'air_pressure', 
    2   : 'altitude',
    3   : 'atmosphere_hybrid_sigma_pressure_coordinate',
    4   : 'depth',
    5   : 'model_level_number',        
    6   : 'air_potential_temperature',
    7   : 'atmosphere_sigma_coordinate',
    10  : 'latitude',
    11  : 'longitude',
    13  : 'region',
    14  : 'atmosphere_hybrid_height_coordinate',
    15  : 'height',
    20  : 'time',      # time (gregorian)
    23  : 'time',      # time (360_day)
    40  : 'pseudolevel',        # pseudolevel. THIS IS NOT A STANDARD NAME!!    #dch should this be None?
    }

# --------------------------------------------------------------------
# Assign CF long names attributes to PP axis codes.
# --------------------------------------------------------------------
_coord_long_name = {
    40 : 'pseudolevel',
    }

# --------------------------------------------------------------------
# Assign CF units attributes to PP axis codes.
# --------------------------------------------------------------------
_coord_units = {
    1   : 'hPa',           # air_pressure                      
    2   : 'm',             # altitude         
    3   : '1',             # atmosphere_hybrid_sigma_pressure_coordinate
    4   : 'm',             # depth                                  
    5   : '1',             # model_level_number                         
    6   : 'K',             # air_potential_temperature
    7   : '1',             # atmosphere_sigma_coordinate               
    10  : 'degrees_north', # latitude                               
    11  : 'degrees_east',  # longitude                                
    13  : '',              # region                                     
    14  : '1',             # atmosphere_hybrid_height_coordinate          
    15  : 'm',             # height                                      
    20  : 'days',          # time (gregorian)                    
    23  : 'days',          # time (360_day)
    40  : '1',             # pseudolevel
    }

# --------------------------------------------------------------------
# Assign CF axis attributes to PP axis codes.
# --------------------------------------------------------------------
_coord_axis = {
    1   : 'Z',   # air_pressure                       
    2   : 'Z',   # altitude                                     
    3   : 'Z',   # atmosphere_hybrid_sigma_pressure_coordinate  
    4   : 'Z',   # depth                                        
    5   : 'Z',   # model_level_number                          
    6   : 'Z',   # air_potential_temperature
    7   : 'Z',   # atmosphere_sigma_coordinate                
    10  : 'Y',   # latitude                                     
    11  : 'X',   # longitude                                    
    13  : None,  # region                                       
    14  : 'Z',   # atmosphere_hybrid_height_coordinate          
    15  : 'Z',   # height                                       
    20  : 'T',   # time (gregorian)                                         
    23  : 'T',   # time (360_day)                                         
    40  : None,  # pseudolevel                                    
    }

# --------------------------------------------------------------------
# Assign CF positive attributes to PP axis codes.
# --------------------------------------------------------------------
_coord_positive = {
    1   : 'down',  # air_pressure                     
    2   : 'up',    # altitude                                  
    3   : 'down',  # atmosphere_hybrid_sigma_pressure_coordinate 
    4   : 'down',  # depth                                     
    5   : None,    # model_level_number                         
    6   : 'up',    # air_potential_temperature
    7   : 'down',  # atmosphere_sigma_coordinate               
    10  : None,    # latitude                                   
    11  : None,    # longitude                                   
    13  : None,    # region                                     
    14  : 'up',    # atmosphere_hybrid_height_coordinate         
    15  : 'up',    # height                                      
    20  : None,    # time (gregorian)                                          
    23  : None,    # time (360_day)                                        
    40  : None,    # pseudolevel                                    
    }

# --------------------------------------------------------------------
# Translate LBVC codes to PP axis codes. (The full list of field code
# keys may be found at
# http://cms.ncas.ac.uk/html_umdocs/wave/@fcodes.)
# --------------------------------------------------------------------
_lbvc_codes = {
    1   :  2,   # altitude (Height) 
    2   :  4,   # depth (Depth)
    3   : None, # (Geopotential (= g*height))
    4   : None, # (ICAO height)
    6   :  5,   # model_level_number  
    7   : None, # (Exner pressure)
    8   :  1,   # air_pressure  (Pressure)
    9   :  3,   # atmosphere_hybrid_sigma_pressure_coordinate (Hybrid pressure)
    10  :  7,   # atmosphere_sigma_coordinate (Sigma (= p/surface p))   ## dch check
    16  : None, # (Temperature T)
    19  :  6,   # air_potential_temperature (Potential temperature)
    27  : None, # (Atmospheric) density
    28  : None, # (d(p*)/dt .  p* = surface pressure)
    44  : None, # (Time in seconds)
    65  : 14,   # atmosphere_hybrid_height_coordinate (Hybrid height)
    129 : None, # Surface
    176 : 10,   # latitude    (Latitude)
    177 : 11,   # longitude   (Longitude)
    }

# --------------------------------------------------------------------
# Characters used in decoding LBEXP into a runid
# --------------------------------------------------------------------
_characters = ('a','b','c','d','e','f','g','h','i','j','k','l','m',
               'n','o','p','q','r','s','t','u','v','w','x','y','z',               
               '0','1','2','3','4','5','6','7','8','9'
               )   
_n_characters = len(_characters)

# --------------------------------------------------------------------
# Names of PP extra data codes
# --------------------------------------------------------------------
_extra_data_name = {
    1  : 'x',
    2  : 'y',
    3  : 'y_domain_lower_bound',
    4  : 'x_domain_lower_bound',
    5  : 'y_domain_upper_bound',
    6  : 'x_domain_upper_bound',
    7  : 'z_domain_lower_bound',
    8  : 'x_domain_upper_bound',
    10 : 'title',
    11 : 'domain_title',
    12 : 'x_lower_bound',
    13 : 'x_upper_bound',
    14 : 'y_lower_bound',
    15 : 'y_upper_bound',
    }

# ====================================================================
#
# PPFileArray object
#
# ====================================================================

class PPFileArray(object):
    ''' 
    
'''
    __slots__ = ('_binary_mask',
                 '_file',
                 '_file_offset',
                 'add_offset',
                 'dtype',
                 '_FillValue',
                 'ndim',
                 'scale_factor',
                 'shape',
                 'size',
                 )


    def __init__(self, **kwargs):
        '''
        
**Initialization**

:Parameters:

    kwargs :
        Attributes are set from keywords and their arguments.

**Examples**

>>> ppfile
<open file 'file.pp', mode 'rb' at 0xc45e00>
>>> a = PPFileArray(_file=ppfile.name, _file_offset=ppfile.tell(),
                    dtype=numpy.dtype('float32'), shape=(73, 96), size=7008,
                    ndim=2)

'''
        for attr, value in kwargs.iteritems():
            setattr(self, attr, value)
    #--- End: def
 
    def __getitem__(self, indices):
        '''
x.__getitem__(indices) <==> x[indices]

''' 
        # ------------------------------------------------------------
        # Read the array from the PP file
        # ------------------------------------------------------------
        mm_array = numpy.memmap(self._file, mode = 'r',
                                offset = self._file_offset,
                                dtype  = self.dtype,
                                shape  = self.shape)

        indices = parse_indices(mm_array, indices)

        array = subspace_array(mm_array, indices)

        # ------------------------------------------------------------
        # Convert to a masked array
        # ------------------------------------------------------------
        if hasattr(self, '_FillValue'):
            # _FillValue is set so mask any missing values
            fill_value = self._FillValue
            array = numpy.ma.array(array, mask=(array == fill_value),
                                   fill_value=fill_value, copy=True)
            array.shrink_mask()
        else:
            # _FillValue is not set, so there are no missing values
            array = numpy.ma.array(array, copy=True)

        # Close the file
        del mm_array

        # ------------------------------------------------------------
        # Unpack the array using the scale_factor and add_offset, if
        # either is available
        # ------------------------------------------------------------
        if hasattr(self, 'scale_factor') and self.scale_factor != 1.0:
            array *= self.scale_factor

        if hasattr(self, 'add_offset') and self.add_offset != 0.0:
            array += self.add_offset

        if hasattr(self, '_binary_mask') and self._binary_mask:
            array = numpy.ma.where(array!=0, 1, 0)

        # ------------------------------------------------------------
        # Return the array
        # ------------------------------------------------------------
        return array
    #--- End: def

    def __str__(self):
        '''
x.__str__() <==> str(x)

'''
        return '%s%s' % (self.__class__.__name__, self.shape)
    #--- End: def
   
#--- End: class


# ====================================================================
#
# PPFileArrayBounds object
#
# ====================================================================

class PPFileArrayBounds(object):
    '''  
'''
    __slots__ = ('_lower'    ,
                 '_upper'    ,
                 'dtype'     ,
                 'shape'     ,
                 'size'      ,
                 'ndim'      ,
                 )

    def __init__(self, lower, upper):
        '''
'''
        self._lower = lower
        self._upper = upper

        self.dtype = numpy_result_type(lower.dtype, upper.dtype)
        self.shape = lower.shape + (2,)       
        self.size  = lower.size * 2
        self.ndim  = lower.ndim + 1
    #--- End: def
   
    def __getitem__(self, indices):
        '''
x.__getitem__(indices) <==> x[indices]

'''
        # ------------------------------------------------------------
        # Read the upper and lower bounds from the PP file and stick
        # them together
        # ------------------------------------------------------------
        array = numpy.column_stack((self._lower[...],
                                    self._upper[...],))

        indices = parse_indices(array, indices)

        return subspace_array(array, indices)
    #--- End: def

    def __str__(self):
        '''
x.__str__() <==> str(x)

'''
        return '%s%s' % (self.__class__.__name__, self.shape)
    #--- End: def
   
#--- End: class


class PPMeta(object):
    '''

'''

    def __init__(self, ppfile):
        '''

**Initialization**

'''

        # Set attributes giving the default data type for reals and
        # integers. The basic assumption is that data are native
        # endian.
        self.float32      = numpy.dtype('<f4').newbyteorder('=')
        self.int32        = numpy.dtype('<i4').newbyteorder('=')
        self.str32        = numpy.dtype('<S4').newbyteorder('=')
        self.file_float32 = self.float32
        '''

The data-type of floats in the PP file.

'''

        self.file_int32   = self.int32
        '''

The data-type of ints in the PP file.

'''

        self.file_str32   = self.str32
        '''

The data-type of strings in the PP file.

'''
        # ------------------------------------------------------------
        # Read 1st block control word
        # ------------------------------------------------------------
        bcw1 = numpy.fromfile(ppfile, dtype=self.file_int32, count=1)
   
        # Stop now if we have reached the end of the file
        if not bcw1:
            self._nonzero = False
            return

        # Change data types if we have a wrong endian PP
        # file. ('Wrong' simply means different to that assumed by a
        # PPMeta instance.)
        if bcw1[0] == 65536:        
            self.file_float32 = self.float32.newbyteorder('S')
            self.file_int32   = self.int32.newbyteorder('S')
            self.file_str32   = self.str32.newbyteorder('S')
        elif bcw1[0] != 256:
            raise RuntimeError("Not a PP field: Magic number = %s" % bcw1[0])
        
        # ------------------------------------------------------------
        # Read the integer and real headers (and the 2nd and 3rd block
        # control words).
        # ------------------------------------------------------------
        lhd = numpy.fromfile(ppfile, dtype=self.file_int32  , count=45)
        bhd = numpy.fromfile(ppfile, dtype=self.file_float32, count=21)
        
        # ------------------------------------------------------------
        # Set some derived metadata quantities
        # ------------------------------------------------------------
        self.lbtim_ia, ib            = divmod(lhd[lbtim], 100)
        self.lbtim_ib, self.lbtim_ic = divmod(ib, 10)
        
        time_units = 'days since %d-%d-%d' % (lhd[lbyr], lhd[lbmon], lhd[lbdat])
        if self.lbtim_ic == 1:
            calendar = 'gregorian'
        else:
            calendar = '360_day'
        
        self.reftime = Units(time_units, calendar)

        # For speed, try to return a cached numeric time value,
        # otherwise calculate one.
        v = (lhd[lbyr] , lhd[lbmon] , lhd[lbdat] , lhd[lbhr] , lhd[lbmin])
        d = (lhd[lbyrd], lhd[lbmond], lhd[lbdatd], lhd[lbhrd], lhd[lbmind])
        
        vdatetime = datetime(v[0], v[1], v[2], v[3], v[4])
        ddatetime = datetime(d[0], d[1], d[2], d[3], d[4])
        
        for attr, t, mm in zip(('vtime', 'dtime'),
                               ( vdatetime ,  ddatetime),
                               ( v, d)):
        
#            print 'XX ','days since %d-%d-%d' % (v[0:3])
#            m.time_units = 'days since %d-%d-%d' % (v[0:3])
#            print mm
        
#            try:
#                value = _cache['time'][mm]
#                print value
#            except KeyError:
            value = netCDF4.date2num(t, time_units, calendar)
#                _cache['time'][mm] = value
            setattr(self, attr, value)
        #--- End: for

        version, model = divmod(lhd[lbsrce], 10000)
        if model > 0:
            self.model = 'UM'
        
        if version > 0:
            self.version = str(version)
                
        # ------------------------------------------------------------
        # Set the T, Z, Y and X axis codes
        # ------------------------------------------------------------
        code = lhd[lbcode]
        if code in (1, 101, 2, 102):
            self.ix, self.iy = 11, 10
        elif code >= 10000:
            temp, ix = divmod(code, 10000)
            self.ix, self.iy = divmod(ix, 100)
        else:
            self.ix, self.iy = None, None
        
        # Determine if we have a site-time cross section and, if so,
        # if it is a timeseries
        self.site_time_cross_section = False
        self.timeseries              = False
        if self.ix == 13 and self.iy == 23:
            self.site_time_cross_section = True
            self.timeseries = (lhd[lbuser3] == lhd[lbrow])
        #--- End: if
        
        # Set iv from LBVC
        self.iv = _lbvc_codes.get(lhd[lbvc], None)
        
        # Set it
        if calendar == 'gregorian':
            self.it = 20
        else:
            self.it = 23
        
        self.lhd = lhd
        self.bhd = bhd  

        self.ppfile = ppfile

        # ------------------------------------------------------------
        # Set the data
        # ------------------------------------------------------------
        self.data()

        # ------------------------------------------------------------
        # Set the extra data
        # ------------------------------------------------------------
        self.extra_data()

        self._nonzero = True
    #--- End: def

    def __nonzero__(self):
        '''
x.__nonzero__() <==> bool(x)

'''
        return self._nonzero
    #--- End: if

    def __str__(self):
        '''
x.__str__() <==> str(x)

'''
        out = [self.printfdr()]        
        
        attrs = ('file_float32','file_int32', 'file_str32',
                 'lbtim_ia', 'lbtim_ib', 'lbtim_ic',
                 'reftime', 'vtime', 'dtime',
                 'version', 'model',
                 'it', 'iv', 'ix', 'iy', 
                 'site_time_cross_section', 'timeseries',
                 'ppfile')

        for attr in attrs:
            out.append('%s=%s' % (attr, getattr(self, attr, None)))
            
        out.append('')

        return '\n'.join(out)   
    #--- End: def

    def printfdr(self):
        '''
        '''
        fdr = []
        for i in range(45):
            fdr.append('%s:_%s' % (_lhd_items[i].upper(), self.lhd[i]))

        for i in range(19):
            fdr.append('%s:_%s' % (_bhd_items[i].upper(), self.bhd[i]))

        fdr = textwrap.fill(' '.join(fdr), width=79)
        fdr = [fdr.replace(':_', ': ')]

        if self.extra:
            fdr.append('EXTRA DATA:')
            for key in self.extra:
                fdr.append('%s: %s' % (key, str(self.extra[key])))
        #--- End: if

        fdr.append('')

        return '\n'.join(fdr)
    #--- End: def

    def extra_data(self):
        '''
        
Read the extra data (if any) of the PP field at the current position
in the given PP file, decode it and store it in the field's metadata
object.

**Excerpt from UMDP F3 vn7.8**

The data record may include 'extra data' in addition to the usual
grid-point values. Cross-section fields must always be followed by
extra data giving the x- and y-coordinate values of each column and
row of the grid. (This is to allow irregular grid). It is also
possible to use extra data in conjunction with other types of field,
eg. a character string description of the field. The header variable
LBEXT shows the length of extra data associated with a field. Thus the
total length (in words) of the data record (LBLREC) equals the number
of grid-point values plus LBEXT. Extra data is arranged in vectors,
each of which is made up of an integer code followed by data
values. The integer code is (1000xIA + IB), where IA is the length of
the vector in words (i.e. usually the number of data values) and IB is
a standard code. A zero integer code indicates no (more) extra data.
For cross-sections the extra data MUST start with an x-vector followed
by a y-vector; optionally other extra data may follow.

:Returns:

    None

**Examples**

'''
        ppfile = self.ppfile

        if self.lhd[lbext] <= 0:
            # Skip the 4th block control word and return with no extra
            # data
            ppfile.seek(_word_size, os.SEEK_CUR) 
            self.extra = {}
            return
        #--- End: if
    
        # Still here? The parse the extra data
        extra = {}
    
        domain_titles = []
    
        len = 0        
        while len < self.lhd[lbext]:
    
            intcode = numpy.fromfile(ppfile, dtype=self.file_int32, count=1)
    
            # Stop if we have run out of extra data
            if not (intcode or intcode[0]):
                break
    
            # Find ia = size of this extra data variable, 
            #      ib = type of this extra data variable
            ia, ib = divmod(intcode[0], 1000)
            
            if ib in (10, 11):
                # Read characters from the file
                title = numpy.fromfile(ppfile, dtype=self.file_str32, count=ia)
                title = ''.join(title)
                if ib == 10:
                    extra[_extra_data_name[ib]] = numpy.array([title])
                else:
                    domain_titles.append(title)
            else:
                # Store the array of numbers as a file pointer
                extra[_extra_data_name[ib]] = PPFileArray(_file=ppfile.name,
                                                          _file_offset=ppfile.tell(),
                                                          dtype=self.file_float32,
                                                          shape=(ia,), size=ia, ndim=1)            
                # Skip to the next extra data array
                ppfile.seek(ia*_word_size, os.SEEK_CUR)
            #--- End: if
    
            len += ia + 1
        #--- End: while    
    
        for bounds_type in ('', '_domain'):
    
            for dim in ('x', 'y'):
                dim_bounds_type = dim + bounds_type
                lower  = dim_bounds_type + '_lower_bound'
                upper  = dim_bounds_type + '_upper_bound'
                bounds = dim_bounds_type + '_bounds'
                
                if lower in extra and upper in extra:
                    extra[bounds] = PPFileArrayBounds(extra[lower], extra[upper])         
                    extra.pop(lower)
                    extra.pop(upper)
        #--- End: for
    
        # If we have domain titles then add them to the extra data
        # dictionary as a numpy array
        if domain_titles:
            extra[_extra_data_name[11]] = numpy.array(domain_titles)
    
        # Skip the 4th block control word so that we're ready to read the
        # next PP field
        ppfile.seek(_word_size, os.SEEK_CUR)            
    
        # Add the extra data to the metadata object
        self.extra = extra
    
        # Return the updated metadata object
        return self
    #--- End: def

    def data(self):
        '''
    
Return a file pointer to the field's 2-d data array.
    
    :Parameters:
    
        ppfile : file
    
        m : PPMeta
            A PP field's complete metadata.
    
    :Returns:
    
        out : PPFileArray
    
'''
        size = self.lhd[lblrec] - self.lhd[lbext]
    
        if not size:
            self.data = None
            return
    
        # Set the data type
        binary_mask = False
        if self.lhd[lbuser1] == 1:
            datatype = self.file_float32
        elif self.lhd[lbuser1] == 2:
            datatype = self.file_int32
        elif self.lhd[lbuser1] == 3:
            datatype = self.file_int32
            binary_mask = True
        else:
            # Unknown data type => assume real
            datatype = self.file_float32
    
        # Set the shape
        shape = (self.lhd[lbrow], self.lhd[lbnpt])
    
        file_offset = self.ppfile.tell()
    
        # Create the data object by setting up a parameters dictionary to
        # initialize a PPFileArray object
        parameters = {'_binary_mask': binary_mask,
                      '_file'       : self.ppfile.name,
                      '_file_offset': file_offset,
                      'dtype'       : datatype,
                      'shape'       : shape,
                      'size'        : size,
                      'ndim'        : 2,
                      }
    
        # Set the _FillValue from BMDI (unless it's -1.0e30, which is a
        # flag for the field containing no missing data). Note that the
        # _FillValue must be of the same type as the data values.
        if self.bhd[bmdi] != -1.0e30:
            parameters['_FillValue'] = self.bhd[bmdi]
            if datatype == self.file_int32:
                parameters['_FillValue'] = int(parameters['_FillValue'])
    
        # Treat BDATUM as an add_offset if it is not 0.0
        if abs(self.bhd[bdatum]) > ATOL():
            parameters['add_offset'] = self.bhd[bdatum]
    
        # Treat BMKS as a scale_factor if it is not 0.0 or 1.0
        if (abs(self.bhd[bmks]-1.0) > ATOL() + RTOL() and 
            abs(self.bhd[bmks])     > ATOL()):
            parameters['scale_factor'] = self.bhd[bmks]
    
        self.data = PPFileArray(**parameters)
    
        # Skip data and 4th block control word to get to the start of the
        # next PP field in the file
        self.ppfile.seek(file_offset + size*_word_size, 0) #os.SEEK_CUR)
    #--- End: def

#--- End: class


def unrotated_latlon(rotated_lat, rotated_lon, pole_lat, pole_lon):
    '''

Create 2-d arrays of unrotated latitudes and longitudes.

'''
    # Make sure rotated_lon and pole_lon is in [0, 360)
    pole_lon = pole_lon % 360.0

    # Convert everything to radians
    pole_lon *= _PI_OVER_180
    pole_lat *= _PI_OVER_180

    cos_pole_lat = numpy.cos(pole_lat)
    sin_pole_lat = numpy.sin(pole_lat)

    # Create appropriate copies of the input rotated arrays
    rot_lon = rotated_lon.copy()
    rot_lat = rotated_lat.view()

    # Make sure rotated longitudes are between -180 and 180
    rot_lon %= 360.0
    rot_lon = numpy.where(rot_lon < 180.0,
                          rot_lon, rot_lon-360)

    # Create 2-d arrays of rotated latitudes and longitudes in radians
    nlat = rot_lat.size
    nlon = rot_lon.size
    rot_lon = numpy.resize(numpy.deg2rad(rot_lon), (nlat, nlon))    
    rot_lat = numpy.resize(numpy.deg2rad(rot_lat), (nlon, nlat))
    rot_lat = numpy.transpose(rot_lat, axes=(1,0))

    # Find unrotated latitudes
    CPART = numpy.cos(rot_lon) * numpy.cos(rot_lat)
    sin_rot_lat = numpy.sin(rot_lat)
    x = cos_pole_lat * CPART + sin_pole_lat * sin_rot_lat
    x = numpy.clip(x, -1.0, 1.0)
    unrotated_lat = numpy.arcsin(x)
    
    # Find unrotated longitudes
    x = -cos_pole_lat*sin_rot_lat + sin_pole_lat*CPART
    x /= numpy.cos(unrotated_lat)   # dch /0 or overflow here? surely
                                    # lat could be ~+-pi/2? if so,
                                    # does x ~ cos(lat)?
    x = numpy.clip(x, -1.0, 1.0)
    unrotated_lon = -numpy.arccos(x)
    
    unrotated_lon = numpy.where(rot_lon > 0.0, 
                                -unrotated_lon, unrotated_lon)
    if pole_lon >= ATOL():
        SOCK = pole_lon - _PI
    else:
        SOCK = 0
    unrotated_lon += SOCK

    # Convert unrotated latitudes and longitudes to degrees
    unrotated_lat = numpy.rad2deg(unrotated_lat)
    unrotated_lon = numpy.rad2deg(unrotated_lon)

    return unrotated_lat, unrotated_lon

    # Return unrotated latitudes and longitudes
#    return numpy.rollaxis(numpy.dstack((unrotated_lat, unrotated_lon)), 2)
#--- End: def

def _set_coordinate_data(c, m=None,
                         array=None, bounds_array=None, climatology=False, 
                         coord_type=None, extra_type=None,
                         parameters={}):
    '''
    
Set a coordinate's data in place.

:Parameters:

    c : Coordinate

    m : PPMeta
        A PP field's complete metadata.

    array : array-like, optional

    bounds_array : array-like, optional

    climatology : bool, optional

    coord_type : str, optional

    extra_type: str, optional

    parameters : dict, optional

:Returns:

    None

'''

    def _insert_data(c, data=None, bounds=None, climatology=False):
        '''

:Parameters:

    c : Coordinate

    data : array-like, optional

    bounds : array-like, optional

    climatology : bool, optional

:Returns:

    None

'''
        _FillValue = getattr(c, '_FillValue', None)
        units = c.Units

        if array is not None:
            # Set the coordinate's data
            c.Data = Data(data, units=units, _FillValue=_FillValue)

        if bounds_array is not None:
            # Set the coordinate's bounds
            c.bounds      = CoordinateBounds()        
            c.bounds.Data = Data(bounds, units=units, _FillValue=_FillValue)
            if climatology:
                c.climatology = True
    #--- End: def

    if c is None:
        return

    if array is not None or bounds_array is not None:
        _insert_data(c, array, bounds_array, climatology)
        return c
    #--- End: if

    origin        = None
    create_bounds = True

    #-----------------------------------------------------------------
    # Set time data from vtime and dtime
    #-----------------------------------------------------------------
    if coord_type == 't':
        # This PP field's data array does not have time as one of its
        # two axes, so create a size 1 dimension coordinate for time
        ib = m.lbtim_ib
        if ib <= 1 or m.vtime >= m.dtime:
            array = numpy.array(m.vtime)                       # 0-d
        else:
            array = numpy.array(0.5*(m.vtime + m.dtime))       # 0-d
            if ib <= 3:
                bounds_array = numpy.array([m.vtime, m.dtime]) # 1-d
                if ib == 3:
                    climatology = True
    #--- End: if

    # ----------------------------------------------------------------
    #
    # ----------------------------------------------------------------
    if coord_type == 'x':
        if abs(m.bhd[bdx]) <= ATOL() or m.timeseries: # DCH 
            # Create an X coordinate data object from the field's
            # extra data
            if coord_type in m.extra:
                array = m.extra[coord_type]
            
            coord_bounds = coord_type+'_bounds'
            if coord_bounds in m.extra:
                 bounds_array = m.extra[coord_bounds]
                
        else:
            # Find parameters for creating X coordinate data and
            # coordinate bounds data objects
            delta  = m.bhd[bdx]
            origin = m.bhd[bzx] + delta
            size   = m.lhd[lbnpt]

            if (m.ix == 11 and origin + delta*(size-1) > 360.0):
                origin -= 360.0

            if m.ix in (13, 40, 99):
                create_bounds = False                    

    elif coord_type == 'y':
        if abs(m.bhd[bdy]) <= ATOL() or m.timeseries: # DCH Set a
            # Create a Y coordinate data object from the field's extra
            # data
            if coord_type in m.extra:
                array = m.extra[coord_type]
            
            coord_bounds = coord_type+'_bounds'
            if coord_bounds in m.extra:
                bounds_array = m.extra[coord_bounds]

        else:
            # Find parameters for creating Y coordinate data and
            # coordinate bounds data objects
            delta  = m.bhd[bdy]
            origin = m.bhd[bzy] + delta
            size   = m.lhd[lbrow]

            if (m.iy == 11 and origin + delta*(size-1) > 360.0):
                origin -= 360.0

            if m.iy in (13, 40, 99):
                create_bounds = False         
    #--- End: if
        
    if extra_type in m.extra:
        if not extra_type.endswith('bounds'):
            # Create a coordinate data object from the field's extra
            # data
            array = m.extra[extra_type]
        else:
            # Create a coordinate bounds data object from the field's
            # extra data
            bounds_array = m.extra[extra_type]
    #--- End: if

    if origin is not None:
        # Create a 1-d coordinate data object from an origin, size and
        # delta
        array = numpy.arange(origin, origin+delta*size, delta, dtype='float32')

        # Create the 1-d coordinate's bounds array
        if create_bounds:
            bounds_array = numpy.empty((size,2), dtype=array.dtype)          
            bounds_array[:, 0] = array - 0.5*delta
            bounds_array[:, 1] = array + 0.5*delta
    #--- End: if 

    # Set the coordinate's data array
    _insert_data(c, array, bounds_array, climatology)
   
    # Return coordinate
    return c
#--- End: def

def _create_Coordinate(f, key, axis_code=None, m=None, pubattr={},
                       array=None, bounds_array=None, climatology=False, 
                       coord_type=None, extra_type=None,
                       parameters=None, dimensions=[], units=None):
    '''
    
Create a coordinate variable.

:Parameters:

    f : Field

    key : str

    m : PPMeta
        A PP field's complete metadata.

    array : array-like, optional

    bounds_array : array-like, optional

    climatology : bool, optional

    coord_type : str, optional

    extra_type: str, optional

    parameters : dict, optional

    dimensions : list, 

    units : Units, optional

:Returns:

    out : Coordinate

'''
    c = Coordinate()

    if axis_code:
        # Set attributes
        c.axis          = _coord_axis[axis_code]
        c.positive      = _coord_positive[axis_code]
        c.standard_name = _coord_standard_name[axis_code]
        c.units         = _coord_units[axis_code]

        if c.standard_name is None:
            c.long_name = _coord_long_name[axis_code]
            del c.standard_name
        #--- End: if

        # Special cases
        if axis_code in (20, 23):
            # Time
            c.Units = m.reftime
        elif 10000 < m.lhd[lbcode]:
            # Rotated lat-lon
            if axis_code == 10:
                c.units         = 'degrees'
                c.standard_name = 'grid_latitude'
            elif axis_code == 11:
                c.units         = 'degrees'
                c.standard_name = 'grid_longitude'
        #--- End: if

        # Remove None attributes        
        [c.delprop(prop) for prop in ('axis', 'positive') 
         if c.getprop(prop) is None]
    #--- End: if

    # Apply extra public attributes, or overwrite existing ones
    for prop, value in pubattr.iteritems():
        if value is not None:
            c.setprop(prop, value)
        elif c.hasprop(prop):
            c.delprop(prop)
    #--- End: for

    if units is not None:
        # Override units with given units
        c.Units = units.copy()

    # data
    _set_coordinate_data(c, m=m,
                         array        = array,
                         bounds_array = bounds_array,
                         climatology  = climatology,
                         coord_type   = coord_type,
                         extra_type   = extra_type, 
                         parameters   = parameters)

    f.space.dimensions[key] = dimensions

    if key.startswith('d'): # d for dim
        # Find the dimension size
        if hasattr(c, 'size'):
            dimension_size = c.size
        else:
            if coord_type == 'x':
                dimension_size = m.lhd[lbnpt]
            elif coord_type == 'y':
                dimension_size = m.lhd[lbrow]
            else:
                raise ValueError(
                    "Can't find dimension size for coord_type '%s'" %
                    coord_type)
        #--- End: try
        f.space.dimension_sizes[key] = dimension_size
    #--- End: if

    f.space[key] = c

    # Set the coordinate's netCDF variable name
    c.ncvar = getattr(c, 'standard_name', 
                      getattr(c, 'long_name', None))
        
    return c
#--- End: def

def _decode_exp(lbexp):
    '''
    
Decode the integer value of LBEXP in the PP header into a runid.

If this value has already been decoded, then it will be returned from
the cache, otherwise the value will be decoded and then added to the
cache.

:Parameters:

    lbexp : int
        The value of LBEXP in the PP header.

:Returns:

    out : str
       A string derived from LBEXP. If LBEXP is a negative integer
       then that number is returned as a string.

**Examples**

>>> _decode(2004)
'aaa5u'
>>> _decode(-34)
'-34'

'''
    if lbexp in _cache['runid']:
        # Return a cached decoding of this LBEXP
        return _cache['runid'][lbexp]

    if lbexp < 0:
        return str(lbexp)

    # Convert LBEXP to a binary string, filled out to 30 bits with
    # zeros
    bits = bin(lbexp)
    bits = bits.lstrip('0b').zfill(30)

    # Step through 6 bits at a time, converting each 6 bit chunk into
    # a decimal integer, which is used as an index to the characters
    # lookup list.
    runid = []
    for i in xrange(0,30,6):
        index = int(bits[i:i+6], 2) 
        if index < _n_characters:
            runid.append(_characters[index])
    #--- End: for
    runid = ''.join(runid)

    # Enter this runid into the cache
    _cache['runid'][lbexp] = runid

    # Return the runid
    return runid
#--- End: def 

def _test_pp_condition(pp_condition, m=None):
    '''

Return True if a field satisfies the condition specified for a PP
STASH code to standard name conversion.

:Parameters:

    pp_condition : str

    m : PPMeta
        A PP field's complete metadata.

:Returns:

   out : bool
       True if a field satisfies the condition specified, False
       otherwise.

**Examples**

>>> ok = _test_pp_condition('true_latitude_longitude', m=m)

'''
    if pp_condition == '':
        # Return True if no condition is set
        return True
    
    # Still here?
    if pp_condition == 'true_latitude_longitude':
        if m.lhd[lbcode] in (1, 2):
            return True

        # Check pole location in case of incorrect LBCODE
        if (abs(m.bhd[bplat]-90.0) <= ATOL() + RTOL()*90.0 and 
            abs(m.bhd[bplon]) <= ATOL()):
            return True

    elif pp_condition == 'rotated_latitude_longitude':
        if m.lhd[lbcode] in (101, 102):
            return True

        # Check pole location in case of incorrect LBCODE
        if not (abs(m.bhd[bplat]-90.0) <= ATOL() + RTOL()*90.0 and 
                abs(m.bhd[bplon]) <= ATOL()):
            return True
        
    else:
        raise ValueError(
            "Unknown PP condition in STASH code conversion table: '%s'" %
            pp_condition)

    # Still here? Then the condition has not been satisfied.
    return False
#--- End: def

def _test_umversion(valid_from, valid_to, m=None):
    '''

Return True if the UM version applicable to this field is within the
given range.

If possible, the UM version is derived from the PP header and stored
in the metadata object. otherwise it is taken from the UMVERSION
environment variable.

:Parameters:

    valid_from : str

    valid_to : str

    m : PPMeta

:Returns:

    out : bool
        True if the UM version applicable to this field is within the
        range, False otherwise.

:Raises:

    RuntimeError :
        If the UM version can not be found in the PP header nor the
        envirnment.

**Examples**

>>> ok = _test_umversion('401', '505', m=m)

'''
    if hasattr(m, 'version'):
        umversion = m.version
    elif _UMVERSION is not None:
        umversion = _UMVERSION
    else:
        raise RuntimeError(
"UM version is not set in PP header nor by the UMVERSION environment variable")

    if valid_from is '':
        valid_from = None

    if valid_to is '':
        if valid_from <= umversion:
            return True 
    elif valid_from <= umversion <= valid_to:
        return True 

    return False
#--- End: def

def _field_attributes(f, m=None):
    '''

Set public and private attributes on the space (including the standard
name).
'''
    f.pp   = m
    f.file = m.ppfile.name

    f._FillValue  = m.bhd[bmdi]
    f.Conventions =  __Conventions__
    f.history     = m.history
    f.setprop('runid', _decode_exp(m.lhd[lbexp]))

    if hasattr(m, 'model'):
        if hasattr(m, 'version'):
            f.source = '%s vn%s' % (m.model, m.version)
        else:
            f.source = m.model
    #--- End: if

    if 'title' in  m.extra:
        f.title = m.extra['title']
        
    # ----------------------------------------------------------------
    # Set the field's standard_name, long_name and units from its
    # STASH code and possibly other properties
    # ----------------------------------------------------------------
    stash    = m.lhd[lbuser4]
    submodel = m.lhd[lbuser7]

    f.setprop('stash_code', stash)
    f.setprop('submodel'  , submodel)

    m.cf_info = {}

    # The STASH code has been set in the PP header, so try to find
    # its standard_name from the conversion table
    if (submodel, stash) in _stash2standard_name:
        for (long_name, 
             units,
             valid_from,
             valid_to, 
             standard_name,
             cf_info,
             pp_condition) in _stash2standard_name[(submodel, stash)]:

            # Check that conditions are met             
            version_ok      = _test_umversion(valid_from, valid_to, m=m)
            pp_condition_ok = _test_pp_condition(pp_condition, m=m)

            if not (version_ok and pp_condition_ok):                    
                continue

            # Still here? Then we have our standard_name, etc.
            f.long_name = long_name.rstrip()
            if units:
                f.units = units
            if standard_name:
                f.standard_name = standard_name
                                    
            m.cf_info = cf_info
                
            break
        #--- End: for

    elif stash:
        f.long_name = 'PP_%d_%d' % (submodel, stash)
    else:
        f.long_name = 'PP_%d_fc%d' % (submodel, m.lhd[lbfc])
    #--- End: if

    # If there is no standard name then set a identifying name based
    # on the submodel and STASHcode (or field code).
    if not hasattr(f, 'standard_name'):
        if stash:
            f.id = 'PP_%d_%d' % (submodel, stash)
        else:
            f.id = 'PP_%d_fc%d' % (submodel, m.lhd[lbfc])
    #--- End: if

    # If there is no long name then set a default
    if not hasattr(f, 'long_name'):
        if stash:
            f.long_name = 'PP_%d_%d' % (submodel, stash)
        else:
            f.long_name = 'PP_%d_fc%d' % (submodel, m.lhd[lbfc])
    #--- End: if

    # Set the field's coordinate's netCDF variable name. Note that
    # there is no danger of f.ncvar being None, since if f has no
    # standard_name then it is guaranteed to have id.
    f.ncvar = f.identity()
        
#
#        try:
#            f.getprop('long_name')
#            try:   # dch this is a nightmare
#                if not (f.getprop('units') == '%' and f.getprop('scale_factor') == 0.01):
#                    f.setprop('units', '%s %s' % (f.getprop('scale_factor'), f.getprop('units')))
#                    f.delprop('scale_factor')
#            except AttributeError:
#                pass
#        except AttributeError:
#            # Default long name in the absence of a conversion
#            f.setprop('long_name', 'STASHcode=%s' % stash)
#    #--- End: if

    return f
#--- End: def

def read_stash2standard_name(table=None, delimiter='!'):
    ''' 

Read the STASH to standard_name conversion table, as found at
http://puma.nerc.ac.uk/STASH_to_CF/STASH_to_CF.txt, into a dictionary.

:Parameters:

    table : str, optional
        Use the conversion table at this file location. By default the
        table will be looked for at
        ``os.path.join(os.path.dirname(cf.__file__),'etc/STASH_to_CF.txt')``

    delimiter : str, optional
        The table's delimiter.

:Returns:

    out : dict
        A dictionary of tuples of lists

*Examples*

>>> stash2sn = read_stash2standard_name()
>>> print stash2sn[(1,2)]
(['U COMPNT OF WIND AFTER TIMESTEP     ',
  'm s-1',
  '',
  '',
  'eastward_wind',
  '',
  'true_latitude_longitude'],
 ['U COMPNT OF WIND AFTER TIMESTEP     ',
  'm s-1',
  '',
  '',
  'x_wind',
  '',
  'rotated_latitude_longitude'])
'''

    # 0  Model           
    # 1  STASH code      
    # 2  STASH name      
    # 3  units           
    # 4  valid from UM vn
    # 5  valid to   UM vn
    # 6  standard_name   
    # 7  CF extra info   
    # 8  PP extra info

    if table is None:
        # Use default conversion table
        package_path = os.path.dirname(__file__)
        table = os.path.join(package_path, 'etc/STASH_to_CF.txt')
    #--- End: if

    lines = csv.reader(open(table, 'r'), 
                       delimiter=delimiter, skipinitialspace=True)

    raw_list = []
    [raw_list.append(line) for line in lines]

    # Get rid of comments
    for line in raw_list[:]:
        if line[0].startswith('#'):
            raw_list.pop(0)
            continue
        break
    #--- End: for

    # Convert to a dictionary which is keyed by (submodel, STASHcode)
    # tuples

    (model, stash, name,
     units, 
     valid_from, valid_to,
     standard_name, cf, pp) = range(9)
        
    stash2sn = {}
    for x in raw_list:
        key = (int(x[model]), int(x[stash]))
        try:            
            cf_info = {}
            if x[cf]:
                for d in x[7].split():
                    if d.startswith('height='): 
                        cf_info['height'] = re.split(_number_regex, d,
                                                     re.IGNORECASE)[1:4:2]
                        if cf_info['height'] == '':
                            cf_info['height'][1] = '1'

                    if d.startswith('below_'):
                        cf_info['below'] = re.split(_number_regex, d,
                                                     re.IGNORECASE)[1:4:2]
                        if cf_info['below'] == '':
                            cf_info['below'][1] = '1'

                    if d.startswith('where_'):         
                        cf_info['where'] = d.replace('where_', 'where ', 1)
                    if d.startswith('over_'):         
                        cf_info['over'] = d.replace('over_', 'over ', 1)

            x[cf] = cf_info                    
        except IndexError:
            pass

        x[pp] = x[pp].rstrip()

        line = (x[name:],)

        if key in stash2sn:
            stash2sn[key] += line
        else:
            stash2sn[key] = line

    #--- End: for

    return stash2sn
#--- End: def

# ---------------------------------------------------------------------
# Create the STASH code to standard_name conversion dictionary
# ---------------------------------------------------------------------
_stash2standard_name = read_stash2standard_name()

def read(infile, close=False):
    ''' 

Read fields from an input PP file on disk.

The file may be big or little endian.

:Parameters:

    filename : file or str
        A string giving the file name, or an open file object, from
        which to read fields.

    close : bool, optional
        **IGNORED**

:Returns:

    out : FieldList
        The fields in the file.

**Examples**

>>> f = cf.pp.read('file.pp')
>>> f = cf.pp.read('*/file[0-9].pp')

'''

    # coord_type: A short string which acts as .... Valid values are
    # 't'  : time 
    #
    # 'v'  : an axis other than time which has the the same value for
    #        all data points, as described by LBVC
    #
    # 'p'  : pseudolevel
    #
    # 'rwl': radiation wavelength
    #
    # 'x'  : The X axis of the data array
    #
    # 'y'  : The Y axis of the data array

    # ------------------------------------------------------------
    # Preliminaries
    # ------------------------------------------------------------
    # Open the file, if it is not already open
    try:
        ppfile = open(infile, 'rb') 
    except TypeError:
        ppfile = infile

    # Initialize the output list of fields
    fields_in_file = FieldList()

    history = '%s Converted from PP by cf-python v%s' % \
        (time.strftime('%Y-%m-%d %H:%M:%S UTC', time.gmtime()),
         __version__)

    # ****************************************************************
    # Loop round each PP field in the file, reading it into a field
    # ****************************************************************
    while True:

        dimN   = 0  # Counter for dimension coordinates
        auxN   = 0  # Counter for auxiliary coordinates
        transN = 0  # Counter for transforms

        # ============================================================
        # Get the next PP field's metadata and data and find some
        # derived metadata quantities.
        # ============================================================
        # Get the PP field (header and data and extra data)
        m = PPMeta(ppfile)

        # Stop now if we have reached the end of the file
        if not m:
            break

#        print m

        m.history = history

        # ============================================================
        # Initialize the field. The involves setting the field's
        # attributes and data and giving it an empty space, for now.
        # ============================================================
        f       = Field()
        f.space = Space()
        space   = f.space

        # Set the field's attributes
        f = _field_attributes(f, m=m)

        axis_name = {}
        axis_dim  = {}
        
        # ============================================================
        # Create the field's space
        # ============================================================

        # ------------------------------------------------------------
        # Create a TIME dimension coordinate
        # ------------------------------------------------------------
        dim  = 'dim%(dimN)d' % locals()
        dimN += 1         # Increment dimension number
        tdim = dim      

        # Set coordinate type and axis code
        if m.ix in (20, 23):
            coord_type = 'x'
            axis_code  = m.ix
            # Flag that we have already created an 'X' coordinate
            m.ix = None
        elif m.iy in (20, 23):
            coord_type = 'y'
            axis_code  = m.iy
            # Flag that we have already created a 'Y' coordinate
            m.iy = None
        else:
            coord_type = 't'
            axis_code  = m.it
        #--- End: if

        # Set the coordinate's data
        if m.timeseries:
            # This PP field is a timeseries. The validity time is
            # taken to be the time for the first sample, the data time
            # for the last sample, with the others evenly between.
            size   = m.lhd[lbrow]
            delta  = (m.dtime - m.vtime)/(size - 1)
            origin = m.vtime
            array = numpy.linspace(origin, origin + delta*(size-1), 
                                   num=size)
        

            coord = _create_Coordinate(f, tdim, axis_code, m=m, 
                                       array = array,
                                       dimensions = [tdim])       
        else:
            coord = _create_Coordinate(f, tdim, axis_code, m=m,
                                       coord_type = coord_type, 
                                       dimensions = [tdim])
       
        # Save the dimension name
        if coord_type == 'x':
            xdim = tdim
        elif coord_type == 'y' or m.timeseries:
            ydim = tdim

        # Set the T axis name for cell methods
        axis_name['t'] = coord.standard_name
        axis_dim['t']  = tdim

        # ------------------------------------------------------------
        # Create a 'VERTICAL' dimension coordinate. This size 1
        # coordinate will often by a vertical coordinate such as
        # pressure, but could be anything, such as longitude.
        # ------------------------------------------------------------
        if m.iv is not None: 
            dim  = 'dim%(dimN)d' % locals()
            dimN += 1         # Increment dimension number
            vdim = dim      

            if 'height' in m.cf_info:
                # Create the height coordinate from the information
                # given in the STASH to standard_name conversion table
                height, units = m.cf_info['height']

                coord = _create_Coordinate(f, vdim, axis_code=15, m=m,
                                           units      = Units(units),
                                           array      = numpy.array([height],
                                                                    dtype=float),
                                           dimensions = [vdim])
            else:
                # Create the height coordinate from the PP header
                axis_code = m.iv
                data      = None

                # Create the height coordinate from the PP header
                if m.lhd[lbvc] == 9:
                    # atmosphere_hybrid_sigma_pressure_coordinate
                    pstar  = 1.0e5
                    data   = numpy.array([m.bhd[blev] + m.bhd[bhlev]/pstar])
                    bounds = numpy.array([[m.bhd[brlev]  + m.bhd[bhrlev]/pstar, 
                                           m.bhd[brsvd1] + m.bhd[brsvd2]/pstar]])
                elif m.lhd[lbvc] == 65:
                    # atmosphere_hybrid_height_coordinate: Can't
                    # calculate eta (because we don't know the height
                    # of the top level), so we'll create dimension
                    # coordinate for model_level_number instead
                    lev = m.lhd[lblev]
                    if lev >= 0:
                        data      = numpy.array([lev])
                        bounds    = None
                        axis_code = 5
                else:
                    data   = numpy.array([m.bhd[blev]])
                    bounds = numpy.array([[m.bhd[brlev], m.bhd[brsvd1]]])
                    if equals(bounds[0,0], bounds[0,1]):
                        bounds = None
                #--- End: if

                if data is not None:
                    # We have found some data, so create a dimension
                    # coordinate
                    coord = _create_Coordinate(f, vdim, axis_code=axis_code,
                                               m=m,
                                               array        = data,
                                               bounds_array = bounds,
                                               dimensions   = [vdim])
            #--- End: if

            # Set the V axis name for cell methods
            axis_name['v'] = coord.standard_name
            axis_dim['v']  = vdim
        #--- End: if

        # ------------------------------------------------------------
        # Create a PSEUDOLEVEL dimension coordinate
        # ------------------------------------------------------------
        if m.lhd[lbuser5] > 0: 

            dim  = 'dim%(dimN)d' % locals()
            dimN += 1         # Increment dimension number
            pdim = dim      
            
            coord = _create_Coordinate(f, pdim, axis_code=40, m=m, 
                                       array      = numpy.array([m.lhd[lbuser5]]),
                                       dimensions = [pdim])
        
            # Set the pseudolevel axis name for cell methods
            axis_name['p'] = coord.standard_name  ## dch: should this be something else?
            axis_dim['p']  = pdim
        #--- End: if

        # ------------------------------------------------------------
        # Create a RADIATION WAVELENGTH dimension coordinate
        # ------------------------------------------------------------
            #dch : do this instead of VERTICAL, I wonder
        try:
            rwl, units = m.cf_info['below']
        except (KeyError, TypeError):
            pass
        else:
            
            dim   = 'dim%(dimN)d' % locals()
            dimN += 1         # Increment dimension number
            rwldim = dim      
            
            # Create the radiation wavelength coordinate
            pubattr = {'standard_name' : 'radiation_wavelength'}

            coord = _create_Coordinate(f, rwldim, axis_code=None, m=m,
                                       pubattr      = pubattr,
                                       units        = Units(units),
                                       array        = numpy.array([rwl], 
                                                                  dtype=float),
                                       bounds_array = numpy.array([[0.0, rwl]]),
                                       dimensions   = [rwldim])
            
            # Set the radiation wavelength axis name for cell methods
            axis_name['rwl'] = coord.standard_name
        #--- End: try

        # ------------------------------------------------------------
        # Create X and Y dimension coordinates, if they haven't
        # already been created. These coordinates represent the 'x'
        # and 'y' dimensions of the data array. In the case of cross
        # sections, one of them may have already been created.
        # ------------------------------------------------------------
        # Set coordinate type and axis code
        for coord_type, axis_code in zip(('y' , 'x' ),
                                         (m.iy, m.ix)):

            # Skip this coordinate if it has already been created
            if axis_code is None:
                continue    

            # Still here? Then create a coordinate
            dim   = 'dim%(dimN)d' % locals()           
            dimN += 1         # Increment dimension number       

            coord = _create_Coordinate(f, dim, axis_code, m=m,
                                       coord_type=coord_type,
                                       dimensions=[dim])
        
            if coord_type == 'y':
                ydim = dim
            else:
                xdim = dim
 
            # Set the X or Y axis name for cell methods
            axis_name[coord_type] = coord.standard_name
            axis_dim[coord_type]  = dim
        #--- End: for

        # ============================================================
        # Create 1-d, size 1 auxiliary coordinates
        # ============================================================

        vc = m.lhd[lbvc]
        if vc == 9:
            # ------------------------------------------------------------
            # Atmosphere hybrid sigma pressure coordinate components
            # (see Appendix D of the CF conventions)
            # ------------------------------------------------------------
            for long_name, value, units, bounds in zip(
                ('atmosphere_hybrid_sigma_pressure_coordinate_ak',      
                 'atmosphere_hybrid_sigma_pressure_coordinate_bk'),
                (m.bhd[bhlev],                     # value
                 m.bhd[blev]),
                ('Pa',                             # units
                 '1'),
                ([m.bhd[bhrlev], m.bhd[brsvd2]],   # bounds
                 [m.bhd[brlev] , m.bhd[brsvd1]]),
                ):
                aux   = 'aux%(auxN)d' % locals()           
                auxN += 1         # Increment auxiliary number
                
                pubattr = {'standard_name': long_name}

                coord = _create_Coordinate(f, aux, axis_code=None,
                                           m=m,
                                           pubattr      = pubattr, 
                                           units        = Units(units),
                                           array        = numpy.array([value]),
                                           bounds_array = numpy.array([bounds]),
                                           dimensions   = [vdim])
            #--- End: for
        elif vc == 65:
            # ------------------------------------------------------------
            # Atmosphere hybrid height coordinate components (see
            # Appendix D of the CF conventions)
            # ------------------------------------------------------------
            for long_name, value, units, bounds in zip(
                ('atmosphere_hybrid_height_coordinate_ak',      
                 'atmosphere_hybrid_height_coordinate_bk'),
                (m.bhd[blev],                      # value
                 m.bhd[bhlev]),
                ('m',                              # units
                 '1'),
                ([m.bhd[brlev] , m.bhd[brsvd1]],   # bounds
                 [m.bhd[bhrlev], m.bhd[brsvd2]])
                 
                ):
                aux   = 'aux%(auxN)d' % locals()           
                auxN += 1         # Increment auxiliary number
                
                pubattr = {'standard_name': long_name}

                coord = _create_Coordinate(f, aux, axis_code=None,
                                           m=m,
                                           pubattr      = pubattr, 
                                           units        = Units(units),
                                           array        = numpy.array([value]),
                                           bounds_array = numpy.array([bounds]),
                                           dimensions   = [vdim])
            #--- End: for
  
        #--- End: if

        # ------------------------------------------------------------
        # forecast_reference_time auxiliary coordinate
        # ------------------------------------------------------------
        if m.lbtim_ib == 1: 
            aux   = 'aux%(auxN)d' % locals()           
            auxN += 1         # Increment auxiliary number
       
            pubattr = {'standard_name': 'forecast_reference_time'}

            coord = _create_Coordinate(f, aux, axis_code=None, m=m,
                                       pubattr    = pubattr,
                                       units      = m.reftime,
                                       array      = numpy.array([m.dtime], 
                                                                dtype=float),
                                       dimensions = [tdim])           
        #--- End: if

        # -------------------------------------------------------
        # model_level_number auxiliary coordinate
        # -------------------------------------------------------
        lev = m.lhd[lblev]
        if (m.iv is not None and 
            m.lhd[lbvc] in (2, 9) and 
            lev not in (7777, 8888, 9999)): # dch check 7777
            aux   = 'aux%(auxN)d' % locals()           
            auxN += 1         # Increment auxiliary number

            coord = _create_Coordinate(f, aux, axis_code=5, m=m, 
                                       pubattr    = {'axis': None}, 
                                       array      = numpy.array([lev]), 
                                       dimensions = [vdim])
        #--- End: if

        # ------------------------------------------------------------
        # Domain title auxiliary coordinate
        # ------------------------------------------------------------
        if 'domain_title' in m.extra:
            domain_titles = m.extra['domain_title']

            if m.ix == 13:
                dim = xdim
            elif m.iy == 13:
                dim = ydim
            else:
                dim = None
    
            if dim:
                aux   = 'aux%(auxN)d' % locals()           
                auxN += 1                        # Increment auxiliary number

                pubattr = {'long_name': 'domain title'}

                coord = _create_Coordinate(f, aux, axis_code=None, m=m, 
                                           array      = numpy.array(domain_titles), # dch array, or not?
                                           pubattr    = pubattr,
                                           dimensions = [dim])
        #--- End: if

        # ============================================================
        # Create N-d (N>1) auxiliary coordinates
        # ============================================================
        
        # ------------------------------------------------------------
        # 2-d unrotated latitude and longitude auxiliary coordinates
        # ------------------------------------------------------------
        if 'degrees' == space[ydim].units == space[xdim].units:

            if space[ydim].standard_name == 'grid_longitude':
                dim_lat, dim_lon = xdim, ydim
                rotated_lon = space[ydim]
                rotated_lat = space[xdim]
            else:
                dim_lat, dim_lon = ydim, xdim
                rotated_lon = space[xdim]
                rotated_lat = space[ydim]

            # Try to find the appropriate unrotated latitude and
            # longitude arrays in the cache, to avoid having to create
            # them.
            found_cached_latlon = False            
            for rot_lat, rot_lon in _cache['latlon']:
                if (rot_lon.Data.equals(rotated_lon.Data) and 
                    rot_lat.Data.equals(rotated_lat.Data)):
                    
                    found_cached_latlon = True

                    lat, lon = _cache['latlon'][(rot_lat, rot_lon)] 

                    lat = lat.copy()
                    lon = lon.copy()

                    break
            #--- End: for

            # Create the unrotated latitude and longitude arrays if we
            # couldn't find them in the cache
            if not found_cached_latlon:
#                print  'CALC'
                lat, lon = unrotated_latlon(rotated_lat.varray, 
                                            rotated_lon.varray,
                                            m.bhd[bplat], m.bhd[bplon]) 

                # Add the newly created unrotated latitude and
                # longitude auxiliary coordinates to the cache
                _cache['latlon'][(rotated_lat, rotated_lon)] = (lat, lon)
            #--- End: if
             
            # Create the transform
            transform_id = 'trans%(transN)d' % locals()           
            transN += 1     # Increment transform number

            grid_mapping_name = 'rotated_latitude_longitude'

            transform = Transform()
            transform.name         = grid_mapping_name
            transform.grid_mapping = grid_mapping_name
            transform['grid_north_pole_latitude']  = m.bhd[bplat]
            transform['grid_north_pole_longitude'] = m.bhd[bplon]

            space.transforms[transform_id] = transform

            # Add the transform pointer to the dimension coordinates
            space[ydim].transforms = [transform_id]
            space[xdim].transforms = [transform_id]

            # Create the unrotated latitude and longitude coordinates        
            for (i, axis_code, standard_name, array,
                 units) in zip((  0            ,   1           ),
                               ( 10            ,  11           ), # axis code
                               ('latitude'     , 'longitude'   ), # standard name
                               ( lat           ,  lon          ), # numpy array
                               ('degrees_north', 'degrees_east'), # units
                               ):
                aux   = 'aux%(auxN)d' % locals()           
                auxN += 1                        # Increment auxiliary number

                pubattr = {'standard_name': standard_name,
                           'axis'         : None,
                          }

                coord = _create_Coordinate(f, aux, axis_code, m=m,
                                           pubattr    = pubattr,
                                           units      = Units(units),
                                           array      = array,
                                           dimensions = [dim_lat, dim_lon]) 

                # Add the transform pointer to the auxiliary
                # coordinates
                coord.transforms = [transform_id]
             #--- End: for

        #--- End: if

        # ------------------------------------------------------------
        # Create auxiliary coordinates for the latitudes and
        # longitudes of the sites in site-time cross sections. (Don't
        # consider 'z' extra data because we don't know what it refers
        # to.)
        # ------------------------------------------------------------
        if m.site_time_cross_section:
            for axis_code, extra_type in zip((11 , 10 ),
                                             ('x', 'y')):
                coord_type = extra_type + '_domain_bounds'

                if coord_type in m.extra:
                    m.extra[coord_type]
                    # Create, from extra data, an auxiliary coordinate                        # should   
                    # with 1) data and bounds, if the upper and lower                         # be       
                    # bounds have no missing values; or 2) data but no                        # the      
                    # bounds, if the upper bound has missing values                           # axis     
                    # but the lower bound does not.                                           # which    
                    file_position = ppfile.tell()                                             # has      
                    bounds = m.extra[coord_type][...]                                         # axis_code
                    # Reset the file pointer after reading the extra                          # 13       
                    # data into a numpy array
                    ppfile.seek(file_position, os.SEEK_SET)
                    data = None
                    if numpy.any(bounds[..., 1] == _pp_rmdi): # dch also test in bmdi?
                        if not numpy.any(bounds[...,0] == _pp_rmdi): # dch also test in bmdi?
                            data = bounds[...,0]
                        bounds = None
                    else:
                        data = numpy.mean(bounds, axis=1)

                    if (data, bounds) != (None, None):
                        aux   = 'aux%(auxN)d' % locals()           
                        auxN += 1                        # Increment auxiliary number
                        
                        coord = _create_Coordinate(f, aux, axis_code, m=m,
                                                   array        = data,
                                                   bounds_array = bounds, 
                                                   pubattr      = {'axis': None},
                                                   dimensions   = [xdim]) # DCH      
                                                                        # xdim?    
                                                                        # should   
                                                                        # be       
                                                                        # the      
                                                                        # axis     
                                                                        # which    
                                                                        # has      
                                                                        # axis_code
                                                                        # 13       
                    #--- End: if
                else:
                    coord_type = '%s_domain_lower_bound' % extra_type
                    if coord_type in m.extra:
                        # Create, from extra data, an auxiliary
                        # coordinate with data but no bounds, if the
                        # data noes not contain any missing values
                        file_position = ppfile.tell()
                        data = m.extra[coord_type][...]
                        # Reset the file pointer after reading the
                        # extra data into a numpy array
                        ppfile.seek(file_position, os.SEEK_SET)
                        if not numpy.any(data == _pp_rmdi): # dch also test in bmdi?   
                            aux   = 'aux%(auxN)d' % locals()           
                            auxN += 1                        # Increment auxiliary number
                            coord = _create_Coordinate(f, aux, axis_code, m=m, 
                                                       array      = numpy.array(data),
                                                       pubattr    = {'axis': None}, 
                                                       dimensions = [xdim])# DCH xdim?    
               #--- End: if
           #--- End: for
        #--- End: if
        
        # ------------------------------------------------------------
        # Set the field's data dimension names and the netCDF
        # dimension names for each of the space's dimensions
        # ------------------------------------------------------------
        space.dimensions['data'] = [ydim, xdim]

        space.nc_dimensions = {}
        for k, ncdim in axis_name.iteritems():
            space.nc_dimensions[axis_dim[k]] = ncdim

        # ------------------------------------------------------------
        # Set the field's data
        # ------------------------------------------------------------
        if m.data is not None:
            f.Data = Data(m.data, units=f.Units,
                          _FillValue=getattr(f, '_FillValue', None))
        #--- End: if

        # ============================================================
        # Create the field's cell methods
        # ============================================================
        cell_methods = []

        proc       = m.lhd[lbproc]
        tmean_proc = 0
        if m.lbtim_ib in (2, 3):
            if proc-128 in (0, 64, 2048, 4096, 8192):
                tmean_proc = 128
                proc -= 128
                
        # ------------------------------------------------------------
        # Area cell methods
        # ------------------------------------------------------------
        if m.ix in (10, 11, 12) and m.iy in (10, 11, 12):
            if 'where' in m.cf_info:
                cell_methods.append('area: mean')
                axis_dim['area']  = None
                axis_name['area'] = 'area'

                cell_methods.append(m.cf_info['where'])
                if 'over' in m.cf_info:
                    cell_methods.append(m.cf_info['over'])
            #--- End: if

            if proc == 64:
                cell_methods.append('x: mean')

#            if proc == 64 or 'where' in m.cf_info:
#                cell_methods.append('area: mean')
#                axis_dim['area']  = None
#                axis_name['area'] = 'area'
#
#            try:
#                cell_methods.append(m.cf_info['where'])
#            except (KeyError, TypeError):
#                pass
#            else:
#                try:
#                    cell_methods.append(m.cf_info['over'])
#                except (KeyError, TypeError):
#                    pass

#            for info in m.cf_info:                
#                if info.startswith('area: mean where_'):                    
#                    cell_methods.append(info.replace('where_', 'where ', 1))
#                elif info.startswith('over_'):
#                    cell_methods.append(info.replace('over_', 'over ', 1))
        # dch : do special zonal mean as as in pp_cfwrite
        
        # ------------------------------------------------------------
        # Vertical cell methods
        # ------------------------------------------------------------
        if proc == 2048:
            cell_methods.append('v: mean')

        # ------------------------------------------------------------
        # Time cell methods
        # ------------------------------------------------------------
        if m.lbtim_ib in (0, 1):
            cell_methods.append('t point')
        elif proc == 4096:
            cell_methods.append('t: minimum')
        elif proc == 8192:
            cell_methods.append('t: maximum')
        if tmean_proc == 128:
            if m.lbtim_ib == 2:
                cell_methods.append('t: mean')
            elif m.lbtim_ib == 3:
                cell_methods.append('t: mean within years')
                cell_methods.append('t: mean over years')
        #--- End: if

        # ------------------------------------------------------------
        # Add the cell methods to the field
        # ------------------------------------------------------------
        if cell_methods:
            cell_methods   = CellMethods(' '.join(cell_methods))
            f.cell_methods = _parse_cell_methods(cell_methods, 
                                                 axis_name, axis_dim)
        #-- End: if

        f.setprop('lbproc', m.lhd[lbproc])

        # ============================================================
        # The field is complete(!). So add it to the list of fields to
        # be returned
        # ============================================================
        fields_in_file.append(f)

        # ============================================================
        # Now read the next PP field in the file back at the top of
        # this while loop
        # ============================================================
    # --- End: while

    # Only finalize the fields after the file has been read in its
    # entirety
    for f in fields_in_file:
        f.finalize()

    # Close the PP file
    ppfile.close()

    # Return the fields
    return fields_in_file
#--- End: def

def _parse_cell_methods(cell_methods, axis_name, axis_dim):
    '''

:Parameters:

    cell_methods : CellMethods

    axis_name : dict

    axis_dim : dict

:Returns:

    cell_methods : CellMethods

**Examples**

'''
    for cell_method in cell_methods:
        for i, name in enumerate(cell_method['name']):
            cell_method['name'][i] = axis_name[name]
            cell_method['dim'][i]  = axis_dim[name]
    #--- End: for

    return cell_methods
#--- End: def
