from django.contrib.gis.db import models
from lingcod.common.utils import *
from lingcod.kmlapp.views import create_kmz
from mlpa.models import *
from lingcod.bioregions.models import *
import lingcod.intersection.models as int_models
import lingcod.replication.models as rep_models
import lingcod.spacing.models as sp_models
from lingcod.unit_converter.models import length_in_display_units, area_in_display_units
from django.conf import settings
from django.contrib.gis import geos
from django.contrib.gis.measure import A, D
from django.db import transaction
from django.db.models.signals import post_save

SHALLOW_0_30M_ORGSCHEME_NAME = 'shallow030'

def build_area(geom_collection):
    """Creates an areal geometry formed by the constituent linework of given geometry. The return type can be a Polygon or MultiPolygon, depending on input. 
    If the input lineworks do not form polygons NULL is returned. The inputs can be LINESTRINGS, MULTILINESTRINGS, POLYGONS, MULTIPOLYGONS, and GeometryCollections."""
    cursor = connection.cursor()
    query = "select ST_AsText(ST_BuildArea('%s'::geometry))" % geom_collection.ewkt
    cursor.execute(query)
    return geos.fromstr(cursor.fetchone()[0])
    

def line_locate_point(line,point):
    """
    Returns a float between 0 and 1 representing the location of the closest point on LineString to the given Point, as a 
    fraction of total 2d line length.  This requires PostGIS 1.1.0 or newer.  
    """
    # print "line : " + str(line.srid)
    # print "point : " + str(point.srid)
    cursor = connection.cursor()
    query = "select ST_Line_Locate_Point('%s'::geometry, '%s'::geometry)  as sline;" % (line.ewkt, point.ewkt)
    cursor.execute(query)
    return cursor.fetchone()[0]

class ContinuousProxyLineManager(models.GeoManager):
    def polygon_intersect_segments(self,polygon):
        return self.filter(geometry__intersects=polygon.boundary)
        
    def polygon_intersect_points(self,polygon):
        points = [ l.geometry.intersection(polygon.boundary) for l in self.polygon_intersect_segments(polygon) ]
        for p in points:
            if p.geom_type=='MultiPoint':
                for pnt in p:
                    pnt.srid = settings.GEOMETRY_DB_SRID
                    points.append(pnt)
                points.remove(p)
            else:
                p.srid = settings.GEOMETRY_DB_SRID
        if not points:
            return None
        if len(points) == 2:
            return points
        elif len(points) > 2:
            # We need to find the northern and southern most points
            p_dict = dict( zip( [p.y for p in points], points ) )
            return [ p_dict[k] for k in [ min( p_dict.keys() ),max( p_dict.keys() ) ] ]
            
    def lower_and_upper_boundary_lines(self,polygon):
        points = self.polygon_intersect_points(polygon)
        lines = []
        for point in points:
            line = geos.LineString( [ShoreLine.objects.all()[0].closest_point(point),Contour30m.objects.all()[0].closest_point(point)] )
            line = line.merged
            line.srid = settings.GEOMETRY_DB_SRID
            lines.append(line)
        return lines

class ContinuousProxyLine(models.Model):
    geometry = models.MultiLineStringField(srid=settings.GEOMETRY_DB_SRID)
    objects = ContinuousProxyLineManager()
    
def pull_strings_from_geom_collection(gc):
    ml = geos.fromstr('MULTILINESTRING EMPTY')
    for g in gc:
        if g.geom_type == 'LineString':
            ml.append(g)
        elif g.geom_type == 'MultiLineString':
            ml.extend(g)
    return ml
    
class ContourLine(models.Model):    
    """Functionality assumes that this model has only a single record with a single multigeometry that represents the whole damned thing."""
    geometry = models.MultiLineStringField(srid=settings.GEOMETRY_DB_SRID)
    
    class Meta:
        abstract = True
    
    def polygon_intersect_points(self,polygon):
        gc = self.geometry.intersection(polygon.boundary)
        points = []
        if gc.geom_type != 'GeometryCollection':
            new_gc = geos.fromstr('GEOMETRYCOLLECTION EMPTY')
            new_gc.append(gc)
            gc = new_gc
        for g in gc:
            if g.geom_type == 'Point':
                points.append(g)
            elif g.geom_type == 'MultiPoint':
                for p in g:
                    points.append(p)
            elif g.geom_type == 'LineString':
                points.extend( [ Point(c) for c in g.coords ] )
            elif g.geom_type == 'MultiLineString':
                for l in g:
                    points.extend( [Point(c) for c in l.coords] )
        for p in points:
            p.srid = self.geometry.srid
        if not points:
            return None
        if len(points) == 2:
            return points
        elif len(points) > 2:
            # We need to find the northern and southern most points
            p_dict = dict( zip( [p.y for p in points], points ) )
            return [ p_dict[k] for k in [ min( p_dict.keys() ),max( p_dict.keys() ) ] ]
    
    def closest_point(self,geom):
        """Return the point on shoreline closest to the given geometry."""
        return sp_models.closest_point(self.geometry,geom)
        
    def segment_proxy_parallel(self,geom):
        # get the geom intersect points for the proxy line
        points = ContinuousProxyLine.objects.polygon_intersect_points(geom)
        if not points:
            return None
        for p in points:
            p.srid = settings.GEOMETRY_DB_SRID
        # find the fractional locations of closest points on my line
        my_point_locations = [ line_locate_point( self.geometry.merged, p ) for p in points ]
        # make sure these are in order. start first, end second
        my_point_locations.sort()
        if my_point_locations[0] == my_point_locations[1]:
            return None
        # return the segment of my line between those points
        substring = int_models.line_substring(self.geometry, my_point_locations[0], my_point_locations[1])
        if substring.geom_type == 'GeometryCollection':
            substring = pull_strings_from_geom_collection(substring)
        return substring.merged
        
class ShoreLine(ContourLine):
    pass
    
class Contour30m(ContourLine):
    pass
        
def construct_box(geom):
    lines = ContinuousProxyLine.objects.lower_and_upper_boundary_lines(geom)
    lines[0].reverse()
    polygon = Polygon( LinearRing( [ Point( c ) for c in lines[0].coord_seq ] + [ Point(c) for c in lines[1].coord_seq ] + [ Point(lines[0].coord_seq[0]) ] ) )
    tp, created = int_models.TestPolygon.objects.get_or_create(id=668, defaults={'geometry':polygon})
    tp.geometry = polygon
    tp.save()
    return polygon.valid
    
def construct_test_lines(geom):
    sl_points = ShoreLine.objects.all()[0].polygon_intersect_points(geom)
    c30_points = Contour30m.objects.all()[0].polygon_intersect_points(geom)
    proxy_points = ContinuousProxyLine.objects.polygon_intersect_points(geom)
    lines = []
    for i in [0,1]:
        line = geos.LineString( c30_points[i], proxy_points[i], sl_points[i] )
        lines.append()
    return lines
    
def angle_test(geom,degrees_of_slop=2.0):
    sl_points = ShoreLine.objects.all()[0].polygon_intersect_points(geom)
    c30_points = Contour30m.objects.all()[0].polygon_intersect_points(geom)
    proxy_points = ContinuousProxyLine.objects.polygon_intersect_points(geom)
    if not sl_points or len(sl_points) != 2:
        return [False,False]
    elif not c30_points or len(c30_points) != 2:
        return [False,False]
    elif not proxy_points or len(proxy_points) != 2:
        return [False,False]
    result = []
    for i in [0,1]:
        line_angle = angle_degrees(c30_points[i],proxy_points[i],sl_points[i])
        # result.append(line_angle)
        result.append( line_angle < (180 + degrees_of_slop) and line_angle > (180 - degrees_of_slop) )
    return result
    
def touches_30m_contour_test(geom):
    c30_points = Contour30m.objects.all()[0].polygon_intersect_points(geom)
    if c30_points:
        return True
    else:
        return False
        
def proxy_replication_failures():
    all_mpas = MlpaMpa.objects.all()
    failures = {}
    for mpa in all_mpas:
        print str(mpa.pk) + ", ",
        if not mpa.geometry_final:
            continue
        
        proxy_segs = ContinuousProxyLine.objects.polygon_intersect_points(mpa.geometry_final)
        if not proxy_segs:
            continue
            
        result, reason = rep_models.sub_rule_for_0_30m_proxies(mpa.geometry_final)
        if not result:
            failures.update({mpa.pk:reason})
    return failures
    
def construct_0_30_poly(geom,save_test_polys=False):
    if geom.geom_type.find('GeometryCollection') != -1:
        mp = geos.fromstr('MULTIPOLYGON EMPTY')
        for g in geom:
            if g.geom_type.find('Polygon') > -1:
                mp.append(g)
        geom = mp
    sl_seg = ShoreLine.objects.all()[0].segment_proxy_parallel(geom)
    cont_seg = Contour30m.objects.all()[0].segment_proxy_parallel(geom)
    if None in [sl_seg,cont_seg]:
        return geos.fromstr('POLYGON EMPTY')
    polys = []
    for i in [0,1]:
        lr = geos.LinearRing( [ Point( coord ) for coord in sl_seg.coord_seq ] + [ Point( coord ) for coord in cont_seg.coord_seq ] + [ Point(sl_seg.coord_seq[0]) ] )
        polygon = Polygon(lr)
        polys.append(polygon)
        if save_test_polys:
            tp, created = int_models.TestPolygon.objects.get_or_create(id=666+i, defaults={'geometry':polygon})
            tp.geometry = polygon
            tp.save()
        if polygon.valid:
            return polygon
    
        sl_seg.reverse()
        
    # If I'm here then neither poly was valid
    if polys[0].area > polys[1].area:
        return clean_geometry(polys[0])
    else:
        return clean_geometry(polys[1])
        
def construct_inshore_poly(geom,save_test_polys=False):
    if geom.geom_type.find('GeometryCollection') != -1:
        mp = geos.fromstr('MULTIPOLYGON EMPTY')
        for g in geom:
            if g.geom_type.find('Polygon') > -1:
                mp.append(g)
        geom = mp
    sl_seg = ShoreLine.objects.all()[0].segment_proxy_parallel(geom)
    proxy_points = ContinuousProxyLine.objects.polygon_intersect_points(geom)
    proxy_seg = geos.LineString( proxy_points )
    if None in [sl_seg,proxy_points]:
        return geos.fromstr('POLYGON EMPTY')
    polys = []
    for i in [0,1]:
        # print sl_seg
        # print proxy_seg
        lr = geos.LinearRing( [ Point( coord ) for coord in sl_seg.coord_seq ] + [ Point( coord ) for coord in proxy_seg.coord_seq ] + [ Point(sl_seg.coord_seq[0]) ] )
        polygon = Polygon(lr)
        polys.append(polygon)
        if save_test_polys:
            tp, created = int_models.TestPolygon.objects.get_or_create(id=666+i, defaults={'geometry':polygon})
            tp.geometry = polygon
            tp.save()
        if polygon.valid:
            return polygon
    
        sl_seg.reverse()
        # print 'reverse'
        
    # If I'm here then neither poly was valid
    if polys[0].area > polys[1].area:
        return clean_geometry(polys[0])
    else:
        return clean_geometry(polys[1])
    

class SpacingPointReplicationManager(models.GeoManager):
    
    def point_set(self,lop,habitat):
        return self.filter(lop=lop,habitat=habitat,replicate=True)
        
    def verify(self):
        """
        This method will verify that, for each combination of LOP and habitat, there are exactly two spacing points marked as replicates.
        """
        result = True
        lops = Lop.objects.filter(run=True)
        habitats = int_models.FeatureMapping.objects.filter(organization_scheme__name=settings.SAT_OPEN_COAST_REPLICATION)
        for lop in lops:
            for hab in habitats:
                if self.point_set(lop,hab).count() != 2:
                    result = False
                    exc_str = 'Instead of 2 points for %s at %s LOP, I found %i.' % (hab.name,lop.name,self.point_set(lop,hab).count())
                    raise Exception(exc_str)
        return result

class SpacingPointReplication(models.Model):
    """
    This will let us cram pretend version of clusters from NCCSR into the spacing reporting.
    """
    lop = models.ForeignKey(Lop,blank=True,null=True, limit_choices_to={'run': True})
    habitat = models.ForeignKey(int_models.FeatureMapping, limit_choices_to={'organization_scheme__name': settings.SAT_OPEN_COAST_REPLICATION})
    point = models.ForeignKey(sp_models.SpacingPoint)
    replicate = models.BooleanField(default=False)
    date_modified = models.DateTimeField(auto_now=True, blank=True, null=True, verbose_name="Date Modified")
    objects = SpacingPointReplicationManager()
    
    class Meta:
        unique_together = (('lop','habitat','point'),)
    
    def __unicode__(self):
        if self.lop:
            return '%s %s at %s.' % (self.point.name,self.habitat.name,self.lop.name)
        else:
            return '%s %s at %s.' % (self.point.name,self.habitat.name,'Null LOP')
        
    def mark_replicate_true(self):
        self.replicate = True
        self.save()
                        
def create_null_lop_sprs(spacing_point):
    habitats = int_models.FeatureMapping.objects.filter(organization_scheme__name=settings.SAT_OPEN_COAST_REPLICATION)
    for hab in habitats:
        spr, created = SpacingPointReplication.objects.get_or_create(lop=None,habitat=hab,point=spacing_point)
        if created:
            spr.save()
            
def create_sprs_for_spacing_point(spacing_point):
    lops = Lop.objects.filter(run=True)
    habitats = int_models.FeatureMapping.objects.filter(organization_scheme__name=settings.SAT_OPEN_COAST_REPLICATION)
    
    for lop in lops:
        for hab in habitats:
            spr, created = SpacingPointReplication.objects.get_or_create(lop=lop,habitat=hab,point=spacing_point)
            if created:
                spr.save()
                
    create_null_lop_sprs(spacing_point)

class MpaShapefile(models.Model):
    """
    This model will provide the correct fields for the export of shapefiles using the django-shapes app.
    """
    geometry = models.PolygonField(srid=settings.GEOMETRY_DB_SRID,blank=True,null=True)
    name = models.CharField(max_length=255)
    mpa_id_num = models.IntegerField(blank=True, null=True)
    name_short = models.CharField(blank=True, max_length=255,null=True)
    desig_name = models.CharField(blank=True, max_length=80, null=True)
    desig_acro = models.CharField(blank=True, max_length=80, null=True)
    lop = models.CharField(blank=True, max_length=80, null=True)
    lop_numeric = models.IntegerField(blank=True, null=True)
    mpa = models.OneToOneField(MlpaMpa, related_name="mpa")
    array = models.ForeignKey(MpaArray, null=True, blank=True)
    array_name = models.CharField(blank=True, max_length=255, null=True)
    allowed_uses = models.TextField(blank=True, null=True)
    other_allowed_uses = models.TextField(blank=True, null=True)
    other_regulated_activities = models.TextField(blank=True, null=True)
    author = models.CharField(blank=True, max_length=255,null=True)
    area_sq_mi = models.FloatField(blank=True,null=True)
    mpa_modification_date = models.DateTimeField(blank=True, null=True)
    date_modified = models.DateTimeField(blank=True, null=True, auto_now_add=True)
    objects = models.GeoManager()
    

class StudyRegionTotal(models.Model):
    feature_mapping = models.ForeignKey(int_models.FeatureMapping)
    org_scheme = models.ForeignKey(int_models.OrganizationScheme)
    study_region_total = models.FloatField()
    estuarine_total = models.FloatField() 
    open_coast_total = models.FloatField()
    
    def __unicode__(self):
        return self.feature_mapping.name

class ClusterManager(models.GeoManager):
    def build_clusters_for_array_by_lop(self,array,lop,with_hab=True):
        # get rid of the old ones
        self.filter(array=array,lop=lop).delete()
        
        if lop:
            mpas = array.clusterable_mpa_set.filter(lop_table__lop__value__gte=lop.value)
        else: # This means that lop is None
            mpas = array.clusterable_mpa_set
        clustered = []
        
        for mpa in mpas:
            if mpa in clustered:
                continue
            else:
                new_cl = Cluster(lop=lop,array=array)
                new_cl.save() # have to save before we can add mpas to the mpa_set
                new_cl.mpa_set.add(mpa)
                clustered.append(mpa)
            gc = geos.fromstr('GEOMETRYCOLLECTION EMPTY')
            gc.append(mpa.geometry_final)
            while mpas.exclude(pk=mpa.pk).exclude(pk__in=[c.pk for c in clustered]).filter(geometry_final__dwithin=(gc,D(m=settings.CLUSTER_THRESHOLD))):
                close_and_unclustered = mpas.exclude(pk=mpa.pk).exclude(pk__in=[c.pk for c in clustered]).filter(geometry_final__dwithin=(gc,D(m=settings.CLUSTER_THRESHOLD)))
                for m in close_and_unclustered:
                    new_cl.mpa_set.add(m)
                    clustered.append(m)
                    gc.append(m.geometry_final)
            else:
                if new_cl.area_sq_mi >= settings.MIN_CLUSTER_SIZE:
                    new_cl.save(with_hab=with_hab)
                else:
                    new_cl.delete()
        return self.filter(array=array,lop=lop)
                
    def build_clusters_for_array(self,array,with_hab=True):
        """
        This is the method to call to generate the clusters that are needed for replication analysis.
        Only the LOPs with run set to True are calculated for.
        """
        lops = Lop.objects.filter(run=True)
        for lop in lops:
            self.build_clusters_for_array_by_lop(array,lop,with_hab)
        return Cluster.objects.filter(array=array)
        
    def calculate_habitat_info(self):
        for cl in self.all():
            cl.save(with_hab=True)
            
            
class Cluster(models.Model):
    array = models.ForeignKey(get_array_class())
    mpa_set = models.ManyToManyField(get_mpa_class())
    lop = models.ForeignKey(Lop,null=True,blank=True)
    bioregion = models.ForeignKey(Bioregion,null=True,blank=True)
    bioregion_span = models.BooleanField(default=False)
    date_modified = models.DateTimeField(auto_now=True, verbose_name="Date Modified")
    northing = models.FloatField(null=True,blank=True)
    objects = ClusterManager()
    
    class Meta:
        ordering = ['lop__value','-northing']
    
    def __unicode__(self):
        if self.lop:
            return '%s LOP Cluster from %s containing %i MPAs' % (self.lop.name,self.array.name,self.mpa_set.count())
        else:
            return '%s LOP Cluster from %s containing %i MPAs' % ('Null',self.array.name,self.mpa_set.count())
    
    def save(self, with_hab=False, *args, **kwargs):
        super(Cluster,self).save(*args,**kwargs)
        self.bioregion = self.get_bioregion()
        self.bioregion_span = self.get_bioregion_span()
        if not self.geometry_collection.empty:
            self.northing = self.geometry_collection.centroid.y
        super(Cluster,self).save(*args,**kwargs)
        if with_hab:
            self.calculate_habitat_info()
    
    @property
    def geometry_collection(self):
        gc = geos.fromstr('GEOMETRYCOLLECTION EMPTY')
        for mpa in self.mpa_set.all():
            gc.append(mpa.geometry_final)
        return gc
        
    @property
    def area_sq_mi(self):
        return area_in_display_units(self.geometry_collection.area)
    
    @property
    def geo_sort(self):
        return self.geometry_collection.centroid.y
        
    @property
    def name(self):
        name = ''
        return ', '.join( [m.name for m in self.mpa_set.all() ] )
        
    def calculate_habitat_info(self):
        rs = rep_models.ReplicationSetup.objects.get(org_scheme__name=settings.SAT_OPEN_COAST_REPLICATION)
        results = rs.analyze_single_item(self.geometry_collection)
        for d in results.values():
            chi = ClusterHabitatInfo()
            chi = chi.fill_attributes(self,d)
        
    def get_bioregion(self):
        return Bioregion.objects.which_bioregion(self.geometry_collection)
        
    def get_bioregion_span(self):
        return Bioregion.objects.spans_multiple(self.geometry_collection.convex_hull)
        
class ClusterHabitatInfo(models.Model):
    cluster = models.ForeignKey(Cluster)
    habitat = models.ForeignKey(int_models.FeatureMapping)
    replicate = models.NullBooleanField(null=True,blank=True)
    reason = models.TextField(blank=True,null=True,default=None)
    result = models.FloatField()
    units = models.CharField(null=True,blank=True, max_length=255)
    additional_required = models.FloatField(null=True,blank=True)
    sort = models.FloatField()
    date_modified = models.DateTimeField(auto_now=True, verbose_name="Date Modified")
    
    class Meta:
        ordering = ['sort']
    
    def __unicode__(self):
        return self.habitat.name
    
    @property
    def info_dict(self):
        r = {}
        r['habitat'] = self.habitat.name
        r['replicate'] = self.replicate
        r['result'] = self.result
        r['additional_required'] = self.additional_required
        r['sort'] = self.sort
        r['reason'] = self.reason
        return r
        
    def fill_attributes(self, cluster, hab_result):
        """
        Given a hab_result dictionary like this:
        {'additional_required': 0.0,
          'feature_map_id': 1,
          'org_scheme_id': 1,
          'percent_of_total': 0.44132429282682328,
          'replicate': True,
          'reason': 'blah blah blah',
          'result': 1.77191378592,
          'sort': 1.0,
          'units': u'miles'}
        fill in attributes.
        """
        hab = int_models.FeatureMapping.objects.get(pk=hab_result['feature_map_id'])
        self.cluster = cluster
        self.habitat = hab
        self.replicate = hab_result['replicate']
        if hab_result.has_key('reason'):
            self.reason = hab_result['reason']
        self.sort = hab_result['sort']
        self.additional_required = hab_result['additional_required']
        self.result = hab_result['result']
        self.units = hab_result['units']
        self.save()

class SpacingReportCacheManager(models.Manager):
    def get_current(self,array,lop):
        from django.core.files.base import File
        from report.views import array_kml_string
        sprc, created = self.get_or_create(array=array,lop=lop)
        if lop:
            lop_value = lop.value
        else:
            lop_value = None
        if created:
            #print 'Spacing report created and cached'
            sprc.kmz = File(open(sprc.kmz_filename,'w+b'))
            sprc.kmz.write( create_kmz( array_kml_string(array,lop_value=lop_value,use_centroids=False), 'doc.kml') )
            sprc.save()
            return sprc
        else:
            if sprc.is_current:
                #print 'Spacing report is current so it was returned from cache'
                return sprc
            else:
                #print 'Spacing report cache was out of date so it was replaced'
                sprc.kmz.delete()
                sprc.kmz = File(open(sprc.kmz_filename,'w+b'))
                sprc.kmz.write( create_kmz( array_kml_string(array,lop_value=lop_value,use_centroids=False), 'doc.kml') )
                sprc.save()
                return sprc

class SpacingReportCache(models.Model):
    """
    Spacing Report Caching
    Cached kml will be tied to array and LOP and must be deleted when any of the following things happen:
    -When the Spacing app land mask is altered, all cached spacing results must go away
    -When a SpacingPointReplication object is modified.
    -When the associated replication results cache is modified.  !!! uh, yeah!  Actually, I think I can just check the replication report cache and make
    sure that it's not newer than the spacing cache.  That should handle everything that's not specific to the spacing app (ie, the land mask, spacing point
    replication)
    """
    array = models.ForeignKey(MpaArray)
    lop = models.ForeignKey(Lop, blank=True, null=True)
    kmz = models.FileField(upload_to='spacing/kmz')
    date_modified = models.DateTimeField(auto_now_add=True,auto_now=True)
    objects = SpacingReportCacheManager()
    
    class Meta:
        unique_together = (('array','lop'),)
        
    def __unicode__(self):
        return "Cached Spacing Report for %s at %s LOP." % (self.array.name,self.lop_text)
        
    @property
    def lop_text(self):
        if self.lop:
            return self.lop.name
        else:
            return 'null'
            
    @property
    def lop_value_string(self):
        if self.lop:
            return str(self.lop.value)
        else:
            return 'null'
            
    @property
    def kmz_basename(self):
        return "sp%i%s.kmz" % (self.array.pk,self.lop_value_string)
        
    @property
    def kmz_filename(self):
        import os
        return os.path.join(self.kmz.storage.location,self.kmz.field.upload_to,self.kmz_basename)
        
    @property
    def is_newer_than_replication_results(self):
        return self.array.replication_modification_date_for_lop(self.lop) != None and self.date_modified > self.array.replication_modification_date_for_lop(self.lop)
            
    @property
    def is_newer_than_replication_spacing_points(self):
        from django.db.models import Max
        spr_date_max = SpacingPointReplication.objects.filter(lop=self.lop,replicate=True).aggregate(Max('date_modified'))['date_modified__max']
        return self.date_modified > spr_date_max
        
    @property
    def is_newer_than_pickle(self):
        from lingcod.spacing.models import PickledGraph
        pic_g = PickledGraph.objects.all()[0]
        return self.date_modified > pic_g.date_modified
        
    @property
    def is_current(self):
        if self.date_modified > self.array.date_modified:
            #print "newer than array"
            if self.is_newer_than_replication_results and self.is_newer_than_replication_spacing_points and self.is_newer_than_pickle:
                #print "newer than replication and replication spacing points"
                return True
            else:
                #print "older than replication"
                return False
        else:
            #print "older than array"
            return False

def feature_mappings_containing_feature(feature):
    """
    For a given lingcod.intersection.models.IntersectionFeature object, return a queryset
    of FeatureMapping objects that include that feature.
    """
    pks = []
    for feat_map in int_models.FeatureMapping.objects.all():
        if feature in feat_map.feature.all():
            pks.append(feat_map.pk)
    return int_models.FeatureMapping.objects.filter(pk__in=pks)
     
def expire_results(sender, instance, **kwargs):
    """
    When a lingcod.intersection.models.IntersectionFeature object is saved, expire the 
    ClusterHabitatInfo objects that relate to that feature through a feature mapping object.
    """
    chis = feature_mappings_containing_feature(instance.name)
    chis.delete()
    
def expire_results_replication(sender, instance, **kwargs):
    """
    When a lingcod.intersection.models.IntersectionFeature object is saved, expire the 
    ClusterHabitatInfo objects that relate to that feature through a feature mapping object.
    """
    chis = ClusterHabitatInfo.objects.filter(habitat=instance.habitat)
    chis.delete()
        
post_save.connect(expire_results, sender=int_models.IntersectionFeature)
post_save.connect(expire_results_replication, sender=rep_models.HabitatThreshold)