# -*- coding: utf-8 -*-

""" Sahana Eden Vulnerability Model

    @copyright: 2012-13 (c) Sahana Software Foundation
    @license: MIT

    Permission is hereby granted, free of charge, to any person
    obtaining a copy of this software and associated documentation
    files (the "Software"), to deal in the Software without
    restriction, including without limitation the rights to use,
    copy, modify, merge, publish, distribute, sublicense, and/or sell
    copies of the Software, and to permit persons to whom the
    Software is furnished to do so, subject to the following
    conditions:

    The above copyright notice and this permission notice shall be
    included in all copies or substantial portions of the Software.

    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
    EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
    OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
    NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
    HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
    WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
    FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
    OTHER DEALINGS IN THE SOFTWARE.
"""

__all__ = ["S3VulnerabilityModel",
           "S3HazardModel",
           "S3RiskModel",
           "S3EvacRouteModel",
           "vulnerability_rheader",
           ]

from datetime import date

from gluon import *
from gluon.storage import Storage

from ..s3 import *
from s3layouts import S3AddResourceLink

# =============================================================================
class S3VulnerabilityModel(S3Model):
    """
        Vulnerability Data
    """

    names = ["vulnerability_indicator",
             "vulnerability_aggregated_indicator",
             "vulnerability_data",
             "vulnerability_document",
             "vulnerability_aggregate",
             "vulnerability_resilience_id",
             "vulnerability_pids",
             "vulnerability_aggregated_period",
             "vulnerability_rebuild_all_aggregates",
             "vulnerability_update_aggregates",
             "vulnerability_update_location_aggregate",
             ]

    resilience_pid = None # id of the resilience indicator
    indicator_pids = None # List of ids used to calculate the resilence indicator

    def model(self):

        T = current.T
        db = current.db

        configure = self.configure
        crud_strings = current.response.s3.crud_strings
        define_table = self.define_table
        super_link = self.super_link

        location_id = self.gis_location_id

        UNKNOWN_OPT = current.messages.UNKNOWN_OPT

        # ---------------------------------------------------------------------
        # Vulnerability Indicators
        #
        tablename = "vulnerability_indicator"
        table = define_table(tablename,
                             # Instance
                             super_link("parameter_id", "stats_parameter"),
                             Field("posn", "integer"),
                             Field("name",
                                   label = T("Name")),
                             s3_comments("description",
                                         label = T("Description")),
                             *s3_meta_fields()
                             )

        # CRUD Strings
        ADD_VULNERABILITY = T("Add Vulnerability Indicator")
        crud_strings[tablename] = Storage(
            title_create = ADD_VULNERABILITY,
            title_display = T("Vulnerability Indicator Details"),
            title_list = T("Vulnerability Indicators"),
            title_update = T("Edit Vulnerability Indicator"),
            #title_search = T("Search Vulnerability Indicators"),
            #title_upload = T("Import Vulnerability Indicators"),
            subtitle_create = T("Add New Vulnerability Indicator"),
            label_list_button = T("List Vulnerability Indicators"),
            label_create_button = ADD_VULNERABILITY,
            msg_record_created = T("Vulnerability Indicator added"),
            msg_record_modified = T("Vulnerability Indicator updated"),
            msg_record_deleted = T("Vulnerability Indicator deleted"),
            msg_list_empty = T("No vulnerability indicators currently defined"))

        configure(tablename,
                  super_entity = "stats_parameter",
                  deduplicate = self.vulnerability_indicator_duplicate,
                  )

        # ---------------------------------------------------------------------
        # Vulnerability Aggregated Indicators
        #
        tablename = "vulnerability_aggregated_indicator"
        table = define_table(tablename,
                             # Instance
                             super_link("parameter_id", "stats_parameter"),
                             Field("name",
                                   label = T("Name")),
                             s3_comments("description",
                                         label = T("Description")),
                             *s3_meta_fields()
                             )

        # CRUD Strings
        ADD_VULNERABILITY = T("Add Vulnerability Aggregated Indicator")
        crud_strings[tablename] = Storage(
            title_create = ADD_VULNERABILITY,
            title_display = T("Vulnerability Aggregated Indicator Details"),
            title_list = T("Vulnerability Aggregated Indicators"),
            title_update = T("Edit Vulnerability Aggregated Indicator"),
            #title_search = T("Search Vulnerability Aggregated Indicators"),
            #title_upload = T("Import Vulnerability Aggregated Indicator"),
            subtitle_create = T("Add New Vulnerability Aggregated Indicator"),
            label_list_button = T("List Vulnerability Aggregated Indicators"),
            label_create_button = ADD_VULNERABILITY,
            msg_record_created = T("Vulnerability Aggregated Indicator added"),
            msg_record_modified = T("Vulnerability Aggregated Indicator updated"),
            msg_record_deleted = T("Vulnerability Aggregated Indicator deleted"),
            msg_list_empty = T("No vulnerability aggregated indicators currently defined"))

        configure(tablename,
                  super_entity = "stats_parameter",
                  deduplicate = self.vulnerability_indicator_duplicate,
                  )

        # ---------------------------------------------------------------------
        # Vulnerability Data
        #
        tablename = "vulnerability_data"
        table = define_table(tablename,
                             # Instance
                             super_link("data_id", "stats_data"),
                             # This is a component, so needs to be a super_link
                             # - can't override field name, ondelete or requires
                             super_link("parameter_id", "stats_parameter",
                                        label = T("Indicator"),
                                        instance_types = ["vulnerability_indicator"],
                                        represent = S3Represent(lookup="stats_parameter"),
                                        readable = True,
                                        writable = True,
                                        empty = False,
                                        ),
                             location_id(
                                widget = S3LocationAutocompleteWidget(),
                                requires = IS_LOCATION(),
                                required = True,
                                ),
                             Field("value", "double",
                                   required = True,
                                   label = T("Value"),
                                   ),
                             s3_date(required = True),
                             # Unused but needed for the stats_data SE
                             #Field("date_end", "date",
                             #      readable=False,
                             #      writable=False
                             #      ),
                             # Link to Source
                             self.stats_source_id(),
                             s3_comments(),
                             *s3_meta_fields()
                             )

        # CRUD Strings
        ADD_DATA = T("Add Vulnerability Data")
        crud_strings[tablename] = Storage(
            title_create = ADD_DATA,
            title_display = T("Vulnerability Data Details"),
            title_list = T("Vulnerability Data"),
            title_update = T("Edit Vulnerability Data"),
            title_search = T("Search Vulnerability Data"),
            title_upload = T("Import Vulnerability Data"),
            subtitle_create = T("Add New Vulnerability Data"),
            label_list_button = T("List Vulnerability Data"),
            label_create_button = ADD_DATA,
            msg_record_created = T("Vulnerability Data added"),
            msg_record_modified = T("Vulnerability Data updated"),
            msg_record_deleted = T("Vulnerability Data deleted"),
            msg_list_empty = T("No vulnerability data currently defined"))

        configure(tablename,
                  super_entity = "stats_data",
                  deduplicate = self.vulnerability_data_duplicate,
                  requires_approval=True,
                  )

        # ---------------------------------------------------------------------
        # Vulnerability Documents
        # - a common view of different sorts of document which can be uploaded
        #   to the vulnerability application
        #
        doc_types = {"vca": T("VCA Reports"),
                     "indicator": T("Indicator ratings"),
                     "demographic": T("Demographic Data"),
                     "map": T("Map"),
                     "image": T("Image"),
                     "other": T("Other Reports"),
                     }

        tablename = "vulnerability_document"
        table = define_table(tablename,
                             # Instance
                             # - so that we can link to the doc_document or doc_image, if-appropriate
                             super_link("doc_id", "doc_entity"),
                             Field("name",
                                   label=T("Name")),
                             Field("document_type",
                                   label = T("Type"),
                                   requires=IS_IN_SET(doc_types),
                                   represent = lambda opt: \
                                        doc_types.get(opt, UNKNOWN_OPT),
                                   ),
                             location_id(
                                widget = S3LocationAutocompleteWidget(),
                                requires = IS_LOCATION(),
                                required = True,
                                ),
                             s3_date(label = T("Date Published")),
                             # Link to Source to be able to approve linked data records & trigger aggregates build
                             self.stats_source_id(),
                             *s3_meta_fields()
                             )

        # Resource Configuration
        configure("vulnerability_document",
                  super_entity = "doc_entity",
                  deduplicate=self.vulnerability_document_duplicate,
                  requires_approval = True,
                  )

        #----------------------------------------------------------------------
        # Vulnerability Aggregated data
        #
        # The data can be aggregated against:
        # location, all the aggregated values across a number of locations
        #           thus for an L2 it will aggregate all the L3 values
        # time, all the vulnerability_data values for the same time period.
        #       currently this is just the latest value in the time period
        # copy, this is a copy of the previous time aggregation because no
        #       data is currently available for this time period

        aggregate_types = {1 : T("Time"),
                           2 : T("Location"),
                           3 : T("Copy"),
                           4 : T("Indicator"),
                           }

        tablename = "vulnerability_aggregate"
        table = define_table(tablename,
                             # This is a component, so needs to be a super_link
                             # - can't override field name, ondelete or requires
                             super_link("parameter_id", "stats_parameter",
                                        label = T("Indicator"),
                                        instance_types = ["vulnerability_indicator",
                                                          "vulnerability_aggregated_indicator"],
                                        represent = S3Represent(lookup="stats_parameter"),
                                        readable = True,
                                        writable = True,
                                        empty = False,
                                        ),
                             location_id(
                                widget = S3LocationAutocompleteWidget(),
                                requires = IS_LOCATION()
                                ),
                             Field("agg_type", "integer",
                                   requires = IS_IN_SET(aggregate_types),
                                   represent = lambda opt: \
                                        aggregate_types.get(opt, UNKNOWN_OPT),
                                   default = 1,
                                   ),
                             Field("reported_count", "integer",
                                   #label = T("The number of aggregated records")
                                   ),
                             Field("ward_count", "integer",
                                   #label = T("The number of geographical units that may be part of the aggregation")
                                   ),
                             Field("date", "date",
                                   label = T("Start Date"),
                                   ),
                             Field("end_date", "date",
                                   label = T("End Date"),
                                   ),
                             Field("sum", "double",
                                   label = T("Sum"),
                                   ),
                             Field("min", "double",
                                   label = T("Minimum"),
                                   ),
                             Field("max", "double",
                                   label = T("Maximum"),
                                   ),
                             Field("mean", "double",
                                   label = T("Mean"),
                                   ),
                             Field("median", "double",
                                   label = T("Median"),
                                   ),
                             Field("mad", "double",
                                   label = T("Median Absolute Deviation"),
                                   default = 0.0,
                                   ),
                             #Field("mean_ad", "double",
                             #      label = T("Mean Absolute Deviation"),
                             #      ),
                             #Field("std", "double",
                             #      label = T("Standard Deviation"),
                             #      ),
                             #Field("variance", "double",
                             #      label = T("Variance"),
                             #      ),
                             *s3_meta_fields()
                             )

        # ---------------------------------------------------------------------
        # Pass model-global names to response.s3
        #
        return Storage(
            vulnerability_resilience_id = self.vulnerability_resilience_id,
            vulnerability_pids = self.vulnerability_pids,
            # Used by Tests
            vulnerability_aggregated_period = self.vulnerability_aggregated_period,
            vulnerability_rebuild_all_aggregates = self.vulnerability_rebuild_all_aggregates,
            vulnerability_update_aggregates = self.vulnerability_update_aggregates,
            vulnerability_update_location_aggregate = self.vulnerability_update_location_aggregate,
            )

    # -------------------------------------------------------------------------
    @staticmethod
    def vulnerability_resilience_id():
        """
            Return the parameter_id of the resilience indicator
        """

        if S3VulnerabilityModel.resilience_pid is None:
            # Get the parameter_id of the aggregated_indicator
            db = current.db
            table = db.vulnerability_aggregated_indicator
            row = db(table.uuid == "Resilience").select(table.parameter_id,
                                                        limitby=(0, 1)).first()
            try:
                S3VulnerabilityModel.resilience_pid = row.parameter_id
            except:
                # DB not initialised
                pass

        return S3VulnerabilityModel.resilience_pid

    # -------------------------------------------------------------------------
    @staticmethod
    def vulnerability_pids():
        """
            Return a list of the parameter_id's that are to be used when
            calculating the resilience indicator
        """

        if S3VulnerabilityModel.indicator_pids is None:
            db = current.db
            table = db.vulnerability_indicator
            rows = db(table.deleted == False).select(table.parameter_id)
            S3VulnerabilityModel.indicator_pids = [i.parameter_id for i in rows]

        return S3VulnerabilityModel.indicator_pids

    # -------------------------------------------------------------------------
    @staticmethod
    def vulnerability_indicator_duplicate(item):
        """ Import item de-duplication """

        if item.tablename in ("vulnerability_indicator",
                              "vulnerability_aggregated_indicator"):
            table = item.table
            name = item.data.get("name", None)
            query = (table.name.lower() == name.lower())
            duplicate = current.db(query).select(table.id,
                                                 limitby=(0, 1)).first()
            if duplicate:
                item.id = duplicate.id
                item.method = item.METHOD.UPDATE

    # -------------------------------------------------------------------------
    @staticmethod
    def vulnerability_data_duplicate(item):
        """ Import item de-duplication """

        if item.tablename == "vulnerability_data":
            data = item.data
            parameter_id = data.get("parameter_id", None)
            location_id = data.get("location_id", None)
            date = data.get("date", None)
            table = item.table
            query = (table.date == date) & \
                    (table.location_id == location_id) & \
                    (table.parameter_id == parameter_id)
            duplicate = current.db(query).select(table.id,
                                                 limitby=(0, 1)).first()
            if duplicate:
                item.id = duplicate.id
                item.method = item.METHOD.UPDATE

    # -------------------------------------------------------------------------
    @staticmethod
    def vulnerability_document_duplicate(item):
        """ Import item de-duplication """

        if item.tablename == "vulnerability_document":
            table = item.table
            name = item.data.get("name", None)
            query = (table.name.lower() == name.lower())
            duplicate = current.db(query).select(table.id,
                                                 limitby=(0, 1)).first()
            if duplicate:
                item.id = duplicate.id
                item.method = item.METHOD.UPDATE

    # -------------------------------------------------------------------------
    @staticmethod
    def vulnerability_group_duplicate(item):
        """ Import item de-duplication """

        if item.tablename == "vulnerability_group":
            table = item.table
            #parameter_id = item.data.get("parameter_id", None)
            location_id = item.data.get("location_id", None)
            date = item.data.get("date", None)
            query = (table.date == date) & \
                    (table.location_id == location_id)
                    #(table.parameter_id == parameter_id)
            duplicate = current.db(query).select(table.id,
                                                 limitby=(0, 1)).first()
            if duplicate:
                item.id = duplicate.id
                item.method = item.METHOD.UPDATE

    # -------------------------------------------------------------------------
    @staticmethod
    def vulnerability_resilience(loc_level,
                                 location_id,
                                 resilience_pid,
                                 indicator_pids,
                                 date_period_start,
                                 date_period_end,
                                 use_location,
                                 ):
        """
           Calculates the resilience held in the vulnerability_data table
           for a specific location and time period.

           This is run async within vulnerability_update_aggregates

           Where appropriate add test cases to modules/unit_tests/s3db/vulnerability.py
        """

        db = current.db
        s3db = current.s3db
        vtable = s3db.vulnerability_data
        atable = db.vulnerability_aggregate

        # Get the approved data from the vulnerability_data table
        query = (vtable.deleted != True) & \
                (vtable.approved_by != None) & \
                (vtable.parameter_id.belongs(indicator_pids))
        ward_count = 1
        if use_location:
            query &= (vtable.location_id == location_id)
        else:
            # Get all the child locations
            child_locations = current.gis.get_children(location_id, loc_level)
            child_ids = [row.id for row in child_locations]
            ward_count = len(child_ids)
            query &= (vtable.location_id.belongs(child_ids))

        if date_period_end is None:
            pass
        elif date_period_end == "None":
            date_period_end = None
        else:
            query &= (vtable.date <= date_period_end)
        rows = db(query).select(vtable.parameter_id,
                                vtable.location_id,
                                vtable.value,
                                vtable.date,
                                orderby=(vtable.location_id,
                                         vtable.parameter_id,
                                         ~vtable.date
                                         )
                                )

        # The query may return duplicate records for the same
        # location+parameter: use the most recent, which because
        # of the ordering will be the first
        values = []
        append = values.append
        locations = []
        new_location = locations.append
        last_record = (0, 0)
        for row in rows:
            value = row.value
            if not value:
                continue
            l = row.location_id
            key = (l, row.parameter_id)
            if last_record != key:
                last_record = key
                append(value)
                if l not in locations:
                    new_location(l)

        # Aggregate the values
        values_len = len(values)
        if not values_len:
            return

        values_sum = sum(values)
        values_min = min(values)
        values_max = max(values)
        values_avg = float(values_sum) / values_len

        from numpy import median
        values_med = median(values)
        values_mad = median([abs(v - values_med) for v in values])

        reported_count = len(locations)

        # Store Resilience value in the vulnerability_aggregate table
        query = (atable.location_id == location_id) & \
                (atable.date == date_period_start) & \
                (atable.parameter_id == resilience_pid)
        record = db(query).select(atable.id,
                                  limitby=(0, 1)).first()

        if record:
            # Update
            db(query).update(date = date_period_start,
                             end_date = date_period_end,
                             reported_count = reported_count,
                             ward_count = ward_count,
                             min = values_min,
                             max = values_max,
                             mean = values_avg,
                             median = values_med,
                             mad = values_mad,
                             )
        else:
            # Insert new
            id = atable.insert(agg_type = 4, # indicator
                               parameter_id = resilience_pid,
                               location_id = location_id,
                               date = date_period_start,
                               end_date = date_period_end,
                               reported_count = reported_count,
                               ward_count = ward_count,
                               min = values_min,
                               max = values_max,
                               mean = values_avg,
                               median = values_med,
                               mad = values_mad,
                               )
        return

    # -------------------------------------------------------------------------
    @staticmethod
    def vulnerability_rebuild_all_aggregates():
        """
            This will delete all the vulnerability_aggregate records and then
            rebuild them by triggering off a request for each

            vulnerability_data record.

            This function is normally only run during prepop or postpop so we
            don't need to worry about the aggregate data being unavailable for
            any length of time
        """

        # Check to see whether an existing task is running and if it is then kill it
        db = current.db
        ttable = db.scheduler_task
        rtable = db.scheduler_run
        wtable = db.scheduler_worker
        query = (ttable.task_name == "vulnerability_update_aggregates") & \
                (rtable.task_id == ttable.id) & \
                (rtable.status == "RUNNING")
        rows = db(query).select(rtable.id,
                                rtable.task_id,
                                rtable.worker_name)
        now = current.request.utcnow
        for row in rows:
            db(wtable.worker_name == row.worker_name).update(status="KILL")
            db(rtable.id == row.id).update(stop_time=now,
                                           status="STOPPED")
            db(ttable.id == row.task_id).update(stop_time=now,
                                                status="STOPPED")

        # Delete the existing aggregates
        current.s3db.vulnerability_aggregate.truncate()

        # Read all the approved vulnerability_data records
        dtable = db.vulnerability_data
        query = (dtable.deleted != True) & \
                (dtable.approved_by != None)
        records = db(query).select(dtable.data_id,
                                   dtable.parameter_id,
                                   dtable.date,
                                   dtable.location_id,
                                   dtable.value)

        # Fire off a rebuild task
        current.s3task.async("vulnerability_update_aggregates",
                             vars=dict(records=records.json()),
                             timeout=21600 # 6 hours
                             )

    # -------------------------------------------------------------------------
    @staticmethod
    def vulnerability_aggregated_period(data_date=None):
        """
            This will return the start and end dates of the aggregated time
            period.

            Currently the time period is annually so it will return the start
            and end of the current year.
        """

        if data_date is None:
            data_date = date.today()
        year = data_date.year
        soap = date(year, 1, 1)
        eoap = date(year, 12, 31)
        return (soap, eoap)

    # -------------------------------------------------------------------------
    @staticmethod
    def vulnerability_update_aggregates(records=None):
        """
            This will calculate the vulnerability_aggregate for the specified
            parameter(s) at the specified location(s).

            This will get the raw data from vulnerability_data and generate a
            vulnerability_aggregate record for the given time period.

            The reason for doing this is so that all aggregated data can be
            obtained from a single table. So when displaying data for a
            particular location it will not be necessary to try the aggregate
            table, and if it's not there then try the data table. Rather just
            look at the aggregate table.

            Once this has run then a complete set of  aggregate records should
            exists for this parameter_id and location for every time period from
            the first data item until the current time period.

            Where appropriate add test cases to modules/unit_tests/s3db/vulnerability.py
        """

        if not records:
            return

        import datetime
        from dateutil.rrule import rrule, YEARLY

        db = current.db
        s3db = current.s3db
        dtable = s3db.vulnerability_data
        atable = db.vulnerability_aggregate
        gtable = db.gis_location

        # Data Structures used for the OPTIMISATION steps
        param_location_dict = {} # a list of locations for each parameter
        location_dict = {} # a list of locations
        loc_level_list = {} # a list of levels for each location

        vulnerability_pids = s3db.vulnerability_pids()
        aggregated_period = S3VulnerabilityModel.vulnerability_aggregated_period

        if isinstance(records[0]["date"], (datetime.date, datetime.datetime)):
            from_json = False
        else:
            from_json = True
            from dateutil.parser import parse

        for record in records:
            data_id = record["data_id"]
            location_id = record["location_id"]
            parameter_id = record["parameter_id"]
            # Skip if either the location or the parameter is not valid
            if not location_id or not parameter_id:
                s3_debug("Skipping bad vulnerability_data record with data_id %s " % data_id)
                continue
            if from_json:
                date = parse(record["date"])
            else:
                date = record["date"]
            (start_date, end_date) = aggregated_period(date)

            # Get all the approved vulnerability_data records for this location and parameter
            query = (dtable.location_id == location_id) & \
                    (dtable.parameter_id == parameter_id) & \
                    (dtable.deleted != True) & \
                    (dtable.approved_by != None)
            data_rows = db(query).select(dtable.data_id,
                                         dtable.date,
                                         dtable.value)

            # Get each record and store them in a dict keyed on the start date
            # of the aggregated period. If a record already exists for the
            # reporting period then the most recent value will be stored.
            earliest_period = start_date
            (last_period, end_date) = aggregated_period(None)
            data = {}
            data[start_date] = Storage(date = date,
                                       id = data_id,
                                       value = record["value"])
            for row in data_rows:
                if row.data_id == data_id:
                    # This is the record we started with, so skip
                    continue
                row_date = row.date
                (start_date, end_date) = aggregated_period(row_date)
                if start_date in data:
                    if row_date <= data[start_date]["date"]:
                        # The indicator in the row is of the same time period as
                        # another which is already stored in data but it is earlier
                        # so ignore this particular record
                        continue
                elif start_date < earliest_period:
                    earliest_period = start_date
                # Store the record from the db in the data storage
                data[start_date] = Storage(date = row_date,
                                           id = row.data_id,
                                           value = row.value)

            # Get all the aggregate records for this parameter and location
            query = (atable.location_id == location_id) & \
                    (atable.parameter_id == parameter_id)
            aggr_rows = db(query).select(atable.id,
                                         atable.agg_type,
                                         atable.date,
                                         atable.end_date,

                                         atable.mean,
                                         )

            aggr = {}
            for row in aggr_rows:
                (start_date, end_date) = aggregated_period(row.date)
                aggr[start_date] = Storage(id = row.id,
                                           type = row.agg_type,
                                           end_date = row.end_date,

                                           mean = row.mean,
                                           )

            # Step through each period and check that aggr is correct
            last_data_period = earliest_period
            last_type_agg = False # Whether the type of previous non-copy record was aggr
            last_data_value = None # The value of the previous aggr record
            # Keep track of which periods the aggr record has been changed in
            # the database
            changed_periods = []
            for dt in rrule(YEARLY, dtstart=earliest_period, until=last_period):
                # Calculate the end of the dt period.
                # - it will be None if this is the last period
                dt = dt.date()
                if dt != last_period:
                    (start_date, end_date) = aggregated_period(dt)
                else:
                    start_date = dt
                    end_date = None
                if dt in aggr:
                    # Check that the stored aggr data is correct
                    agg_type = aggr[dt]["type"]
                    if agg_type == 2:
                        # This is built using other location aggregates
                        # so it can be ignored because only time or copy aggregates
                        # are being calculated in this function
                        last_type_agg = True
                        last_data_value = aggr[dt]["mean"]
                        continue
                    # Query to use to update aggr records
                    query = (atable.id == aggr[dt]["id"])
                    if agg_type == 3:
                        # This is a copy aggregate
                        if dt in data:
                            # There is data in the data dictionary for this period
                            # so aggregate record needs to be changed
                            value = data[dt]["value"]
                            last_data_value = value
                            db(query).update(agg_type = 1, # time
                                             reported_count = 1, # one record
                                             ward_count = 1, # one ward
                                             end_date = end_date,
                                             sum = value,
                                             min = value,
                                             max = value,
                                             mean = value,
                                             median = value,
                                             )
                            changed_periods.append((start_date, end_date))
                        elif last_type_agg:
                            # No data in the data dictionary and the last type was aggr
                            continue
                        # Check that the data currently stored is correct
                        elif aggr[dt]["mean"] != last_data_value:
                            value = last_data_value
                            db(query).update(agg_type = 3, # copy
                                             reported_count = 1, # one record
                                             ward_count = 1, # one ward
                                             end_date = end_date,
                                             sum = value,
                                             min = value,
                                             max = value,
                                             mean = value,
                                             median = value,
                                             )
                            changed_periods.append((start_date, end_date))
                    elif agg_type == 1:
                        if dt in data:
                            # The value in the aggr should match the value in data
                            value = data[dt]["value"]
                            last_data_value = value
                            if aggr[dt]["mean"] != value:
                                db(query).update(agg_type = 1, # time
                                                 reported_count = 1, # one record
                                                 ward_count = 1, # one ward
                                                 end_date = end_date,
                                                 sum = value,
                                                 min = value,
                                                 max = value,
                                                 mean = value,
                                                 median = value,
                                                 )
                                changed_periods.append((start_date, end_date))
                        else:
                            # The data is not there so it must have been deleted
                            # Copy the value from the previous record
                            value = last_data_value
                            db(query).update(agg_type = 3, # copy
                                             reported_count = 1, # one record
                                             ward_count = 1, # one ward
                                             end_date = end_date,
                                             sum = value,
                                             min = value,
                                             max = value,
                                             mean = value,
                                             median = value,
                                             )
                            changed_periods.append((start_date, end_date))
                # No aggregate record for this time period exists
                # So one needs to be inserted
                else:
                    if dt in data:
                        value = data[dt]["value"]
                        agg_type = 1 # time
                        last_data_value = value
                    else:
                        value = last_data_value
                        agg_type = 3 # copy
                    atable.insert(parameter_id = parameter_id,
                                  location_id = location_id,
                                  agg_type = agg_type,
                                  reported_count = 1, # one record
                                  ward_count = 1, # one ward
                                  date = start_date,
                                  end_date = end_date,
                                  sum = value,
                                  min = value,
                                  max = value,
                                  mean = value,
                                  median = value,
                                  )
                    changed_periods.append((start_date, end_date))
            # End of loop through each time period

            if changed_periods == []:
                continue
            # The following structures are used in the OPTIMISATION steps later
            location = db(gtable.id == location_id).select(gtable.level,
                                                           limitby=(0, 1)
                                                           ).first()
            loc_level_list[location_id] = location.level
            if parameter_id not in param_location_dict:
                param_location_dict[parameter_id] = {location_id : changed_periods}
            elif location_id not in param_location_dict[parameter_id]:
                param_location_dict[parameter_id][location_id] = changed_periods
            else:
                # Store the older of the changed periods (the end will always be None)
                # Only need to check the start date of the first period
                if changed_periods[0][0] < param_location_dict[parameter_id][location_id][0][0]:
                    param_location_dict[parameter_id][location_id] = changed_periods
            if parameter_id in vulnerability_pids:
                if location_id not in location_dict:
                    location_dict[location_id] = changed_periods
                else:
                    # Store the older of the changed periods (the end will always be None)
                    # Only need to check the start date of the first period
                    if changed_periods[0][0] < location_dict[location_id][0][0]:
                        location_dict[location_id] = changed_periods

        # End of loop through each vulnerability_data record

        # OPTIMISATION step 1
        # The following code will get all the locations for which a parameter
        # has been changed. This will remove duplicates which will occur when
        # items are being imported for many communes in the same district.
        # Take an import of 12 communes in the same district, without this the
        # district will be updated 12 times, the province will be updated 12
        # times and the country will be updated 12 times that is 33 unnecessary
        # updates (for each time period) (i.e. 15 updates rather than 48)

        # Get all the parents
        parents = {}
        get_parents = current.gis.get_parents
        for loc_id in location_dict.keys():
            parents[loc_id] = get_parents(loc_id)
        # Expand the list of locations for each parameter
        parents_data = {}
        for (param_id, loc_dict) in param_location_dict.items():
            for (loc_id, periods) in loc_dict.items():
                if loc_id in parents: # There won't be a parent if this is a L0
                    for p_loc_row in parents[loc_id]:
                        p_loc_id = p_loc_row.id
                        if param_id in parents_data:
                            if p_loc_id in parents_data[param_id]:
                                # Store the older of the changed periods (the end will always be None)
                                # Only need to check the start date of the first period
                                if periods[0][0] < parents_data[param_id][p_loc_id][0][0][0]:
                                    parents_data[param_id][p_loc_id][0] = periods
                            else:
                                parents_data[param_id][p_loc_id] = [periods,
                                                                    loc_level_list[loc_id]
                                                                    ]
                        else:
                            parents_data[param_id] = {p_loc_id : [periods,
                                                                  loc_level_list[loc_id]
                                                                  ]
                                                      }

        # Now that the time aggregate types have been set up correctly,
        # fire off requests for the location aggregates to be calculated
        async = current.s3task.async
        for (param_id, loc_dict) in parents_data.items():
            for (loc_id, (changed_periods, loc_level)) in loc_dict.items():
                for (start_date, end_date) in changed_periods:
                    s, e = str(start_date), str(end_date)
                    async("vulnerability_update_location_aggregate",
                          args = [loc_level, loc_id, param_id, s, e],
                          timeout = 1800 # 30m
                          )

        # OPTIMISATION step 2
        # Get all the locations for which the resilence indicator needs to be
        # recalculated. Without this the calculations will be triggered for
        # each parameter and for each location unnecessarily.
        # For example an import of 12 communes in the same district with data
        # for the 10 parameters that make up the resilence calculation will trigger
        # 480 updates, rather than the optimal 15, for each time period.
        resilence_parents = {}
        for (loc_id, periods) in location_dict.items():
            resilence_parents[loc_id] = (periods, loc_level_list[loc_id], True)
            for p_loc_row in parents[loc_id]:
                p_loc_id = p_loc_row.id
                if p_loc_id in resilence_parents:
                    # store the older of the changed periods (the end will always be None)
                    # Only need to check the start date of the first period
                    if periods[0][0] < resilence_parents[p_loc_id][0][0][0]:
                        resilence_parents[p_loc_id][0] = periods
                else:
                    resilence_parents[p_loc_id] = [periods, loc_level_list[loc_id], False]

        # Now calculate the resilience indicators
        vulnerability_resilience = S3VulnerabilityModel.vulnerability_resilience
        resilience_pid = s3db.vulnerability_resilience_id()
        for (location_id, (period, loc_level, use_location)) in resilence_parents.items():
            for (start_date, end_date) in changed_periods:
                vulnerability_resilience(loc_level,
                                         location_id,
                                         resilience_pid,
                                         vulnerability_pids,
                                         start_date,
                                         end_date,
                                         use_location,
                                         )

    # -------------------------------------------------------------------------
    @staticmethod
    def vulnerability_update_location_aggregate(location_level,
                                                location_id,
                                                parameter_id,
                                                start_date,
                                                end_date
                                                ):
        """
           Calculates the vulnerability_aggregate for a specific parameter at a
           specific location and time.

            @param location_id: the location record ID
            @param parameter_id: the parameter record ID
            @param start_date: the start date of the time period (as string)
            @param end_date: the end date of the time period (as string)
        """

        db = current.db

        dtable = current.s3db.vulnerability_data
        atable = db.vulnerability_aggregate

        # Get all the child locations
        child_locations = current.gis.get_children(location_id, location_level)
        child_ids = [row.id for row in child_locations]

        # Get the most recent vulnerability_data record for all child locations
        query = (dtable.parameter_id == parameter_id) & \
                (dtable.deleted != True) & \
                (dtable.approved_by != None) & \
                (dtable.location_id.belongs(child_ids))
        if end_date == "None": # converted to string as async parameter
            end_date = None
        else:
            query &= (dtable.date <= end_date)
        rows = db(query).select(dtable.value,
                                dtable.date,
                                dtable.location_id,
                                orderby=(dtable.location_id, ~dtable.date),
                                # groupby avoids duplicate records for the same
                                # location, but is slightly slower than just
                                # skipping the duplicates in the loop below
                                #groupby=(dtable.location_id)
                                )

        # Collect the values, skip duplicate records for the
        # same location => use the most recent one, which is
        # the first row for each location as per the orderby
        # in the query above
        last_location = None
        values = []
        append = values.append
        for row in rows:
            new_location_id = row.location_id
            if new_location_id != last_location:
                last_location = new_location_id
                append(row.value)

        # Aggregate the values
        values_len = len(values)
        if not values_len:
            return

        values_sum = sum(values)
        values_min = min(values)
        values_max = max(values)
        values_avg = float(values_sum) / values_len

        from numpy import median
        values_med = median(values)
        values_mad = median([abs(v - values_med) for v in values])

        # Add or update the aggregated values in the database

        # Do we already have a record?
        query = (atable.location_id == location_id) & \
                (atable.parameter_id == parameter_id) & \
                (atable.date == start_date) & \
                (atable.end_date == end_date)
        exists = db(query).select(atable.id, limitby=(0, 1)).first()

        attr = dict(agg_type = 2, # Location
                    reported_count = values_len,
                    ward_count = len(child_ids),
                    min = values_min,
                    max = values_max,
                    mean = values_avg,
                    median = values_med,
                    mad = values_mad,
                    sum = values_sum,
                    )
        if exists:
            # Update
            db(query).update(**attr)
        else:
            # Insert new
            atable.insert(parameter_id = parameter_id,
                          location_id = location_id,
                          date = start_date,
                          end_date = end_date,
                          **attr
                          )
        return

# =============================================================================
class S3HazardModel(S3Model):
    """
        Hazard Model
    """

    names = ["vulnerability_hazard",
             "vulnerability_hazard_id",
             ]

    def model(self):

        T = current.T
        db = current.db

        # ---------------------------------------------------------------------
        # Hazards
        #
        tablename = "vulnerability_hazard"
        table = self.define_table(tablename,
                                  Field("name", length=128, notnull=True, unique=True,
                                        label=T("Name"),
                                        ),
                                  s3_comments(),
                                  *s3_meta_fields())

        current.response.s3.crud_strings[tablename] = Storage(
            title_create = T("Add Hazard"),
            title_display = T("Hazard Details"),
            title_list = T("Hazards"),
            title_update = T("Edit Hazard"),
            title_search = T("Search Hazards"),
            subtitle_create = T("Add New Hazard"),
            label_list_button = T("List Hazards"),
            label_create_button = T("Add Hazard"),
            label_delete_button = T("Remove Hazard"),
            msg_record_created = T("Hazard added"),
            msg_record_modified = T("Hazard updated"),
            msg_record_deleted = T("Hazard removed"),
            msg_list_empty = T("No Hazards currently recorded"))

        # Reusable Field
        represent = S3Represent(lookup=tablename)
        hazard_id = S3ReusableField("hazard_id", table,
                                    sortby = "name",
                                    label = T("Hazard"),
                                    requires = IS_NULL_OR(
                                                IS_ONE_OF(db, "vulnerability_hazard.id",
                                                          represent,
                                                          sort=True)),
                                    represent = represent,
                                    comment = S3AddResourceLink(c="vulnerability",
                                                                f="hazard",
                                                                title=T("Add Hazard")),
                                    ondelete = "CASCADE",
                                    )

        self.configure(tablename,
                       deduplicate = self.vulnerability_hazard_duplicate
                       )

        # ---------------------------------------------------------------------
        # Pass names back to global scope (s3.*)
        #
        return Storage(
                vulnerability_hazard_id = hazard_id
            )

    # -------------------------------------------------------------------------
    @staticmethod
    def defaults():
        """
            Return safe defaults for model globals, this will be called instead
            of model() in case the model has been deactivated in
            deployment_settings.
        """

        return Storage(
            hazard_id = S3ReusableField("hazard_id", "integer",
                                        readable=False,
                                        writable=False),
        )

    # -------------------------------------------------------------------------
    @staticmethod
    def vulnerability_hazard_duplicate(item):
        """ Import item de-duplication """

        if item.tablename == "vulnerability_hazard":
            table = item.table
            name = item.data.get("name", None)
            query = (table.name.lower() == name.lower())
            duplicate = current.db(query).select(table.id,
                                                 limitby=(0, 1)).first()
            if duplicate:
                item.id = duplicate.id
                item.method = item.METHOD.UPDATE

# =============================================================================
class S3RiskModel(S3Model):
    """
        Risks
    """

    names = ["vulnerability_risk",
             "vulnerability_risk_group",
             "vulnerability_risk_tag",
             ]

    def model(self):

        T = current.T

        add_component = self.add_component
        define_table = self.define_table

        # ---------------------------------------------------------------------
        # Risks
        #
        tablename = "vulnerability_risk"
        table = define_table(tablename,
                             self.super_link("doc_id", "doc_entity"),
                             Field("name", notnull=True,
                                   label=T("Name")),
                             self.vulnerability_hazard_id(),
                             self.gis_location_id(
                                widget = S3LocationSelectorWidget(
                                    #catalog_layers=True,
                                    polygon=True
                                    )
                                ),
                             s3_comments(),
                             *s3_meta_fields())

        current.response.s3.crud_strings[tablename] = Storage(
            title_create = T("Add Risk"),
            title_display = T("Risk Details"),
            title_list = T("Risks"),
            title_update = T("Edit Risk"),
            title_search = T("Search Risks"),
            subtitle_create = T("Add New Risk"),
            label_list_button = T("List Risks"),
            label_create_button = T("Add Risk"),
            label_delete_button = T("Remove Risk from this event"),
            msg_record_created = T("Risk added"),
            msg_record_modified = T("Risk updated"),
            msg_record_deleted = T("Risk removed"),
            msg_list_empty = T("No Risks currently registered for this event"))

        self.configure(tablename,
                       super_entity="doc_entity",
                       )

        # Reusable Field
        represent = S3Represent(lookup=tablename)
        risk_id = S3ReusableField("risk_id", table,
                                  sortby = "name",
                                  label = T("Risk"),
                                  requires = IS_NULL_OR(
                                                IS_ONE_OF(db, "vulnerability_risk.id",
                                                          represent,
                                                          sort=True)),
                                  represent = represent,
                                  comment = S3AddResourceLink(c="vulnerability",
                                                              f="risk",
                                                              title=T("Add Risk")),
                                  ondelete = "CASCADE",
                                  )

        # Tags as component of Risks
        add_component("vulnerability_risk_tag",
                      vulnerability_risk=dict(joinby="risk_id",
                                              name="tag"))

        # Coalitions
        add_component("org_group",
                      vulnerability_risk=dict(link="vulnerability_risk_group",
                                              joinby="risk_id",
                                              key="group_id",
                                              actuate="hide"))
        # Format for InlineComponent/filter_widget
        add_component("vulnerability_risk_group",
                      vulnerability_risk="risk_id")

        # ---------------------------------------------------------------------
        # Risk Tags
        # - Key-Value extensions
        # - can be used to identify a Source
        # - can be used to add extra attributes for filtering &/or styling
        # - can link Risks to other Systems
        # - can be a Triple Store for Semantic Web support
        #
        tablename = "vulnerability_risk_tag"
        table = define_table(tablename,
                             risk_id(),
                             # key is a reserved word in MySQL
                             Field("tag", label=T("Key")),
                             Field("value", label=T("Value")),
                             s3_comments(),
                             *s3_meta_fields())

        # ---------------------------------------------------------------------
        # Risks <> Coalitions link table
        #
        tablename = "vulnerability_risk_group"
        table = define_table(tablename,
                             risk_id(),
                             self.org_group_id(empty=False),
                             *s3_meta_fields())

        # Pass names back to global scope (s3.*)
        return dict()

# =============================================================================
class S3EvacRouteModel(S3Model):
    """
        Evacuation Routes
    """

    names = ["vulnerability_evac_route",
             "vulnerability_evac_route_group",
             ]

    def model(self):

        T = current.T
        
        add_component = self.add_component
        define_table = self.define_table

        # ---------------------------------------------------------------------
        # Evacuation Routes
        #
        tablename = "vulnerability_evac_route"
        table = define_table(tablename,
                             self.super_link("doc_id", "doc_entity"),
                             Field("name", notnull=True,
                                   label=T("Name")),
                             # Multiple? (=> link table)
                             self.vulnerability_hazard_id(label=("For Hazard")),
                             # This would normally be a LineString
                             self.gis_location_id(
                                widget = S3LocationSelectorWidget(polygon=True),
                             ),
                             s3_comments(),
                             *s3_meta_fields())

        current.response.s3.crud_strings[tablename] = Storage(
            title_create = T("Add Evacuation Route"),
            title_display = T("Evacuation Route Details"),
            title_list = T("Evacuation Routes"),
            title_update = T("Edit Evacuation Route"),
            title_search = T("Search Evacuation Routes"),
            subtitle_create = T("Add New Evacuation Route"),
            label_list_button = T("List Evacuation Routes"),
            label_create_button = T("Add Evacuation Route"),
            label_delete_button = T("Remove Evacuation Route from this event"),
            msg_record_created = T("Evacuation Route added"),
            msg_record_modified = T("Evacuation Route updated"),
            msg_record_deleted = T("Evacuation Route removed"),
            msg_list_empty = T("No Evacuation Routes currently registered for this event"))

        self.configure(tablename,
                       super_entity="doc_entity",
                       )

        # Coalitions
        add_component("org_group",
                      vulnerability_evac_route=dict(link="vulnerability_evac_route_group",
                                                    joinby="evac_route_id",
                                                    key="group_id",
                                                    actuate="hide"))
        # Format for InlineComponent/filter_widget
        add_component("vulnerability_evac_route_group",
                      vulnerability_evac_route="evac_route_id")

        represent = S3Represent(lookup=tablename)

        # ---------------------------------------------------------------------
        # Evacuation Routes <> Coalitions link table
        #
        tablename = "vulnerability_evac_route_group"
        table = define_table(tablename,
                             Field("evac_route_id", table,
                                   requires = IS_ONE_OF(current.db, "vulnerability_evac_route.id",
                                                        represent,
                                                        sort=True,
                                                        ),
                                   represent = represent,
                                   ),
                             self.org_group_id(empty=False),
                             *s3_meta_fields())

        # Pass names back to global scope (s3.*)
        return dict()

# =============================================================================
def vulnerability_rheader(r, tabs=[]):
    """ Vulnerability Resource Headers """

    if r.representation != "html":
        # RHeaders only used in interactive views
        return None
    record = r.record
    if record is None:
        # List or Create form: rheader makes no sense here
        return None

    table = r.table
    resourcename = r.name
    T = current.T

    if resourcename == "risk":
        # Tabs
        tabs = [(T("Basic Details"), None),
                (T("Tags"), "tag"),
                ]
        rheader_tabs = s3_rheader_tabs(r, tabs)
        rheader = DIV(TABLE(TR(TH("%s: " % table.name.label),
                               record.name
                               ),
                            ), rheader_tabs)

    return rheader

# END =========================================================================
