# vim: expandtab:ts=4:sw=4
from __future__ import absolute_import
import numpy as np
from . import kalman_filter
from . import linear_assignment
from . import iou_matching
from .face_track import Track


class Tracker:
    """
    This is the multi-target tracker.

    Parameters
    ----------
    metric : nn_matching.NearestNeighborDistanceMetric
        A distance metric for measurement-to-track association.
    max_age : int
        Maximum number of missed misses before a track is deleted.
    n_init : int
        Number of consecutive detections before the track is confirmed. The
        track state is set to `Deleted` if a miss occurs within the first
        `n_init` frames.

    Attributes
    ----------
    metric : nn_matching.NearestNeighborDistanceMetric
        The distance metric used for measurement to track association.
    max_age : int
        Maximum number of missed misses before a track is deleted.
    n_init : int
        Number of frames that a track remains in initialization phase.
    kf : kalman_filter.KalmanFilter
        A Kalman filter to filter target trajectories in image space.
    tracks : List[Track]
        The list of active tracks at the current time step.

    """

    def __init__(self, metric, max_iou_distance=0.5, max_age=30, n_init=3):
        self.metric = metric
        self.max_iou_distance = max_iou_distance
        self.max_age = max_age
        self.n_init = n_init

        self.kf = kalman_filter.KalmanFilter()
        self.tracks = []
        self._next_id = 1

    def predict(self):
        """Propagate track state distributions one time step forward.

        This function should be called once every time step, before `update`.
        """
        for track in self.tracks:
            track.predict(self.kf)

    def update(self, image, detections, aln):
        """Perform measurement update and track management.

        Parameters
        ----------
        detections : List[deep_track.detection.Detection]
            A list of detections at the current time step.

        """
        # Run matching cascade.
        matches, unmatched_tracks, unmatched_detections = \
            self._match(detections)

        # Update track set.
        for track_idx, detection_idx in matches:
            self.tracks[track_idx].update(
                self.kf, image, detections[detection_idx], aln)
        for track_idx in unmatched_tracks:
            self.tracks[track_idx].mark_missed()
        # unmatched_detections should determine if is in the middle
        # todo
        for detection_idx in unmatched_detections:
            #print unmatched_detections
            self._initiate_track(detections[detection_idx])
        self.tracks = [t for t in self.tracks if not t.is_deleted()]

        # Update distance metric.
        active_targets = [t.track_id for t in self.tracks if t.is_confirmed()]
        features, targets = [], []
        for track in self.tracks:
            if not track.is_confirmed():
                continue
            features += track.features
            targets += [track.track_id for _ in track.features]
            track.features = []
        self.metric.partial_fit(
            np.asarray(features), np.asarray(targets), active_targets)


    def _match(self, detections):

        def gated_metric(tracks, dets, track_indices, detection_indices):
            features = np.array([dets[i].feature for i in detection_indices])
            targets = np.array([tracks[i].track_id for i in track_indices])
            cost_matrix = self.metric.distance(features, targets)
            cost_matrix = linear_assignment.gate_cost_matrix(
                self.kf, cost_matrix, tracks, dets, track_indices,
                detection_indices)

            return cost_matrix

        # Split track set into confirmed and unconfirmed tracks.
        confirmed_tracks = [
            i for i, t in enumerate(self.tracks) if t.is_confirmed()]
        unconfirmed_tracks = [
            i for i, t in enumerate(self.tracks) if not t.is_confirmed()]

        # Associate confirmed tracks using appearance features.
        matches_a, unmatched_tracks_a, unmatched_detections_a = \
            linear_assignment.matching_cascade(
                gated_metric, self.metric.matching_threshold, self.max_age,
                self.tracks, detections, confirmed_tracks)
        #print "matches_a", matches_a
        # Associate remaining tracks together with unconfirmed tracks using IOU.

        # IOU
        iou_track_candidates = unconfirmed_tracks + [
            k for k in unmatched_tracks_a if
            self.tracks[k].time_since_update == 1]
        matches_b, unmatched_tracks_b, unmatched_detections_b = \
            linear_assignment.min_cost_matching(
                iou_matching.iou_cost, self.max_iou_distance, self.tracks,
                detections, iou_track_candidates, unmatched_detections_a)
        #print "matches_b", matches_b


        # Force match
        #print "unmatched_tracks_b", unmatched_tracks_b

        #force_match_detections_candidates = []
        #for detection_idx in unmatched_detections_b:
        #    tl, wh = detections[detection_idx].tlwh[:2], detections[detection_idx].tlwh[2:]
        #    #print tl, wh
        #    centrality = np.floor(tl + wh / 2)
        #    flag, idx = self.in_area(centrality, self.car_areas)
        #    if flag:
        #        force_match_detections_candidates.append(detection_idx)
        #print "force_match_detections_candidates", force_match_detections_candidates

        matches_c, unmatched_tracks_c, unmatched_detections_c = \
            linear_assignment.min_cost_matching(
                iou_matching.distance_cost, 200, self.tracks,
                detections, unmatched_tracks_b, unmatched_detections_b)

        #print "matches_c", matches_c
        #print unmatched_detections

        #print "matches_b", matches_b

        """
        #left, right, up, down
        border_percentage = (0.0, 0.0, 0.0, 0.0)
        image_size = (1920,1080)
        max_distance = 100
        unmatched_detections_candidates = []
        for detection_idx in unmatched_detections:
            #left ,right,up, down,

            xy1 = (image_size[0]*(border_percentage[0]/100.0), image_size[1]*(border_percentage[2]/100.0))
            xy2 = (image_size[0]*(1-border_percentage[1]/100.0), image_size[1]*(1-border_percentage[3]/100.0))

            tl, wh = detections[detection_idx].tlwh[:2], detections[detection_idx].tlwh[2:]
            #print tl, wh
            centrality = np.floor(tl + wh / 2)
            #print xy1
            #print xy2
            #print centrality
            if xy1[0] < centrality[0] < xy2[0] \
                    and xy1[1] < centrality[1] < xy2[1]:
                unmatched_detections_candidates.append(detection_idx)
        """
        """
        force_match_areas = [[547,341,238,192],[1051,493,644,228],[1299,395,426,300]]
        #left right up
        door_areas = [[247,336,200,308],[1817,415,103,185],[1101,281,386,118],[720,330,250,150]]
        image_size = (1920,1080)
        max_distance = 200
        force_track_candidates = list(set(unconfirmed_tracks + unmatched_tracks_a) - set(track_b))
        unmatched_detections_candidates = []
        unmatched_detections_return = []
        for detection_idx in unmatched_detections:
            tl, wh = detections[detection_idx].tlwh[:2], detections[detection_idx].tlwh[2:]
            #print tl, wh
            centrality = np.floor(tl + wh / 2)
            if in_area(centrality, force_match_areas):
                unmatched_detections_candidates.append(detection_idx)
                print centrality
                #print "Y"
                #print centrality[0], centrality[1]
            elif in_area(centrality, door_areas):
                print centrality
                unmatched_detections_return.append(detection_idx)
            #else:
            #    unmatched_detections_return.append(detection_idx)
        #print unmatched_detections, unmatched_detections_candidates
        matches_c, unmatched_tracks_c, unmatched_detections_c = \
            linear_assignment.min_cost_matching(
                iou_matching.distance_cost, max_distance, self.tracks,
                detections, force_track_candidates, unmatched_detections_candidates)

        """
        #print "force_track_candidates",force_track_candidates
        #print "unmatched_detections", unmatched_detections
        #print "unmatched_detections_c",unmatched_detections_c
        #unmatched_detections = list(set(unmatched_detections)^set(unmatched_detections_candidates))

        #print unmatched_detections
        #print(unmatched_tracks_b)
        #print(force_track_candidates)
        #print "matches_c"
        #print(type(force_match_detections))
        #unmatched_detections = [det for det in force_match_detections if det not in ]
        """
        unmatched_detections = []
        for detection_idx in unmatched_detections_c:
            #left ,right,up, down,

            xy1 = (image_size[0]*(border_percentage[0]/100.0), image_size[1]*(border_percentage[2]/100.0))
            xy2 = (image_size[0]*(1-border_percentage[1]/100.0), image_size[1]*(1-border_percentage[3]/100.0))

            tl, wh = detections[detection_idx].tlwh[:2], detections[detection_idx].tlwh[2:]
            #print tl, wh
            centrality = np.floor(tl + wh / 2)
            #print xy1
            #print xy2
            #print centrality
            if xy1[0] < centrality[0] < xy2[0] \
                    and xy1[1] < centrality[1] < xy2[1]:
                continue
            unmatched_detections.append(detection_idx)
        """


        #unmatched_detections = []
        #for detection_idx in unmatched_detections_c:
        #    tl, wh = detections[detection_idx].tlwh[:2], detections[detection_idx].tlwh[2:]
            #print tl, wh
        #    centrality = np.floor(tl + wh / 2)
         #   flag, idx = self.in_area(centrality, self.door_areas)
         #   if flag:
         #       unmatched_detections.append(detection_idx)
         #       print("centrality", centrality)
                #print "Y"
                #print centrality[0], centrality[1]
            #else:
            #    unmatched_detections_return.append(detection_idx)
        #print unmatched_detections, unmatched_detections_candidates

        matches = matches_a + matches_b + matches_c
        unmatched_tracks = list(set(unmatched_tracks_c))
        #unmatched_detections = list(set(unmatched_detections_return))
        return matches, unmatched_tracks, unmatched_detections_c


    def _initiate_track(self, detection):
        mean, covariance = self.kf.initiate(detection.to_xyah())
        self.tracks.append(Track(
            mean, covariance, self._next_id, self.n_init, self.max_age,
            detection.cls, detection.feature))
        self._next_id += 1
