from .bbox import get_affine_transform, affine_transform, bbox_areas
from .data_aug import _distort, _crop, _pad_to_square, _flip, Data_anchor_sample
from .utils import _draw_umich_gaussian, _gaussian_2d, _get_border
import numpy as np
import cv2
import time

class CenterNetTargetGenerator(object):
    """Target generator for CenterNet.

    Parameters
    ----------
    num_class : int
        Number of categories.
    output_width : int
        Width of the network output.
    output_height : int
        Height of the network output.

    """
    def __init__(self, num_class, output_width, output_height):
        super(CenterNetTargetGenerator, self).__init__()
        self._num_class = num_class
        self._output_width = int(output_width)
        self._output_height = int(output_height)

    def __call__(self, gt_boxes, gt_ids):
        """Target generation"""
        heatmap = np.zeros((self._num_class, self._output_height, self._output_width),
                           dtype=np.float32)
        wh_target = np.zeros((2, self._output_height, self._output_width), dtype=np.float32)
        wh_mask = np.zeros((2, self._output_height, self._output_width), dtype=np.float32)
        center_reg = np.zeros((2, self._output_height, self._output_width), dtype=np.float32)
        center_reg_mask = np.zeros((2, self._output_height, self._output_width), dtype=np.float32)
        for bbox, cid in zip(gt_boxes, gt_ids):
            cid = int(cid)
            box_h, box_w = bbox[3] - bbox[1], bbox[2] - bbox[0]
            if box_h > 0 and box_w > 0:
                radius = _gaussian_radius((np.ceil(box_h), np.ceil(box_w)))
                radius = max(0, int(radius))
                center = np.array(
                    [(bbox[0] + bbox[2]) / 2, (bbox[1] + bbox[3]) / 2], dtype=np.float32)
                center_int = center.astype(np.int32)
                center_x, center_y = center_int
                assert center_x < self._output_width, \
                    'center_x: {} > output_width: {}'.format(center_x, self._output_width)
                assert center_y < self._output_height, \
                    'center_y: {} > output_height: {}'.format(center_y, self._output_height)
                _draw_umich_gaussian(heatmap[cid], center_int, radius, radius)
                wh_target[0, center_y, center_x] = np.log(box_w)
                wh_target[1, center_y, center_x] = np.log(box_h)
                wh_mask[:, center_y, center_x] = 1.0
                center_reg[:, center_y, center_x] = center - center_int
                center_reg_mask[:, center_y, center_x] = 1.0
                
        return heatmap, wh_target, wh_mask, center_reg, center_reg_mask


class CenterNetDefaultTrainTransform(object):
    def __init__(self, width, height, num_class=1, scale_factor=4, **kwargs):
        self._kwargs = kwargs
        assert width==height
        self._width = width
        self._height = height
        self._num_class = num_class
        self._scale_factor = scale_factor
        self._max_objects = 32

        self._target_generator = CenterNetTargetGenerator(num_class, width // scale_factor, height // scale_factor)

    

    def __call__(self, src, targets, img_path):
        """Apply transform to training image/label."""
        image = src.copy()
        boxes = targets[:, :4].copy()
        labels = targets[:, -1:].copy()

        # if len(boxes)>self._max_objects:
        #     mask = np.random.choice(range(len(boxes)), self._max_objects)
        #     boxes = boxes[mask]
        #     labels = labels[mask]

        # random horizontal flip
        image, boxes = _flip(image, boxes)

        h, w = image.shape[:2]
        input_h, input_w = self._height, self._width
        s = max(h, w) * 1.0
        c = np.array([w / 2., h / 2.], dtype=np.float32)

        # # random crop
        _border = s * np.random.choice([0.1, 0.2, 0.25])
        w_border = _get_border(_border, w)
        h_border = _get_border(_border, h)
        c[0] = np.random.randint(low=w_border, high=w - w_border)
        c[1] = np.random.randint(low=h_border, high=h - h_border)
        s = s * np.random.choice(np.arange(0.3, 1.2, 0.1))

        trans_input = get_affine_transform(c, s, 0, [input_w, input_h])
        image = cv2.warpAffine(image, trans_input, (input_w, input_h), flags=cv2.INTER_LINEAR)
        output_w = input_w // self._scale_factor
        output_h = input_h // self._scale_factor
        trans_output = get_affine_transform(c, s, 0, [output_w, output_h])
        # bbox
        for i in range(boxes.shape[0]):
            boxes[i, :2] = affine_transform(boxes[i, :2], trans_output)
            boxes[i, 2:4] = affine_transform(boxes[i, 2:4], trans_output)

        centers = (boxes[:, :2] + boxes[:, 2:]) / 2
        # center in image
        mask_a = np.logical_and(np.array([1, 1]) < centers, centers < np.array([output_w-1, output_h-1])).all(axis=1)
        # min face is 5 px
        b_w_t = (boxes[:, 2] - boxes[:, 0] + 1) * self._scale_factor
        b_h_t = (boxes[:, 3] - boxes[:, 1] + 1) * self._scale_factor
        mask_b = np.minimum(b_w_t, b_h_t) > 5.0
        mask = mask_a & mask_b
        boxes = boxes[mask].copy()
        labels = labels[mask].copy()

        
        img = image.copy()
        img = _distort(img)
        img = img[:, :, ::-1]
        img = img.astype(np.float32)
        img = (img - 127.5) / 128.0
        img = img.transpose(2, 0, 1).astype(np.float32)

        # generate training target so cpu workers can help reduce the workload on gpu
        gt_bboxes = boxes
        gt_ids = labels

        heatmap, wh_target, wh_mask, center_reg, center_reg_mask = self._target_generator(
            gt_bboxes, gt_ids)

        ## view
        # if heatmap.sum()>0:
        #     import matplotlib.pyplot as plt
        #     fig = plt.figure()
        #     ax = fig.add_subplot(1, 1, 1)
        #     image = cv2.resize(image, (input_w//4, input_h//4))
        #     ax.imshow(image)
        #     ax.imshow(heatmap[0], alpha=0.4)#, cmap='rainbow')
        #     for box in boxes:
        #         x1, y1, x2, y2 = box 
        #         rect = plt.Rectangle((x1, y1), x2-x1, y2-y1, fill=False, edgecolor='r', linewidth=1)
        #         ax.add_patch(rect)
        #     plt.draw()
        #     plt.show()
        #     # plt.savefig('test/out/%s.jpg'%(str(time.time()).replace('.', '')))

        ret = {
            'img': img, 
            'heatmap': heatmap, 
            'wh_target': wh_target, 
            'wh_mask': wh_mask,
            'center_reg': center_reg,
            'center_mask': center_reg_mask,
        }
        return ret



class CenterNetDefaultValTransform(object):
    def __init__(self, width, height):
        self._width = width
        self._height = height
        self._mean = np.array([0.485, 0.456, 0.406], dtype=np.float32)
        self._std = np.array([0.229, 0.224, 0.225], dtype=np.float32)

    def __call__(self, src, tragets):
        """Apply transform to validation image/label."""
        # resize
        img = src.copy()
        bbox = tragets[:, :4]
        gt_mask = tragets[:, 4:]
        input_h, input_w = self._height, self._width
        h, w, _ = src.shape
        s = max(h, w) * 1.0
        c = np.array([w / 2., h / 2.], dtype=np.float32)
        trans_input = get_affine_transform(c, s, 0, [input_w, input_h])
        inp = cv2.warpAffine(img, trans_input, (input_w, input_h), flags=cv2.INTER_LINEAR)
        output_w = input_w
        output_h = input_h
        trans_output = get_affine_transform(c, s, 0, [output_w, output_h])
        for i in range(bbox.shape[0]):
            bbox[i, :2] = affine_transform(bbox[i, :2], trans_output)
            bbox[i, 2:4] = affine_transform(bbox[i, 2:4], trans_output)
        bbox[:, :2] = np.clip(bbox[:, :2], 0, output_w - 1)
        bbox[:, 2:4] = np.clip(bbox[:, 2:4], 0, output_h - 1)
        img = inp

        # to tensor
        img = img.astype(np.float32) / 255.
        img = (img - self._mean) / self._std
        img = img.transpose(2, 0, 1).astype(np.float32)

        # print(gt_mask.shape)
        ret = {
            'img': img, 
            'gt_bbox': bbox,
            'gt_list': gt_mask,
        }
        return ret