from __future__ import division, print_function, absolute_import

import base64
import os
import random
import sys
import traceback
import warnings
from socket import *

import cv2
import numpy as np
from PIL import Image

from deep_sort import nn_matching
from deep_sort import preprocessing
from deep_sort.detection import Detection
from deep_sort.detection_yolo import Detection_YOLO
from deep_sort.tracker import Tracker
from tools import generate_detections as gdet
from yolo import YOLO
from add_mosaic.face_detection import FaceDetection
from add_mosaic.license_plate_detection import LicensePlateDetection

warnings.filterwarnings('ignore')


class TrackerArgs:
    def __init__(self):
        self.Detection = False
        self.Tracking = False
        self.FaceDetection = False
        self.LicensePlateDetection = False
        self.detectionTypes = []


def getPort():
    pscmd = "netstat -ntl |grep -v Active| grep -v Proto|awk '{print $4}'|awk -F: '{print $NF}'"
    procs = os.popen(pscmd).read()
    procarr = procs.split("\n")
    tt = random.randint(15000, 20000)
    if tt not in procarr:
        return tt
    else:
        return getPort()


input_args = TrackerArgs()
HOST = '127.0.0.1'
PORT = getPort()
BUFSIZ = 1024 * 20
ADDR = (HOST, PORT)
tcpSerSock = socket(AF_INET, SOCK_STREAM)
tcpSerSock.bind(ADDR)
tcpSerSock.listen(5)

yolo = YOLO(input_args.detectionTypes)
max_cosine_distance = 0.3
nn_budget = None
nms_max_overlap = 1.0
# Deep SORT
model_filename = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'model_data/yolo/mars-small128.pb')
encoder = gdet.create_box_encoder(model_filename, batch_size=1)
metric = nn_matching.NearestNeighborDistanceMetric("cosine", max_cosine_distance, nn_budget)
tracker = Tracker(metric)

faceDetection=FaceDetection()
licensePlateDetection=LicensePlateDetection()
# fourcc = cv2.VideoWriter_fourcc(*'XVID')
# out = cv2.VideoWriter('output_yolov4TestCompress.avi', fourcc, 30, (1920, 1080))


def processData(tcpCliSock, frame):
    detectionResult = ""

    sourceImage=cv2.cvtColor(frame, cv2.COLOR_BGR2RGB)# bgr to rgb
    image = Image.fromarray(sourceImage)

    if input_args.Detection:
        boxes, confidence, classes = yolo.detect_image(image)

        if input_args.Tracking:
            features = encoder(frame, boxes)

            detections = [Detection(bbox, confidence, cls, feature) for bbox, confidence, cls, feature in
                          zip(boxes, confidence, classes, features)]
        else:
            detections = [Detection_YOLO(bbox, confidence, cls) for bbox, confidence, cls in
                          zip(boxes, confidence, classes)]

        # Run non-maxima suppression.
        boxes = np.array([d.tlwh for d in detections])
        scores = np.array([d.confidence for d in detections])
        indices = preprocessing.non_max_suppression(boxes, nms_max_overlap, scores)
        detections = [detections[i] for i in indices]

        for det in detections:
            bbox = det.to_tlbr()
            # score = "%.2f" % round(det.confidence * 100, 2) + "%"
            # cv2.rectangle(frame, (int(bbox[0]), int(bbox[1])), (int(bbox[2]), int(bbox[3])), (255, 0, 0), 2)
            if len(classes) > 0:
                cls = det.class_name
                detectionResult = detectionResult + f'{str(cls)} {str(bbox[0])} {str(bbox[1])} {str(bbox[2])} {str(bbox[3])} '
                # cv2.putText(frame, str(cls) + " " + score, (int(bbox[0]), int(bbox[3])), 0,
                #             1e-3 * frame.shape[0], (0, 255, 0), 1)

        if input_args.Tracking:
            # Call the tracker
            tracker.predict()
            tracker.update(detections)

            for track in tracker.tracks:
                if not track.is_confirmed() or track.time_since_update > 1:
                    continue
                bbox = track.to_tlbr()
                # cv2.rectangle(frame, (int(bbox[0]), int(bbox[1])), (int(bbox[2]), int(bbox[3])), (255, 255, 255), 2)
                # cv2.putText(frame, "ID: " + str(track.track_id), (int(bbox[0]), int(bbox[1])), 0,
                #             1e-3 * frame.shape[0], (0, 255, 0), 1)
                detectionResult = detectionResult + f'{str(track.track_id)} {str(bbox[0])} {str(bbox[1])} {str(bbox[2])} {str(bbox[3])} '

    if input_args.FaceDetection:
        detectionResult = detectionResult +faceDetection.detection_face(image)

    if input_args.LicensePlateDetection:
        detectionResult = detectionResult +licensePlateDetection.detection_license_plate(sourceImage)

    try:
        # out.write(frame)
        tcpCliSock.send((detectionResult + "\n").encode("utf-8"))
    except:
        traceback.print_exc()


if __name__ == "__main__":
    sys.stdout.write("Port:" + str(PORT))
    sys.stdout.write('\n')
    receivedData = ""
    while True:
        print('waiting for connection...')
        tcpCliSock, addr = tcpSerSock.accept()
        tcpCliSock.send(("Connected\n").encode("utf-8"))
        print('...connected from:', addr)
        # count = 0
        while True:
            try:
                data = tcpCliSock.recv(BUFSIZ)
                temp = data.decode("UTF-8", 'ignore')
                if len(temp) > 0:
                    receivedData = receivedData + temp
                    if receivedData.__contains__("SuperMapDetectionTypesStart") and receivedData.__contains__(
                            "SuperMapDetectionTypesEnd"):
                        begin = receivedData.find("SuperMapDetectionTypesStart")
                        end = receivedData.rfind("SuperMapDetectionTypesEnd")
                        tempDetectionTypes = receivedData[begin + len("SuperMapDetectionTypesStart"):end]
                        receivedData = receivedData[0:begin] + receivedData[end + len("SuperMapDetectionTypesEnd"):]
                        if tempDetectionTypes.__contains__(","):
                            input_args.detectionTypes.clear()
                            detectionTypes = tempDetectionTypes.split(",")
                            for detectionType in detectionTypes:
                                input_args.detectionTypes.append(detectionType)
                        else:
                            input_args.detectionTypes.clear()
                            input_args.detectionTypes.append(tempDetectionTypes)
                    if receivedData.__contains__("SuperMapResetTracker"):
                        receivedData = receivedData.replace("SuperMapResetTracker", "")
                        tracker = Tracker(metric)
                    elif receivedData.__contains__("SuperMapDetection"):
                        receivedData = receivedData.replace("SuperMapDetection", "")
                        input_args.Detection = True
                    elif receivedData.__contains__("SuperMapNotDetection"):
                        receivedData = receivedData.replace("SuperMapNotDetection", "")
                        input_args.Detection = False
                    elif receivedData.__contains__("SuperMapTracking"):
                        receivedData = receivedData.replace("SuperMapTracking", "")
                        input_args.Tracking = True
                    elif receivedData.__contains__("SuperMapNotTracking"):
                        receivedData = receivedData.replace("SuperMapNotTracking", "")
                        input_args.Tracking = False
                    elif receivedData.__contains__("SuperMapFaceDetection"):
                        receivedData = receivedData.replace("SuperMapFaceDetection", "")
                        input_args.FaceDetection = True
                    elif receivedData.__contains__("SuperMapNotFaceDetection"):
                        receivedData = receivedData.replace("SuperMapNotFaceDetection", "")
                        input_args.FaceDetection = False
                    elif receivedData.__contains__("SuperMapLicensePlateDetection"):
                        receivedData = receivedData.replace("SuperMapLicensePlateDetection", "")
                        input_args.LicensePlateDetection = True
                    elif receivedData.__contains__("SuperMapNotLicensePlateDetection"):
                        receivedData = receivedData.replace("SuperMapNotLicensePlateDetection", "")
                        input_args.LicensePlateDetection = False
                    elif receivedData.__contains__("SuperMapClosed"):
                        # out.release()
                        tcpCliSock.close()
                        print("Disconnected")
                        break
                    if receivedData.__contains__("SuperMapImageEnd") and receivedData.__contains__(
                            "SuperMapImageStart"):
                        images = receivedData.split('SuperMapImageEnd')
                        currentImage = images[0]
                        # count = count + 1
                        # print("接收图片个数" + str(count))
                        if len(images) == 2:
                            receivedData = images[1]
                        elif len(images) == 1:
                            receivedData = ""
                        else:
                            receivedData = ""
                        currentImage = currentImage.replace("SuperMapImageEnd", "").replace("SuperMapImageStart", "")
                        currentImage = base64.b64decode(currentImage)
                        np_arr = np.fromstring(currentImage, np.uint8)
                        receivedImage = cv2.imdecode(np_arr, 1)
                        processData(tcpCliSock, receivedImage)
            except Exception as e:
                # out.release()
                print('str(Exception):\t', str(Exception))
                print('str(e):\t\t', str(e))
                print('repr(e):\t', repr(e))
                # Get information about the exception that is currently being handled
                exc_type, exc_value, exc_traceback = sys.exc_info()
                print('e.message:\t', exc_value)
                print("Note, object e and exc of Class %s is %s the same." %
                      (type(exc_value), ('not', '')[exc_value is e]))
                print('traceback.print_exc(): ', traceback.print_exc())
                print('traceback.format_exc():\n%s' % traceback.format_exc())
                print("Disconnected")
                break