//
// Created by hoyin on 2022/2/8.
//

#include "SingleTrack.h"
#include "chrono"
#include <utility>

#define KEY_ESC 27
#define KEY_SPACE 32
#define KEY_PREV ','
#define KEY_NEXT '.'

#define KEY_WIDTH_MINUS 'a'
#define KEY_WIDTH_PLUS 'd'
#define KEY_HEIGHT_MINUS 'w'
#define KEY_HEIGHT_PLUS 's'

#define KEY_X_MINUS 81
#define KEY_X_PLUS 83
#define KEY_Y_MINUS 82
#define KEY_Y_PLUS 84

#define MATCH_THRESHOLD 0.3
#define MATCH_METHOD TM_SQDIFF_NORMED

st::SingleTrack::SingleTrack(int trackerType, const VideoCapture& video, int frameWidth, int frameHeight,
							 bool cudaSupport) :
							 trackerType(trackerType),
							 frameWidth(frameWidth),
							 frameHeight(frameHeight),
							 video(video),
							 cudaSupport(cudaSupport){
	if (video.get(CV_CAP_PROP_FRAME_COUNT) == -1) {
		sourceType = st::SourceType::UVC_CAMERA;
	} else {
		sourceType = st::SourceType::VIDEO;
	}
	initTracker();
}

st::SingleTrack::SingleTrack(int trackerType, MVCamera *mvCamera, int frameWidth, int frameHeight,
							 bool cudaSupport) :
							 trackerType(trackerType),
							 frameWidth(frameWidth),
							 frameHeight(frameHeight),
							 cudaSupport(cudaSupport) {
	this->camera = mvCamera;
	sourceType = st::SourceType::HIKVISION_MV_CAMERA;
	initTracker();
}



void st::SingleTrack::initTracker() {
	switch (trackerType) {
		case st::TrackerType::BOOSTING:
			tracker = TrackerBoosting::create();
			break;
		case st::TrackerType::MIL:
			tracker = TrackerMIL::create();
			break;
		case st::TrackerType::KCF:
			tracker = TrackerKCF::create();
			break;
		case st::TrackerType::TLD:
			tracker = TrackerTLD::create();
			break;
		case st::TrackerType::MEDIANFLOW:
			tracker = TrackerMedianFlow::create();
			break;
		case st::TrackerType::MOSSE:
			tracker = TrackerMOSSE::create();
			break;
		case st::TrackerType::CSRT:
			tracker = TrackerCSRT::create();
		default:
			break;
	}
}

int st::SingleTrack::startTracking(Rect2d bbox, int startFrame, Mat *trackResult, bool displayStream) {
	if (sourceType == st::SourceType::VIDEO) {
		video.set(CV_CAP_PROP_POS_FRAMES, startFrame);
		if (!video.isOpened()) {
			cout << "Could not read video file" << endl;
			return 1;
		}
	} else if (sourceType == st::SourceType::UVC_CAMERA) {
		if (!video.isOpened()) {
			cout << "Could not open uvc camera" << endl;
			return 1;
		}
	} else if (sourceType == st::SourceType::HIKVISION_MV_CAMERA) {
		if (!camera->isOK()) {
			cout << "Could not open hik_mv camera" << endl;
			return 1;
		}
	}

	Mat frame;
	readFrame(frame);

//	初始检测框
	if (displayStream) {
		rectangle(frame, bbox, Scalar(255, 20, 20), 2, 1);
		imshow("Tracking", frame);
	}

//	跟踪器初始化

	tracker->init(frame, bbox);

	while (readFrame(frame)) {
		auto timer = (double) getTickCount();
		bool ok = tracker->update(frame, bbox);
		double fps = getTickFrequency() / ((double) getTickCount() - timer);
		if (ok) {
//			如果追踪到目标
			if (displayStream) {
				rectangle(frame, bbox, Scalar(255, 20, 20), 2, 1);
			}
		} else {
			matcher.setOriginImage(frame, Size(frameWidth, frameHeight));
			bbox = matcher.templateMatch(MATCH_THRESHOLD, MATCH_METHOD);
			if (!bbox.empty()) {
				initTracker();
				tracker->init(frame, bbox);
				if (displayStream) {
					putText(frame, "Found templateMatch", Point(100, 110), FONT_HERSHEY_SIMPLEX, 0.75, Scalar(0, 200, 200), 2);
					rectangle(frame, bbox, Scalar(0, 200, 200), 2, 1);
				}
			}
			if (displayStream) {
				putText(frame, "Tracking failure detected", Point(100, 80), FONT_HERSHEY_SIMPLEX, 0.75, Scalar(0, 0, 255), 2);
			}
		}
		if (trackResult != nullptr) {
			if (bbox.empty()) {
				*trackResult = *trackResult;
			}
			*trackResult = frame(bbox);
		}
		if (displayStream) {
			putText(frame, "TrackerType: " + st::trackerTypes[trackerType], Point(100, 20), FONT_HERSHEY_SIMPLEX, 0.75, Scalar(50, 170, 50), 2);
			putText(frame, "FPS: " + to_string(int(fps)), Point(100, 50), FONT_HERSHEY_SIMPLEX, 0.75, Scalar(50, 170, 50), 2);
			imshow("Tracking", frame);

			int k = waitKey(10);
			if (k == 27) {
				break;
			}
		}
	}
	return 0;
}

Rect2d st::SingleTrack::selectTrackingBox(int indexOfFrame, const Rect2d& defaultBox) {
	cout << "selectTrackingBox" << endl;
	int frameCount = 0;
	if (st::SourceType::VIDEO == sourceType) {
		frameCount = video.get(CV_CAP_PROP_FRAME_COUNT);
		if (indexOfFrame < frameCount && indexOfFrame >= 0) {
			video.set(CV_CAP_PROP_POS_FRAMES, indexOfFrame);
			cout << format("read frame of index: (%d / %d)", indexOfFrame, frameCount) << endl;
		} else
			cout << format("frame index out of range: (%d / %d)", indexOfFrame, frameCount) << endl;
	}

	Mat frame, frameClone;
	int x, y, w, h;
	Rect2d targetRect;
	bool startedGrab = false;
	if (defaultBox.empty()) {
		x = frameWidth/4, y = frameHeight/4, w = frameWidth/4, h = frameHeight/4;
	} else {
		x = (int) defaultBox.x, y = (int) defaultBox.y, w = (int) defaultBox.width, h = (int) defaultBox.height;
	}

	while (true) {
		bool readFrameOK = readFrame(frame);
		if (!readFrameOK) {
			cout << "waiting for available image." << endl;
			startedGrab = false;
			continue;
		} else {
			if (!startedGrab) {
				cout << "grabbed available image." << endl;
				frameClone = frame.clone();
			}
			startedGrab = true;
		}
		targetRect = Rect2d(x, y, w, h);
		rectangle(frame, targetRect, Scalar(255, 20, 20), 2, 1);
		imshow("select tracking box", frame);
		int k = waitKey(1);
		if (k == KEY_ESC || k == KEY_SPACE) {
			destroyAllWindows();
			break;
		} else if (k == KEY_PREV && sourceType == st::SourceType::VIDEO) {
			if (indexOfFrame > 0) {
				video.set(CV_CAP_PROP_POS_FRAMES, indexOfFrame--);
			} else {
				video.set(CV_CAP_PROP_POS_FRAMES, 0);
			}
			cout << "Frame: " << indexOfFrame << " / " << frameCount << endl;
		} else if (k == KEY_NEXT && sourceType == st::SourceType::VIDEO) {
			if (indexOfFrame < frameCount - 1) {
				video.set(CV_CAP_PROP_POS_FRAMES, indexOfFrame++);
			} else {
				video.set(CV_CAP_PROP_POS_FRAMES, indexOfFrame);
			}
			cout << "Frame: " << indexOfFrame << " / " << frameCount << endl;
		} else if (k == KEY_X_MINUS) {
			if (x > 0) {
				x -= 5;
			}
		} else if (k == KEY_X_PLUS) {
			if (x < frame.cols) {
				x += 5;
			}
		} else if (k == KEY_Y_MINUS) {
			if (y > 0) {
				y -= 5;
			}
		} else if (k == KEY_Y_PLUS) {
			if (y < frame.rows) {
				y += 5;
			}
		} else if (k == KEY_WIDTH_MINUS) {
			if (w > 0) {
				w -= 5;
			}
		} else if (k == KEY_WIDTH_PLUS) {
			if (w < frame.cols) {
				w += 5;
			}
		} else if (k == KEY_HEIGHT_MINUS) {
			if (h > 0) {
				h -= 5;
			}
		} else if (k == KEY_HEIGHT_PLUS) {
			if (h < frame.rows) {
				h += 5;
			}
		}
		if (sourceType == st::SourceType::VIDEO) {
			video.set(CV_CAP_PROP_POS_FRAMES, indexOfFrame);
		}
	}
	trackingTarget = frame(Rect2d(targetRect.x + 2, targetRect.y + 2, targetRect.width - 4, targetRect.height - 4));
	matcher.setTargetImage(trackingTarget);
	return targetRect;
}

bool st::SingleTrack::readFrame(Mat& frame) {
	Mat originFrame;
	bool ok;
	switch (sourceType) {
		case st::SourceType::VIDEO:
			ok = video.read(originFrame);
			break;
		case st::SourceType::UVC_CAMERA:
			ok = video.read(originFrame);
			break;
		case st::SourceType::HIKVISION_MV_CAMERA:
			ok = camera->readFrame(originFrame);
			break;
		default:
			break;
	}
	if (ok) {
		try {
			resize(originFrame, frame, Size(frameWidth, frameHeight));
		} catch (cv::Exception) {
			cout << "in SingleTrack -> readFrame() -> resize()" << endl;
		}
	}
	return ok;
}

st::SingleTrack::SingleTrack() = default;
