#include "jf_aod_opencv.h"

class AODOpenCVPrivate{
    public:
        //parameter can be listed here
        int accurate =  20;
        int threscount = 100;        
        int step = 3;
        int count = 0;
        int interval = 180;
        double recordTime = 0;
		int maxDiscontinuedTimes = 2;
		int freCount = 50; 
        cv::Mat fore;
        cv::Mat back;
        cv::Mat backz;
        cv::Mat sub;
        int detectFlag = 0;
        queue<cv::Mat> q_back;
        vector<cv::Rect> detectedRect;
		vector<AODFrequency> af_vec;
        vector<Rect> preparedRect;
        vector<AODAlarmTime> alarmVec;
		vector<AODTrack> track_vec;
        bool isUseFrequencytactics = true;
		bool isUseAlarmTimeTactic = true;
		bool isUseAreaComparison = true;
        Ptr<BackgroundSubtractor> bg =  createBackgroundSubtractorMOG2(100, 16, false);

};

enum TRANSTYPE
{
     ZOOMRECT_TYPE = 0,
     RESTORERECT_TYPE,
};

bool isAllowedRange(AODAlarmTime alt, Point p);
double GetGrayAvg(cv::Mat src);
Rect scale_rect(const Rect& r, float scale);
cv::Rect transformRect(cv::Rect ori_roi, cv::Mat img, TRANSTYPE type);

//获取rect中心点
Point AODOpenCV::getCenterPoint(Rect rect)
{
	Point cpt;
	cpt.x = rect.x + cvRound(rect.width / 2.0);
	cpt.y = rect.y + cvRound(rect.height / 2.0);
	return cpt;
}

// 遗留物检测核心代码
void AODOpenCV::detect(const AODInput &input){
        Mat img_input = input.mat;
		// cout<<img_input.cols<<"x"<<img_input.rows<<endl;
		float area_ratio = (480.0*320.0)/(float)(img_input.cols*img_input.rows);

		//ROI按比例应用到读入的视频中
		// cv::Rect t_roi = transformRect(getROI(), input.mat, ZOOMRECT_TYPE);
		img_input = img_input(getROI());
		resize(img_input,img_input,Size(480, 320));        
        vector <vector<Point>>subcontours;
		if (img_input.empty()) return;
        // Mat img_ori = t_roi.area() == 0 ? img_input:img_input(t_roi);
		Mat img_ori = img_input;
		cv::Mat frame = img_ori.clone();
		cv::Mat origin = img_ori.clone();
		cvtColor(frame, frame, COLOR_RGB2GRAY);
		//应用背景
		m_pHandle->bg->apply(frame, m_pHandle->fore);
		m_pHandle->bg->getBackgroundImage(m_pHandle->back);
		cv::erode(m_pHandle->back, m_pHandle->back, cv::Mat());//to remove image noise
		cv::erode (m_pHandle->fore, m_pHandle->fore, cv::Mat ());//to remove image noise more, but reduces contour size
		cv::erode (m_pHandle->fore, m_pHandle->fore, cv::Mat ());
		cv::erode (m_pHandle->fore, m_pHandle->fore, cv::Mat ());

		vector<vector<Point>> forecontours;		
		findContours(m_pHandle->fore, forecontours, CV_RETR_EXTERNAL, CV_CHAIN_APPROX_NONE);//CHAIN_APPROX_NONE ensures that straight lines of contour are also included
		vector<cv::Rect> forerects(forecontours.size());
		for(int i=0;i<forecontours.size();i++)
		{
			if(contourArea(forecontours[i])> getAreaThresholdMin() && contourArea(forecontours[i])< getAreaThresholdMax()){
				forerects[i] = boundingRect(forecontours[i]);
				trackDetectedRect(forerects[i]);
			}
			// if(contourArea(contours[i])>500)//to eliminate noise contours
			// drawContours (frame, contours, -1, cv::Scalar (255, 255, 255), 1);
		}
		// dilate(m_pHandle->fore, m_pHandle->fore, cv::Mat());// perhaps to sharpen contour without noise,gives more square contour
		// dilate(m_pHandle->fore, m_pHandle->fore, cv::Mat());// perhaps to sharpen contour without noise,gives more square contour
		// dilate(m_pHandle->fore, m_pHandle->fore, cv::Mat());// perhaps to sharpen contour without noise
		if(m_pHandle->count % m_pHandle->step == 0){
			m_pHandle->q_back.push(frame);
			if(m_pHandle->q_back.size() > m_pHandle->accurate) m_pHandle->q_back.pop();
		}

		//Abandoned Object Part    
		if (m_pHandle->detectFlag == 0)
		{
			absdiff(m_pHandle->back, m_pHandle->back, m_pHandle->backz);
			m_pHandle->detectFlag = 10;
		}

		if (m_pHandle->detectFlag == 10 && m_pHandle->count > m_pHandle->threscount)//this loop runs once
		{
			m_pHandle->backz = m_pHandle->back.clone();//extra loop run to ensure stable initial background
			m_pHandle->detectFlag = 20;
			m_pHandle->recordTime = getTickCount() / getTickFrequency();
			cout<<"Abandoned object detect begin ..."<<endl;
		}

		if (m_pHandle->detectFlag == 20 && (getTickCount() / getTickFrequency() - m_pHandle->recordTime) >= m_pHandle->interval)//interval can vary from 0 to 10 minutes; infinite loop
		{
			cout << "New Interval of Abandoned Object Detection \n";
			m_pHandle->backz = m_pHandle->back.clone();//not backz=back as they'll become pointers pointing to same address
			m_pHandle->recordTime  = getTickCount() / (getTickFrequency());
		}
    
		if(m_pHandle->detectFlag == 20){
            if(m_pHandle->detectedRect.size() > 0) m_pHandle->detectedRect.clear();
			absdiff(m_pHandle->back, m_pHandle->backz, m_pHandle->sub);
			threshold(m_pHandle->sub, m_pHandle->sub, 70, 255, THRESH_BINARY);            
			erode(m_pHandle->sub, m_pHandle->sub, cv::Mat()); //to remove image noise more, but reduces contour size
			dilate(m_pHandle->sub, m_pHandle->sub, cv::Mat());// perhaps to sharpen contour without noise,gives more square contour
			findContours(m_pHandle->sub, subcontours, CV_RETR_EXTERNAL, CV_CHAIN_APPROX_NONE);
			vector<Rect> subBoundRect(subcontours.size());
			vector<Point> subcenter(subBoundRect.size());
			int totalarea = 0;
			bool isOverArea = false;
			for (int i = 0; i < subcontours.size(); i++) {
				if (contourArea(subcontours[i])> 10){
					totalarea += contourArea(subcontours[i]);
				}
			}
			if(totalarea > frame.cols * frame.rows /1.2) isOverArea = true;
			int type;	
			if(isOverArea) cout<<"There is a Possible Accident Happen"<<endl;
			else {
				for (int i = 0; i < subcontours.size(); i++) {
					if (contourArea(subcontours[i])> area_ratio*getAreaThresholdMin() && contourArea(subcontours[i])< area_ratio*getAreaThresholdMax()){ //medium value is 130
					subBoundRect[i] = boundingRect(subcontours[i]);	
					trackDetectedRect(subBoundRect[i]);
					if(!this->getUseFrequencyTactics()){
						if(!m_pHandle->q_back.empty()){
							type = detectLeftFilter(m_pHandle->q_back, frame, subBoundRect[i]);
						}	
						else type = 1;
						if(type == 2) {
							m_pHandle->detectedRect.push_back(subBoundRect[i]); 				
						}
					}else{
						this->addAODFrequency(subBoundRect[i]);
						this->detectPreparedRect();
						this->cleanAODFrequency();
					}
				    }
		        }

				if(this->getUseFrequencyTactics()){
				  for(int i = 0; i < m_pHandle->preparedRect.size(); i++){
					if(!m_pHandle->q_back.empty())
                        type = detectLeftFilter(m_pHandle->q_back, frame, m_pHandle->preparedRect[i]);
                    else type = 1;
                    if (type == 2) {
						m_pHandle->detectedRect.push_back(m_pHandle->preparedRect[i]);
                    }
				 }
				}

				m_pHandle->preparedRect.clear();
				if(getUseAlarmTimeTactic()){//如果使用了一段时间不重复报警功能，则把重复报警过滤掉
					for(int i = 0; i < m_pHandle->detectedRect.size(); i++){
						int flag = 0;  //标记是不是找到了
						for(int j = 0; j < m_pHandle->alarmVec.size(); j++){
							if(isAllowedRange(m_pHandle->alarmVec[j], getCenterPoint(m_pHandle->detectedRect[i]))){
								if((getTickCount()/getTickFrequency() - m_pHandle->alarmVec[j].startTime) < getTimeGapThreshold()){
									vector<cv::Rect>::iterator it = m_pHandle->detectedRect.begin() + i;
									//cout<<"found time-out alarm"<<endl;
									m_pHandle->detectedRect.erase(it);
									i--;
									flag = 1;
									continue;	
								}
							}	
						}
						if(flag == 0){//没找到
							AODAlarmTime al;
							al.center = getCenterPoint(m_pHandle->detectedRect[i]);
							al.startTime = getTickCount() / getTickFrequency();
							m_pHandle->alarmVec.push_back(al);
						}	
					}
					vector<AODAlarmTime>::iterator it;
					for(it = m_pHandle->alarmVec.begin();it !=m_pHandle->alarmVec.end();)
					{
						if((getTickCount()/getTickFrequency() - (*it).startTime) > getTimeGapThreshold()){
							it = m_pHandle->alarmVec.erase(it);
						}else it++;
					}
				}
				AODOutput ao;
				for(int i = 0; i < m_pHandle->detectedRect.size(); i++){
					cout<<"the detected rect is "<<getCenterPoint(m_pHandle->detectedRect[i])<<endl;
					// cout<<"the detected rect is "<<getCenterPoint(transformRect(m_pHandle->detectedRect[i], input.mat, RESTORERECT_TYPE))<<endl;//", the current frame is "<<m_pHandle->count<<endl;
					AODResult ar;
					ar.rt = m_pHandle->detectedRect[i];
					ar.detectedFrameCounts = getDetectedFrameCounts(m_pHandle->detectedRect[i]);
					ar.type = 2;
					ar.prop = 1;
					cout<<"the detection costs "<<ar.detectedFrameCounts<<" frames"<<endl;
					ao.arrRes.push_back(ar);
					ao.mat = img_input;
					
				}
				if(m_pHandle->detectedRect.size() > 0) cb_(ao);
			}

		}else m_pHandle->sub = m_pHandle->backz;
		m_pHandle->count++;
        if(m_pHandle->count == 100000) m_pHandle->count = 0;
		clearTrackRect();
 }


bool isAllowedRange(AODAlarmTime alt, Point p){
	if(abs(alt.center.x - p.x) <= 3 && abs(alt.center.y - p.y) <=3){
		return true;
	}
	return false;
}

vector<Rect> AODOpenCV::getdetectedRect(){
    return m_pHandle->detectedRect;
}

void AODOpenCV::cleardetectedRect(){
	m_pHandle->detectedRect.clear();
}

void AODOpenCV::setinterval(int interval){
    m_pHandle->interval = interval;
}

void AODOpenCV::setaccurate(int accurate){
    m_pHandle->accurate = accurate;
}

void AODOpenCV::setstep(int step){
    m_pHandle->step = step;
}


void AODOpenCV::setthrescount(int threscount){
	m_pHandle->threscount = threscount;
}
//得到的检测框加入面积变化检测序列
void AODOpenCV::addAODFrequency(cv::Rect rt){
	bool includeFlag = false;	
	for(int i = 0; i < m_pHandle->af_vec.size(); i++){
		if(rt.contains(m_pHandle->af_vec[i].center)){ //如果拥有
			m_pHandle->af_vec[i].detectedRect.push_back(rt);
			m_pHandle->af_vec[i].isContinuous = true;
			m_pHandle->af_vec[i].count++;
			includeFlag = true;
			break;
		}
	}
	if(!includeFlag){
		AODFrequency aodFre;
		aodFre.detectedRect.push_back(rt);
		aodFre.center = getCenterPoint(rt);
		aodFre.isContinuous = true;
		aodFre.count++;
		m_pHandle->af_vec.push_back(aodFre);
	}
}

//清除面积检测序列中不时间连续或超过检测的次数范围的检测框
void AODOpenCV::cleanAODFrequency(){
	vector<AODFrequency>::iterator it;
	for(it =m_pHandle->af_vec.begin();it !=m_pHandle->af_vec.end();)
     {
         if(!(*it).isContinuous){
			 (*it).discontinuousTimes++;
		 }else (*it).discontinuousTimes = 0;

		 if((*it).discontinuousTimes >= this->getmaxDiscontinuedTimes() || (*it).count == m_pHandle->freCount)
             it= m_pHandle->af_vec.erase(it);    //删除元素，返回值指向已删除元素的下一个位置    
         else{
			 (*it).isContinuous = false;
			 ++it; 
		 }
     }
}

void AODOpenCV::detectPreparedRect(){
	int nBIN = 5;
	for(int i = 0; i < m_pHandle->af_vec.size(); i++){
		if(m_pHandle->af_vec[i].count == m_pHandle->freCount){
			vector<cv::Rect> arrInput = m_pHandle->af_vec[i].detectedRect;

			auto fn = [](cv::Rect input1, cv::Rect input2) {
      			return input1.area() < input2.area();
   			};
			// 得到最大值/最小值
			auto max_iter = max_element(std::begin(arrInput), std::end(arrInput), fn);
			auto min_iter = min_element(std::begin(arrInput), std::end(arrInput), fn);
			auto max = *max_iter;
			auto min = *min_iter;
			int nHistSize = (max.area() - min.area()) / nBIN;
			// 最大值/最小值相同，说明面积变化平稳，可以断定这个检测框是可用的
   			 if (nHistSize == 0) {
				m_pHandle->preparedRect.push_back(arrInput[arrInput.size()-1]);
				continue;
			}

			std::vector<int> arrHist;			
			for (auto &num : arrInput) {
				auto hist = (num.area() - min.area()) / nHistSize;
				arrHist.push_back(hist);
			}

			//直方图
			std::unordered_map<int, int> hash_table;
			for (auto &i : arrHist) {
				hash_table[i]++;
			}

			int max_freq = -1, most_freq_val = -1; // Initialize these values to -1
			// for (auto &i : hash_table) {
			// 	if (i.second > max_freq) {
			// 		max_freq = i.second;
			// 		most_freq_val = i.first;
			// 	}
    		// }
			int pre;
			bool flag = false;
			for(int i = 0; i < hash_table.size(); i++){
				if(i == 0) {
					flag = true;
					pre = hash_table[i];
				}	
				else {
					if(hash_table[i] - pre > 2){
						flag = false;
						break;
					}else pre = hash_table[i];
				}
			}

			if(flag == true){
				m_pHandle->preparedRect.push_back(arrInput[arrInput.size()-1]);
			}

		}
	}
}

void AODOpenCV::setUseFrequencyTactics(bool useFreTactics){
	m_pHandle->isUseFrequencytactics = useFreTactics;
}

bool AODOpenCV::getUseFrequencyTactics(){
	return m_pHandle->isUseFrequencytactics;
}

void AODOpenCV::setmaxDiscontinuedTimes(int times){
	m_pHandle->maxDiscontinuedTimes = times;
}

int AODOpenCV::getmaxDiscontinuedTimes(){
	 return m_pHandle->maxDiscontinuedTimes;
 }

 int AODOpenCV::detectLeftFilter(queue<Mat> qback, Mat& frame, Rect& rect)
{
	int returnVal = -1;
	queue<Mat> q = qback;
	while(!q.empty()){
		Mat background = q.front();
		// Scalar S1 = mean(background(rect));
		// Scalar S2 = mean(frame(rect));
		// double dist1 = norm(S1);
		// double dist2 = norm(S2);
        double avg1 = GetGrayAvg(background(rect));
        double avg2 = GetGrayAvg(frame(rect));
		if (abs(avg1 - avg2) > 2)
			return 1;
		q.pop();
	}
	if(getUseAreaComparison() == false) return 2;
	else return detectLeftRemove(frame, rect);
}

int AODOpenCV::detectLeftRemove(Mat& background, Rect& rect)
{
	int returnVal = -1;
	Rect surroundRect = scale_rect(rect, 4.f);
	if (surroundRect.width + surroundRect.x > background.cols) surroundRect.width = background.cols - surroundRect.x;
	if (surroundRect.height + surroundRect.y > background.rows) surroundRect.height = background.rows - surroundRect.y;
	Scalar S1 = mean(background(rect));
	Scalar S2 = mean(background(surroundRect));
	double dist1 = norm(S1);
	double dist2 = norm(S2);
	double ratio = 0.0;
	if (dist1 >= dist2)
		ratio = dist2 / dist1;
	else
		ratio = dist1 / dist2;

	if (ratio >= 0.9 && ratio <= 1.0)   
		returnVal = 1;
	else        
	{
		returnVal = 2;
	}

	return returnVal;
}

bool AODOpenCV::getUseAlarmTimeTactic(){
	return m_pHandle->isUseAlarmTimeTactic;
}

void AODOpenCV::setUseAlarmTimeTactic(bool isUse){
	m_pHandle->isUseAlarmTimeTactic = isUse;
}


bool AODOpenCV::getUseAreaComparison(){return m_pHandle->isUseAreaComparison;}
void AODOpenCV::setUseAreaComparison(bool isUse){ m_pHandle->isUseAreaComparison = isUse;}

double GetGrayAvg(cv::Mat src)
{
    cv::Mat img;
    if (src.channels() == 3)
        cv::cvtColor(src, img, CV_BGR2GRAY);
    else
        img = src;
    //cv::mean(src);
    return mean(img)[0]; 
    //cout << "Mean: " << m << endl; 
    
}

Rect AODOpenCV::scale_rect(const Rect& r, float scale)
{
	Point m = getCenterPoint(r);
	float width = r.width  * scale;
	float height = r.height * scale;
	int x = cvRound(m.x - width / 2);
	int y = cvRound(m.y - height / 2);
	if (x < 0) x = 0;
	if (y < 0) y = 0;
	return Rect(x, y, cvRound(width), cvRound(height));
}

AODOpenCV::AODOpenCV() : m_pHandle(new AODOpenCVPrivate()) {}
AODOpenCV::~AODOpenCV(){}

//rect按检测需求进行比例缩放，type = ZOOMRECT_TYPE 表示按照检测的需求进行缩放，RESTORERECT_TYPE表示按照读入视频的需求进行还原
//img表示读入的需要检测的图片
cv::Rect transformRect(cv::Rect ori_roi, cv::Mat img, TRANSTYPE type){
	cv::Rect obj_roi;
	if(ori_roi.area() > 0){
		if(type == 0){
			obj_roi.x = ori_roi.x*320/img.cols;
			obj_roi.width = ori_roi.width*320/img.cols;
			obj_roi.y = ori_roi.y*260/img.rows;
			obj_roi.height = ori_roi.height*260/img.rows;

		}
		if(type == 1){
			obj_roi.x = ori_roi.x*img.cols/320;
			obj_roi.width = ori_roi.width*img.cols/320;
			obj_roi.y = ori_roi.y*img.rows/260;
			obj_roi.height = ori_roi.height*img.rows/260;
		}
	}
	
	return obj_roi;
}

//追踪检测框起始帧
 void AODOpenCV::trackDetectedRect(const cv::Rect detectedRect){
	 vector<AODTrack>::iterator it;
	 bool isContains = false;
	 for(it = m_pHandle->track_vec.begin(); it!=m_pHandle->track_vec.end();){
		 if(detectedRect.contains(it->center)){
			if(it->isContinuous) break;
			else{				
				it->frameCount++;
				it->isContinuous = true;
				isContains = true;
				break;
			}
		 }
		 it++;
	 }

	 if(!isContains){
		 AODTrack at;
		 at.center = getCenterPoint(detectedRect);
		 at.frameCount++;
		 m_pHandle->track_vec.push_back(at);
	 }
 }

//清理无效检测狂
void AODOpenCV::clearTrackRect(){
	 vector<AODTrack>::iterator it;
	 for(it = m_pHandle->track_vec.begin(); it!=m_pHandle->track_vec.end();){
		 //如果检测框是时间连续的
		 if(it->isContinuous){
			it->discontinuousCounts = 0;
			it->isContinuous = false; 
			it++;
		 }else{
			 //如果不是时间上连续的则不连续次数加1
			 it->discontinuousCounts++;
			 //不连续的次数达到50
			 if(it->discontinuousCounts >= 10000){
				 it = m_pHandle->track_vec.erase(it);
			 }else{//未达到50,则起始帧数加1
				it->frameCount++;
				it++;
			 }
		 }
		
	 }
 }

//获取检测用了多少帧
 int AODOpenCV::getDetectedFrameCounts(const cv::Rect detectedRect){
	 int detectedFrameCount = 0;
	 vector<AODTrack>::iterator it;
	 for(it = m_pHandle->track_vec.begin(); it!=m_pHandle->track_vec.end();){
		if(detectedRect.contains(it->center)){
			detectedFrameCount = it->frameCount;
			m_pHandle->track_vec.erase(it);
			break;
		}
		it++;
	 }
	 return detectedFrameCount;
 }


