#include<stdio.h>
#include<iostream>
#include <opencv2/core.hpp>
#include <opencv2/opencv.hpp>
#include <opencv2/imgproc.hpp>
#include <opencv2/features2d.hpp>
//#include <opencv2/xfeatures2d.hpp>
#include "opencv2/imgproc/imgproc_c.h"
#include <opencv2/core/utils/logger.hpp>


//#include "StereoSGM.h"
//#include <algorithm>
//#include <vector>
#include <fstream>
#include <sstream> 

using namespace cv;
using namespace std;

#define PI acos(-1.0)

const int depth_level = 30;
typedef Vec<double, depth_level> Vec_nb;

struct seed//种子点
{
	Point2f pt;
	double escore;
};


void extract_features(
	vector<string>& image_names,
	vector<vector<KeyPoint>>& key_points_for_all,
	vector<Mat>& descriptor_for_all,
	vector <vector<Vec3b>>& colors_for_all
)
{
	key_points_for_all.clear();
	descriptor_for_all.clear();
	Mat image;

	// 读取图像，获取图像特征点并保存
	Ptr<Feature2D> sift = SIFT::create(0, 3, 0.04, 10);
	for (auto it = image_names.begin(); it != image_names.end(); ++it)
	{
		image = imread(*it);
		if (image.empty())
		{
			continue;
		}

		vector<KeyPoint> key_points;
		Mat descriptor;
		// 偶尔出现内存分配失败的错误  Detects keypoints and computes the descriptors
		// sift->detectAndCompute(image, noArray(), key_points, descriptor);
		sift->detect(image, key_points);
		sift->compute(image, key_points, descriptor);


		// 特征点过少，则排除该图像
		if (key_points.size() <= 10)
		{
			printf("特征点过少,该图像被排除");
			continue;
		}

		key_points_for_all.push_back(key_points);
		descriptor_for_all.push_back(descriptor);

		vector<Vec3b> colors(key_points.size());	// 三通道 存放该位置三通道颜色
		for (int i = 0; i < key_points.size(); ++i)
		{
			Point2f& p = key_points[i].pt;
			/*cout << p.x << ", " << p.y << endl;
			if (i == 2653)
			{
				cout << p.x << ", " << p.y << endl;
				cout << image.rows << ", " << image.cols << endl;
			}*/
			if (p.x <= image.rows && p.y <= image.cols)
				colors[i] = image.at<Vec3b>(p.x, p.y);
		}

		colors_for_all.push_back(colors);
	}
}

Mat match_features(Mat img1,Mat img2,Mat& query, Mat& train, vector<DMatch>& matches, vector<KeyPoint>& p1,
	vector<KeyPoint>& p2, vector <KeyPoint>& RR_KP1, vector <KeyPoint>& RR_KP2,vector <DMatch>& RR_matches)
{
	vector<vector<DMatch>> knn_matches;
	BFMatcher matcher(NORM_L2);
	matcher.knnMatch(query, train, knn_matches, 2);

	// 获取满足Ratio Test的最小匹配的距离
	float min_dist = FLT_MAX;
	for (int r = 0; r < knn_matches.size(); ++r)
	{
		// Rotio Test
		if (knn_matches[r][0].distance > 0.6 * knn_matches[r][1].distance)
		{
			continue;
		}

		float dist = knn_matches[r][0].distance;
		if (dist < min_dist)
		{
			min_dist = dist;
		}
	}

	matches.clear();
	for (size_t r = 0; r < knn_matches.size(); ++r)
	{
		// 排除不满足Ratio Test的点和匹配距离过大的点
		if (
			knn_matches[r][0].distance > 0.6 * knn_matches[r][1].distance ||
			knn_matches[r][0].distance > 5 * max(min_dist, 10.0f)
			)
		{
			continue;
		}

		// 保存匹配点
		matches.push_back(knn_matches[r][0]);
	}
	
	/*Mat img_matches;
	drawMatches(img1, p1, img2, p2, matches, img_matches);
	imshow("raw_matches", img_matches);
	waitKey();*/

	//RANSAC匹配过程
	vector<DMatch> m_Matches;
	m_Matches = matches;
	int ptCount = matches.size();
	if (ptCount < 100)
	{
		cout << "Don't find enough match points" << endl;
	}

	//坐标转换为float类型
	vector <KeyPoint> RAN_KP1, RAN_KP2;
	//size_t是标准C库中定义的，应为unsigned int，在64位系统中为long unsigned int,在C++中为了适应不同的平台，增加可移植性。
	for (size_t i = 0; i < m_Matches.size(); i++)
	{
		RAN_KP1.push_back(p1[matches[i].queryIdx]);
		RAN_KP2.push_back(p2[matches[i].trainIdx]);
		//RAN_KP1是要存储img01中能与img02匹配的点
		//goodMatches存储了这些匹配点对的img01和img02的索引值
	}
	//坐标变换
	vector <Point2f> p01, p02;
	for (size_t i = 0; i < m_Matches.size(); i++)
	{
		p01.push_back(RAN_KP1[i].pt);
		p02.push_back(RAN_KP2[i].pt);
	}
	//vector <Point2f> img1_corners(4);
	//img1_corners[0] = Point(0,0);
	//img1_corners[1] = Point(img1.cols,0);
	//img1_corners[2] = Point(img1.cols, img1.rows);
	//img1_corners[3] = Point(0, img1.rows);
	//vector <Point2f> img2_corners(4);
	////求转换矩阵
		Mat m_homography;
	//	vector<uchar> m;
		m_homography = findHomography(p02, p01, RANSAC);//寻找匹配图像
		//求基础矩阵 Fundamental,3*3的基础矩阵
	vector<uchar> RansacStatus;
	 Mat Fundamental = findFundamentalMat(p02, p01, RansacStatus, FM_RANSAC);
	 cout << "F:" << endl << Fundamental << endl;
	//重新定义关键点RR_KP和RR_matches来存储新的关键点和基础矩阵，通过RansacStatus来删除误匹配点
	
	int index = 0;
	for (size_t i = 0; i < m_Matches.size(); i++)
	{
		if (RansacStatus[i] != 0)
		{
			RR_KP1.push_back(RAN_KP1[i]);
			RR_KP2.push_back(RAN_KP2[i]);
			m_Matches[i].queryIdx = index;
			m_Matches[i].trainIdx = index;
			RR_matches.push_back(m_Matches[i]);
			index++;
		}
	}
	cout << "RANSAC后匹配点数" << RR_matches.size() << endl;

	//存储特征点
	//坐标变换
	vector <Point2f> p01_RR, p02_RR;
	for (size_t i = 0; i < RR_matches.size(); i++)
	{
		p01_RR.push_back(RR_KP1[i].pt);
		p02_RR.push_back(RR_KP2[i].pt);
	}
	ofstream fout;
	fout.open("match_point_1.txt");
	for (int i = 0; i < p01_RR.size(); i++) {
		fout << p01_RR[i].x << " " << p01_RR[i].y << "\n";
	}
	ofstream fout_2;
	fout_2.open("match_point_2.txt");
	for (int i = 0; i < p02_RR.size(); i++) {
		fout_2 << p02_RR[i].x << " " << p02_RR[i].y << "\n";
	}
	cout << "特征点存储完毕\n" << endl;


	//Mat img_RR_matches;
	//drawMatches(img1, RR_KP1, img2, RR_KP2, RR_matches, img_RR_matches);
	//imshow("After RANSAC", img_RR_matches);
	////等待任意按键按下
	//waitKey(0);

	
	return m_homography;

}

bool find_transform(Mat& K, vector<Point2f>& p1, vector<Point2f>& p2, Mat& R, Mat& T, Mat& mask)
{
	// 根据内参数矩阵获取相机的焦距和光心坐标（主点坐标）
	double focal_length = 0.5 * (K.at<double>(0) + K.at<double>(4));
	Point2d principle_point(K.at<double>(2), K.at<double>(5));

	
	// 根据匹配点求取本征矩阵，使用RANSAC，进一步排除失配点
	Mat E = findEssentialMat(p1, p2, focal_length, principle_point, RANSAC, 0.999, 1.0, mask);
	cout << "E:"<<endl << E << endl;
	if (E.empty())
	{
		return false;
	}

	double feasible_count = countNonZero(mask);	// 得到非零元素，即数组中的有效点
	// cout << (int)feasible_count << " - in - " << p1.size() << endl;

	// 对于RANSAC而言，outlier数量大于50%时，结果是不可靠的
	if (feasible_count <= 15 || (feasible_count / p1.size()) < 0.6)
	{
		return false;
	}
	


	// 分解本征矩阵，获取相对变换
	int pass_count = recoverPose(E, p1, p2, R, T, focal_length, principle_point, mask);

	 cout << "pass_count = " << pass_count << endl;

	// 同时位于两个相机前方的点的数量要足够大
	if (((double)pass_count) / feasible_count < 0.7)
	{
		return false;
	}
	return true;
}



void get_matched_points(
	vector<KeyPoint>& p1,
	vector<KeyPoint>& p2,
	vector<DMatch> matches,
	vector<Point2f>& out_p1,
	vector<Point2f>& out_p2
)
{
	out_p1.clear();
	out_p2.clear();
	for (int i = 0; i < matches.size(); ++i)
	{
		/*Point2f tmp1 = p1[matches[i].queryIdx].pt;
		Point2f tmp2 = p2[matches[i].trainIdx].pt;
		if (tmp1.x <= width && tmp1.y < height)*/
		out_p1.push_back(p1[matches[i].queryIdx].pt);
		//if (tmp2.x <= width && tmp2.y < height)
		out_p2.push_back(p2[matches[i].trainIdx].pt);
	}
}

void draw_epline(Mat src, Mat dst) {
	Ptr<Feature2D> sift = SIFT::create(0, 3, 0.04, 10);
	vector<KeyPoint> kp1, kp2;
	kp1.clear();
	kp2.clear();
	Mat descriptor1, descriptor2;
	// 偶尔出现内存分配失败的错误  Detects keypoints and computes the descriptors
	// sift->detectAndCompute(image, noArray(), key_points, descriptor);
	sift->detect(src, kp1);
	sift->compute(src, kp1, descriptor1);
	sift->detect(dst, kp2);
	sift->compute(dst, kp2, descriptor2);

	vector<vector<DMatch>> knn_matches;
	knn_matches.clear();
	BFMatcher matcher(NORM_L2);
	matcher.knnMatch(descriptor1, descriptor2, knn_matches, 2);

	// 获取满足Ratio Test的最小匹配的距离
	float min_dist = FLT_MAX;
	for (int r = 0; r < knn_matches.size(); ++r)
	{
		// Rotio Test
		if (knn_matches[r][0].distance > 0.5 * knn_matches[r][1].distance)
		{
			continue;
		}

		float dist = knn_matches[r][0].distance;
		if (dist < min_dist)
		{
			min_dist = dist;
		}
	}
	vector<DMatch>matches;
	matches.clear();
	for (size_t r = 0; r < knn_matches.size(); ++r)
	{
		// 排除不满足Ratio Test的点和匹配距离过大的点
		if (
			knn_matches[r][0].distance > 0.6 * knn_matches[r][1].distance
			)
		{
			continue;
		}

		// 保存匹配点
		matches.push_back(knn_matches[r][0]);
	}

	/*||
		knn_matches[r][0].distance > 5 * max(min_dist, 10.0f)*/

	Mat img_matches;
	drawMatches(src, kp1, dst, kp2, matches, img_matches);
	imshow("raw_", img_matches);
	waitKey();


	vector< Point2f > pts1, pts2;
	pts1.clear();
	pts2.clear();
	for (size_t i = 0; i < matches.size(); i++)
	{
		pts1.push_back(kp1[matches[i].queryIdx].pt);
		pts2.push_back(kp2[matches[i].trainIdx].pt);
	}

	//求基础矩阵 Fundamental,3*3的基础矩阵
	vector<uchar> RansacStatus;
	RansacStatus.clear();
	Mat fundamental_matrix = findFundamentalMat(pts1, pts2, RansacStatus, FM_RANSAC);

	vector<cv::Vec<float, 3>> epilines1, epilines2;
	epilines1.clear();
	epilines2.clear();
	computeCorrespondEpilines(pts1, 1, fundamental_matrix, epilines1);
	computeCorrespondEpilines(pts2, 2, fundamental_matrix, epilines2);
	cv::RNG& rng = theRNG();
	Mat show;
	src.copyTo(show);
	for (int i = 0; i < 10; ++i) {
		//随机产生颜色
		Scalar color = Scalar(rng(255), rng(255), rng(255));
		circle(show, pts1[i], 5, color, 3);
		//绘制外极线的时候，选择两个点，一个是x=0处的点，一个是x为图片宽度处
		line(show, cv::Point(0, -epilines2[i][2] / epilines2[i][1]), Point(src.cols, -(epilines2[i][2] + epilines2[i][0] * src.cols) / epilines2[i][1]), color);

		circle(dst, pts2[i], 5, color, 3);
		line(dst, cv::Point(0, -epilines1[i][2] / epilines1[i][1]), Point(dst.cols, -(epilines1[i][2] + epilines1[i][0] * dst.cols) / epilines1[i][1]), color);
	}
	imshow("epiline1", show);
	imwrite("../epiline1.jpg", show);
	imshow("epiline2", dst);
	imwrite("../epiline2.jpg", dst);
	waitKey(0);
	return;
}

//根据图像坐标估计三维坐标
void point_reconstruct(Mat M1, Mat M2, double p1[2], double p2[2], double p[3]) {

	double src1_data[4][3];
	//cout << p1[0] << endl;
	//cout << M1.type() << endl;
	//cout << M1.at<float>(2, 0) << endl;
	src1_data[0][0] = p1[0] * M1.at<float>(2, 0) - M1.at<float>(0, 0);
	src1_data[0][1] = p1[0] * M1.at<float>(2, 1) - M1.at<float>(0, 1);
	src1_data[0][2] = p1[0] * M1.at<float>(2, 2) - M1.at<float>(0, 2);
	src1_data[1][0] = p1[1] * M1.at<float>(2, 0) - M1.at<float>(1, 0);
	src1_data[1][1] = p1[1] * M1.at<float>(2, 1) - M1.at<float>(1, 1);
	src1_data[1][2] = p1[1] * M1.at<float>(2, 2) - M1.at<float>(1, 2);
	src1_data[2][0] = p2[0] * M2.at<float>(2, 0) - M2.at<float>(0, 0);
	src1_data[2][1] = p2[0] * M2.at<float>(2, 1) - M2.at<float>(0, 1);
	src1_data[2][2] = p2[0] * M2.at<float>(2, 2) - M2.at<float>(0, 2);
	src1_data[3][0] = p2[1] * M2.at<float>(2, 0) - M2.at<float>(1, 0);
	src1_data[3][1] = p2[1] * M2.at<float>(2, 1) - M2.at<float>(1, 1);
	src1_data[3][2] = p2[1] * M2.at<float>(2, 2) - M2.at<float>(1, 2);
	CvMat src1 = cvMat(4, 3, CV_64FC1, src1_data);
	double src2_data[4];
	src2_data[0] = M1.at<float>(0, 3) - p1[0] * M1.at<float>(2, 3);
	src2_data[1] = M1.at<float>(1, 3) - p1[1] * M1.at<float>(2, 3);
	src2_data[2] = M2.at<float>(0, 3) - p2[0] * M2.at<float>(2, 3);
	src2_data[3] = M2.at<float>(1, 3) - p2[1] * M2.at<float>(2, 3);
	CvMat src2 = cvMat(4, 1, CV_64FC1, src2_data);
	CvMat answer = cvMat(3, 1, CV_64FC1, p);
	cvSolve(&src1, &src2, &answer, CV_SVD);
}
//估计特征点深度值范围
void EstimateDepthRange(Mat K, Mat R, Mat T, vector <KeyPoint> RR_KP1, vector <KeyPoint> RR_KP2,vector <DMatch> RR_matches, double& zMin, double& zMax)
{
	// 三维重建
	//Mat structure;	// 4行N列的矩阵，每一列代表空间中的一个点（齐次坐标）
	vector<Point2f> p1, p2;
	get_matched_points(RR_KP1, RR_KP2, RR_matches, p1, p2);

	// 两个相机的投影矩阵[R T], triangulatePoints 只支持float型
	Mat proj1(3, 4, CV_32FC1);
	Mat proj2(3, 4, CV_32FC1);

	proj1(Range(0, 3), Range(0, 3)) = Mat::eye(3, 3, CV_32FC1);	// 对角矩阵 为1
	proj1.col(3) = Mat::zeros(3, 1, CV_32FC1);

	R.convertTo(proj2(Range(0, 3), Range(0, 3)), CV_32FC1);
	T.convertTo(proj2.col(3), CV_32FC1);

	Mat fK;
	K.convertTo(fK, CV_32FC1);
	proj1 = fK * proj1;
	proj2 = fK * proj2;

	cout << proj1 << endl;
	cout << proj2 << endl;
	////reconstruct(K, R, T, p1, p2, structure);
	//vector<Point3f> P;
	//for (int j = 0; j < structure.cols; j++) {
	//	Point3f F;
	//	F.x = structure.at<double>(0, j) / structure.at<double>(3, j);
	//	F.y = structure.at<double>(1, j) / structure.at<double>(3, j);
	//	F.z = structure.at<double>(2, j) / structure.at<double>(3, j);
	//	P.push_back(F);
	//}
	//zMin = zMax = P[0].z;
	//for (int i = 1; i < P.size(); i++) {
	//	zMin = P[i].z < zMin ? P[i].z : zMin;
	//	zMax = P[i].z > zMax ? P[i].z : zMax;

	//}

	for (int i = 0; i < p1.size(); i++) {
		double p_1[2] = { p1[i].x, p1[i].y };
		double p_2[2]= { p2[i].x, p2[i].y };
		double P3D[3];
		point_reconstruct(proj1, proj2, p_1, p_2, P3D);
		if (i == 0) {
			zMin = P3D[2];
			zMax= P3D[2];
		}
		else {
			zMin = P3D[2] < zMin ? P3D[2] : zMin;
			zMax = P3D[2] > zMax ? P3D[2] : zMax;
		}
	}
}
//根据单应矩阵求新坐标点
int getpoint(vector< Point > a_in, vector< Point >& a_out, Mat H)

{
	

	Mat Ma;

	Mat Mb;





	Ma.create(3, 1, CV_64F);

	Mb.create(3, 1, CV_64F);

	for (int i = 0; i < a_in.size(); i++)

	{

		Ma.at<double>(0, 0) = a_in[i].x;

		Ma.at<double>(1, 0) = a_in[i].y;

		Ma.at<double>(2, 0) = 1;

		Mb = H * Ma;

		cv::Point warpPt;
		warpPt.x = cvRound(Mb.at<double>(0, 0) / Mb.at<double>(2, 0));
		warpPt.y = cvRound(Mb.at<double>(1, 0) / Mb.at<double>(2, 0));

		a_out.push_back(warpPt);



		//cout << warpPt << endl;

	}



	return 0;

}

//求公共区域
void FindSameRect(Mat img_1, Mat img_2, Mat H, Rect& rect, CvPoint& ori) {

	vector <Point> img1_corners(4);
	img1_corners[0] = Point(0, 0);
	img1_corners[1] = Point(img_2.cols, 0);
	img1_corners[2] = Point(img_2.cols, img_2.rows);
	img1_corners[3] = Point(0, img_2.rows);
	vector <Point> img2_corners;
	getpoint(img1_corners, img2_corners, H);
	//cout << img_1.size << endl;
	//for (int i = 0; i < 4; i++) {
	//	cout << img2_corners[i] << endl;
	//	circle(img_1, img2_corners[i], 8, Scalar(0, 255, 0), -1); //第五个参数我设为-1，表明这是个实点。
	//}
	//cv::imshow("H", img_1);
	//waitKey();

	int width = (int)(img2_corners[2].x - img2_corners[0].x - 3) / 4 * 4;
	int height = (int)(img2_corners[2].y - img2_corners[0].y);

	// 坐标更新
	//CvPoint ori;
	ori.x = (img2_corners[0].x < 0 ? 0 : img2_corners[0].x);
	ori.y = (img2_corners[2].y < 0 ? 0 : img2_corners[0].y);
	
	rect = Rect((int)ori.x, (int)ori.y, width, height);
}

//计算单应矩阵
void SolveHomoMat(Mat R, Mat T, Mat InnerMatrix, double Z, Mat& HMat, int PlaneAngle)
{


	//
	// CvMat* HomeMat = cvCreateMat(3, 3, CV_64FC1);
	//
	///************************************************************************
	//计算公式：H=K*(R+t*nd')*K^(-1) ,nd=1/d*n
	///************ 矩阵初始化*******************/

	//CvMat TMat = T;
	//CvMat RMat = R;
	//CvMat InnerMat = InnerMatrix;

	///****************矩阵求逆**************************/
	//CvMat* Inner_InverseMat = cvCreateMat(3, 3, CV_64FC1);      // 内参逆矩阵
	//cvInvert(&InnerMat, Inner_InverseMat);

	//CvMat* temp1Mat = cvCreateMat(3, 3, CV_64FC1);

	/*******************求nd************************/
	double ZInverse = 1.0 / Z;

	double d[3] = { sin((double)PlaneAngle / 180 * PI), 0, cos((double)PlaneAngle / 180 * PI) };
	//double d[3] = { 0,0,1 };
	double dNorm = 1.0;
	double Norm_data[3] = { d[0] / dNorm * ZInverse, d[1] / dNorm * ZInverse, d[2] / dNorm * ZInverse };
	//CvMat NormMat1 = cvMat(1, 3, CV_64FC1, Norm_data);
	Mat NormMat1 = Mat(1, 3, CV_64FC1, Norm_data);

	HMat = InnerMatrix * (T * NormMat1 + R) * InnerMatrix.inv();

	///*******************矩阵相乘************************/
	//cvMatMulAdd(&TMat, &NormMat1, &RMat, temp1Mat);
	//cvMatMul(&InnerMat, temp1Mat, temp1Mat);
	//cvMatMul(temp1Mat, Inner_InverseMat, HomeMat);

	//HMat = Mat(HomeMat, 1);

	////释放内存
	//cvReleaseMat(&Inner_InverseMat);
	//cvReleaseMat(&temp1Mat);
}




//生成虚拟平面到相机1的单应矩阵
void ComputerHomeMat(Point2f& Scale, Point2f Tcenter, Mat R, Mat T, Mat InnerMatrix, double Zmax, double Zmin, int Depth_levels, \
	vector<Mat>& HomeMat)
{
	//int i;
	//
	//// 内参数矩阵
	////CvMat InnerMat = cvMat(3, 3, CV_64FC1, InnerMatrix);

	// +缩放矩阵
	double Scale_data[][3] = { 1,   0,  0,
		0,   1,  0,
		0,   0,  1 };
	Scale_data[0][0] = Scale.x;
	Scale_data[1][1] = Scale.y;
	Mat ScaleMat = Mat(3, 3, CV_64FC1, Scale_data[0]);

	// 平移阵
	double Tra_data[][3] = { 1,   0,  0,
							0,   1,   0,
							0,   0,  1 };
	Tra_data[0][2] = -Tcenter.x;
	Tra_data[1][2] = -Tcenter.y;
	Mat TraMat = Mat(3, 3, CV_64FC1, Tra_data[0]);


	for (int i = 0; i < Depth_levels; i++)
	{
		double di;
		//利用R，T，di求单应矩阵
		if (!Depth_levels)
		{
			di = Zmax;
		}
		else
			di = Zmax - (Zmax - Zmin) / (Depth_levels - 1) * i;//i从0到Depth_levels, 高度由zmax 到 zmin
		Mat Home;
		SolveHomoMat(R, T, InnerMatrix, di, Home, 0);
		//Home = ScaleMat * Home*TraMat;
	//	Home = ScaleMat * Home * TraMat.inv();//

		HomeMat.push_back(Home);
	}
	//cvReleaseMat(&TraInv_Mat);
}
//由单应矩阵序列，计算变换后的一系列I2` 
void ComputerHomeImages(Mat Image, vector<Mat> HomeMat, int Depth_levels,Size newImageSize, vector<Mat>&Imageout)
{
	
	int i;
	for (i = 0; i < Depth_levels; i++)
	{
	

		Mat Iout;
		//改动：HomeMat[i].inv()
		//warpPerspective(Image, Iout, HomeMat[i].inv(), Image.size(),CV_INTER_CUBIC + CV_WARP_FILL_OUTLIERS);
		warpPerspective(Image, Iout, HomeMat[i].inv(), newImageSize, CV_INTER_CUBIC + CV_WARP_FILL_OUTLIERS);
		Imageout.push_back(Iout);
		
		char s[100];
		sprintf(s, "Home_%d.jpg", i);
		imwrite(s, Iout);
	}

}

//两张图像上各自取一点的ZNCC方法的匹配分数。 （x1,y1）(x2,y2)两点分别在im1和im2上，radius为窗口半径， GaussTemplate高斯模板没用到
float NormCrossCorr(int x1, int y1, int x2, int y2, int radius, Mat im1, Mat im2) {
	int window = 2 * radius + 1;
	int windowSq = window * window;
	
	
	int xDim1 = im1.cols;
	int yDim1 = im1.rows;
	int xDim2 = im2.cols;
	int yDim2 = im2.rows;


		if ((x1 - radius) < 0 || (x1 + radius) >= xDim1 ||
			(y1 - radius) < 0 || (y1 + radius) >= yDim1 ||
			(x2 - radius) < 0 || (x2 + radius) >= xDim2 ||
			(y2 - radius) < 0 || (y2 + radius) >= yDim2)
			return 0;
	
		double mean1 = 0.0;
		double mean2 = 0.0;

		/*cout << im1.type() << endl;
		cout << im2.type() << endl;*/
		/*imshow("img1", im1);
		imshow("img2", im2);
		waitKey();*/
		//cout << im1 << endl;
		//cout << im1.type()<< endl;

		for (int x = -radius; x <= radius; ++x)
		{
			for (int y = -radius; y <= radius; ++y)
			{
				/*cout <<(int) im1.at<Vec3b>(x1 + x, y1 + y)[0] << endl;
				cout << (int)im2.at<Vec3b>(x2 + x, y2 + y)[0] << endl;*/

				/*Mat img0, img1;
				im1.copyTo(img0);
				im2.copyTo(img1);
				circle(img0, Point(y1 + y, x1 + x), 5, Scalar(0, 255, 255), -1);
				circle(img1, Point(y1 + y, x1 + x), 5, Scalar(0, 255, 255), -1);
				imshow("1", img0);
				imshow("2", img1);
				waitKey(0);*/
				//mean1 += data1[step1 * (y1 + y) + (x1 + x)];
				mean1 += (double)(int)im1.at<Vec3b>(x1 + x, y1 + y)[0];
				//mean2 += data2[step2 * (y2 + y) + (x2 + x)];
				mean2 += (double)(int)im2.at<Vec3b>(x2 + x, y2 + y)[0];
			}
		}
	
		mean1 /= (double)windowSq;
		mean2 /= (double)windowSq;
	
		double diffCross = 0.0;
		double diffSq1 = 0.0;
		double diffSq2 = 0.0;
	
		double diff1, diff2;
	
		for (int x = -radius; x <= radius; ++x)
		{
			for (int y = -radius; y <= radius; ++y)
			{
	
				diff1 = abs((double)(int)im1.at<Vec3b>(x1 + x, y1 + y)[0] - mean1)/**GaussTemplate[(y+radius)*window+x+radius]*/;
				diff2 = abs((double)(int)im2.at<Vec3b>(x2 + x, y2 + y)[0] - mean2)/**GaussTemplate[(y+radius)*window+x+radius]*/;
	
				diffCross += diff1 * diff2;
				diffSq1 += diff1 * diff1;
				diffSq2 += diff2 * diff2;
			}
		}
		if (diffSq1 == 0 || diffSq2 == 0)
		{
			return 0;
		}

		else
		{
			double coef = diffCross / sqrt(diffSq1 * diffSq2);
			return -1 * fabs(coef);
		}
}
float InterPointID(int MatchLevelID, float* MatchScore)
{
	/*********视差插值计算********/
	float score;
	if (MatchScore[2] + MatchScore[0] - 2 * MatchScore[1] == 0.0)
	{
		score = MatchLevelID;
	}
	else
	{
		score = MatchLevelID + (MatchScore[2] - MatchScore[0]) / (2 * (MatchScore[0] + MatchScore[2] - 2 * MatchScore[1]));
	}
	return score;

}
//单应矩阵匹配
void HomeImagesMatch(Mat ImageTop,Rect rect, CvPoint ori,vector<Mat>& Imageout, int Windowsize,Mat& DisOut)
{
	// 检查接口
	int width = ImageTop.cols;
	int height = ImageTop.rows;
	
	int rect_width = rect.width;
	int rect_height = rect.height;


	DisOut = Mat(height, width, CV_64FC1);
	
	int i, j, k;
	//int step = width * Depth_levels;

	//// 计算高斯模板
	//float* Gauss_tmp = new float[Windowsize * Windowsize];
	//for (i = 0; i < Windowsize * Windowsize; i++)
	//{
	//	Gauss_tmp[i] = 1.0;
	//}

	// 	ComputerGuassTemp(Windowsize,Windowsize,Gauss_tmp,1.0);

		// 代价函数(变量为d)
	Mat ssdMat = Mat(height, width, CV_64FC(depth_level));
	//float* Costdata =(float*) ssdMat.data;
	for (i = ori.y; i < ori.y+ rect_height; i++)
	{
		for (j = ori.x; j <ori.x+ rect_width; j++)
		{
		//	cout << "新图片:";
			for (k = 0; k < depth_level; k++)
			{
				/*Mat img0,img1;
				ImageTop.copyTo(img0);
				Imageout[k].copyTo(img1);
				circle(img0, Point(j, i), 5, Scalar(0, 255, 255), -1);
				circle(img1, Point(j, i), 5, Scalar(0, 255, 255), -1);
				imshow("1", img0);
				imshow("2", img1);
				waitKey(0);*/
				// 互信息作为代价函数
				ssdMat.at<Vec_nb>(i,j)[k]= NormCrossCorr(i, j, i, j, Windowsize / 2, ImageTop, Imageout[k]);
				//Costdata[i * step + j * Depth_levels + k] = NormCrossCorr(j, i, j, i, Windowsize / 2, ImageTop, Imageout[k]);
			//	cout<<ssdMat.at<Vec_nb>(i, j)[k] << endl;
			}
		}
	}

	// 求解最小能量对应的视差d
	//float* ZID = (float*)DisOut.data;
	int ID;
	for (i = ori.y; i < ori.y + rect_height; i++)
	{
		for (j = ori.x; j < ori.x + rect_width; j++)
		{
			float MinS = 1000;
			float score[3];
			for (k = 0; k < depth_level; k++)
			{
				if (MinS > ssdMat.at<Vec_nb>(i, j)[k])
				{
					// 最小能量对应的视差
					MinS = ssdMat.at<Vec_nb>(i, j)[k];
					ID = k;
				}
			}
			if (i<1 || i>height - 1 || j<1 || j>width - 1 || ID == 0 || ID == depth_level - 1)      // 边缘点不进行插值 || 两头不插值
			{
				DisOut.at<double>(i,j) =(double) ID;
				continue;
			}
			for (k = -1; k <= 1; k++)
			{
				score[1 + k] = ssdMat.at<Vec_nb>(i, j)[ID+k]; //溢出
			}
			// 插值该点视差
			DisOut.at<double>(i, j) =(double) InterPointID(ID, score);
		}
	}
}

//计算NCC得分
void ComputerNccMat(Mat ImageTop, Rect rect, CvPoint ori, vector<Mat>& Imageout, int Windowsize, Mat& nccMat)
{
	// 检查接口
	int width = ImageTop.cols;
	int height = ImageTop.rows;

	int rect_width = rect.width;
	int rect_height = rect.height;


	//DisOut = Mat(height, width, CV_64FC1);

	int i, j, k;
	//int step = width * Depth_levels;

	//// 计算高斯模板
	//float* Gauss_tmp = new float[Windowsize * Windowsize];
	//for (i = 0; i < Windowsize * Windowsize; i++)
	//{
	//	Gauss_tmp[i] = 1.0;
	//}

	// 	ComputerGuassTemp(Windowsize,Windowsize,Gauss_tmp,1.0);

		// 代价函数(变量为d)
	nccMat = Mat(height, width, CV_64FC(depth_level));
	//float* Costdata =(float*) ssdMat.data;
	for (i = ori.y; i < ori.y + rect_height; i++)
	{
		for (j = ori.x; j < ori.x + rect_width; j++)
		{
			
			for (k = 0; k < depth_level; k++)
			{
				
				// 互信息作为代价函数
				nccMat.at<Vec_nb>(i, j)[k] =-1* NormCrossCorr(i, j, i, j, Windowsize / 2, ImageTop, Imageout[k]);// 这里乘以-1保证为正数
				//Costdata[i * step + j * Depth_levels + k] = NormCrossCorr(j, i, j, i, Windowsize / 2, ImageTop, Imageout[k]);
				//cout<<nccMat.at<Vec_nb>(i, j)[k] << endl;
			}
		}
	}

	//// 求解最小能量对应的视差d
	////float* ZID = (float*)DisOut.data;
	//int ID;
	//for (i = ori.y; i < ori.y + rect_height; i++)
	//{
	//	for (j = ori.x; j < ori.x + rect_width; j++)
	//	{
	//		float MinS = 1000;
	//		float score[3];
	//		for (k = 0; k < depth_level; k++)
	//		{
	//			if (MinS > nccMat.at<Vec_nb>(i, j)[k])
	//			{
	//				// 最小能量对应的视差
	//				MinS = nccMat.at<Vec_nb>(i, j)[k];
	//				ID = k;
	//			}
	//		}
	//		if (i<1 || i>height - 1 || j<1 || j>width - 1 || ID == 0 || ID == depth_level - 1)      // 边缘点不进行插值 || 两头不插值
	//		{
	//			DisOut.at<double>(i, j) = (double)ID;
	//			continue;
	//		}
	//		for (k = -1; k <= 1; k++)
	//		{
	//			score[1 + k] = nccMat.at<Vec_nb>(i, j)[ID + k]; //溢出
	//		}
	//		// 插值该点视差
	//		DisOut.at<double>(i, j) = (double)InterPointID(ID, score);
	//	}
	//}
}

//void depthReocery(Mat &ImageTop, Mat& ImageDown,vector<Mat>HomeMat, Point2f& Scale, Point2f Tcenter,int Windowsize, int Depth_levels, Mat& DisOut,Point ori) {
//
//	// 检查接口
//	int width = ImageDown.cols;
//	int height = ImageDown.rows;
//
//	int start_x, start_y;
//	start_x = ori.x;
//	start_y = ori.y;
//
//	GaussianBlur(ImageTop, ImageTop, Size(9, 9), 15, 0, 4);
//	GaussianBlur(ImageDown, ImageDown, Size(9, 9), 15, 0, 4);
//
//
//
//	// +缩放矩阵
//	double Scale_data[][3] = { 1,   0,  0,
//		0,   1,  0,
//		0,   0,  1 };
//	Scale_data[0][0] = Scale.x;
//	Scale_data[1][1] = Scale.y;
//	Mat ScaleMat = Mat(3, 3, CV_64FC1, Scale_data[0]);
//
//	// 平移阵
//	double Tra_data[][3] = { 1,   0,  0,
//							0,   1,   0,
//							0,   0,  1 };
//	Tra_data[0][2] = Tcenter.x;
//	Tra_data[1][2] = Tcenter.y;
//	Mat TraMat = Mat(3, 3, CV_64FC1, Tra_data[0]);
//
//	DisOut = Mat(height, width, CV_32FC1);
//
//	int i, j, k;
//	int step = width * Depth_levels;
//
//	// 计算高斯模板
//	float* Gauss_tmp = new float[Windowsize * Windowsize];
//	for (i = 0; i < Windowsize * Windowsize; i++)
//	{
//		Gauss_tmp[i] = 1.0;
//	}
//
//	// 	ComputerGuassTemp(Windowsize,Windowsize,Gauss_tmp,1.0);
//
//		// 代价函数(变量为d)
//	Mat ssdMat = Mat(height, width, CV_32FC(Depth_levels));
//	float* Costdata = (float*)ssdMat.data;
//
//	for (i = 0; i < height; i++)
//	{
//		for (j = 0; j < width; j++)
//		{
//
//			
//
//			double d[3] = { j,i,1 };
//
//			//CvMat NormMat1 = cvMat(1, 3, CV_64FC1, Norm_data);
//			Mat point = Mat(1, 3, CV_64FC1, d);
//			
//		
//			
//		/*	Mat img0;
//			ImageDown.copyTo(img0);
//			circle(img0, Point(j, i), 5, Scalar(0, 255, 255), -1);
//			
//				imshow("1", img0);
//				cout << "下层图像上的点坐标：" << j << " " << i << endl;*/
//			
//			
//			for (k = 0; k < Depth_levels; k++)
//			{
//				Mat point_h = HomeMat[k].inv() * point.t();
//				Point2f after;
//				after.x = point_h.at<double>(0, 0) / point_h.at<double>(2, 0);
//				after.y = point_h.at<double>(1, 0) / point_h.at<double>(2, 0);
//				//下层图像
//				Mat move_ = ScaleMat  * point.t();
//				Point move;
//				move.x = move_.at<double>(0, 0) / move_.at<double>(2, 0);
//				move.y = move_.at<double>(1, 0) / move_.at<double>(2, 0);
//				//cout << "Move:" << move << endl;
//				//if (i == height - 1) {
//				/*	Mat img1;
//					ImageTop.copyTo(img1);
//					circle(img1, Point(start_x + move.x, start_y + move.y), 5, Scalar(0, 255, 255), -1);
//					circle(img1, Point((int)after.x, (int)after.y), 5, Scalar(0, 0, 255), -1);
//					imshow("2", img1);
//					waitKey();*/
//				
//				// 互信息作为代价函数
//				Costdata[i * step + j * Depth_levels + k] = NormCrossCorr(start_x + move.x, start_y + move.y, (int)after.x, (int)after.y, Windowsize / 2, ImageTop, ImageTop, Gauss_tmp);
//				//if (i == height - 1)
//				//	cout << "ZNCC:" << Costdata[i * step + j * Depth_levels + k] << endl;
//			}
//		}
//	}
//
//	// 求解最小能量对应的视差d
//	float* ZID = (float*)DisOut.data;
//	int ID;
//	for (i = 0; i < height; i++)
//	{
//		for (j = 0; j < width; j++)
//		{
//			float MinS = 1000;
//			float score[3];
//			for (k = 0; k < Depth_levels; k++)
//			{
//				if (MinS > Costdata[i * step + j * Depth_levels + k])
//				{
//					// 最小能量对应的视差
//					MinS = Costdata[i * step + j * Depth_levels + k];
//					ID = k;
//				}
//			}
//			if (i<1 || i>height - 1 || j<1 || j>width - 1 || ID == 0 || ID == Depth_levels - 1)      // 边缘点不进行插值 || 两头不插值
//			{
//				ZID[i * width + j] = ID;
//				continue;
//			}
//			for (k = -1; k <= 1; k++)
//			{
//				score[1 + k] = Costdata[i * step + j * Depth_levels + ID + k]; //溢出
//			}
//			// 插值该点视差
//			ZID[i * width + j] = InterPointID(ID, score);
//		}
//	}
//}

//void ComputerXYZ(CvMat* InnerMatrix_Inv, double z, CvPoint ImagePoint, float* depth)
//{
//	/*************确定x,y******************/
//	double ImageCoor[3] = { ImagePoint.x,ImagePoint.y,1 };
//	CvMat ImageCoorMat = cvMat(3, 1, CV_64FC1, ImageCoor);
//
//	//归一化图像坐标
//	CvMat* TempMat = cvCreateMat(3, 1, CV_64FC1);
//	cvMatMul(InnerMatrix_Inv, &ImageCoorMat, TempMat);
//
//	/******空间点坐标******/
//	depth[0] = (float)z * cvmGet(TempMat, 0, 0);
//	depth[1] = (float)z * cvmGet(TempMat, 1, 0);
//	depth[2] = (float)z;
//	cvReleaseMat(&TempMat);
//}
//void ComputerImageDepth(Rect rect, Mat PointID, Mat K, double Z0, double deltaZ, \
//	Mat depth)
//{
//	
//	depth = Mat(rect.height, rect.width, CV_32FC3);
//	
//	int i, j;
//	//float* Z = PointID.;
//	//float* depth = Point3D->data.fl;
//	int step = 3 * depth.cols; // 3通道数组
//
//	//求K逆
//	CvMat InnerMat = cvMat(3, 3, CV_64FC1, InnerMatrix);
//	CvMat* InnerMat_Inverse = cvCreateMat(3, 3, CV_64FC1);
//	cvInvert(&InnerMat, InnerMat_Inverse);
//
//	for (i = 0; i < PointID.rows; i++)
//	{
//		for (j = 0; j < PointID.cols; j++)
//		{
//			double z = Z0 - PointID[i,j] * deltaZ;       // 修改
//			ComputerXYZ(K.inv(), z, cvPoint(rect.x + j, rect.y + i), &depth[i * step + j * 3]); //element = (x,y,z)
//		}
//	}
//	cvReleaseMat(&InnerMat_Inverse);
//
//}


/*给深度图上色*/
void GenerateFalseMap(cv::Mat& src, cv::Mat& disp,int method =1 )
{
 
	int height_ = src.rows;
	int width_ = src.cols;
	// for all pixels do  

	if (method == 1) {
		for (int v = 0; v < height_; v++) {
			for (int u = 0; u < width_; u++) {
				int pix = src.data[v * width_ + u];
				uchar r = (uchar)(pix);
				uchar g = 8 * (uchar)((pix < 128) ? pix : 255 - pix);
				uchar b = (uchar)(255 - pix);
				//rgb内存连续存放  
				disp.data[v * width_ * 3 + 3 * u + 0] = b;
				disp.data[v * width_ * 3 + 3 * u + 1] = g;
				disp.data[v * width_ * 3 + 3 * u + 2] = r;
			}
		}
	}
	else {
		// color map 
		float max_val = 255.0f;
		float map[8][4] = { { 0,0,0,114 },{ 0,0,1,185 },{ 1,0,0,114 },{ 1,0,1,174 },
		{ 0,1,0,114 },{ 0,1,1,185 },{ 1,1,0,114 },{ 1,1,1,0 } };
		float sum = 0;
		for (int i = 0; i < 8; i++)
			sum += map[i][3];

		float weights[8]; // relative   weights  
		float cumsum[8];  // cumulative weights  
		cumsum[0] = 0;
		for (int i = 0; i < 7; i++) {
			weights[i] = sum / map[i][3];
			cumsum[i + 1] = cumsum[i] + map[i][3] / sum;
		}

		for (int v = 0; v < height_; v++) {
			for (int u = 0; u < width_; u++) {

				// get normalized value  
				float val = std::min(std::max(src.data[v * width_ + u] / max_val, 0.0f), 1.0f);

				// find bin  
				int i;
				for (i = 0; i < 7; i++)
					if (val < cumsum[i + 1])
						break;

				// compute red/green/blue values  
				float   w = 1.0 - (val - cumsum[i]) * weights[i];
				uchar r = (uchar)((w * map[i][0] + (1.0 - w) * map[i + 1][0]) * 255.0);
				uchar g = (uchar)((w * map[i][1] + (1.0 - w) * map[i + 1][1]) * 255.0);
				uchar b = (uchar)((w * map[i][2] + (1.0 - w) * map[i + 1][2]) * 255.0);
				//rgb内存连续存放  
				disp.data[v * width_ * 3 + 3 * u + 0] = b;
				disp.data[v * width_ * 3 + 3 * u + 1] = g;
				disp.data[v * width_ * 3 + 3 * u + 2] = r;
			}
		}
	}
}


//从大到小进行排序
void SortMaxToMin(vector<seed> SeedPoints, int CurrentNum, int SeedNum)
{
	seed tmp;
	int k = SeedNum - CurrentNum;

	for (int i = CurrentNum; i < SeedNum; i++)
	{
		for (int j = i + 1; j < SeedNum; j++)
		{
			if (SeedPoints[i].escore < SeedPoints[j].escore)
			{
				//Temp[0] = Seed_data[3 * i]; Temp[1] = Seed_data[3 * i + 1]; Temp[2] = Seed_data[3 * i + 2];
				//Seed_data[3 * i] = Seed_data[3 * j]; Seed_data[3 * i + 1] = Seed_data[3 * j + 1]; Seed_data[3 * i + 2] = Seed_data[3 * j + 2];
				//Seed_data[3 * j] = Temp[0]; Seed_data[3 * j + 1] = Temp[1]; Seed_data[3 * j + 2] = Temp[2];
				tmp.pt = SeedPoints[i].pt; tmp.escore = SeedPoints[i].escore;
				SeedPoints[j].pt = SeedPoints[i].pt; SeedPoints[j].escore = SeedPoints[i].escore;
				SeedPoints[i].pt = tmp.pt; SeedPoints[i].escore = tmp.escore;
			}
		}
	}

}


//初始种子点 单峰与准单峰点 +特征点
int FindInitialSeeds(Mat imgMinAreaMask, Mat& scores, Mat& MaxScore, Mat& StdScore, Mat& evaluationScores,vector< Mat>& MultiTopScore, vector<seed>& SeedPoints, vector <KeyPoint> RR_KP1, Mat& matDepth, int nLayers, int BSPFTh)
{
	//imgMinAreaMask 为最小的单应图像，即最高一层虚拟平面对应图像
	int width = imgMinAreaMask.cols;
	int height = imgMinAreaMask.rows;

	MaxScore = Mat(height, width, CV_64FC1);
	StdScore = Mat(height, width, CV_64FC1);
	evaluationScores = Mat(height, width, CV_64FC1);
	int iter;
	for (iter = 0; iter < 8; ++iter)
	{
		Mat tmp = Mat::zeros(height, width, CV_64FC1);
		MultiTopScore.push_back(tmp);   // 此时matbuffer中存储的是不同的Mat数据，此时，对matBuffer中的任何一个元素操作都是独立的
	}
	Mat flag;
	matDepth.copyTo(flag);
	//MultiTopScore = Mat(height, width, CV_64FC(8));
	//matDepth = Mat::zeros(height, width, CV_64FC1);
	for (int tempi = 0; tempi < RR_KP1.size(); tempi++) {// 给匹配点做上标记
		flag.at<double>((int)RR_KP1[tempi].pt.y, (int)RR_KP1[tempi].pt.x) = 1;
	}
	//uchar* pMinArea = (uchar*)imgMinAreaMask->imageData;//可重建区域计算
	//float* pMaxCorrData = (float*)MaxScore->imageData;//最优峰相关性系数(初值为最大相关性值，在多峰选择是可能发生变化)
	//float* pStdCorrData = (float*)StdScore->imageData;//相关性系数曲线标准差
	//float* pEvaluationData = (float*)imgEvaluationScores->imageData;//相关性系数曲线最优程度估计值
	//float* pMultiTopData = (float*)MultiTopScore->imageData;//多峰信息指针（(最多存放三个峰值)
	//float* pSeedData = SeedPoints->data.fl;//种子点矩阵
	//float* pDepthData = matDepth->data.fl;//深度层信息
	//int rows = matDepth->rows;
	//int cols = matDepth->cols;
	//float* pCorr, * pPreCorr, * pNextCorr;//相关性系数
	int nWidthStep;
	float Corr, PreCorr1, PreCorr2, NextCorr1, NextCorr2;
	float DepthMaxData, DepthStdData, EvaluationData;

	int i, j, h;
	float layerVal;
	int maxLayer = 0;
	double maxVal = 0;
	double minVal = 0;
	int maxLoc;//ZNCC峰值索引

	double stdDev, localStdDev;//ZNCC曲线标准差，
	int SeedsCount = 0;

	//用不包括第一层和最后一层的相关性系数曲线进行计算
	Mat matPtCorrCurve(nLayers - 4, 1, CV_32FC1);
	//局部区域相关性系数曲线
	Mat matNeighborCorr;

	struct
	{
		float peakVal;
		float peakLayer;
	}Peak[20];

	const float fRatio = 0.5;//主副峰比值阈值
	const float CORRTH = 0.5;
	const float STDTH = 0.15;//相关性曲线标准差阈值
	const float LOCALSTDTH = 0.02;//相关性曲线局部标准差阈值
	float HalfPeakCorr;//半峰宽度对应相关性系数值

	int CurrentPoint;

	int nPeakCount = 0;
	float maxPeakCorr;
	int nFinalPeakCount = 0;
	float tempPeakVal, tempPeakLayer;

	for (i = 0; i < height; i++) {
		for (j = 0; j < width; j++)
		{
			//cout << i <<" "<< j << endl;
			if (imgMinAreaMask.at<Vec3b>(i, j)[0] != 0) 
			{
				nPeakCount = 0;
				maxPeakCorr = 0;

				//计算像素（(i,j)相关性曲线峰值，将主副峰比值大于阈值的峰值保存
				for (h = 2; h < nLayers - 2; h++) 
				{
					layerVal=scores.at<Vec_nb>(i, j)[h];
					//pCorrCurveData[h - 2] = layerVal;
					matPtCorrCurve.at<float>(h - 2) = layerVal;
					PreCorr1 = scores.at<Vec_nb>(i, j)[h - 1];
					PreCorr2 = scores.at<Vec_nb>(i, j)[h - 2];
					NextCorr1 = scores.at<Vec_nb>(i, j)[h+1];
					NextCorr2 = scores.at<Vec_nb>(i, j)[h +2];
					//寻找最大峰值并记录所有峰值
					if (layerVal > PreCorr1 && layerVal > PreCorr2 && layerVal > NextCorr1 && layerVal > NextCorr2/*&& layerVal>fPeakCorrTh*/)
					{
						Peak[nPeakCount].peakVal = layerVal;
						Peak[nPeakCount].peakLayer = h;
						nPeakCount++;
						_ASSERT(nPeakCount < 20);
						if (layerVal > maxPeakCorr)
						{
							maxPeakCorr = layerVal;
						}
					}
				}
				//if (nPeakCount==0) continue;

				//对峰值进行排序 and 筛选
				nFinalPeakCount = 0;
				for (int m = 0; m < nPeakCount; m++)
				{
					for (int n = m + 1; n < nPeakCount; n++)
					{
						if (Peak[n].peakVal > Peak[m].peakVal)
						{
							tempPeakVal = Peak[m].peakVal;  	tempPeakLayer = Peak[m].peakLayer;
							Peak[m].peakVal = Peak[n].peakVal;  Peak[m].peakLayer = Peak[n].peakLayer;
							Peak[n].peakVal = tempPeakVal;      Peak[n].peakLayer = tempPeakLayer;
						}
					}

					if (Peak[m].peakVal / Peak[0].peakVal > fRatio)
					{
						nFinalPeakCount++;
					}
					else
					{
						for (int tempi = m; tempi < nPeakCount; tempi++)
						{
							Peak[tempi].peakLayer = 0;
							Peak[tempi].peakVal = 0;
						}
						break;
					}
				}
				nPeakCount = nFinalPeakCount;
				
				minMaxIdx(matPtCorrCurve, &minVal, &maxVal, 0, &maxLoc);//最大最小值和最大的索引
				Mat mean_temp,std_temp;
				meanStdDev(matPtCorrCurve, mean_temp, std_temp);
				stdDev= std_temp.at<double>(0, 0);//标准差

				//局部STD
				if (nPeakCount > 0)
				{
					int hStart = max((int)Peak[0].peakLayer - 2 - BSPFTh, 0);
					int hEnd = min((int)Peak[0].peakLayer - 2 + BSPFTh, nLayers - 5);
					//cvGetRows(matPtCorrCurve, &matNeighborCorr, hStart, hEnd);
					matNeighborCorr = matPtCorrCurve.rowRange(hStart, hEnd).clone();//取出某些行，左闭右开。
					meanStdDev(matNeighborCorr, mean_temp, std_temp);
					localStdDev = std_temp.at<double>(0, 0);//标准差
				}
				MaxScore.at<double>(i, j)=maxVal;
				StdScore.at<double>(i, j) = localStdDev;
				//cout << "localStdDev=" << localStdDev <<" stdDev="<< stdDev << endl;

				//计算最优程度估计值escore
				HalfPeakCorr = (Peak[0].peakVal - minVal) / 2;
				int maxLayerRange = min(nLayers - Peak[0].peakLayer - 2, Peak[0].peakLayer - 2);
				int tempLayerLeft = 0;
				int tempLayerRight = 0;
				float* CurrentLayerCorrLeft, * CurrentLayerCorrRight, * LastLayerCorrLeft, * LastLayerCorrRight;
				for (int i = 0; i < maxLayerRange - 1; i++)
				{
					if ((int)Peak[0].peakLayer - i - 1 < 0 || (int)Peak[0].peakLayer + i + 1 >= matPtCorrCurve.rows) {
						break;
					}
					//if (pCorrCurveData[(int)Peak[0].peakLayer - i - 1]<HalfPeakCorr && pCorrCurveData[(int)Peak[0].peakLayer - i]>HalfPeakCorr)
					if (matPtCorrCurve.at<float>((int)Peak[0].peakLayer - i - 1) < HalfPeakCorr && matPtCorrCurve.at<float> ((int)Peak[0].peakLayer - i) > HalfPeakCorr)
						tempLayerLeft = Peak[0].peakLayer - i;
					//if (pCorrCurveData[(int)Peak[0].peakLayer + i + 1]<HalfPeakCorr && pCorrCurveData[(int)Peak[0].peakLayer + i]>HalfPeakCorr)
					if (matPtCorrCurve.at<float>((int)Peak[0].peakLayer + i + 1) < HalfPeakCorr && matPtCorrCurve.at<float>((int)Peak[0].peakLayer + i) > HalfPeakCorr)
						tempLayerRight = Peak[0].peakLayer + i;
				}
				if (tempLayerLeft != 0 && tempLayerRight != 0)
					evaluationScores.at<double>(i,j)= Peak[0].peakVal * stdDev / (tempLayerRight - tempLayerLeft);

				if (nPeakCount > 1)
				{
					evaluationScores.at<double>(i, j) /= 2;
				}

				int RecordPeakCount = min(nPeakCount, 4);
				for (int tempi = 0; tempi < RecordPeakCount; tempi++)
				{
					
					MultiTopScore[2 * tempi].at<double>(i,j)= Peak[tempi].peakVal;
					MultiTopScore[2 * tempi+1].at<double>(i, j) = Peak[tempi].peakLayer;
				}

				//种子点选择
				if ( (nPeakCount == 1 ) &&(flag.at<double>(i, j) == 1 ||( stdDev > STDTH && localStdDev > LOCALSTDTH && Peak[0].peakVal > CORRTH)))//最后一个是特征点
				//if ( (nPeakCount == 1 ) &&stdDev > STDTH && localStdDev > LOCALSTDTH && Peak[0].peakVal > CORRTH|| matDepth.at<double>(i, j) == 1  )//最后一个是特征点
				{
					int tempLayer = Peak[0].peakLayer;
					PreCorr1 = scores.at<Vec_nb>(i, j)[tempLayer - 1];
					NextCorr1 = scores.at<Vec_nb>(i, j)[tempLayer + 1];
					Corr= scores.at<Vec_nb>(i, j)[tempLayer];
					float subLayer = (NextCorr1 - PreCorr1) / (2 * (NextCorr1 + PreCorr1 - 2 * Corr));
					//cout << tempLayer << " " << subLayer << endl;
					matDepth.at<double>(i,j)= tempLayer + subLayer;
					evaluationScores.at<double>(i, j) *= 2;
					
					Point2f pt_temp(j,i);
					seed seed_temp;
					seed_temp.pt = pt_temp;
					seed_temp.escore = evaluationScores.at<double>(i, j);
					SeedPoints.push_back(seed_temp);
					SeedsCount++;
				}

				
			}
		}
	}
	SortMaxToMin(SeedPoints, 0, SeedsCount);
	Mat matInitialSeeds(height, width, CV_8UC1);
	normalize(matDepth, matInitialSeeds, 0, 255, NORM_MINMAX);
	//cout << " matdepth:" << endl << matDepth << endl;
	//waitKey();
	//cout << " matInitialSeeds:" << endl << matInitialSeeds << endl;
	imwrite("种子生长前.jpg", matInitialSeeds);
	/*
	imshow("种子生长前", matInitialSeeds);
	waitKey();*/
	//compare(matInitialSeeds, 0, matInitialSeeds, CMP_NE);
	/*compare(matInitialSeeds, 0, matInitialSeeds,1);
	imwrite("InitialSeeds.jpg", matInitialSeeds);
	imshow("InitialSeeds", matInitialSeeds);
	waitKey();*/


	//保存种子点
	//ofstream fout_seed;
	//fout_seed.open("initialSeed.txt");
	///*for (int i = 0; i < SeedPoints.size(); i++) {
	//       cout << SeedPoints[i].pt << " "<<endl;
	//		fout_seed <<  SeedPoints[i].pt.x << " " <<  SeedPoints[i].pt.y  << endl;
	//}*/
	
	//绘制并保存初始种子点 即匹配点和单峰
	//vector<Point> initialSeed;
	//Mat img1seed,;
	//Point p;
	//img_1.copyTo(imgMinAreaMask);
	///*for (int i = 0; i < SeedPoints.size(); i++) {
	//	p = SeedPoints[i].pt;
	//	circle(img1seed, p, 1, Scalar(0,0,255), -1);
	//	initialSeed.push_back(p);
	//}*/
	
	//imshow("img1SeedRoi", img1seed);
	////imwrite("img1SeedRoi.jpg", img1seed);
	//waitKey();

	return SeedsCount;
}

//初始种子点 分区域的特征点。
int MyFindInitialSeeds_region(Mat imgMinAreaMask, Mat& scores, Mat& MaxScore, Mat& StdScore, Mat& evaluationScores, vector< Mat>& MultiTopScore, vector<seed>& SeedPoints, vector <KeyPoint> key_points_for_all, Mat& matDepth, int nLayers, int BSPFTh)
{
	int N = 11;//区域直径
	double kprate = 0.3;//至少包含多少比例的特征点。
	
	//imgMinAreaMask 为最小的单应图像，即最高一层虚拟平面对应图像
	int width = imgMinAreaMask.cols;
	int height = imgMinAreaMask.rows;
	Mat PeakMat(height, width, CV_64FC1);
	MaxScore = Mat(height, width, CV_64FC1);
	StdScore = Mat(height, width, CV_64FC1);
	evaluationScores = Mat(height, width, CV_64FC1);
	int iter;
	for (iter = 0; iter < 8; ++iter)
	{
		Mat tmp = Mat::zeros(height, width, CV_64FC1);
		MultiTopScore.push_back(tmp);  
	}
	Mat flag;
	matDepth.copyTo(flag);
	//cout << "flag " << flag.size() << endl;
	for (int tempi = 0; tempi < key_points_for_all.size(); tempi++) {// 给匹配点做上标记
		//cout << key_points_for_all[tempi].pt << endl;
		flag.at<double>((int)key_points_for_all[tempi].pt.y, (int)key_points_for_all[tempi].pt.x) = 1;
	}
	
	imshow("flag", flag);
	waitKey(1000);
	destroyWindow("flag");
	int nWidthStep;
	float Corr, PreCorr1, PreCorr2, NextCorr1, NextCorr2;
	float DepthMaxData, DepthStdData, EvaluationData;

	int i, j, h;
	float layerVal;
	int maxLayer = 0;
	double maxVal = 0;
	double minVal = 0;
	int maxLoc;//ZNCC峰值索引

	double stdDev, localStdDev;//ZNCC曲线标准差，
	int SeedsCount = 0;

	//用不包括第一层和最后一层的相关性系数曲线进行计算
	Mat matPtCorrCurve(nLayers - 4, 1, CV_32FC1);
	//局部区域相关性系数曲线
	Mat matNeighborCorr;

	struct
	{
		float peakVal;
		float peakLayer;
	}Peak[20];

	const float fRatio = 0.5;//主副峰比值阈值
	const float CORRTH = 0.5;
	const float STDTH = 0.15;//相关性曲线标准差阈值
	const float LOCALSTDTH = 0.02;//相关性曲线局部标准差阈值
	float HalfPeakCorr;//半峰宽度对应相关性系数值

	int CurrentPoint;

	int nPeakCount = 0;
	float maxPeakCorr;
	int nFinalPeakCount = 0;
	float tempPeakVal, tempPeakLayer;

	for (i = 0; i < height; i++) {
		for (j = 0; j < width; j++)
		{
			//cout << i <<" "<< j << endl;
			if (imgMinAreaMask.at<Vec3b>(i, j)[0] != 0)
			{
				nPeakCount = 0;
				maxPeakCorr = 0;

				//计算像素（(i,j)相关性曲线峰值，将主副峰比值大于阈值的峰值保存
				for (h = 2; h < nLayers - 2; h++)
				{
					layerVal = scores.at<Vec_nb>(i, j)[h];
					//pCorrCurveData[h - 2] = layerVal;
					matPtCorrCurve.at<float>(h - 2) = layerVal;
					PreCorr1 = scores.at<Vec_nb>(i, j)[h - 1];
					PreCorr2 = scores.at<Vec_nb>(i, j)[h - 2];
					NextCorr1 = scores.at<Vec_nb>(i, j)[h + 1];
					NextCorr2 = scores.at<Vec_nb>(i, j)[h + 2];
					//寻找最大峰值并记录所有峰值
					if (layerVal > PreCorr1 && layerVal > PreCorr2 && layerVal > NextCorr1 && layerVal > NextCorr2/*&& layerVal>fPeakCorrTh*/)
					{
						Peak[nPeakCount].peakVal = layerVal;
						Peak[nPeakCount].peakLayer = h;
						nPeakCount++;
						_ASSERT(nPeakCount < 20);
						if (layerVal > maxPeakCorr)
						{
							maxPeakCorr = layerVal;
						}
					}
				}
				//if (nPeakCount==0) continue;

				//对峰值进行排序 and 筛选
				nFinalPeakCount = 0;
				for (int m = 0; m < nPeakCount; m++)
				{
					for (int n = m + 1; n < nPeakCount; n++)
					{
						if (Peak[n].peakVal > Peak[m].peakVal)
						{
							tempPeakVal = Peak[m].peakVal;  	tempPeakLayer = Peak[m].peakLayer;
							Peak[m].peakVal = Peak[n].peakVal;  Peak[m].peakLayer = Peak[n].peakLayer;
							Peak[n].peakVal = tempPeakVal;      Peak[n].peakLayer = tempPeakLayer;
						}
					}

					if (Peak[m].peakVal / Peak[0].peakVal > fRatio)
					{
						nFinalPeakCount++;
					}
					else
					{
						for (int tempi = m; tempi < nPeakCount; tempi++)
						{
							Peak[tempi].peakLayer = 0;
							Peak[tempi].peakVal = 0;
						}
						break;
					}
				}
				nPeakCount = nFinalPeakCount;

				minMaxIdx(matPtCorrCurve, &minVal, &maxVal, 0, &maxLoc);//最大最小值和最大的索引
				Mat mean_temp, std_temp;
				meanStdDev(matPtCorrCurve, mean_temp, std_temp);
				stdDev = std_temp.at<double>(0, 0);//标准差

				//局部STD
				if (nPeakCount > 0)
				{
					int hStart = max((int)Peak[0].peakLayer - 2 - BSPFTh, 0);
					int hEnd = min((int)Peak[0].peakLayer - 2 + BSPFTh, nLayers - 5);
					//cvGetRows(matPtCorrCurve, &matNeighborCorr, hStart, hEnd);
					matNeighborCorr = matPtCorrCurve.rowRange(hStart, hEnd).clone();//取出某些行，左闭右开。
					meanStdDev(matNeighborCorr, mean_temp, std_temp);
					localStdDev = std_temp.at<double>(0, 0);//标准差
				}
				MaxScore.at<double>(i, j) = maxVal;
				StdScore.at<double>(i, j) = localStdDev;
				//cout << "localStdDev=" << localStdDev <<" stdDev="<< stdDev << endl;

				//计算最优程度估计值escore
				HalfPeakCorr = (Peak[0].peakVal - minVal) / 2;
				int maxLayerRange = min(nLayers - Peak[0].peakLayer - 2, Peak[0].peakLayer - 2);
				int tempLayerLeft = 0;
				int tempLayerRight = 0;
				float* CurrentLayerCorrLeft, * CurrentLayerCorrRight, * LastLayerCorrLeft, * LastLayerCorrRight;
				for (int i = 0; i < maxLayerRange - 1; i++)
				{
					if ((int)Peak[0].peakLayer - i - 1 < 0 || (int)Peak[0].peakLayer + i + 1 >= matPtCorrCurve.rows) {
						break;
					}
					//if (pCorrCurveData[(int)Peak[0].peakLayer - i - 1]<HalfPeakCorr && pCorrCurveData[(int)Peak[0].peakLayer - i]>HalfPeakCorr)
					if (matPtCorrCurve.at<float>((int)Peak[0].peakLayer - i - 1) < HalfPeakCorr && matPtCorrCurve.at<float>((int)Peak[0].peakLayer - i) > HalfPeakCorr)
						tempLayerLeft = Peak[0].peakLayer - i;
					//if (pCorrCurveData[(int)Peak[0].peakLayer + i + 1]<HalfPeakCorr && pCorrCurveData[(int)Peak[0].peakLayer + i]>HalfPeakCorr)
					if (matPtCorrCurve.at<float>((int)Peak[0].peakLayer + i + 1) < HalfPeakCorr && matPtCorrCurve.at<float>((int)Peak[0].peakLayer + i) > HalfPeakCorr)
						tempLayerRight = Peak[0].peakLayer + i;
				}
				if (tempLayerLeft != 0 && tempLayerRight != 0)
					evaluationScores.at<double>(i, j) = Peak[0].peakVal * stdDev / (tempLayerRight - tempLayerLeft);

				if (nPeakCount > 1)
				{
					evaluationScores.at<double>(i, j) /= 2;
				}

				int RecordPeakCount = min(nPeakCount, 4);
				for (int tempi = 0; tempi < RecordPeakCount; tempi++)
				{

					MultiTopScore[2 * tempi].at<double>(i, j) = Peak[tempi].peakVal;
					MultiTopScore[2 * tempi + 1].at<double>(i, j) = Peak[tempi].peakLayer;
				}
				PeakMat.at<double>(i, j) = Peak[0].peakLayer;
				////种子点选择
				//if ((nPeakCount == 1) && (flag.at<double>(i, j) == 1 || (stdDev > STDTH && localStdDev > LOCALSTDTH && Peak[0].peakVal > CORRTH)))//最后一个是特征点
				////if ( (nPeakCount == 1 ) &&stdDev > STDTH && localStdDev > LOCALSTDTH && Peak[0].peakVal > CORRTH|| matDepth.at<double>(i, j) == 1  )//最后一个是特征点
				//{
				//	int tempLayer = Peak[0].peakLayer;
				//	PreCorr1 = scores.at<Vec_nb>(i, j)[tempLayer - 1];
				//	NextCorr1 = scores.at<Vec_nb>(i, j)[tempLayer + 1];
				//	Corr = scores.at<Vec_nb>(i, j)[tempLayer];
				//	float subLayer = (NextCorr1 - PreCorr1) / (2 * (NextCorr1 + PreCorr1 - 2 * Corr));
				//	//cout << tempLayer << " " << subLayer << endl;
				//	matDepth.at<double>(i, j) = tempLayer + subLayer;
				//	evaluationScores.at<double>(i, j) *= 2;

				//	Point2f pt_temp(i, j);
				//	seed seed_temp;
				//	seed_temp.pt = pt_temp;
				//	seed_temp.escore = evaluationScores.at<double>(i, j);
				//	SeedPoints.push_back(seed_temp);
				//	SeedsCount++;
				//}

			}
		}
	}

	
	////种子点选择
				//if ((nPeakCount == 1) && (flag.at<double>(i, j) == 1 || (stdDev > STDTH && localStdDev > LOCALSTDTH && Peak[0].peakVal > CORRTH)))//最后一个是特征点
				////if ( (nPeakCount == 1 ) &&stdDev > STDTH && localStdDev > LOCALSTDTH && Peak[0].peakVal > CORRTH|| matDepth.at<double>(i, j) == 1  )//最后一个是特征点
				//{
				//	int tempLayer = Peak[0].peakLayer;
				//	PreCorr1 = scores.at<Vec_nb>(i, j)[tempLayer - 1];
				//	NextCorr1 = scores.at<Vec_nb>(i, j)[tempLayer + 1];
				//	Corr = scores.at<Vec_nb>(i, j)[tempLayer];
				//	float subLayer = (NextCorr1 - PreCorr1) / (2 * (NextCorr1 + PreCorr1 - 2 * Corr));
				//	//cout << tempLayer << " " << subLayer << endl;
				//	matDepth.at<double>(i, j) = tempLayer + subLayer;
				//	evaluationScores.at<double>(i, j) *= 2;

				//	Point2f pt_temp(i, j);
				//	seed seed_temp;
				//	seed_temp.pt = pt_temp;
				//	seed_temp.escore = evaluationScores.at<double>(i, j);
				//	SeedPoints.push_back(seed_temp);
				//	SeedsCount++;
				//}

	int maxi=N, maxj=N;
	int counti=0, countj=0;
	int imgi, imgj;
	int tempLayer;
	vector<seed> region_kp;
	vector<seed>  region_kp_all;
	vector<seed>  region_all;
	//seed temp;
	int region_kpnum = 0, region_seednum = 0;
	cout << "图片大小" << imgMinAreaMask.size() << endl;
	Mat seedimg;
	matDepth.copyTo(seedimg);
	seed temp;
	while (imgMinAreaMask.rows-counti * N >0  && imgMinAreaMask.cols-countj * N >0 ) {
		cout << "---------------------counti " << counti << " countj " << countj << endl;
		//cout << " cols-countj * N " << imgMinAreaMask.scols - countj * N << " rows-counti * N  " << imgMinAreaMask.rows - counti * N<< endl;
		region_kpnum = 0, region_seednum = 0;
		region_kp.clear();
		region_all.clear();
		region_kp_all.clear();
		
		if (imgMinAreaMask.cols - counti * N <= N) {//一行的末尾，不够N个
			maxi = imgMinAreaMask.cols - counti * N;
		}
		if (imgMinAreaMask.cols - countj * imgMinAreaMask.rows <= N) {//一列的末尾，不够N个
			maxj = imgMinAreaMask.rows - countj * N;
		}

		for (int i = 0; i < maxi; i++) {
			for (int j = 0; j < maxj; j++) {
				imgi = counti *N + i;
				imgj = countj *N + j;
				//cout << imgMinAreaMask.size() << endl;
				if (imgMinAreaMask.at<Vec3b>(imgi, imgj)[0] == 0) continue;
				//cout << "imgi " << imgi << " imgj " << imgj << endl;
				//区域内特征点 判断单峰条件
				
				temp.pt.x = imgj; temp.pt.y = imgi;
				temp.escore = evaluationScores.at<double>(imgi, imgj);
				//cout << "temp: " << temp.pt << endl;
				for (int tempk = 0; tempk < region_all.size(); tempk++) {
					if (temp.escore > region_all[tempk].escore) {//temp插入到tempk,原本的数据后挪。
						seed temp1;
						temp1.pt.x = region_all[region_all.size() - 1].pt.x; temp1.pt.y = region_all[region_all.size() - 1].pt.y;
						temp1.escore = region_all[region_all.size() - 1].escore;
						region_all.push_back(temp1);//最后一个后移一位
						for (int temph = region_all.size()-1; temph >=tempk; temph++) {//后移
							region_all[temph].pt.x = region_all[temph - 1].pt.x;
							region_all[temph].pt.y = region_all[temph - 1].pt.y;
							region_all[temph].escore = region_all[temph - 1].escore;
						}
						seed temp2;
						temp2.pt.x = temp.pt.x; temp2.pt.y = temp.pt.y;
						temp2.escore = temp.escore;
						region_all[tempk] = temp2;//插入
					}
				}

				//region_all.push_back(temp);
				if (flag.at<double>(imgi, imgj) != 0) {
					seed temp6;
					temp6.pt.x = temp.pt.x; temp6.pt.y = temp.pt.y;
					temp6.escore = temp.escore;
					
					//cout << temp.pt << endl;
					region_kp_all.push_back(temp6);
					region_kpnum++;

					if (nPeakCount == 1 && stdDev > STDTH && localStdDev > LOCALSTDTH && Peak[0].peakVal > CORRTH) {
						//region_kp.push_back(temp);
						tempLayer = PeakMat.at<double>(imgi, imgj);
						PreCorr1 = scores.at<Vec_nb>(imgi, imgj)[tempLayer - 1];
						NextCorr1 = scores.at<Vec_nb>(imgi,imgj)[tempLayer + 1];
						Corr = scores.at<Vec_nb>(imgi, imgj)[tempLayer];
						float subLayer = (NextCorr1 - PreCorr1) / (2 * (NextCorr1 + PreCorr1 - 2 * Corr));
						//cout << tempLayer << " " << subLayer << endl;
						matDepth.at<double>(imgi, imgj) = tempLayer + subLayer;
						evaluationScores.at<double>(imgi, imgj) *= 2;

						seed temp3;
						temp3.pt.x = temp.pt.x; temp3.pt.y = temp.pt.y;
						temp3.escore = temp.escore;
						SeedPoints.push_back(temp3);
						seedimg.at<double>(temp3.pt.y, temp3.pt.x) =1;
						//cout << temp.pt << endl;
						SeedsCount++;
						region_seednum++;
					}
				}

			}
			
		}

		cout << "region_seednum: " << region_seednum << "of" << region_kpnum << endl;
		if (region_kpnum == 0) {
			// 如果该区域没有特征点，给escore最高的几个点
			cout << " 没有特征点 " << endl;
			if (region_all.size() >= 1) {
				
				for (int k = 0; k < 1; k++) {
					//region_kp.push_back(region_kp_all[i]);
					imgi = region_all[k].pt.y; imgi = region_all[k].pt.x;
					if (imgMinAreaMask.at<Vec3b>(imgi, imgj)[0] == 0) break;

					tempLayer = PeakMat.at<double>(imgi, imgj);
					PreCorr1 = scores.at<Vec_nb>(imgi, imgj)[tempLayer - 1];
					NextCorr1 = scores.at<Vec_nb>(imgi, imgj)[tempLayer + 1];
					Corr = scores.at<Vec_nb>(imgi, imgj)[tempLayer];
					float subLayer = (NextCorr1 - PreCorr1) / (2 * (NextCorr1 + PreCorr1 - 2 * Corr));
					//cout << tempLayer << " " << subLayer << endl;
					matDepth.at<double>(imgi, imgj) = tempLayer + subLayer;
					evaluationScores.at<double>(imgi, imgj) *= 2;

					seed temp4;
					temp4.pt.x = region_all[k].pt.x; temp4.pt.y = region_all[k].pt.y;
					temp4.escore = region_all[k].escore;
					SeedPoints.push_back(region_all[k]);
					seedimg.at<double>(region_all[k].pt.y, region_all[k].pt.x) = 1;
					//cout << region_all[k].pt << endl;
					SeedsCount++;
					region_seednum++;
				}
				//countj++;
			}
		}
		if (region_seednum <= kprate* region_kpnum || region_seednum ==0 ) {
			cout << " 单峰点少于特征点: " << endl;
			SortMaxToMin(region_kp_all, 0, region_kpnum);
			/*for (int ii = 0; ii < region_kp_all.size(); ii++) {
				cout << region_kp_all[ii].pt << endl;
			}*/
			for (int k = region_seednum; k < kprate * region_kpnum; k++) {
				//region_kp.push_back(region_kp_all[i]);
				imgi = region_kp_all[k].pt.y; imgj = region_kp_all[k].pt.x;
				if (imgMinAreaMask.at<Vec3b>(imgi, imgj)[0] == 0) continue;

				tempLayer = PeakMat.at<double>(imgi, imgj);
				PreCorr1 = scores.at<Vec_nb>(imgi, imgj)[tempLayer - 1];
				NextCorr1 = scores.at<Vec_nb>(imgi, imgj)[tempLayer + 1];
				Corr = scores.at<Vec_nb>(imgi, imgj)[tempLayer];
				float subLayer = (NextCorr1 - PreCorr1) / (2 * (NextCorr1 + PreCorr1 - 2 * Corr));
				//cout << tempLayer << " " << subLayer << endl;
				matDepth.at<double>(imgi, imgj) = tempLayer + subLayer;
				//cout << "imgi " << imgi << " imgj " << imgj << endl;
				evaluationScores.at<double>(imgi, imgj) *= 2;

				seed temp5;
				temp5.pt.x = region_kp_all[k].pt.x; temp5.pt.y = region_kp_all[k].pt.y;
				temp5.escore = region_kp_all[k].escore;
				//SeedPoints.push_back(region_all[k]);
				SeedPoints.push_back(temp5);
				seedimg.at<double>(region_kp_all[k].pt.y, region_kp_all[k].pt.x) = 1;
				//cout << region_kp_all[k].pt << endl;
				SeedsCount++;
			}
		} 		
		cout << "region_seednum: "<< region_seednum << "of" << region_kpnum << endl;
		//cout << "SeedsCount   " << SeedsCount << endl;
		countj++; 
		if (imgMinAreaMask.cols - countj * N <= 0) {//一行的末尾，不够N个
			countj = 0;
			counti++;
		}
		if (imgMinAreaMask.cols - countj * imgMinAreaMask.rows <= N) {//一列的末尾，不够N个
			
		}
		/*imshow("seedimg", seedimg);
		imshow("matdepth", matDepth);
		imshow("flag", flag);
		waitKey(200);*/
	}






	SortMaxToMin(SeedPoints, 0, SeedsCount);
	Mat matInitialSeeds(height, width, CV_8UC1);
	normalize(matDepth, matInitialSeeds, 0, 255, NORM_MINMAX);
	//cout << " matdepth:" << endl << matDepth << endl;
	//waitKey();
	//cout << " matInitialSeeds:" << endl << matInitialSeeds << endl;
	imwrite("种子生长前.jpg", matInitialSeeds);
	
	imshow("种子生长前", matInitialSeeds);
	waitKey();
	//compare(matInitialSeeds, 0, matInitialSeeds, CMP_NE);
	/*compare(matInitialSeeds, 0, matInitialSeeds,1);
	imwrite("InitialSeeds.jpg", matInitialSeeds);
	imshow("InitialSeeds", matInitialSeeds);
	waitKey();*/


	//保存种子点
	//ofstream fout_seed;
	//fout_seed.open("initialSeed.txt");
	///*for (int i = 0; i < SeedPoints.size(); i++) {
	//       cout << SeedPoints[i].pt << " "<<endl;
	//		fout_seed <<  SeedPoints[i].pt.x << " " <<  SeedPoints[i].pt.y  << endl;
	//}*/

	//绘制并保存初始种子点 即匹配点和单峰
	//vector<Point> initialSeed;
	//Mat img1seed,;
	//Point p;
	//img_1.copyTo(imgMinAreaMask);
	///*for (int i = 0; i < SeedPoints.size(); i++) {
	//	p = SeedPoints[i].pt;
	//	circle(img1seed, p, 1, Scalar(0,0,255), -1);
	//	initialSeed.push_back(p);
	//}*/

	//imshow("img1SeedRoi", img1seed);
	////imwrite("img1SeedRoi.jpg", img1seed);
	//waitKey();
	cout << "初始种子点数量" << SeedsCount << "of "<< imgMinAreaMask.cols* imgMinAreaMask.rows << endl;
	return SeedsCount;
}


//种子生长
void SeedPropagation(Mat imgMinAreaMask,Mat& scores, Mat& MaxScore, Mat& StdScore, Mat& imgEvaluationScores, vector< Mat>& MultiTopScore, vector<seed>& SeedPoints, Mat& matDepth, int nLayers, int seedsCount, int BSPFTh, Mat& matFinalSeeds)
{
	//uchar* pMinArea = (uchar*)imgMinAreaMask->imageData;//可重建区域计算
	//float* pMaxCorrData = (float*)MaxScore->imageData;//最优峰相关性系数(初值为最大相关性值，在多峰选择是可能发生变化)
	//float* pStdCorrData = (float*)StdScore->imageData;//相关性系数曲线标准差
	//float* pEvaluationData = (float*)imgEvaluationScores->imageData;//相关性系数曲线最优程度估计值
	//float* pMultiTopData = (float*)MultiTopScore->imageData;//多峰信息指针（(最多存放三个峰值)
	//float* pSeedData = SeedPoints->data.fl;//种子点矩阵
	//float* pDepthData = matDepth->data.fl;//深度层信息
	int rows = imgMinAreaMask.rows;
	int cols = imgMinAreaMask.cols;
	float Corr,  PreCorr, NextCorr;//相关性系数

	int i, j, k;
	float seedLayer;
	int currentPoint, seedRow, seedCol;
	float fMaxCorrLayer, fMaxCorr, fMaxCorrStd;
	float halfPeakCorr;
	int initialSeedsCount = seedsCount;

	const int NR = 1;//种子点邻域半径(8邻域)
	const float CORRTH = 0.35;//相关性系数阈值
	//const float STDTH = 0.1;//相关性曲线标准差阈值
	const float StdCorrTh = 0.02;//相关性曲线局部标准差阈值
	const float PEAKRATIOTH = 0.7;//多峰比值阈值
	Mat temp;
	int ct = 1;
	//检验是否满足生长准则
	for (i = 7176; i < seedsCount; i++)
	{
		//cout << "i " << i<< endl;
		
		seedRow = (int)SeedPoints[i].pt.x;
		seedCol = (int)SeedPoints[i].pt.y;
		_ASSERT(seedRow < matDepth.rows);
		_ASSERT(seedCol < matDepth.cols);
		seedLayer = matDepth.at<double>(seedRow, seedCol);
		//_ASSERT(seedLayer>0.1);
		//_ASSERT(seedLayer<nLayers-1);
		//cout << " seed: " << seedRow << " " << seedCol << " layer" << seedLayer<< endl;
		for (j = -NR; j <= NR; j++)
		{
			for (k = -NR; k <= NR; k++)
			{
				if (j == 0 && k == 0) continue;
				//cout << "    jk:" << j <<" " << k << endl;
				//cout << "    pt:" << seedRow + j << " " << seedCol + k << endl;
				//currentPoint = (seedRow + j) * cols + seedCol + k;
				if ((seedRow + j - NR) < 0 || (seedRow + j + NR) >= rows || (seedCol + k - NR) < 0 || (seedCol + k + NR) >= cols)continue;
				if (imgMinAreaMask.at<Vec3b >(seedRow + j, seedCol + k)[0] != 0)
				{
					
					//fMaxCorr = pMultiTopData[8 * currentPoint];
					//fMaxCorrLayer = pMultiTopData[8 * currentPoint + 1];
					//fMaxCorrStd = pStdCorrData[currentPoint];
					fMaxCorr = MultiTopScore[0].at<double>(seedRow + j, seedCol + k);//?
					fMaxCorrLayer = MultiTopScore[1].at<double>(seedRow + j, seedCol + k);//?
					fMaxCorrStd = StdScore.at<double>(seedRow + j, seedCol + k);

					//匹配层为第一层和最后一层时，不可生长为种子点
					if (fMaxCorrLayer < 0.0001 || fabs(fMaxCorrLayer - nLayers + 1) < 0.0001) continue;
					//小范围平滑性准则(对应阶跃型)

					//唯一性约束
					if (matDepth.at<double>(seedRow + j, seedCol + k) > 0.001) continue;

					//曲线差异性
					if (StdScore.at<double>(seedRow + j, seedCol + k) <= StdCorrTh/*STDTH*/) continue;

					//多峰选择
					float minDifCorr = fMaxCorr;
					float minDifLayer = fMaxCorrLayer;
					float minDifLayerDif = fabs(minDifLayer - seedLayer);
					for (int i = 1; i < 4; i++)
					{
						//灰度相似性约束:匹配点要求灰度的相似性达到一定程度
						if (MultiTopScore[2*i - 2].at<double>(seedRow + j, seedCol + k) < CORRTH) break;
						if (MultiTopScore[2*i - 2].at<double>(seedRow + j, seedCol + k) / MultiTopScore[0].at<double>(seedRow + j, seedCol + k) > PEAKRATIOTH && fabs(MultiTopScore[2 * i - 1].at<double>(seedRow + j, seedCol + k) - seedLayer) < minDifLayerDif)
							//if (fabs(pMultiTopData[8*currentPoint+2*i+1]-seedLayer)<minDifLayerDif)
						{
							minDifCorr = MultiTopScore[2*i-2].at<double>(seedRow + j, seedCol + k);//minDifCorr = pMultiTopData[8 * currentPoint + 2 * i];
							minDifLayer = MultiTopScore[2*i-1].at<double>(seedRow + j, seedCol + k);//minDifLayer = pMultiTopData[8 * currentPoint + 2 * i + 1];
							minDifLayerDif = fabs(minDifLayer - seedLayer);
						}
					}
					MaxScore.at<double>(seedRow + j, seedCol + k) = minDifCorr;//pMaxCorrData[currentPoint] = minDifCorr;
					MultiTopScore[0].at<double>(seedRow + j, seedCol + k) = minDifCorr;//pMultiTopData[8 * currentPoint] = minDifCorr;
					MultiTopScore[1].at<double>(seedRow + j, seedCol + k) = minDifLayer;//pMultiTopData[8 * currentPoint + 1] = minDifLayer;
					//删除多峰信息
					for (int i = 2; i < 8; i++)	MultiTopScore[i].at<double>(seedRow + j, seedCol + k) = 0;//pMultiTopData[8 * currentPoint + i] = 0;

					//邻域光滑约束
					//if (minDifLayerDif <= BSPFTh)
					if (minDifLayerDif <= 1.5+2*exp(- seedsCount) )
					{
						//灰度相似性约束:匹配点要求灰度的相似性达到一定程度
						if (minDifCorr > CORRTH)
						{
							int Layer = minDifLayer;
							PreCorr = scores.at<Vec_nb>(seedRow + j, seedCol + k)[Layer + 1];
							NextCorr = scores.at<Vec_nb>(seedRow + j, seedCol + k)[Layer - 1];
							Corr = scores.at<Vec_nb>(seedRow + j, seedCol + k)[Layer];
							//float subLayer = (pNextCorr[currentPoint] - pPreCorr[currentPoint]) /(2 * (pNextCorr[currentPoint] + pPreCorr[currentPoint] - 2 * pCorr[currentPoint]));
							double subLayer = (NextCorr - PreCorr) / (2 * (NextCorr + PreCorr) - 2 * Corr);
							//_ASSERT(fabs(subLayer) < 1);//????????
							matDepth.at<double>(seedRow + j, seedCol + k)= Layer + subLayer;//pDepthData[currentPoint] = Layer + subLayer;
							
							//pSeedData[3 * seedsCount] = pEvaluationData[currentPoint];
							//pSeedData[3 * seedsCount + 1] = seedRow + j;
							//pSeedData[3 * seedsCount + 2] = seedCol + k;
							seed seed_temp;
							seed_temp.pt.x = seedRow + j; seed_temp.pt.y = seedCol + k;
							seed_temp.escore = imgEvaluationScores.at<double>(seedRow + j, seedCol + k);
							SeedPoints.push_back(seed_temp);
							seedsCount++;
							//删除多峰信息
							for (int i = 2; i < 8; i++)	MultiTopScore[i].at<double>(seedRow + j, seedCol + k) = 0;//pMultiTopData[8 * currentPoint + i] = 0;
						}
						else;//不满足灰度相似性约束
					}
					else//不满足邻域光滑约束
					{
						int mCorr = -100, mLayer = 0;
						for (int x = -BSPFTh; x <= BSPFTh; x++)
						{
							if (seedLayer + x<0 || seedLayer + x>nLayers - 1) continue;
							Corr= Corr = scores.at<Vec_nb>(seedRow + j, seedCol + k)[(int)seedLayer + x];//pCorr = (float*)(scores[(int)seedLayer + x]->imageData);
							if (Corr> mCorr)
							{
								mCorr = Corr;
								mLayer = seedLayer + x;
							}
						}
						if (fabs(mLayer - seedLayer + BSPFTh) < 0.0001 || fabs(mLayer - seedLayer - BSPFTh) < 0.0001)
						{   //!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!是否可以精简掉？
							MaxScore.at<double>(seedRow + j, seedCol + k) = 0; //pMaxCorrData[currentPoint] = 0;
							matDepth.at<double>(seedRow + j, seedCol + k) = seedLayer; //pDepthData[currentPoint] = seedLayer;
						}
						else
						{
							MaxScore.at<double>(seedRow + j, seedCol + k) = mCorr; //pMaxCorrData[currentPoint] = mCorr;
							matDepth.at<double>(seedRow + j, seedCol + k) = mLayer;//pDepthData[currentPoint] = mLayer;
						}
					}
				}
			}
		}

		
		if (i > initialSeedsCount - 2)
		{
			cout << "initialSeedsCount " << initialSeedsCount << endl;
			SortMaxToMin(SeedPoints, initialSeedsCount, seedsCount);
			initialSeedsCount = seedsCount;
			//normalize(matDepth, temp, 0, 255, NORM_MINMAX);
			//imshow("种子生长", temp);
			//waitKey(50);
			//cout << ct <<endl ;
			//imwrite("种子生长_种子数_" + to_string(seedsCount) + ".jpg", temp);
			//imwrite(to_string(ct++) + ".jpg", temp);
		}

	}
	normalize(matDepth, matFinalSeeds, 0, 255, NORM_MINMAX);//cvNormalize(matDepth, matFinalSeeds, 0, 255, CV_MINMAX);
	imwrite("种子生长后.jpg", matFinalSeeds);
	/*imshow("种子生长后", matFinalSeeds);
	waitKey();*/
	//cvSaveImage("DepthAfterPropagation.jpg", matFinalSeeds);
	//cvConvert(matDepth, matFinalSeeds);
	//cvCmpS(matDepth, 0.01, matFinalSeeds, CV_CMP_GT);
	//cvSaveImage("FinalSeeds.bmp", matFinalSeeds);
	ofstream depth;
	depth.open("DepthAfterPropagation.txt");
	//depth << matDepth << endl;
	ofstream nor_depth;
	nor_depth.open("DepthAfterPropagation_normalized.txt");
	//nor_depth << matFinalSeeds << endl;
}
//识别边缘
vector<Point> GetContourPts(Mat src) {//src背景是黑色
	/*
		输入图像转化为 dst，灰度值为0的点置255 不为零的点置0
		这样可以保证输出的边界点是原图中灰度值为0的点

	*/

	
	Mat dst = src <= 1;// 这个值是因为前面给匹配点加标签的时候 让matdepth的值是0.123，但实际上并没有恢复他们的深度，0.123 在映射后时1.6左右 给一个大于1.6 小于2的值即可。
	Mat imgtmp(dst.rows, dst.cols, CV_64FC3);
	//src.copyTo(dst);
	//cout << dst.type() << endl;
	/*imshow("src", src);
	imshow("dst", dst);
	waitKey();*/
	//cout << dst << endl;
	vector<vector<Point>> contours;
	vector<Vec4i> hierarchy;

	findContours(dst, contours, hierarchy, CV_RETR_TREE, CV_CHAIN_APPROX_NONE);
	vector<Point> storePt;
	int contourSize = contours.size();
	int szHierachy = hierarchy.size();
	Vec4i vec4iH;
	//printf("\n\nTotal Contours: %d\n", szHierachy);
	for (int i = 0; i < contourSize; i++)//遍历每一条轮廓
	{
		vec4iH = hierarchy[i];
		
		//printf("Contour %d: Former: %d, Next: %d, Son: %d, Father: %d\n", i, vec4iH.val[0], vec4iH.val[1], vec4iH.val[2], vec4iH.val[3]);
		//当外围是黑色时，边界也被处理为轮廓
		/*if (vec4iH.val[3] == -1 || vec4iH.val[3] == 0)
		{
			for (int j = 0; j < contours[i].size(); j++) {
			circle(imgtmp, contours[i][j], 1, Scalar(0, 0, 255), -1);
			}
			
			printf("%d th contour has %d points\n", i, contours[i].size());
			imshow("边缘", imgtmp);
			waitKey(200);
			continue;
		}*/

		//hierarchy[i].
		int iSize = contours[i].size();
		//printf("%d th contour has %d points\n", i, iSize);
		for (int j = 0; j < iSize; j++)
		{
			storePt.push_back(contours[i][j]);
		}

		//绘图
		//for (int i = 0; i < storePt.size(); i++) {
		//	circle(imgtmp, storePt[i], 1, Scalar(0, 0, 255), -1);
		//}
		//
		//imshow("边缘", imgtmp);
		//char q=waitKey(200);
		//if (q=='a')//按a中断绘图
		//{
		//	imshow("src", src);
		//	imshow("mask", dst);
		//	waitKey();
		//}
	}
	
	
	return storePt;
}

//平均值拟合
Scalar FitMean(Mat matPT, Mat FitMask/*=NULL*/) {
	return mean(matPT, FitMask);
}
//平面拟合
Scalar FitPlane(Mat matPT, Mat FitMask/*=NULL*/) 
{
	//cout << "matPt" << matPT << endl;
	//cout << " 插值：" << matPT.at<double>(7, 7)<<endl;
	bool bInputMaskValid = true;
	Size sz = matPT.size();
	if (FitMask.empty())
	{
		bInputMaskValid = false;
		FitMask = Mat::zeros(sz.height, sz.width, CV_8UC1);
	}

	int ptCount = countNonZero(FitMask);//统计mask中非零点个数;
	//_ASSERT(ptCount >= 5);//??这个干吗的
	//if (ptCount < 5) {
	//	//cout << FitMask << endl;
	//	int tempi = rand() % 15;
	//	int tempj = rand() % 15;
	//	while(ptCount < 5)
	//	{
	//		if (FitMask.at<uchar>(tempi, tempj) == 0) {
	//			FitMask.at<uchar>(tempi, tempj) = 255;
	//			ptCount++;
	//		}else {
	//			tempi = rand() % (FitMask.cols);
	//			tempj = rand() % (FitMask.cols);
	//		}
	//	} 
	//	//cout << FitMask << endl;
	//}
	if (ptCount < 5) return mean(matPT, FitMask);
	_ASSERT(ptCount >= 5);

	//解方程组 
	Mat matXY(ptCount, 3, CV_64F);
	Mat matZ(ptCount, 1, CV_64F);
	Mat matZ0(ptCount, 1, CV_64F);
	Mat matCoff = Mat::zeros(3, 1, CV_64F);
	
	int i, j, iCount = 0;
	Scalar sVal;
	//cout << FitMask.size() << endl;
	//cout << FitMask.type() << endl;
	//cout << FitMask.channels() << endl;
	for (i = 0; i < sz.height; i++)
	{
		for (j = 0; j < sz.width; j++)
		{
			//cout << "ij " << i <<" " << j << endl;
			//sVal = FitMask.at<double>(i, j); //sVal = cvGet2D(FitMask, i, j);
			//cout << "fitmak data" << FitMask.at<uchar>(i, j) << endl;
			if (int(FitMask.at<uchar>(i, j)) <= 0.001)continue;
			matXY.at<double>(iCount, 0) = 1;//cvmSet(matXY, iCount, 0, 1);
			matXY.at<double>(iCount, 1) = j;//cvmSet(matXY, iCount, 1, j);
			matXY.at<double>(iCount, 2) = i;//cvmSet(matXY, iCount, 2, i);
			sVal = matPT.at<double>(i, j); //sVal = cvGet2D(matPT, i, j);
			matZ.at<double>(iCount, 0) = sVal.val[0];//cvmSet(matZ, iCount, 0, sVal.val[0]);
			//cout << matZ.at<double>(iCount, 0) << endl;
			iCount++;
		}
	}
	double maxError;
	int maxPosPt;
	Scalar meanError, stdError;

	solve(matXY, matZ, matCoff, CV_SVD);//解方程
	matZ0 = matXY * matCoff; //cvMatMul(matXY, matCoff, matZ0);
	matZ0 =abs( matZ - matZ0);//cvAbsDiff(matZ, matZ0, matZ0);
	//cvMinMaxLoc(matZ0, NULL, &maxError, NULL, &maxPosPt);
	minMaxIdx(matZ0, NULL, &maxError, NULL, &maxPosPt);
	
	//去除拟合准确性最低的一个点 即maxPosPt去掉?
	Mat matLeftXY(ptCount - 1, 3, CV_64F);
	Mat matLeftZ(ptCount - 1, 1, CV_64F);
	Mat matLeftZ0(ptCount - 1, 1, CV_64F);
	//利用去除后数据进行运算
	for (i = 0; i < maxPosPt; i++)
	{
		matLeftXY.at<double>(i, 0) = 1;//cvmSet(matLeftXY, i, 0, 1);
		matLeftXY.at<double>(i, 1) = matXY.at<double>(i, 1);//cvmSet(matLeftXY, i, 1, cvmGet(matXY, i, 1));
		matLeftXY.at<double>(i, 2) = matXY.at<double>(i, 2);//cvmSet(matLeftXY, i, 2, cvmGet(matXY, i, 2));
		matLeftZ.at<double>(i, 0) = matZ.at<double>(i, 0);//cvmSet(matLeftZ, i, 0, cvmGet(matZ, i, 0));
	}
	for (i = maxPosPt + 1; i < ptCount; i++)
	{
		matLeftXY.at<double>(i-1, 0) = 1;//cvmSet(matLeftXY, i-1, 0, 1);
		matLeftXY.at<double>(i-1, 1) = matXY.at<double>(i, 1);//cvmSet(matLeftXY, i-1, 1, cvmGet(matXY, i, 1));
		matLeftXY.at<double>(i-1, 2) = matXY.at<double>(i, 2);//cvmSet(matLeftXY, i-1, 2, cvmGet(matXY, i, 2));
		matLeftZ.at<double>(i-1, 0) = matZ.at<double>(i, 0);//cvmSet(matLeftZ, i-1, 0, cvmGet(matZ, i, 0));
	}
	solve(matLeftXY, matLeftZ, matCoff, CV_SVD);
	//计算拟合最大误差，平均误差和标准差
	matLeftZ0 = matLeftXY * matCoff;//cvMatMul(matLeftXY, matCoff, matLeftZ0);
	matLeftZ0 = abs(matLeftZ0 - matLeftZ);//cvAbsDiff(matLeftZ0, matLeftZ, matLeftZ0);
	minMaxIdx(matLeftZ0, NULL, &maxError, NULL, &maxPosPt);//cvMinMaxLoc(matLeftZ0, NULL, &maxError, NULL, &maxPosPt);
	meanStdDev(matLeftZ0,meanError,stdError);//cvAvgSdv(matLeftZ0, &meanError, &stdError);
	//cout <<"meanError:"<< meanError <<" stdError:"<< stdError << endl;
	double PlaneParamsA = matCoff.at<double>(0, 0);// cvmGet(matCoff, 0, 0);
	double PlaneParamsB = matCoff.at<double>(1, 0); //cvmGet(matCoff, 1, 0);
	double PlaneParamsC = matCoff.at<double>(2, 0);// cvmGet(matCoff, 2, 0);

	Scalar scalar;
	scalar.val[0] = PlaneParamsA + PlaneParamsB * ((sz.width - 1) / 2.0) + PlaneParamsC * ((sz.height - 1) / 2.0);
	scalar.val[1] = maxError;
	scalar.val[2] = meanError.val[0];
	scalar.val[3] = stdError.val[0];

	//cout << "scalar" << scalar << endl;
	return scalar;
}
//二次曲面拟合
Scalar FitQuadric(Mat matPT, Mat& FitMask/*=NULL*/)
{
	//cout << "matPt" << matPT << endl;
	//cout << " 插值：" << matPT.at<double>(7, 7) << endl;
	bool bInputMaskValid = true;
	Size sz = matPT.size();
	if (FitMask.empty())
	{
		bInputMaskValid = false;
		FitMask = Mat::ones(sz.height, sz.width, CV_8UC1);
	}

	int ptCount = countNonZero(FitMask);//统计mask中非零点个数;
	
	if (ptCount < 7)  return mean(matPT, FitMask);
	_ASSERT(ptCount >= 7);

	//解方程组 
	Mat matXY(ptCount, 6, CV_64F);
	Mat matZ(ptCount, 1, CV_64F);
	Mat matZ0(ptCount, 1, CV_64F);
	Mat matCoff = Mat::zeros(6, 1, CV_64F);

	int i, j,k, iCount = 0;
	Scalar sVal;
	//cout << FitMask.size() << endl;
	//cout << FitMask.type() << endl;
	//cout << FitMask.channels() << endl;
	for (i = 0; i < sz.height; i++)
	{
		for (j = 0; j < sz.width; j++)
		{
			//cout << "ij " << i <<" " << j << endl;
			//sVal = FitMask.at<double>(i, j); //sVal = cvGet2D(FitMask, i, j);
			//cout << "fitmak data" << FitMask.at<uchar>(i, j) << endl;
			if (int(FitMask.at<uchar>(i, j)) <= 0.001)continue;
			matXY.at<double>(iCount, 0) = 1; //cvmSet(matXY, iCount, 0, 1);
			matXY.at<double>(iCount, 1) = j; //cvmSet(matXY, iCount, 1, x);
			matXY.at<double>(iCount, 2) = i; //cvmSet(matXY, iCount, 2, y);
			matXY.at<double>(iCount, 3) = i*j; //cvmSet(matXY, i, 3, x * y);
			matXY.at<double>(iCount, 4) = j*j; //cvmSet(matXY, i, 4, x * x);
			matXY.at<double>(iCount, 5) = i*i; //cvmSet(matXY, i, 5, y * y);
			sVal = matPT.at<double>(i, j); //sVal = cvGet2D(matPT, i, j);
			matZ.at<double>(iCount, 0) = sVal.val[0];//cvmSet(matZ, iCount, 0, sVal.val[0]);
			//cout << matZ.at<double>(iCount, 0) << endl;
			iCount++;
		}
	}
	double maxError;
	int maxPosPt;
	Scalar meanError, stdError;

	int bResult=solve(matXY, matZ, matCoff, CV_SVD);//解方程
	_ASSERT(bResult != 0);

	matZ0 = matXY * matCoff; //cvMatMul(matXY, matCoff, matZ0);
	matZ0 = abs(matZ - matZ0);//cvAbsDiff(matZ, matZ0, matZ0);
	//cvMinMaxLoc(matZ0, NULL, &maxError, NULL, &maxPosPt);
	minMaxIdx(matZ0, NULL, &maxError, NULL, &maxPosPt);

	//去除拟合准确性最低的一个点 即maxPosPt去掉?
	Mat matLeftXY(ptCount - 1, 6, CV_64F);
	Mat matLeftZ(ptCount - 1, 1, CV_64F);
	Mat matLeftZ0(ptCount - 1, 1, CV_64F);
	//利用去除后数据进行运算
	for (i = 0; i < maxPosPt; i++)
	{
		matLeftXY.at<double>(i, 0) = 1;//cvmSet(matLeftXY, i, 0, 1);
		matLeftXY.at<double>(i, 1) = matXY.at<double>(i, 1);//cvmSet(matLeftXY, i, 1, cvmGet(matXY, i, 1));
		matLeftXY.at<double>(i, 2) = matXY.at<double>(i, 2);//cvmSet(matLeftXY, i, 2, cvmGet(matXY, i, 2));
		matLeftXY.at<double>(i, 3) = matXY.at<double>(i, 3); //cvmSet(matXY, i, 3, x * y);
		matLeftXY.at<double>(i, 4) = matXY.at<double>(i, 4); //cvmSet(matXY, i, 4, x * x);
		matLeftXY.at<double>(i, 5) = matXY.at<double>(i, 5); //cvmSet(matXY, i, 5, y * y);
		matLeftZ.at<double>(i, 0) = matZ.at<double>(i, 0);//cvmSet(matLeftZ, i, 0, cvmGet(matZ, i, 0));
	}
	for (i = maxPosPt + 1; i < ptCount; i++)
	{
		matLeftXY.at<double>(i - 1, 0) = 1;//cvmSet(matLeftXY, i-1, 0, 1);
		matLeftXY.at<double>(i - 1, 1) = matXY.at<double>(i, 1);//cvmSet(matLeftXY, i-1, 1, cvmGet(matXY, i, 1));
		matLeftXY.at<double>(i - 1, 2) = matXY.at<double>(i, 2);//cvmSet(matLeftXY, i-1, 2, cvmGet(matXY, i, 2));
		matLeftXY.at<double>(i-1, 3) = matXY.at<double>(i, 3); //cvmSet(matXY, i, 3, x * y);
		matLeftXY.at<double>(i-1, 4) = matXY.at<double>(i, 4); //cvmSet(matXY, i, 4, x * x);
		matLeftXY.at<double>(i-1, 5) = matXY.at<double>(i, 5); //cvmSet(matXY, i, 5, y * y);
		matLeftZ.at<double>(i - 1, 0) = matZ.at<double>(i, 0);//cvmSet(matLeftZ, i-1, 0, cvmGet(matZ, i, 0));
	}
	solve(matLeftXY, matLeftZ, matCoff, CV_SVD);


	matZ0 = matXY * matCoff; //cvMatMul(matXY, matCoff, matZ0);
	matZ0 = abs(matZ - matZ0);//cvAbsDiff(matZ, matZ0, matZ0);
	//cvMinMaxLoc(matZ0, NULL, &maxError, NULL, &maxPosPt);
	minMaxIdx(matZ0, NULL, &maxError, NULL, &maxPosPt);

	meanStdDev(matZ0, meanError, stdError);//cvAvgSdv(matLeftZ0, &meanError, &stdError);
	//cout <<"meanError:"<< meanError <<" stdError:"<< stdError << endl;
	double QuadricParamsA = matCoff.at<double>(0, 0);// cvmGet(matCoff, 0, 0);
	double QuadricParamsB = matCoff.at<double>(1, 0); //cvmGet(matCoff, 1, 0);
	double QuadricParamsC = matCoff.at<double>(2, 0);// cvmGet(matCoff, 2, 0);
	double QuadricParamsD = matCoff.at<double>(3, 0); //m_QuadricParams.D = cvmGet(matCoff, 3, 0);
	double QuadricParamsE = matCoff.at<double>(4, 0); //m_QuadricParams.E = cvmGet(matCoff, 4, 0);
	double QuadricParamsF = matCoff.at<double>(5, 0); //m_QuadricParams.F = cvmGet(matCoff, 5, 0);
	
	Scalar scalar;
	double x = (sz.width - 1) / 2, y = (sz.height - 1) / 2;
	scalar.val[0] =  QuadricParamsA + QuadricParamsB * x + QuadricParamsC * y+ QuadricParamsD * x * y + QuadricParamsE * x * x + QuadricParamsF * y * y;
	scalar.val[1] = maxError;
	scalar.val[2] = meanError.val[0];
	scalar.val[3] = stdError.val[0];
	Scalar scalar_mean= mean(matPT, FitMask);
	if (scalar.val[0] > 1.5 * scalar_mean[0]|| scalar.val[0] < 0.5 * scalar_mean[0]) {
		cout << "scalar_mean" << scalar_mean.val[0] << endl;
		return scalar_mean;
	}
	cout << "scalar" << scalar.val[0] << endl;
	return scalar;
}
//后处理曲面拟合
void PostProcessing(Mat& imgMinAreaMask, Mat& matDepth, Mat& matFinalSeeds)
{
	// 黑边处理
	Mat mask0 = Mat::zeros(imgMinAreaMask.rows, imgMinAreaMask.cols, imgMinAreaMask.type());
	Mat mask_black;
	compare(imgMinAreaMask, mask0, mask_black, CMP_EQ);//黑边掩码
	matFinalSeeds = matFinalSeeds > 0; //掩码 8US1格式
	std::vector<cv::Mat> mask_black_channels(3);
	cv::split(mask_black, mask_black_channels);
	add(matFinalSeeds, mask_black_channels[0], matFinalSeeds);//填补黑边


	int Height = imgMinAreaMask.rows;
	int Width = imgMinAreaMask.cols;
	//char* pMinArea = imgMinAreaMask->imageData;
	//float* pDepthData = matDepth->data.fl;
	int currentPoint;
	int beforePointsCount = 0;//拟合前种子点数量
	int aferPointsCount = 0;//拟合后种子点数量
	//IplImage* imgDepth = cvGetImage(matDepth, cvCreateImageHeader(cvSize(Width, Height), IPL_DEPTH_32F, 1));
	//IplImage* imgSeeds = cvGetImage(matFinalSeeds, cvCreateImageHeader(cvSize(Width, Height), IPL_DEPTH_8U, 1));
	Mat imgDepth = matDepth;
	Mat imgSeeds = matFinalSeeds;//数据类型？？8UC1
	//cout << "imgSeeds.size:" << imgSeeds.size << endl;

	int NR = 7;//拟合邻域大小
	Mat matFitWin;
	Rect rectFitWin = Rect(0, 0, 2 * NR + 1, 2 * NR + 1);
	Mat winmask (2 * NR + 1, 2 * NR + 1, CV_8UC1);
	vector<Point> contourPts = GetContourPts(imgSeeds);
	Mat temp;
	Point pt;
	Scalar sResult;
	//int k = 0;
	Mat imgtmp( matFinalSeeds.rows, matFinalSeeds.cols,CV_8UC3);
	//matFinalSeeds.copyTo(imgtmp);
	Point p;
	while (!contourPts.empty())
	{
		cout << "边缘点个数: " << contourPts.size() << endl;
		for (int i = 0; i < contourPts.size(); i++)
		{
			//cout <<" i "<<  i<<"of "<< contourPts.size() << endl;
			pt = contourPts[i];
			rectFitWin.x = pt.x - NR;
			rectFitWin.y = pt.y - NR;
			if (0 <= rectFitWin.x && 0 <= rectFitWin.width && rectFitWin.x + rectFitWin.width <= imgDepth.cols && 0 <= rectFitWin.y && 0 <= rectFitWin.height && rectFitWin.y + rectFitWin.height <= imgDepth.rows) {//越界判断
				matFitWin = imgDepth(rectFitWin);//cvGetSubRect(imgDepth, &matFitWin, rectFitWin);
			}
			else {
				cout << pt << endl;
				matDepth.at<double>(pt.y, pt.x) = sResult.val[0];;//cvSet2D(matDepth, pt.y, pt.x, sResult);???????
				imgSeeds.at<uchar>(pt.y, pt.x) = 255;	//cvSet2D(imgSeeds, pt.y, pt.x, cvScalarAll(255));
			}
			winmask = matFitWin > 0.001;//cvCmpS(&matFitWin, 0.5, winmask, CV_CMP_GT); //类型是8UC1
			
			//sResult = FitPlane(matFitWin, winmask);
			//sResult = FitQuadric(matFitWin, winmask);
			sResult = FitMean(matFitWin, winmask);
			matDepth.at<double>(pt.y, pt.x) = sResult.val[0];//cvSet2D(matDepth, pt.y, pt.x, sResult);
			imgSeeds.at<uchar>(pt.y, pt.x) = 255;	//cvSet2D(imgSeeds, pt.y, pt.x, cvScalarAll(255));
			
		}
		
		imgtmp.setTo(Scalar(0, 0, 0));
		for (int i = 0; i < contourPts.size(); i++) {
			circle(imgtmp, contourPts[i], 1, Scalar(0, 0, 255), -1);
		}
		//imshow("imgCountPt", matDepth);
		imshow("曲面拟合", imgSeeds);
		imshow("边缘", imgtmp);
		//normalize(matDepth, temp, 0, 255, NORM_MINMAX);
		//imshow("曲面拟合nor", temp);
		//imwrite("曲面拟合_迭代次数_"+to_string(k++) + ".jpg", temp);
		waitKey(50);
		contourPts = GetContourPts(imgSeeds);
		
	 }
	


	Mat imgdep;
	normalize(matDepth, imgdep, 0, 255, NORM_MINMAX);
	imshow("matDepth", matDepth);
	waitKey();
	imwrite("matDepth.jpg", imgdep);
	ofstream fout_fit;
	fout_fit.open("fitdata.txt");
	fout_fit << matDepth << endl;
}

int main()
{
	cv::utils::logging::setLogLevel(utils::logging::LOG_LEVEL_SILENT);//不再输出日志
	string img1 = "110.bmp";//上层图片
	string img2 = "55.bmp";//下层图片
	vector<string> img_names = { img1, img2 };
	vector<vector<KeyPoint>> key_points_for_all;
	vector<Mat> descriptor_for_all;
	vector<vector<Vec3b>> colors_for_all;
	vector<DMatch> matches;

	Mat K(Matx33d(
		1908.10,0,352.94,
		0,1904.91,287.51,
		0,0,1));
	Mat img_1 = imread(img_names[0]);//上层图片
	Mat img_2 = imread(img_names[1]);//下层图片
	/*---------------------------------------------特征点提取与匹配-----------------------------------------------*/
	// 提取特征
	extract_features(img_names, key_points_for_all, descriptor_for_all, colors_for_all);
	
	// 特征匹配
	//重新定义关键点RR_KP和RR_matches来存储新的关键点和基础矩阵，通过RansacStatus来删除误匹配点
	vector <KeyPoint> RR_KP1, RR_KP2;
	vector <DMatch> RR_matches;
	Mat H=match_features(img_1,img_2,descriptor_for_all[0], descriptor_for_all[1], matches, key_points_for_all[0], key_points_for_all[1], RR_KP1, RR_KP2, RR_matches);


	// 计算变换矩阵 外参
	vector<Point2f> p1, p2;
	Mat R, T;	// 旋转矩阵和平移向量
	Mat mask;	// mask中大于零的点代表匹配点，等于零的点代表失配点
	get_matched_points(key_points_for_all[0], key_points_for_all[1], matches, p1, p2);
	find_transform(K, p1, p2, R, T, mask);
	//T = -T;
	cout << "R:" <<endl<< R << endl;
	cout << "T:" << endl<<T << endl;


	/*---------------------------------------------使用ZNCC最大值的深度恢复-----------------------------------------------*/
	//生成公共区域
	Rect rect;
	CvPoint ori;//上层图像公共区域左上角
	FindSameRect(img_1, img_2, H, rect,ori);//求公共区域
	cout << "rect " << rect << endl;
	cout << "ori " << ori.x<<" "<<ori.y << endl;

	// 生成上层子图像
	Mat ImageTop = img_1(rect);
	//取出上层图像roi，保持尺寸不变。
	Mat resize_top;
	Mat mask_ = Mat::zeros(img_1.size(), CV_8UC1);
	mask_(rect).setTo(255);
	img_1.copyTo(resize_top, mask_);

	//保存上层图像特征点
	/*Mat temp1 = img_1;
	for (int i = 0; i < key_points_for_all[0].size(); i++) {
		Point p = key_points_for_all[0][i].pt;
		circle(temp1, p, 1, Scalar(0, 0, 255), -1);
	}
	Mat temp2 = img_1(rect);
	imwrite("key_points_1.jpg", temp2);
	imshow("上层图像roi特征点",temp2);
	waitKey();*/
	//imwrite("topRoi.jpg", ImageTop);
	//resize(ImageTop, resize_top, Size(img_2.cols, img_2.rows));
	//imwrite("resize_top.jpg", resize_top);
	
	// 生成下层子图像，
	Mat ImageDown;
	resize(img_2, ImageDown, Size(rect.width, rect.height));

	
	//计算上层下层图像变换参数
	Point2f Scale;  //下层图像缩放矩阵
	/*Scale.x = (float)ImageDown.cols / img_2.cols;
	Scale.y = (float)ImageDown.rows / img_2.rows;*/
	Scale.x = Scale.y = 1.0;
	//cout << "Scale:"<<Scale << endl;
	Point2f Tcenter;//上层图像平移量
	double t[3] = { 0,0,1 };
	Mat t_ = Mat(1, 3, CV_64FC1, t);
	/*cout << H << endl;
	cout << H.type() << endl;
	cout << t_ .type()<< endl;*/
	Mat temp_t = H * (t_.t());
	Tcenter.x = temp_t.at<double>(0, 0) / temp_t.at<double>(2, 0);
	Tcenter.y = temp_t.at<double>(1,0) / temp_t.at<double>(2, 0);
	cout << "Tcenter:" << Tcenter << endl;
	
	
	//估计地面起伏
	double zMin, zMax;
	EstimateDepthRange(K, R, T, RR_KP1, RR_KP2, RR_matches, zMin, zMax);
	cout << "zmin：" << zMin << endl;
	cout << "zmax：" << zMax << endl;

	//根据实际高度，计算地面起伏比例
	double h_max, h_min,h_down;
	printf("输入 地面与上层图像最大距离 最小距离 相机下降高度：");
	//scanf("%lf %lf %lf",&h_max,&h_min,&h_down);
	h_max=110, h_min=100, h_down=55;
	double s = fabs(T.at<double>(2, 0)) / h_down;
	zMax = h_max * s;
	zMin = h_min * s;
	cout << "\n" << endl;
	cout << "zmin：" << zMin << endl;
	cout << "zmax：" << zMax << endl;

	//生成一系列诱导图像Image_out
	vector<Mat>HomeMat;
	ComputerHomeMat(Scale, Point2f(rect.x, rect.y), R, T, K, zMax, zMin, depth_level, HomeMat);
	vector<Mat>Image_out;
	ComputerHomeImages(img_2, HomeMat, depth_level, Size(img_1.cols, img_1.rows), Image_out);

	//对诱导图像做层内高斯滤波
	/*for (int i = 0; i < Image_out.size()-1; i++) {
		imshow("img2_out[k]", Image_out[i]);
		GaussianBlur(Image_out[i], Image_out[i], Size(3, 3), 1);
		imshow("高斯后", Image_out[i]);
		waitKey(100);
	}
	waitKey();*/

	//*****************************深度恢复*****************************//
	//原文的深度恢复，没用到
	/*Mat depth;
	depthReocery(img_1, img_2, HomeMat, Scale, Tcenter, 8, 30, depth, ori);*/
	
	// 计算深度信息
	//Mat depth;
	//HomeImagesMatch(resize_top, rect, ori, Image_out, 8, depth);
	//Mat result = depth(rect);
	
	//保存原始深度数据
	////ofstream fout("result.txt");
	////fout << result << endl;
	
	// 映射到0-255后可视化
	//normalize(result, result, 0, 255, NORM_MINMAX);
	//result.convertTo(result, CV_8UC1);
	//Mat res_color(result.rows, result.cols, CV_8UC3);
	//GenerateFalseMap(result, res_color,1);//深度图伪彩色
	//imshow("depth", result);
	//imshow("depth_color", res_color);
	//waitKey(0);
	
	//保存深度数据可视化图
	////imwrite("depth.jpg", result);
	////imwrite("depth_color1.jpg", res_color);
	////imwrite("final_depth.jpg", result);
	
	
	
	///*-----------------------------------------------------------使用种子生长的深度恢复--------------------------------------------------------*///
	//绘制图一匹配后的特征点
	/*Mat img1out, img1roi;
	Point p;
	img_1.copyTo(img1out);
	for (int i = 0; i < RR_KP1.size(); i++) {
		p = RR_KP1[i].pt;
		circle(img1out, p, 1, Scalar(0,0,255), -1);
	}
	img1roi = img1out(rect);
	imshow("img1roi", img1roi);
	//imwrite("img1roi.jpg", img1roi);
	waitKey();*/

	//计算roi内所有点的各层ZNCC数据。
	Mat NccMat;
	ComputerNccMat(resize_top, rect, ori, Image_out, 8, NccMat);
	NccMat = NccMat(rect);//只需要roi区域内的数据。

	//生成最小区域
	//Rect rect;
	//CvPoint ori;//上层图像公共区域左上角
	/*FindSameRect(img_1, img_2, HomeMat[depth_level-1].inv(), rect, ori);
	cout << "rect " << rect << endl;
	cout << "ori " << ori.x << " " << ori.y << endl;*/
	
	//深度恢复的最小区域
	Mat imgMinAreaMask = Image_out[depth_level-1](rect);
	/*imshow("最小区域", imgMinAreaMask);
	waitKey();*/

	

	///**************************************求初始种子点*****************************/// 
	Mat MaxScore, StdScore, evaluationScores,  matDepth;//所有点的最大ZNCC得分，zncc曲线方差，评估指标得分，深度信息
	vector<Mat> MultiTopScore; //多峰时，保存前4个峰的位置和ZNCC得分
	vector<seed> SeedPoints;//种子点
	int seedCount;//种子点数量。
	int BSPFTh = 3;// 阈值，约束了相似性，越大相似性越小。
	
	//将图一的特征点坐标改为imgMinAreaMask 区域内坐标, 
	vector<KeyPoint> KP1_roi(RR_KP1.size());
	for (int i = 0; i < RR_KP1.size(); i++) {
		int x = RR_KP1[i].pt.x - ori.x;
		int y= RR_KP1[i].pt.y - ori.y;
		if (x>=rect.width||x<0||y>=rect.height||y<0) continue;
		KP1_roi[i].pt.x = x;
		KP1_roi[i].pt.y = y;
	}//把特征点坐标变到ROI

	vector<KeyPoint> KP1_roi_all;
	KeyPoint kp_temp;
	cout << "key_points_for_all[0] " << key_points_for_all[0].size() << endl;
	cout << "rect " << rect.size() << endl;
	for (int i = 0; i < key_points_for_all[0].size(); i++) {
		int x = key_points_for_all[0][i].pt.x - ori.x;
		int y = key_points_for_all[0][i].pt.y - ori.y;
		if (x >= rect.width || x < 0 || y >= rect.height || y < 0) continue;
		else {
			if (key_points_for_all[0][i].pt.x == key_points_for_all[0][i - 1].pt.x && key_points_for_all[0][i].pt.y == key_points_for_all[0][i - 1].pt.y) continue;
			else {
				kp_temp.pt.x = x; kp_temp.pt.y = y;
				KP1_roi_all.push_back(kp_temp);
			}
		}
	}//把特征点坐标变到ROI
	cout << "KP1_roi_all " << KP1_roi_all.size() <<endl;
	//求初始种子点，匹配点也已经传入
	matDepth = Mat::zeros(imgMinAreaMask.rows, imgMinAreaMask.cols, CV_64FC1);
	cout << "求初始种子点 " << endl;
	seedCount = MyFindInitialSeeds_region(imgMinAreaMask, NccMat, MaxScore, StdScore, evaluationScores, MultiTopScore, SeedPoints, KP1_roi_all, matDepth, depth_level, BSPFTh);

	//seedCount = FindInitialSeeds(imgMinAreaMask, NccMat, MaxScore, StdScore, evaluationScores, MultiTopScore, SeedPoints, /*KP1_roi_all*/ KP1_roi, matDepth, depth_level, BSPFTh);
	cout << "seedcount:" << seedCount << endl;

	


	///********************************种子生长***********************************//
	Mat matFinalSeeds;// 只是matDepth 映射到0-255的数据罢了
	SeedPropagation(imgMinAreaMask, NccMat, MaxScore,  StdScore, evaluationScores, MultiTopScore, SeedPoints, matDepth, depth_level, seedCount, BSPFTh, matFinalSeeds);
	/*cout << matDepth << endl;
	cout << matDepth.type() << endl;*/
	
	//对生长后的深度数据做滤波，并保存数据
	//GaussianBlur(matDepth, matDepth, Size(3,3),1);//高斯
	/*blur(matDepth, matDepth, Size(3, 3));//均值
	ofstream GaussianBlur;
	GaussianBlur.open("GaussianBlur.txt");
	GaussianBlur << matDepth << endl;*/

	///*********************************曲面拟合***********************************//
	/*cout << matFinalSeeds << endl;
	imshow("matFinalSeeds", matFinalSeeds);
	waitKey();*/
	
	
	PostProcessing(imgMinAreaMask, matDepth, matFinalSeeds);
	
	//*************************************计算真实三维坐标并保存****************************//
	ofstream fout_xyz;
	fout_xyz.open("XYZ.txt");
	
	Mat K_inv;
	cv::invert(K, K_inv);
	Mat imageCoor(3, 1, CV_64FC1);
	Mat tep;
	Mat realdepth(matDepth.rows, matDepth.cols, CV_64FC3);// 注意深度信息的格式 result是CV_8US1 
	double z, x, y, z0;

	printf("开始计算xyz：\n");
	//cout << " " << matDepth.rows << " " << matDepth.cols << endl;
	for (int i = 0; i < matDepth.rows; i++) {
		for (int j = 0; j < matDepth.cols; j++) {
			imageCoor = (Mat_<double>(3, 1) << i, j, 1);
			tep = K_inv * imageCoor;

			// 注意深度信息的格式 result是CV_8US1  用uchar
			z = h_min + ((double)matDepth.at<double>(i, j) / 40) * (h_max - h_min);
			x = z * (tep.at<double>(0, 0));
			y = z * (tep.at<double>(1, 0));

			fout_xyz << x << " " << y << " " << z << "\n";
		}
	}
	printf("计算结束\n");




}