#include <stdio.h>
#include <iostream>
#include "opencv2/core/core.hpp"
#include "opencv2/features2d/features2d.hpp"
#include "opencv2/highgui/highgui.hpp"
#include "opencv2/nonfree/nonfree.hpp"
#include <boost/typeof/typeof.hpp>
#include <utility>
#include <algorithm>
#include <limits>
#include <boost/tuple/tuple.hpp>
#include <ctime>
#include <opencv/cv.h>
#include <QString>
#include <cstring>

using namespace cv;

bool floatEqualTo(float num1, float num2)
{
    return (num1 - 1e-4 < num2) && (num2 < num1 + 1e-4);
}

void deleteElementFake(vector<KeyPoint> &vec, int pos)
{
    if(vec.empty())
        return;
    int i = 0;
    for (; i < pos; ++i);
    vec[i].pt.x = -1;
    vec[i].pt.y = -1;
}

void deleteElement(vector<KeyPoint> &vec, int pos)
{
    BOOST_AUTO(iter, vec.begin());
    for (int i = 0; iter != vec.end(); ++i)
    {
        if(i == pos) {
            iter = vec.erase(iter);
            break;
        }
        ++iter;
    }
}

void myDrawMatches(cv::Mat &left,vector<KeyPoint> &img1KP,
                   vector<KeyPoint> &img2KP,
                   cv::Mat &matchedImg)
{
    assert((int)img1KP.size() == (int)img2KP.size());
    //随机生成一种颜色
    cv::cvtColor(matchedImg,matchedImg,CV_GRAY2BGR);
    for(int i = 0; i < (int)img1KP.size(); ++i) {
        srand(i);
        int r = rand() % 256;
        srand(i * 10);
        int g = rand() % 256;
        srand(time(NULL));
        int b = rand() % 256;
        cv::Point2f leftPt,rightPt;
        leftPt.x = img1KP[i].pt.x;
        leftPt.y = img1KP[i].pt.y;
        rightPt.x = img2KP[i].pt.x + left.cols;
        rightPt.y = img2KP[i].pt.y;
        cv::circle(matchedImg,leftPt,3,cv::Scalar(r,g,b));
        cv::circle(matchedImg,rightPt,3,cv::Scalar(r,g,b));
        cv::line(matchedImg,leftPt,rightPt,cv::Scalar(r,g,b));
    }
}
//判断queryPt的匹配点trainPt是否在queryPt的邻域搜索块内
bool isInSearchBlock(KeyPoint &queryPt, KeyPoint &trainPt,int blockWidth, int blockHeight) {
    if(fabs(queryPt.pt.x - trainPt.pt.x) < blockWidth / 2 &&
            fabs(queryPt.pt.y - trainPt.pt.y) < blockHeight / 2)
        return true;
    return false;
}

bool isAreaSimilar(cv::Mat &queryImg,KeyPoint &queryPt,
                   cv::Mat &trainImg, KeyPoint &trainPt,
                   int areaSize)
{
    if((queryPt.pt.x - queryImg.cols + 2 > 0.0001 || queryPt.pt.y - queryImg.rows + 2 > 0.0001  //右、下边界
        || queryPt.pt.x - 1 < 0.0001 || queryPt.pt.y - 1 < 0.0001 ) &&    //左、上边界
            (trainPt.pt.x - trainImg.cols + 2 > 0.0001 || trainPt.pt.y - trainImg.rows + 2 > 0.0001
             || trainPt.pt.x - 1 < 0.0001 || trainPt.pt.y - 1 < 0.0001 )) {    //区域不足
        std::cout<<"Insufficient Area."<<std::endl;
        return false;
    }
    int level = areaSize * areaSize;
    int pixPrelevel = static_cast<int>((255.0 / (float)level + 0.5));  //每个等级的宽度
    std::vector<int> queryStatisticHist(level,0),trainStatisticHist(level,0);

    for(int i = (-1)*(areaSize - 1) / 2; i < (areaSize - 1) / 2; ++i) {
        for(int j = (-1)*(areaSize - 1) / 2; j < (areaSize - 1) / 2; ++j) {
            ++queryStatisticHist[queryImg.at<uchar>(queryPt.pt.y + i,queryPt.pt.x + j) / (pixPrelevel + 1)];
            ++trainStatisticHist[trainImg.at<uchar>(trainPt.pt.y + i,trainPt.pt.x + j) / (pixPrelevel + 1)];
        }
    }
    //利用余弦定理计算queryStatisticHist与trainStatisticHist的夹角，评判相似度
    float nominator = 0;
    for(int i = 0; i < level; ++i)
        nominator += queryStatisticHist[i] * trainStatisticHist[i];
    float denominator_query = 0.0, denominator_train = 0.0;
    for(int i = 0; i < level; ++i) {
        denominator_query += queryStatisticHist[i] * queryStatisticHist[i];
        denominator_train += trainStatisticHist[i] * trainStatisticHist[i];
    }
    float denominator = sqrt(denominator_query) * sqrt(denominator_train);
    float cosine = nominator / denominator;
    if(cosine - 0.96 > 0.00001)
        return true;
    return false;
}
//原始LBP
void getTextureMap(cv::Mat &queryImg)
{
    int tmp[8]={0};


    for(int i=1;i<queryImg.rows-1;i++)
        for(int j=1;j<queryImg.cols-1;j++)
        {
            int sum = 0;
            if(queryImg.at<uchar>(i-1,j-1) > queryImg.at<uchar>(i,j))
                tmp[0]=1;
            else
                tmp[0]=0;
            if(queryImg.at<uchar>(i,j-1) > queryImg.at<uchar>(i,j))
                tmp[1]=1;
            else
                tmp[1]=0;
            if(queryImg.at<uchar>(i+1,j-1) > queryImg.at<uchar>(i,j))
                tmp[2]=1;
            else
                tmp[2]=0;
            if(queryImg.at<uchar>(i+1,j) > queryImg.at<uchar>(i,j))
                tmp[3]=1;
            else
                tmp[3]=0;
            if(queryImg.at<uchar>(i+1,j+1) > queryImg.at<uchar>(i,j))
                tmp[4]=1;
            else
                tmp[4]=0;
            if(queryImg.at<uchar>(i,j+1) > queryImg.at<uchar>(i,j))
                tmp[5]=1;
            else
                tmp[5]=0;
            if(queryImg.at<uchar>(i-1,j+1) > queryImg.at<uchar>(i,j))
                tmp[6]=1;
            else
                tmp[6]=0;
            if(queryImg.at<uchar>(i-1,j) > queryImg.at<uchar>(i,j))
                tmp[7]=1;
            else
                tmp[7]=0;
            for(int k = 0;k < 7;k++)
                sum+=abs(tmp[k]-tmp[k+1]);
            sum=sum+abs(tmp[7]-tmp[0]);
            if (sum <= 2)
                queryImg.at<uchar>(i,j)=(tmp[0]*128+
                        tmp[1]*64+tmp[2]*32+tmp[3]*16+
                        tmp[4]*8+tmp[5]*4+tmp[6]*2+tmp[7]);
            else
                queryImg.at<uchar>(i,j)=59;
        }
}

bool isTextureSimilar(cv::Mat &queryTexture, KeyPoint &queryPt,
                      cv::Mat &trainTexture,KeyPoint &trainPt,int areaSize)
{
    return isAreaSimilar(queryTexture,queryPt,trainTexture,trainPt,areaSize);
}

void cleanMatchesProposed(cv::Mat &img1, cv::Mat &img2, vector<DMatch> &match,
                          vector<KeyPoint> &queryKeypoints, vector<KeyPoint> &trainKeypoints)
{
    //    std::vector<std::pair<int,int> > pointToDelPos; //记录那些索引项需要被删除
    assert(img1.cols == img2.cols && img1.rows == img2.rows);
    int inBlockNum = 0, outBlockNum = 0;
    for(int i = 0; i < (int)match.size(); ++i)    {
        int queryIdx = match[i].queryIdx;
        int trainIdx = match[i].trainIdx;
        KeyPoint queryPt = queryKeypoints[queryIdx];
        KeyPoint trainPt = trainKeypoints[trainIdx];
        if(isInSearchBlock(queryPt,trainPt,img1.cols / 2, img1.rows / 2))
            ++inBlockNum;
        else
            ++outBlockNum;
    }
    std::cout<<"inBlockNum="<<inBlockNum<<" outBlockNum="<<outBlockNum<<std::endl;

    BOOST_AUTO(pos, match.begin());
    for(; pos != match.end(); ) {
        bool condition = !isAreaSimilar(img1,queryKeypoints[(*pos).queryIdx], //区域相似度测试
                img2,trainKeypoints[(*pos).trainIdx],
                5);        
        if(inBlockNum > outBlockNum)    //相机移动范围较小
            condition = condition || !isInSearchBlock(queryKeypoints[(*pos).queryIdx],
                    trainKeypoints[(*pos).trainIdx],
                    img1.cols / 2, img1.rows / 2);
        if(condition)
            pos = match.erase(pos);
        else
            ++pos;
    }
}

float distance(cv::KeyPoint &p1, cv::KeyPoint &p2)
{
    return sqrt((p1.pt.x - p2.pt.x) * (p1.pt.x - p2.pt.x) +
                (p1.pt.y - p2.pt.y) * (p1.pt.y - p2.pt.y));
}

void cluster(vector<vector<KeyPoint> > &keypointsSet,vector<KeyPoint> &keypoints,vector<DMatch> &match,QString label)
{
    int tempIdx;
    for(int i = 0; i < (int)keypoints.size(); ++i) {//进行聚类，将特征点放入距离最近的中心点所在集合
        float min = std::numeric_limits<float>::max();
        int pos;
        for(int j = 0; j < (int)match.size(); ++j) {
            if(label ==  "query")
                tempIdx = match[j].queryIdx;
            else
                tempIdx = match[j].trainIdx;
            float dist = distance(keypoints[i],keypoints[tempIdx]);
            if(dist < min) {
                pos = j;
                min = dist;
            }
        }
        keypointsSet[pos].push_back(keypoints[i]);
    }
}

bool isAngleNear(KeyPoint &queryPt,KeyPoint queryCenter,
                 KeyPoint &trainPt,KeyPoint trainCenter)
{
    float angle1 = (queryCenter.pt.y - queryPt.pt.y) / (queryCenter.pt.x - queryPt.pt.x);
    float angle2 = (trainCenter.pt.y - trainPt.pt.y) / (trainCenter.pt.x - trainPt.pt.x);

    angle1 = angle1 * 180 / 3.14;   //转换成角度
    angle2 = angle2 * 180 / 3.14;

    if(fabs(angle1 - angle2) < 10.0)
        return true;
    return false;
}

std::vector<std::pair<KeyPoint,KeyPoint> > revive(vector<vector<KeyPoint> > &queryKeypointsSet, vector<vector<KeyPoint> > &trainKeypointsSet,
                                                  vector<KeyPoint> &queryClassCenter,vector<KeyPoint> &trainClassCenter,Mat &img1,Mat &img2)
{
    //获取纹理图像
    cv::Mat queryTexture,trainTexture;
    img1.copyTo(queryTexture);
    img2.copyTo(trainTexture);
    getTextureMap(queryTexture);
    getTextureMap(trainTexture);
    cv::imwrite("queryTexture.jpg",queryTexture);
    cv::imwrite("trainTexture.jpg",trainTexture);
    std::vector<std::pair<KeyPoint,KeyPoint> > revivedMatch;
    //对左图中每个点在右图中匹配点聚类中寻找潜在匹配点
    for(int i = 0; i < (int)queryKeypointsSet.size(); ++i) {
        for(int j = 0; j < (int)queryKeypointsSet[i].size(); ++j) {
            float distQ = distance(queryKeypointsSet[i][j], queryClassCenter[i]);
            float max = std::numeric_limits<float>::min();
            int pos;
            bool found = false;
            for(int k = 0;k < (int)trainKeypointsSet[i].size(); ++k) {  //距离是仿射不变量，可以作为度量标准
                float distT = distance(trainKeypointsSet[i][k],trainClassCenter[i]);
                float minDist = distQ > distT ? distT : distQ;
                if(fabs(distQ - distT) / minDist < 0.1 &&
                        isAreaSimilar(img1,queryKeypointsSet[i][j],img2,trainKeypointsSet[i][k],3)
                        &&
                        isAngleNear(queryKeypointsSet[i][j],queryClassCenter[i],
                                    trainKeypointsSet[i][k],trainClassCenter[i])) {
                    pos = k;
                    found = true;
                    break;
                }//if
            }//for
            if(found) {
                revivedMatch.push_back(std::make_pair(queryKeypointsSet[i][j],trainKeypointsSet[i][pos]));
                deleteElement(trainKeypointsSet[i],pos);
            }
            //            deleteElement(queryKeypointsSet[i],j);
        }
    }
    return revivedMatch;
}

std::vector<std::pair<KeyPoint,KeyPoint> > reviveSomeMatches(cv::Mat &img1, cv::Mat &img2, vector<DMatch> &match,
                                                             vector<KeyPoint> &queryKeypoints, vector<KeyPoint> &trainKeypoints)
{
    //将两幅图像中的点分别聚集为match.size()个类
//    assert((int)queryKeypoints.size() == (int)trainKeypoints.size());
    vector<vector<KeyPoint> > queryKeypointsSet((int)match.size()), trainKeypointsSet((int)match.size());
    cluster(queryKeypointsSet,queryKeypoints,match,"query");
    cluster(trainKeypointsSet,trainKeypoints,match,"train");
    vector<KeyPoint> queryClassCenter,trainClassCenter;
    for(int i = 0; i < (int)match.size(); ++i) {
        queryClassCenter.push_back(queryKeypoints[match[i].queryIdx]);
        trainClassCenter.push_back(trainKeypoints[match[i].trainIdx]);
    }
    return revive(queryKeypointsSet,trainKeypointsSet,queryClassCenter,trainClassCenter,img1,img2);
}

//计算ZNCC
float calcZNCC(cv::Mat &query,boost::tuples::tuple<int,int,int> queryInfo,  //x,y,avg
               cv::Mat &train,boost::tuples::tuple<int,int,int> trainInfo)
{
    int nominator = 0;
    int denominator_query = 0, denominator_train = 0;
    for(int i = -1; i < 2; ++i) {
        //分子
        nominator += abs(query.at<uchar>(queryInfo.get<1>(),queryInfo.get<0>() + i) - queryInfo.get<2>()) *
                abs(train.at<uchar>(trainInfo.get<1>(),trainInfo.get<0>() + i) - trainInfo.get<2>());
        nominator += abs(query.at<uchar>(queryInfo.get<1>(),queryInfo.get<0>() + i) - queryInfo.get<2>()) *
                abs(train.at<uchar>(trainInfo.get<1>() - 1,trainInfo.get<0>() + i) - trainInfo.get<2>());
        nominator += abs(query.at<uchar>(queryInfo.get<1>(),queryInfo.get<0>() + i) - queryInfo.get<2>()) *
                abs(train.at<uchar>(trainInfo.get<1>() + 1,trainInfo.get<0>() + i) - trainInfo.get<2>());
        //分母
        denominator_query += abs(pow(query.at<uchar>(queryInfo.get<1>(),queryInfo.get<0>() + i),2) -
                                 pow(queryInfo.get<2>(),2));
        denominator_query += abs(pow(query.at<uchar>(queryInfo.get<1>() + 1,queryInfo.get<0>() + i),2) -
                                 pow(queryInfo.get<2>(),2));
        denominator_query += abs(pow(query.at<uchar>(queryInfo.get<1>() - 1,queryInfo.get<0>() + i),2) -
                                 pow(queryInfo.get<2>(),2));
        denominator_train += abs(pow(train.at<uchar>(trainInfo.get<1>(),trainInfo.get<0>() + i),2) -
                                 pow(trainInfo.get<2>(),2));
        denominator_train += abs(pow(train.at<uchar>(trainInfo.get<1>() + 1,trainInfo.get<0>() + i),2) -
                                 pow(trainInfo.get<2>(),2));
        denominator_train += abs(pow(train.at<uchar>(trainInfo.get<1>() - 1,trainInfo.get<0>() + i),2) -
                                 pow(trainInfo.get<2>(),2));
    }
    float denominator = sqrt(denominator_query * denominator_train);
    return (float)nominator / denominator; //四舍五入
}
//不需要像素平均值
float calcNCC(cv::Mat &query,boost::tuples::tuple<int,int> queryInfo,
              cv::Mat &train,boost::tuples::tuple<int,int> trainInfo)
{
    int sum_nominator = 0, sum_denominator_query = 0, sum_denominator_train = 0;
    for(int i = -1; i < 2; ++i) {
        sum_nominator += query.at<uchar>(queryInfo.get<1>(),queryInfo.get<0>() + i) *
                train.at<uchar>(trainInfo.get<1>(),trainInfo.get<0>() + i);
        sum_nominator += query.at<uchar>(queryInfo.get<1>() + 1,queryInfo.get<0>() + i) *
                train.at<uchar>(trainInfo.get<1>(),trainInfo.get<0>() + i);
        sum_nominator += query.at<uchar>(queryInfo.get<1>() - 1,queryInfo.get<0>() + i) *
                train.at<uchar>(trainInfo.get<1>(),trainInfo.get<0>() + i);
        sum_denominator_query += pow(query.at<uchar>(queryInfo.get<1>(),queryInfo.get<0>() + i),2);
        sum_denominator_query += pow(query.at<uchar>(queryInfo.get<1>() + 1,queryInfo.get<0>() + i),2);
        sum_denominator_query += pow(query.at<uchar>(queryInfo.get<1>() - 1,queryInfo.get<0>() + i),2);
        sum_denominator_train += pow(train.at<uchar>(trainInfo.get<1>(),trainInfo.get<0>() + i),2);
        sum_denominator_train += pow(train.at<uchar>(trainInfo.get<1>() + 1,trainInfo.get<0>() + i),2);
        sum_denominator_train += pow(train.at<uchar>(trainInfo.get<1>() - 1,trainInfo.get<0>() + i),2);
    }
    return static_cast<float>(sum_nominator / sqrt(sum_denominator_query * sum_denominator_train));
}

int calcZSSD(cv::Mat &query,boost::tuples::tuple<int,int,int> queryInfo,  //x,y,avg
               cv::Mat &train,boost::tuples::tuple<int,int,int> trainInfo)
{
    int sum = 0;
    for(int i = -1; i < 2; ++i) {
        sum += pow((query.at<uchar>(queryInfo.get<1>(),queryInfo.get<0>()) - queryInfo.get<2>()) -
                   (train.at<uchar>(trainInfo.get<1>(),trainInfo.get<0>()) - trainInfo.get<2>()),2);
        sum += pow((query.at<uchar>(queryInfo.get<1>() + 1,queryInfo.get<0>()) - queryInfo.get<2>()) -
                   (train.at<uchar>(trainInfo.get<1>() + 1,trainInfo.get<0>()) - trainInfo.get<2>()),2);
        sum += pow((query.at<uchar>(queryInfo.get<1>() + 1,queryInfo.get<0>()) - queryInfo.get<2>()) -
                   (train.at<uchar>(trainInfo.get<1>() + 1,trainInfo.get<0>()) - trainInfo.get<2>()),2);

    }
    return sum;
}

int calcSSD(cv::Mat &query,boost::tuples::tuple<int,int> queryInfo,
            cv::Mat &train,boost::tuples::tuple<int,int> trainInfo)
{
    int sum = 0;
    for(int i = -1; i < 2; ++i) {
        sum += pow(query.at<uchar>(queryInfo.get<1>(),queryInfo.get<0>()) -
                train.at<uchar>(trainInfo.get<1>(),trainInfo.get<0>()),2);
        sum += pow(query.at<uchar>(queryInfo.get<1>() + 1,queryInfo.get<0>()) -
                   train.at<uchar>(trainInfo.get<1>() + 1,trainInfo.get<0>()),2);
        sum += pow(query.at<uchar>(queryInfo.get<1>() - 1,queryInfo.get<0>()) -
                        train.at<uchar>(trainInfo.get<1>() - 1,trainInfo.get<0>()),2);
    }
}

int calcZSAD(cv::Mat &query,boost::tuples::tuple<int,int,int> queryInfo,  //x,y,avg
             cv::Mat &train,boost::tuples::tuple<int,int,int> trainInfo)
{
    int sum = 0;
    for(int i = -1; i < 2; ++i) {
        sum += abs((query.at<uchar>(queryInfo.get<1>(),queryInfo.get<0>()) - queryInfo.get<2>()) -
                   (train.at<uchar>(trainInfo.get<1>(),trainInfo.get<0>()) - trainInfo.get<2>()));
        sum += abs((query.at<uchar>(queryInfo.get<1>() + 1,queryInfo.get<0>()) - queryInfo.get<2>()) -
                   (train.at<uchar>(trainInfo.get<1>() + 1,trainInfo.get<0>()) - trainInfo.get<2>()));
        sum += abs((query.at<uchar>(queryInfo.get<1>() - 1,queryInfo.get<0>()) - queryInfo.get<2>()) -
                   (train.at<uchar>(trainInfo.get<1>() - 1,trainInfo.get<0>()) - trainInfo.get<2>()));
    }
}

int calcSAD(cv::Mat &query,boost::tuples::tuple<int,int> queryInfo,
            cv::Mat &train,boost::tuples::tuple<int,int> trainInfo)
{
    int sum = 0;
    for(int i = -1; i < 2; ++i) {
        sum += abs(query.at<uchar>(queryInfo.get<1>(),queryInfo.get<0>()) -
                   train.at<uchar>(trainInfo.get<1>(),trainInfo.get<0>()));
        sum += abs(query.at<uchar>(queryInfo.get<1>() + 1,queryInfo.get<0>()) -
                   train.at<uchar>(trainInfo.get<1>() + 1,trainInfo.get<0>()));
        sum += abs(query.at<uchar>(queryInfo.get<1>() - 1,queryInfo.get<0>()) -
                   train.at<uchar>(trainInfo.get<1>() - 1,trainInfo.get<0>()));
    }
}

float isTrueMatches(cv::Mat dep_1, cv::Mat dep_2,cv::KeyPoint &queryIdx,cv::KeyPoint &trainIdx)
{
    //对disp6中每一候选点trainIdx，计算其3*3邻域内能量值（此处选ZNCC），
    //选择最小的作为匹配点。
    //若两点均在边界（邻域不足），则直接认为是匹配点
    if((queryIdx.pt.x - dep_1.cols + 2 > 0.0001 || queryIdx.pt.y - dep_1.rows + 2 > 0.0001  //右、下边界
        || queryIdx.pt.x - 1 < 0.0001 || queryIdx.pt.y - 1 < 0.0001 ) &&    //左、上边界
            (trainIdx.pt.x - dep_1.cols + 2 > 0.0001 || trainIdx.pt.y - dep_1.rows + 2 > 0.0001
             || trainIdx.pt.x - 1 < 0.0001 || trainIdx.pt.y - 1 < 0.0001 ))
        return -1.0;
    //计算邻域灰度平均值
    int query_x = static_cast<int>(queryIdx.pt.x + 0.5);    //四舍五入！
    int query_y = static_cast<int>(queryIdx.pt.y + 0.5);
    int train_x = static_cast<int>(trainIdx.pt.x + 0.5);
    int train_y = static_cast<int>(trainIdx.pt.y + 0.5);
    int avg_query = (dep_1.at<uchar>(query_y,query_x) + dep_1.at<uchar>(query_y,query_x + 1)
                     + dep_1.at<uchar>(query_y,query_x - 1) + dep_1.at<uchar>(query_y + 1,query_x)
                     + dep_1.at<uchar>(query_y - 1,query_x) + dep_1.at<uchar>(query_y - 1,query_x + 1)
                     + dep_1.at<uchar>(query_y + 1,query_x + 1) + dep_1.at<uchar>(query_y + 1,query_x - 1)
                     + dep_1.at<uchar>(query_y - 1,query_x -1)) / 9; //3*3邻域
    int avg_train = (dep_2.at<uchar>(train_y,train_x) + dep_2.at<uchar>(train_y,train_x + 1)
                     + dep_2.at<uchar>(train_y,train_x - 1) + dep_2.at<uchar>(train_y + 1,train_x)
                     + dep_2.at<uchar>(train_y - 1,train_x) + dep_2.at<uchar>(train_y - 1,train_x + 1)
                     + dep_2.at<uchar>(train_y + 1,train_x + 1) + dep_2.at<uchar>(train_y + 1,train_x - 1)
                     + dep_2.at<uchar>(train_y - 1,train_x - 1)) / 9; //3*3邻域
    return calcZNCC(dep_1,boost::tuples::make_tuple(query_x,query_y,avg_query),
                    dep_2,boost::tuples::make_tuple(train_x,train_y,avg_train));
}

//仅用深度信息
void cleanMatchesZSIFT(vector<KeyPoint> &queryKeypoints,
                       vector<KeyPoint> &trainKeypoints,
                       vector<DMatch> &match)
{
    cv::Mat dep_1 = cv::imread("Z:/dep_1.jpg");
    cv::Mat dep_2 = cv::imread("Z:/dep_2.jpg");

    BOOST_AUTO(m_pos,match.begin());
    for(; m_pos != match.end(); ++m_pos) {
        if(!isAreaSimilar(dep_1,queryKeypoints[(*m_pos).queryIdx], //区域相似度测试
                          dep_2,trainKeypoints[(*m_pos).trainIdx],
                          3))
            m_pos = match.erase(m_pos);
        else
            ++m_pos;
    }
}
//仿射不变SIFT
void cleanMatchesASIFT(cv::Mat &img1, cv::Mat &img2, vector<DMatch> &match,
                       vector<KeyPoint> &queryKeypoints, vector<KeyPoint> &trainKeypoints)
{

}

static void help()
{
    printf("\nThis program implement a novel method to improve SIFT matching results.\n"
           "Usage:\n improved-z-sift-matching.exe <method-type> <image1> <image2>\n"
           "<method-type>: proposed || zsift || asift || defaultsift\n");
}

int main(int argc, char **argv)
{
    //调试用
//    Mat img1 = imread("Z:/im2.png", CV_LOAD_IMAGE_GRAYSCALE);
//    Mat img2 = imread("Z:/im6.png", CV_LOAD_IMAGE_GRAYSCALE);
    Mat img1 = imread("Z:/cat_1.bmp", CV_LOAD_IMAGE_GRAYSCALE);
    Mat img2 = imread("Z:/cat_2.bmp", CV_LOAD_IMAGE_GRAYSCALE);

    //    if(argc != 4)
    //    {
    //        help();
    //        return -1;
    //    }
    QString fileName = "result_" + QString(argv[1]) + ".jpg";

    //    Mat img1 = imread(argv[2], CV_LOAD_IMAGE_GRAYSCALE);
    //    Mat img2 = imread(argv[3], CV_LOAD_IMAGE_GRAYSCALE);

    // detecting keypoints
    SiftFeatureDetector detector(400);
    vector<KeyPoint> queryKeypoints, trainKeypoints;
    detector.detect(img1, queryKeypoints);
    detector.detect(img2, trainKeypoints);

    printf("Found %d and %d keypoints.\n", queryKeypoints.size(), trainKeypoints.size());
    // computing descriptors
    SiftFeatureDetector extractor;
    Mat descriptors1, descriptors2;
    extractor.compute(img1, queryKeypoints, descriptors1);
    extractor.compute(img2, trainKeypoints, descriptors2);
    // matching descriptors
    BFMatcher matcher(NORM_L2);
    vector<DMatch> matches;
    matcher.match(descriptors1, descriptors2, matches);
    //    std::cout<<"argv[1]="<<argv[1]<<std::endl;
    //    if(!strcmp(argv[1],"proposed"))
    cleanMatchesProposed(img1,img2,matches,queryKeypoints,trainKeypoints);
    //    else if(!strcmp(argv[1],"zsift"))
    //        cleanMatchesZSIFT(queryKeypoints,trainKeypoints,matches);
    //    else if(!strcmp(argv[1],"asift"))
    //        cleanMatchesASIFT(img1,img2,matches,queryKeypoints,trainKeypoints);
    //    else
    //        std::cout<<"Using default SIFT method.\n";
    //利用得到的稳定特征点匹配进行聚类，复活一些正确的匹配
    std::vector<std::pair<KeyPoint,KeyPoint> > revivedPoints = reviveSomeMatches(img1,img2,matches,queryKeypoints,trainKeypoints);
    std::cout<<"Revive "<<(int)revivedPoints.size()<<" matches."<<std::endl;
    std::vector<KeyPoint> query,train;
    BOOST_AUTO(pos,revivedPoints.begin());
    for(; pos != revivedPoints.end(); ++pos) {
        query.push_back((*pos).first);
        train.push_back((*pos).second);
    }
    //要想不显示没有匹配关系的点，只能使用myDrawMatches()
    // drawing the results
    Mat img_matches;
    std::cout<<"Found "<<(int)matches.size()<<" matches"<<std::endl;
    drawMatches(img1, queryKeypoints, img2, trainKeypoints, matches, img_matches);

    Mat rv_matches(img1.rows,img1.cols * 2,CV_8UC1);
    for(int i = 0; i < img1.rows; ++i)
        for(int j = 0; j < img1.cols; ++j)
            rv_matches.at<uchar>(i,j) = img1.at<uchar>(i,j);
    for(int i = 0; i < img2.rows; ++i)
        for(int j = 0; j < img2.cols; ++j)
            rv_matches.at<uchar>(i,img1.cols + j) = img2.at<uchar>(i,j);
    myDrawMatches(img1,query,train,rv_matches);
    imshow("matches", img_matches);
    imwrite(fileName.toLatin1().constData(),img_matches);
    imshow("rv_matches",rv_matches);
    imwrite("rv_matches.jpg",rv_matches);
    waitKey(0);
    //    //找出特征点数目较少的集合
    //    vector<KeyPoint> pointsLessNum;
    //    vector<KeyPoint> pointsLargerNum;
    //    cv::Mat leftImg;
    //    int flag = 0;
    //    if((int)queryKeypoints.size() >= (int)trainKeypoints.size()) {
    //        pointsLessNum.resize((int)trainKeypoints.size());
    //        pointsLargerNum.resize((int)queryKeypoints.size());
    //        copy(trainKeypoints.begin(),trainKeypoints.end(),pointsLessNum.begin());
    //        copy(queryKeypoints.begin(),queryKeypoints.end(),pointsLargerNum.begin());
    //        flag = 1;
    //        img1.copyTo(leftImg);
    //    }
    //    else {
    //        pointsLessNum.resize((int)queryKeypoints.size());
    //        pointsLargerNum.resize((int)trainKeypoints.size());
    //        copy(queryKeypoints.begin(),queryKeypoints.end(),pointsLessNum.begin());
    //        copy(trainKeypoints.begin(),trainKeypoints.end(),pointsLargerNum.begin());
    //        flag = 2;
    //        img2.copyTo(leftImg);
    //    }
    //    //利用深度信息改善匹配结果

    //    // computing descriptors
    //    namedWindow("matches", 1);
    //    Mat img_matches(img1.rows,img1.cols * 2,CV_8UC1);
    //    for(int i = 0; i < img1.rows; ++i)
    //        for(int j = 0; j < img1.cols; ++j)
    //            img_matches.at<uchar>(i,j) = img1.at<uchar>(i,j);
    //    for(int i = 0; i < img2.rows; ++i)
    //        for(int j = 0; j < img2.cols; ++j)
    //            img_matches.at<uchar>(i,img1.cols + j) = img2.at<uchar>(i,j);
    //    if(flag == 1)
    //        myDrawMatches(leftImg,pointsLessNum,matched,img_matches);
    //    else
    //        myDrawMatches(leftImg,matched,pointsLessNum,img_matches);
    //    imshow("matches", img_matches);
    //    waitKey(0);

    return 0;
}
