#include "tracking.h"
#include "complement.h"
#include "Convert.hpp"
#include "Algorithm.hpp"
#include "PoseOptimiser.hpp"
#include "Config.hpp"
#include "global.h"
#include <fstream>
#include "prosac.hpp"
#include <glog/logging.h>


using namespace google;
namespace markerAR
{
    extern Map* map;
    extern Ptr<cv::Size> imgSize;
    extern Ptr<ORB> orb;
    extern Ptr<cv::DescriptorMatcher> binaryMatcher;
    extern Ptr<ORB> myorb;
    extern Ptr<IntrinsicMatrix> K;
    extern Ptr<Keyframe> last;
    extern Ptr<ORB> orb_no;
    extern Mat debugimg;
    extern future<bool> keyframeThread;
    extern Ptr<BRIEF> brief;
    extern std::mutex mutex_keyframe_global;
    extern std::mutex mutex_marker_global;
    
#define TICK
    
#ifdef TICK
#define __TICKSTART__ {tm.reset();tm.start();}
#define __TICKSTOP__  tm.stop();
#define __TICKPRINT__ cout<<tm.getTimeSec()<<endl;
#else
    
#define __TICKSTART__
#define __TICKSTOP__
#define __TICKPRINT__
#endif
    
    
    //add keyframes continuous
    bool Tracking::tryAddKeyframe()
    {
        //®return true;
        while (true)
        {
            if(addKeyframeFinished == true)
            {
                usleep(10000);
                continue;
            }
            Keyframe* now = new Keyframe();
            
            now->img = candidate.img.clone();
            now->setSmallBlurImage();
            now->markers = candidate.markers;
            
            orb->operator()(now->img, Mat(), now->feature.kpts, now->feature.desp);

            auto result = bow.query(now->feature.kpts, now->feature.desp);
            cout<<"result count"<<result.size()<<endl;
            int size = result.size();
            tryAddMarker(now->img, result);
            
            
            now->Hkm = candidate.Hkm;
            
            if (map->getKeyFrameSize()!=0) {
                vector<vector<DMatch>> dmatches;
                binaryMatcher->knnMatch(now->feature.desp, currentKeyFrame->feature.desp,dmatches,2);
                
                static vector<Point2f> nowP2D, kfP2D;
                vector < int > idx;
                GetMatch::GetInliers(dmatches, idx, 0.6, 90);
                if(idx.size()<20)
                {
                    addKeyframeFinished = true;
                    delete now;
                    continue;
                }
                if (idx.size() < 70)
                {
                    map->addKeyFrame(now);
                    currentKeyFrame = now;
                    addKeyframeFinished = true;
                    continue;
                }
                else
                {
                    addKeyframeFinished = true;
                    delete now;
                    continue;
                }
            }
            map->addKeyFrame(now);
            currentKeyFrame = now;
            addKeyframeFinished = true;
//                    vector<DMatch> dmatch;
//                    GetMatch::GetDMatch(dmatches, idx, dmatch);
//                    GetMatch::preparePoint2D(now->feature.kpts, kf->feature.kpts, dmatch, nowP2D, kfP2D);
//                    Mat debugimg;
//            
//                    Mat inlier;
//                    findHomography(nowP2D, kfP2D, CV_RANSAC, 3.0, inlier);
//            
//                    int inliers = norm(inlier, NORM_L1);
            
        }
    }
    
    
    
    int computeScale(const cv::Size &imgSize, const Eigen::Matrix3d &Hnm,float scaleFactor)
    {
        float origin = imgSize.area();
        vector<Point2f> vertex(4);
        vertex[0].x = 0;
        vertex[0].y = 1;
        vertex[1].x = imgSize.width-1;
        vertex[1].y = 0;
        vertex[2].x = imgSize.width-1;
        vertex[2].y = imgSize.height-1;
        vertex[3].x = 0;
        vertex[3].y = imgSize.height - 1;
        transformPointsForward(vertex, Converter::Eigen2CV(Hnm));
        float r = computeAreaQuad(vertex);
        float s = origin / r;
#ifdef _DEBUG
        cout <<"Area scaling to the whole size marker:"<< s << endl;
#endif
        return  log(s) / log(scaleFactor*scaleFactor)+0.5;
        
    }
    
    int computeScale(const Matx33d &H, float scaleFactor)
    {
        double det = H(0, 0)*H(1, 1) - H(1, 2)*H(2, 1);
        double s = det / scaleFactor / scaleFactor;
        return s + 0.5;
    }
    
    //H' = A*H*inv(B)
    Matx33d computeScaleH(const Matx33d&H, const float xscale, const float yscale)
    {
        Matx33d A = Matx33d::eye();
        Matx33d B = Matx33d::eye();
        A(0, 0) = xscale;
        A(1, 1) = xscale;
        B(0, 0) = 1 / yscale;
        B(1, 1) = 1 / yscale;
        return A*H*B;
    }
    
    
    void Tracking::OutputTrajectory(const std::string &fileName)
    {
        std::ofstream fout(fileName);
        for (int i=0; i<trajectory.size(); ++i) {
            auto &a = trajectory[i];
            auto r = a.second.EulerAngle();
            fout<<a.first<<"\t\t"<<a.second.t[0]<<"\t\t"<<a.second.t[1]<<"\t\t"<<a.second.t[2]<<"\t\t"<<r[0]<<"\t\t"<<r[1]<<"\t\t"<<r[2]<<endl;
            fout<<kfOutput[i].transpose()<<endl;
        }
    }
    
    
    
    bool Tracking::tryAddMarker(const Mat& _img,const QueryResult &result)
    {
        Mat gray;
        if (_img.channels() != 1)
        {
            cvtColor(_img, gray, CV_BGR2GRAY);
        }
        else
            gray = _img.clone();
        
        for (int i=0; i<result.size(); ++i) {
            Marker* pm = map->getMarker(result[i].id);
            if (find(now->markers.begin(), now->markers.end(), pm)== now->markers.end()) {
                cv::Mat warpImg;
                std::vector<cv::KeyPoint> kpts;
                cv::Mat desp;
                std::vector<std::vector<DMatch>>dmatches;
                std::vector<DMatch> dmatch;
                std::vector<int> idx;
                warpPerspective(gray, warpImg, result[i].Htq, gray.size());
                orb->detect(warpImg, kpts);
                orb->compute(warpImg, kpts, desp);
                binaryMatcher->knnMatch(desp,pm->orbStructure->desp, dmatches, 2);
                GetMatch::GetInliersRadius(kpts, pm->orbStructure->kpts, dmatches, idx, 0.6, 90, 30);
                GetMatch::GetDMatch(dmatches, idx, dmatch);
                vector<Point3f> marker3D;
                vector<Point2f> now2d2;
                GetMatch::prepare3D_2D(pm->orbStructure->p3d, kpts, dmatch, marker3D, now2d2);
                transformPointsInverse(now2d2,result[i].Htq);
                Matx33d R;
                Matx31d t;
                Solve3D::getCameraRT(K->K_, marker3D, now2d2, R, t, false);
                mutex_tracking.lock();
                now->markers.push_back(pm);
                now->C.push_back(Pose(Converter::CV2Eigen(R),Converter::CV2Eigen(t)));
                now->Hnm.push_back(Converter::CV2Eigen(result[i].Htq).inverse());
                cout<<now->markers.size()<<now->C.size()<<now->Hnm.size()<<endl;
                candidate.markers.push_back(pm);
                candidate.C.push_back(Pose(Converter::CV2Eigen(R),Converter::CV2Eigen(t)));
                candidate.Hkm.push_back(Converter::CV2Eigen(result[i].Htq).inverse());
                cout<<candidate.markers.size()<<candidate.C.size()<<candidate.Hkm.size()<<endl;
                mutex_tracking.unlock();
            }
        }
        
        return true;
        
    }
    
    //TODO: implement tracking function
    /*
     *The main hypothesis is consecutive frame, which means the current frame is similar with the last frame.
     *Upon this hypothesis, we think if we warp current frame with last frame's H matrix to the Marker, we can match feature in a proper radius.
     *So the main skeleton of this function as following.
     *1. warp current img with the last H matrix to the Marker
     *2. using the warpimg to match with the Marker
     *3. using 3D_2D matching pair to compute Camera Pose
     *4. using some Criteria to add Keyframe
     
     */
    bool Tracking::tracking(const Mat&_img,double timestamp)
    {
        
        mutex_tracking.lock();
        
        TickMeter tm5;
        tm5.start();
        
        TickMeter tm;
        
        //#define isDebug
        Mat gray;
        if (_img.channels() != 1)
        {
            cvtColor(_img, gray, CV_BGR2GRAY);
        }
        else
            gray = _img.clone();
        
        
        //compute every marker mask in now img
        std::vector<Mat> mask(now->markers.size());
        Mat maskSum = Mat::zeros(gray.size(), CV_8U);//the sum of all marker location
        for (int i=0; i<now->markers.size(); ++i) {
            misc::warpMask(mask[i], gray.size(), now->Hnm[i]);
            maskSum += mask[i];
        }
        
        now->clear();
        //detect now img
        brief->operator()(gray, maskSum, now->fts);
        
        __TICKSTART__
        std::vector<int> levels;//This vector is just for test which will be removed soon.
        double err;
        for (int i=0; i<now->markers.size(); ++i) {
            //warp img directly using H matrix of last frame
            const Marker* nowMarker = now->markers[i];
            Mat warpImg;
            int level = computeScale(*imgSize, now->Hnm[i], nowMarker->multiBrief->scale);
            if (level<0)
                level = 0;
            else if (level>=nowMarker->multiBrief->levelNum)
                level = nowMarker->multiBrief->levelNum-1;
            levels.push_back(level);
            
            float scale = pow(nowMarker->multiBrief->scale, level);
//            Mat mask;
//            //    warpPerspective(Identity, mask, Converter::Eigen2CV(now->H), Identity.size());
//            misc::warpMask(mask, gray.size(), now->Hnm[i]);
//            
            //warpH : from marker (level n) to current img
            Eigen::Matrix3d scaleHnm = nowMarker->warpScale(now->Hnm[i], mask[i], warpImg, level);
            
            vector<vector<DMatch>> dmatches;
            

            // match warpImg with nowMarker
            /* transform points in marker to current warp img
             * compute point descriptors in warp img
             * match descriptors between warp img and now img
             */
            transformPointsForward(nowMarker->multiBrief->allFts[level], transformPoints, scaleHnm, gray.size());
            
            brief->compute(warpImg,transformPoints);
            
            //matching points
            
            
            vector<DMatch> dmatch;
            misc::matchRadius(now->fts,transformPoints, dmatch,gray.size());
            vector<int> idx;
            GetMatch::GetInliers(dmatch, idx,90);
            if (idx.size()<10) {
                now->markers.erase(now->markers.begin()+i);
                now->C.erase(now->C.begin()+i);
                now->Hnm.erase(now->Hnm.begin()+i);
                continue;
            }
            GetMatch::GetMaskObj(dmatch, idx);
            
            
            vector<Point2f> query,train;
            vector<Eigen::Vector2d> qq,tt;
            GetMatch::preparePoint2D(now->fts, nowMarker->multiBrief->allFts[level], dmatch, query, train);
            Mat mask;
            Matx33d scale_Hnm = findHomographyRHO(train, query, mask, 3);
            Eigen::Matrix3d s =Eigen::Matrix3d::Identity();
            now->Hnm[i] = Converter::CV2Eigen(scale_Hnm);
            s(0, 0) = scale;
            s(1, 1) = scale;
            s = s*now->Hnm[i].inverse();
            now->Hnm[i] = s.inverse();

            GetMatch::GetMaskObj(dmatch, mask);
            
//            
//            
//            
//            //    cout<<idx.size()<<endl;
//#define NDEBUG
//#ifndef NDEBUG
//            Mat inverseWarp;
//            warpPerspective(gray, inverseWarp, Converter::Eigen2CV(now->Hnm[i]).inv(), gray.size());
//            
//            inverseWarp = drawInfo(inverseWarp, "matches:"+to_string(dmatch.size())+"\tLevel:"+to_string(level), Point2i(10,warpImg.rows-30));
//            
//            drawPoints(warpImg, transformPoints, warpImg);
//            imshow("warp", warpImg);
//            imshow("inverseWarp", inverseWarp );
//            imshow("marker", nowMarker->img);
//            imshow("now", gray);
//            cvWaitKey(0);
//#endif
            //
            GetMatch::setXYZ(now->fts,transformPoints,dmatch);
            
            std::vector<Feature*> fts;
            
            std::vector<Point3f> markerP3D;
            std::vector<Point2f> nowP2d;
            markerP3D.reserve(dmatch.size());
            fts.reserve(fts.size());
            nowP2d.reserve(dmatch.size());
            Point3f a;
            Point2f b;
            for (int i=0; i<dmatch.size(); ++i) {
                Converter::Eigen2CV(now->fts[dmatch[i].queryIdx]->pointXYZ->pointXYZ, a);
                Converter::Eigen2CV(now->fts[dmatch[i].queryIdx]->_pos,b);
                fts.push_back(now->fts[dmatch[i].queryIdx]);
                markerP3D.push_back(a);
                nowP2d.push_back(b);
            }
            Matx33d R;
            Matx31d t;
            R = Converter::Eigen2CV<double,3,3>(now->C[i].R);
            t = Converter::Eigen2CV(now->C[i].t);

            Solve3D::getCameraRT(Converter::Eigen2CV(K->K), markerP3D, nowP2d,  R,  t, true);
            now->C[i].R = Converter::CV2Eigen(R);
            now->C[i].t = Converter::CV2Eigen(t);
            
            
            err = misc::computeMeanReprojectEror(*K,now->C[0], fts);
            
        }
        trajectory.push_back(make_pair(timestamp,now->C[0]));
//
        cout<<err<<endl;
        auto p = kf.addMotion(mm.addPose(now->C[0], timestamp),timestamp, K->fx, pow(2, levels[0]), err);
        kfOutput.push_back(p);
 //       cout<<p.topRows(3)<<now->C[0].t<<endl;
       now->C[0].t = p.topRows(3);
        Matrix3d R;
        auto RR = Eigen::AngleAxisd(p(3), Eigen::Vector3d::UnitX())
        *Eigen::AngleAxisd(p(4), Eigen::Vector3d::UnitY())
        *Eigen::AngleAxisd(p(5), Eigen::Vector3d::UnitZ());
//        ceres::EulerAnglesToRotationMatrix(p.data()+3, 3,R.data());
//        now->C[0].R.transposeInPlace();
    
        now->C[0].R = RR.toRotationMatrix();

//        __TICKSTOP__
//        cout<<"Marker tracking time:"<<now->C.size()<<endl;
//        __TICKPRINT__
        mutex_tracking.unlock();
        if(now->markers.empty())
        {
            mutex_state.lock();
            state = Tracking_Lost;
            mutex_state.unlock();
            return false;
        }
//        if(addKeyframeFinished)
//        {
//            candidate.C = now->C;
//            candidate.Hkm = now->Hnm;
//            candidate.img = gray.clone();
//            candidate.markers = now->markers;
//            maskCandidate = maskSum.clone();
//            addKeyframeFinished = false;
//        }
        
        
        return true;
    }
    
    void Tracking::init(const Pose&_C,const Marker* marker,const Keyframe* k,const Eigen::Matrix3d &Hnm,double timeStamp)
    {
        LOG(INFO)<<"Tracking Init"<<endl;
        LOG(INFO)<<"Tracking Init H"<<Hnm<<endl;
        currentKeyFrame = k;
        now->markers.clear();
        now->Hnm.clear();
        now->C.clear();
        now->C.push_back(_C);
        now->Hnm.push_back(Hnm);
        now->markers.push_back(marker);
        now->K = K;
        state = Tracking_Success;
        kf.initState(mm.addPose(_C, timeStamp),timeStamp);
        
    };
    
    
    Tracking::Tracking()
    {
        addKeyframeFinished = true;
        now = new Frame();
        identity = Mat(imgSize->height, imgSize->width, CV_8U, 254);
        state = Not_Initialise;
        mm = Motion_Model(10);
        
        thread_addKeyframe = new std::thread(&Tracking::tryAddKeyframe,this);
        thread_addKeyframe->detach();
    }
    
    Tracking::~Tracking()
    {
    }
    
    
    const std::vector<Eigen::Matrix4d>& Tracking::getRenderP()
    {
        std::lock_guard<std::mutex> lock(mutex_tracking);
        P.resize(now->C.size());
        for (int i=0; i<now->C.size(); ++i){
            
            P[i]=Solve3D::getP(now->C[i].R, now->C[i].t);
            P[i](0,3) += now->markers[i]->center.x;
            P[i](1,3) += now->markers[i]->center.y;
            P[i](2,3) += now->markers[i]->center.z;
        }
        return P;
        
    }
}
