/***************************************************************************
 创建者: 华磊
 开始时间: 2019.11.29
 copyright: (C) 华友高科
 ***************************************************************************/
#include "lasercameracalibrate.h"
#include <QDebug>
#include <iostream>
#ifdef D_USE_OPENCV
#include <opencv2/opencv.hpp>
#include <opencv2/core/core.hpp>
using namespace cv;
#endif
#include <iostream>
#include <vector>
#include <stdio.h>
#include "calculatethreepointmatrixtransform.h"

//#include <pcl/io/pcd_io.h>
//#include <pcl/point_types.h>
//#include <pcl/registration/icp.h>
//#include <pcl/registration/gicp.h>
//#include <pcl/visualization/cloud_viewer.h>
//#include <pcl/visualization/pcl_visualizer.h>

//#include "jly_goicp.h"
//#include "ConfigMap.hpp"
//#include "pclicptransfer.h"


using namespace std;

LaserCameraCalibrate::LaserCameraCalibrate()
{

//    pclicptransfer=new PclIcpTransfer;
}

int LaserCameraCalibrate::updatePclIcpTransferDebugWindow()
{
//     return pclicptransfer->updatePclIcpTransferDebugWindow();
}

int LaserCameraCalibrate::showPclIcpTransferDebugWindow(bool isShowBaseMap, bool isShowBaseCompareMap,
                                                        bool isShowLocalMap, bool isShowLocalCompareMap)
{
//    return pclicptransfer->showPclIcpTransferDebugWindow(isShowBaseMap,isShowBaseCompareMap,isShowLocalMap,isShowLocalCompareMap);
}

int LaserCameraCalibrate::calculateLaserCameraFrame(std::vector<PointPro> tcpPointList,
                                                    std::vector<PointPro> cameraPointList,
                           std::vector<double> &cameraTransferOut, double &maxErrorOut)
{
    //要求４个点的姿态任意

    //第一个为固定焊缝点的坐标。其余４个点都在激光扫描平面

    //最新方法．有相机坐标系(使用xy代替ｘｚ)．TCP坐标系．
    //第一个点为焊点的世界坐标值．求其余４点（焊点）在TCP坐标的值．
    //其余４点（焊点）在相机坐标系的值．使用PNP算法，求有相机坐标系与TCP坐标系的转移矩阵．

    //----------------------------------------------
    if(5!=tcpPointList.size())
    {
        qDebug()<<"error, 5!=pointList.size()";
        return -1;
    }
    if(5!=cameraPointList.size())
    {
        qDebug()<<"error, 5!=cameraPointList.size()";
        return -2;
    }

    //删除第一个无用点
    std::vector<PointPro> cameraPointList_new;
    cameraPointList_new.push_back(cameraPointList[1]);
    cameraPointList_new.push_back(cameraPointList[2]);
    cameraPointList_new.push_back(cameraPointList[3]);
    cameraPointList_new.push_back(cameraPointList[4]);

    //第一个点为焊点的世界坐标值．求其余４点（焊点）在TCP坐标的值．
    std::vector<PointPro> weldInTcpList;
    weldInTcpList.resize(tcpPointList.size()-1);
    for(int i=0;i<tcpPointList.size()-1;i++)
    {
        if(1!=calculatePositionInTcp(tcpPointList[0],tcpPointList[1+i],weldInTcpList[i]))
        {
            return -1;
        }
    }
    
    //其余４点（焊点）在相机坐标系的值．求有相机坐标系与TCP坐标系的转移矩阵．
    if(1!=calculateTransformMatrix_geometry(weldInTcpList,cameraPointList_new,cameraTransferOut))
    {
        qDebug()<<"矩阵不可逆，请重新取点";
        return -2;
    }

    qDebug()<<"111cameraTransferOut"<<cameraTransferOut[0]<<cameraTransferOut[1]<<cameraTransferOut[2]
          <<cameraTransferOut[3]<<cameraTransferOut[4]<<cameraTransferOut[5];
//    calculateTransformMatrix_pciicp(weldInTcpList,cameraPointList_new,cameraTransferOut);
//    calculateTransformMatrix_goicp(weldInTcpList,cameraPointList_new,cameraTransferOut);


     //使用转移矩阵，计算４个点的世界坐标位置，求得误差．
     std::vector<PointPro> resultPoint;
     std::vector<PointPro> transPoint;
     resultPoint.resize(cameraPointList_new.size());
     transPoint.resize(cameraPointList_new.size());
     for(int i=0;i<cameraPointList_new.size();i++)
     {
         testTransfer(weldInTcpList[i],cameraTransferOut,transPoint[i]);
         calculateWeldPositionInWorldByCamera(tcpPointList[i+1],cameraPointList_new[i],
                                                cameraTransferOut,resultPoint[i]);
     }

     maxErrorOut=0;
     for(int i=0;i<cameraPointList_new.size();i++)
     {
        double tmpError=sqrt((resultPoint[i].positionValue[0]-tcpPointList[0].positionValue[0])*
                (resultPoint[i].positionValue[0]-tcpPointList[0].positionValue[0])+
                (resultPoint[i].positionValue[1]-tcpPointList[0].positionValue[1])*
                (resultPoint[i].positionValue[1]-tcpPointList[0].positionValue[1])+
                (resultPoint[i].positionValue[2]-tcpPointList[0].positionValue[2])*
                (resultPoint[i].positionValue[2]-tcpPointList[0].positionValue[2]));
        if(tmpError>maxErrorOut)
        {
            maxErrorOut=tmpError;
        }
        qDebug()<<"tmpError"<<tmpError
               <<resultPoint[i].positionValue[0]-tcpPointList[0].positionValue[0]
                <<resultPoint[i].positionValue[1]-tcpPointList[0].positionValue[1]
                <<resultPoint[i].positionValue[2]-tcpPointList[0].positionValue[2];
        qDebug()<<"pose"<<resultPoint[i].positionValue[0]
                <<resultPoint[i].positionValue[1]
                <<resultPoint[i].positionValue[2];
     }



     return 1;
}



int LaserCameraCalibrate::calculateWeldPositionInWorldByCamera(PointPro tcpInWorld, PointPro cameraPoint,
                                          std::vector<double> cameraTransfer, PointPro &weldInWorld)
{

    KDL::Frame frame_tcp_in_world= Frame(createEulerMatrix(tcpInWorld.positionValue[3]*M_PI/180.0,
                                         tcpInWorld.positionValue[4]*M_PI/180.0,
                                         tcpInWorld.positionValue[5]*M_PI/180.0),
                                         Vector (tcpInWorld.positionValue[0],
                                             tcpInWorld.positionValue[1],
                                             tcpInWorld.positionValue[2]));

    KDL::Frame frame_camera_in_tcp = Frame(createEulerMatrix(cameraTransfer[3]*M_PI/180.0,
                    cameraTransfer[4]*M_PI/180.0,
                    cameraTransfer[5]*M_PI/180.0),
                    Vector (cameraTransfer[0],
                        cameraTransfer[1],
                        cameraTransfer[2]));

    KDL::Vector tmpVector2(cameraPoint.positionValue[0],cameraPoint.positionValue[1],cameraPoint.positionValue[2]);
    KDL::Frame frame_weld_in_camera(tmpVector2);

    KDL::Frame frame_weld_in_world=frame_tcp_in_world*frame_camera_in_tcp*frame_weld_in_camera;
    weldInWorld.positionValue.resize(6);
    weldInWorld.positionValue[0]=frame_weld_in_world.p[0];
    weldInWorld.positionValue[1]=frame_weld_in_world.p[1];
    weldInWorld.positionValue[2]=frame_weld_in_world.p[2];
    return 1;
}

int LaserCameraCalibrate::testTransfer(PointPro pointIn, std::vector<double> cameraTransfer,
                                       PointPro &pointOut)
{

    KDL::Frame frame_trans = Frame(createEulerMatrix(cameraTransfer[3]*M_PI/180.0,
                    cameraTransfer[4]*M_PI/180.0,
                    cameraTransfer[5]*M_PI/180.0),
                    Vector (cameraTransfer[0],
                        cameraTransfer[1],
                        cameraTransfer[2]));

    KDL::Vector tmpVector2(pointIn.positionValue[0],pointIn.positionValue[1],pointIn.positionValue[2]);
    KDL::Frame frame1(tmpVector2);

//    KDL::Frame frame2=frame1*frame_trans;
//    KDL::Frame frame2=frame1*(frame_trans.Inverse());
//    KDL::Frame frame2=(frame_trans.Inverse())*frame1;
     KDL::Frame frame2=frame_trans*frame1;
    pointOut.positionValue.resize(6);
    pointOut.positionValue[0]=frame2.p[0];
    pointOut.positionValue[1]=frame2.p[1];
    pointOut.positionValue[2]=frame2.p[2];
    return 1;
}

int LaserCameraCalibrate::calculatePositionInTcp(PointPro weldPointInWorld, PointPro tcpInWorld, 
                                                 PointPro &weldInTcp)
{
    if(3>weldPointInWorld.positionValue.size() || 3>tcpInWorld.positionValue.size())
    {
        qDebug()<<"error,,,3>weldPointInWorld.positionValue.size() || 3>weldPointInWorld.tcpInWorld.size()";
        return -1;
    }

    KDL::Frame frame_tcp_in_world= Frame(createEulerMatrix(tcpInWorld.positionValue[3]*M_PI/180.0,
                                         tcpInWorld.positionValue[4]*M_PI/180.0,
                                         tcpInWorld.positionValue[5]*M_PI/180.0),
                                         Vector (tcpInWorld.positionValue[0],
                                             tcpInWorld.positionValue[1],
                                             tcpInWorld.positionValue[2]));
    
    KDL::Vector tmpVector2(weldPointInWorld.positionValue[0],weldPointInWorld.positionValue[1],weldPointInWorld.positionValue[2]);
    KDL::Frame frame_weld_in_wrold(tmpVector2);
    
    
    KDL::Frame frame_weld_in_tcp=frame_tcp_in_world.Inverse()*frame_weld_in_wrold;

    weldInTcp.positionValue.resize(3);
    weldInTcp.positionValue[0]=frame_weld_in_tcp.p[0];
    weldInTcp.positionValue[1]=frame_weld_in_tcp.p[1];
    weldInTcp.positionValue[2]=frame_weld_in_tcp.p[2];

    return 1;
}

int LaserCameraCalibrate::getRotationBy3Points(const Matrix3f& rf3, Rotation& rotMat)
{

    MatrixXf Rf1(3, 3), vo(3, 3);
    RowVector3f ColNorm;
    Vector3f v1, v2, v3, nv1, nv2, nv3;

    v1 = rf3.col(1) - rf3.col(0);
    v2 = rf3.col(2) - rf3.col(0);
    v3 = v1.cross(v2);

    vo.col(0) = v1;
    vo.col(1) = v2;
    vo.col(2) = v3;

    if (fabs(vo.determinant()) < 1e-7){
        return -1;
    }
    Rf1 = getSchmidtMatrix(vo);
    ColNorm = Rf1.colwise().norm();
    nv1 = Rf1.col(0) / ColNorm[0];
    nv2 = Rf1.col(1) / ColNorm[1];
    nv3 = Rf1.col(2) / ColNorm[2];

    rotMat =Rotation(nv1(0), nv2(0), nv3(0),
                                   nv1(1), nv2(1), nv3(1),
                                   nv1(2), nv2(2), nv3(2));

    return 1;
}


MatrixXf LaserCameraCalibrate::getSchmidtMatrix(const MatrixXf& Sd)
{
    int n = Sd.rows();
    int m = Sd.cols();
    MatrixXf Orth(n, m);
    Orth.col(0) = Sd.col(0);
    MatrixXf DeltOrth(n, 1), TempM(n, 1);
    for (int i = 1; i != m; ++i)
    {
        DeltOrth << MatrixXf::Zero(n, 1);
        for (int j = 0; j != i; ++j)
        {
            float aa = Orth.col(j).transpose()*Sd.col(i);
            float bb = Orth.col(j).transpose()*Orth.col(j);
            TempM = aa / bb*Orth.col(j);
            //DeltOrth.col(0) = DeltOrth.col(0) + (Orth.col(j).transpose()*Sd.col(i) )/ (Orth.col(j).transpose()*Orth.col(j))*Orth.col(j);
            DeltOrth.col(0) = DeltOrth.col(0) + TempM.col(0);
        }
        Orth.col(i) = Sd.col(i) - DeltOrth.col(0);
    }
    return Orth;
}

Rotation LaserCameraCalibrate::createEulerMatrix(double psi, double sita, double fai)
{
    return Rotation::RPY(psi,sita,fai);

//    double s1 =sin(psi);
//    double c1 =cos(psi);
//    double s2 =sin(sita);
//    double c2 =cos(sita);
//    double s3 =sin(fai);
//    double c3 =cos(fai);
//    return Rotation(c2*c3, -c2*s3, s2,
//                    c1*s3+c3*s1*s2, c1*c3-s1*s2*s3, -c2*s1,
//                    s1*s3-c1*c3*s2, c3*s1+c1*s2*s3, c1*c2);
}

void LaserCameraCalibrate::extractEulerMatrixAngle(const Rotation &rotMat, double &psi, double &sita, double &fai)
{
    rotMat.GetRPY(psi,sita,fai);
    return ;

//    fai = atan2(-rotMat.data[1], rotMat.data[0]);//范围-pi~pi
//    sita = asin(rotMat.data[2]);//范围-pi/2~pi/2
//    psi = atan2(-rotMat.data[5], rotMat.data[8]);//范围-pi~pi
}

int LaserCameraCalibrate::calculateTransformMatrix_opencv(std::vector<PointPro> weldInTcpList,
                                                   std::vector<PointPro> cameraPointList,
                          std::vector<float> &cameraTransferOut)
{
//    vector<cv::Point3f> cornersPoint3d;
//    vector<cv::Point2f> cornersPoint2d;
//    Mat camMatrix;//内参
//    Mat distCoeff;//畸变
//    for(int i=0;i<4;i++)
//    {
//        cornersPoint3d.push_back(cv::Point3f(weldInTcpList[i].positionValue[0], weldInTcpList[i].positionValue[1]
//                ,weldInTcpList[i].positionValue[2]));
//    }
//    for(int i=0;i<4;i++)
//    {
//        cornersPoint2d.push_back(cv::Point2f(cameraPointList[i].positionValue[0],
//                                 cameraPointList[i].positionValue[1]));
////        cornersPoint2d.push_back(cv::Point2f(cameraPointList[i].positionValue[0],
////                                 cameraPointList[i].positionValue[2]));
//    }


//     float fx = 1;
//     float fy = 1;
//     float u0 = 0;
//     float v0 = 0;

//     camMatrix = (Mat_<double>(3, 3) << fx, 0., u0, 0., fy, v0, 0., 0., 1.);
//     distCoeff=(Mat_<double>(5, 1) << 0,0,0,0,0);

//     Mat rotMat,transMat;
//     solvePnP(cornersPoint3d, cornersPoint2d, camMatrix, distCoeff, rotMat, transMat);

//     qDebug()<<"rotMat"<<rotMat.at<double>(0)<<rotMat.at<double>(1)<<rotMat.at<double>(2);
//     qDebug()<<"transMat"<<transMat.data[0]<<transMat.data[1]<<transMat.data[2];
//     std::cout << "pnp rotMat : \n"  << rotMat << std::endl;
//     std::cout << "pnp transMat : \n"  << transMat << std::endl;
//     rotMat.convertTo(rotMat, CV_64F);    //旋转向量
//     Rodrigues(rotMat, rotMat);
//     std::cout << "Rodrigues rotMat : \n"  << rotMat << std::endl;

//     //旋转矩阵转换为wpr
//     Rotation tmpRotation(rotMat.at<double>(0),rotMat.at<double>(1),rotMat.at<double>(2),
//             rotMat.at<double>(3),rotMat.at<double>(4),rotMat.at<double>(5),
//             rotMat.at<double>(6),rotMat.at<double>(7),rotMat.at<double>(8));
//     double tmpw,tmpp,tmpr;

//     tmpRotation.GetRPY(tmpw,tmpp,tmpr);
//     Rotation tmpRotation2=tmpRotation.RPY(tmpw,tmpp,tmpr);

//     getTaitBryan(tmpRotation2,tmpw,tmpp,tmpr);
//     cameraTransferOut.resize(6);
//     cameraTransferOut[0]=transMat.data[0];
//     cameraTransferOut[1]=transMat.data[1];
//     cameraTransferOut[2]=transMat.data[2];
//     cameraTransferOut[3]=tmpw;
//     cameraTransferOut[4]=tmpp;
//     cameraTransferOut[5]=tmpr;


    return 1;
}

int LaserCameraCalibrate::test()
{
    std::vector<PointPro> weldInTcpList;
    std::vector<PointPro> cameraPointList;
    std::vector<double> cameraTransferOut;
    PointPro tmpPoint;
    tmpPoint.positionValue.resize(6);
    tmpPoint.positionValue[0]=0;
    tmpPoint.positionValue[1]=0;
    tmpPoint.positionValue[2]=0;
    weldInTcpList.push_back(tmpPoint);

    tmpPoint.positionValue[0]=1;
    tmpPoint.positionValue[1]=0;
    tmpPoint.positionValue[2]=0;
    weldInTcpList.push_back(tmpPoint);

    tmpPoint.positionValue[0]=5;
    tmpPoint.positionValue[1]=8;
    tmpPoint.positionValue[2]=0;
    weldInTcpList.push_back(tmpPoint);
    //
    tmpPoint.positionValue[0]=10;
    tmpPoint.positionValue[1]=0;
    tmpPoint.positionValue[2]=0;
    cameraPointList.push_back(tmpPoint);

    tmpPoint.positionValue[0]=11;
    tmpPoint.positionValue[1]=0;
    tmpPoint.positionValue[2]=0;
    cameraPointList.push_back(tmpPoint);

    tmpPoint.positionValue[0]=15;
    tmpPoint.positionValue[1]=8;
    tmpPoint.positionValue[2]=0;
    cameraPointList.push_back(tmpPoint);

    if(1!=calculateTransformMatrix_geometry(weldInTcpList,cameraPointList,cameraTransferOut))
    {
        qDebug()<<"矩阵不可逆，请重新取点";
        return -2;
    }
}

int LaserCameraCalibrate::calculateTransformMatrix_goicp(std::vector<PointPro> weldInTcpList,
                                   std::vector<PointPro> cameraPointList, std::vector<float> &cameraTransferOut)
{
//    int  NdDownsampled;
//    NdDownsampled = 0; // No downsampling
//    clock_t  clockBegin, clockEnd;

//    POINT3D * pModel, * pData;
//    GoICP goicp;

////	parseInput(argc, argv, modelFName, dataFName, NdDownsampled, configFName, outputFname);
////	readConfig(configFName, goicp);
//    goicp.MSEThresh = 1;
//    goicp.initNodeRot.a = -3.1416;
//    goicp.initNodeRot.b = -3.1416;
//    goicp.initNodeRot.c = -3.1416;
//    goicp.initNodeRot.w = 6.2832;
//    goicp.initNodeTrans.x = -500;
//    goicp.initNodeTrans.y = -500;
//    goicp.initNodeTrans.z = -500;
//    goicp.initNodeTrans.w = 100;
//    goicp.trimFraction = 0.0;
//    // If < 0.1% trimming specified, do no trimming
//    if(goicp.trimFraction < 0.001)
//    {
//        goicp.doTrim = false;
//    }
//    goicp.dt.SIZE = 4;
//    goicp.dt.expandFactor = 1.0;

//    // Load model and data point clouds
//    pModel = (POINT3D *)malloc(sizeof(POINT3D) * 4);
//    for(int i = 0; i < 4; i++)
//    {
//        (pModel)[i].x=weldInTcpList[i].positionValue[0];
//        (pModel)[i].y=weldInTcpList[i].positionValue[1];
//        (pModel)[i].z=weldInTcpList[i].positionValue[2];
//    }


//    pData = (POINT3D *)malloc(sizeof(POINT3D) * 4);
//    for(int i = 0; i < 4; i++)
//    {
//        (pData)[i].x=cameraPointList[i].positionValue[0];
//        (pData)[i].y=cameraPointList[i].positionValue[1];
//        (pData)[i].z=cameraPointList[i].positionValue[2];
//    }

//    goicp.pModel = pModel;
//    goicp.Nm = 4;
//    goicp.pData = pData;
//    goicp.Nd = 4;

//    // Build Distance Transform
//    cout << "Building Distance Transform..." << flush;
//    clockBegin = clock();
//    goicp.BuildDT();
//    clockEnd = clock();
//    cout << (double)(clockEnd - clockBegin)/CLOCKS_PER_SEC << "s (CPU)" << endl;

//    // Run GO-ICP
//    if(NdDownsampled > 0)
//    {
//        goicp.Nd = NdDownsampled; // Only use first NdDownsampled data points (assumes data points are randomly ordered)
//    }
//    cout << "Registering..." << endl;
//    clockBegin = clock();
//    goicp.Register();
//    clockEnd = clock();
//    double time = (double)(clockEnd - clockBegin)/CLOCKS_PER_SEC;
//    cout << "Optimal Rotation Matrix:" << endl;
//    cout << goicp.optR << endl;
//    cout << "Optimal Translation Vector:" << endl;
//    cout << goicp.optT << endl;
//    cout << "Finished in " << time << endl;


//    //旋转矩阵转换为wpr
//    Rotation tmpRotation(goicp.optR.val[0][0],goicp.optR.val[0][1],goicp.optR.val[0][2],
//            goicp.optR.val[1][0],goicp.optR.val[1][1],goicp.optR.val[1][2],
//            goicp.optR.val[2][0],goicp.optR.val[2][1],goicp.optR.val[2][2]);
//    double tmpw,tmpp,tmpr;
//    tmpRotation.GetRPY(tmpw,tmpp,tmpr);
//    Rotation tmpRotation2=tmpRotation.RPY(tmpw,tmpp,tmpr);
//    getTaitBryan(tmpRotation2,tmpw,tmpp,tmpr);

//    cameraTransferOut.resize(6);
//    cameraTransferOut[0]=goicp.optT.val[0][0];
//    cameraTransferOut[1]=goicp.optT.val[1][0];
//    cameraTransferOut[2]=goicp.optT.val[2][0];
//    cameraTransferOut[3]=tmpw;
//    cameraTransferOut[4]=tmpp;
//    cameraTransferOut[5]=tmpr;


////	ofstream ofile;
////	ofile.open(outputFname.c_str(), ofstream::out);
////	ofile << time << endl;
////	ofile << goicp.optR << endl;
////	ofile << goicp.optT << endl;
////	ofile.close();

//    delete(pModel);
//    delete(pData);
}

int LaserCameraCalibrate::initialGoicp()
{

}

int LaserCameraCalibrate::calculateTransformMatrix_pciicp(std::vector<PointPro> weldInTcpList,
                                      std::vector<PointPro> cameraPointList, std::vector<double> &cameraTransferOut)
{
//    pclicptransfer->calculateTransformMatrix_pciicp(weldInTcpList,cameraPointList,cameraTransferOut);

    return 1;
}

int LaserCameraCalibrate::calculateTransformMatrix_geometry(std::vector<PointPro> weldInTcpList,
              std::vector<PointPro> cameraPointList, std::vector<double> &cameraTransferOut)
{

    CalculateThreePointMatrixTransform tmpThree;
    vector<double> point1onA, point2onA,point3onA,point1onB,point2onB,point3onB;
    point1onA=weldInTcpList[0].positionValue;
    point2onA=weldInTcpList[1].positionValue;
    point3onA=weldInTcpList[2].positionValue;

    point1onB=cameraPointList[0].positionValue;
    point2onB=cameraPointList[1].positionValue;
    point3onB=cameraPointList[2].positionValue;


    Eigen::Matrix3f rotationOut;
    Eigen::Vector3f translationOut;
    float ratioOut;
    if(1!=tmpThree.calculateTransformNew2(point1onA, point2onA, point3onA,point1onB, point2onB, point3onB,
                                rotationOut,translationOut,ratioOut))
    {
        return -1;
    }

     //旋转矩阵转换为wpr
     Rotation tmpRotation(rotationOut(0),rotationOut(1),rotationOut(2),
             rotationOut(3),rotationOut(4),rotationOut(5),
             rotationOut(6),rotationOut(7),rotationOut(8));
     double tmpw,tmpp,tmpr;

     tmpRotation.GetRPY(tmpw,tmpp,tmpr);
     Rotation tmpRotation2=tmpRotation.RPY(tmpw,tmpp,tmpr);
     //inverse
     Frame tmpFrame=Frame(tmpRotation2, Vector (translationOut(0),translationOut(1),translationOut(2)));
     Frame  tmpInverse=tmpFrame.Inverse();

     extractEulerMatrixAngle(tmpInverse.M,tmpw,tmpp,tmpr);
     cameraTransferOut.resize(6);
     cameraTransferOut[0]=tmpInverse.p[0];
     cameraTransferOut[1]=tmpInverse.p[1];
     cameraTransferOut[2]=tmpInverse.p[2];
     cameraTransferOut[3]=tmpw/M_PI*180;
     cameraTransferOut[4]=tmpp/M_PI*180;
     cameraTransferOut[5]=tmpr/M_PI*180;


     return 1;
}
