#include<opencv2/opencv.hpp>
#include "yaml-cpp/yaml.h"
#include<yaml-cpp/node/parse.h>
#include <GeographicLib/Geodesic.hpp>
#include <GeographicLib/Constants.hpp>
#include<iostream>
#include<string>
#include<math.h>

using namespace std;
using namespace cv;
using namespace GeographicLib;

Point3f original_camera_point = Point3f(0,0,0);   //xy

cv:: Mat convertvector2Mat(vector<double> v,int channels, int rows)
{
  cv::Mat mat  = cv:: Mat(v);
  cv::Mat dest = mat.reshape(channels,rows).clone();
  return dest;
}

double get_distance(Point3f & p,Point3f & original_camera_point)
{
  double x1 = p.x;
  double y1 = p.y;
  double x2 = original_camera_point.x;
  double y2 = original_camera_point.y;
  
  double distance = sqrt(pow((x1-x2), 2) + pow((y1-y2), 2));
  
  return distance;
}

double get_lat_lon(double lat0,double lon0,double azi2 ,double dis)
{
  double obj_lat,obj_lon;
  Geodesic geod(Constants::WGS84_a(), Constants::WGS84_f());
  geod.Direct(lat0, lon0, azi2, dis, obj_lat, obj_lon);            //calculate lat,lon;
  cout << setprecision(10) << "obj_lat: "<<obj_lat << " " << setprecision(10) <<"obj_lon: "<< obj_lon << "\n";    
  return obj_lat ,obj_lon;
}

double get_direction(Point3f & world_xy,double &dis)
{
  double dire;  //dire is arcsin
  double x1 = world_xy.x;
  double y1 = world_xy.y;
  dire = atan2(x1,y1);
  double angle = 1*dire*180/3.1415926;
  cout<<"angle: "<<angle<<endl;
  //cout<<"angle: "<<angle<<endl;
  //dire = angle_to_radian(angle);
  return angle;
}

cv:: Point3f getWorldPoints(Point2f &inPoints, Mat &rvec, Mat &tvec, Mat &cameraMatrix)
{
	//initialize parameter
	Mat rotationMatrix;//3*3
	Rodrigues(rvec,rotationMatrix);
	double zConst = 0;//实际坐标系的距离，若工作平面与相机距离固定可设置为0
	double s;

    //获取图像坐标
    cv::Mat imagePoint = (Mat_<double>(3,1)<<double(inPoints.x),double(inPoints.y),1);
	// cv::Mat::ones(3, 1, cv::DataType<double>::type); //u,v,1
	// imagePoint.at<double>(0, 0) = inPoints.x;
	// imagePoint.at<double>(1, 0) = inPoints.y;
 
	//计算比例参数S
	cv::Mat tempMat, tempMat2;
	tempMat = rotationMatrix.inv() * cameraMatrix.inv() * imagePoint;
	tempMat2 = rotationMatrix.inv() * tvec;
	s = zConst + tempMat2.at<double>(2, 0);
	s /= tempMat.at<double>(2, 0);
 
    //计算世界坐标
	Mat wcPoint = rotationMatrix.inv() * (s * cameraMatrix.inv() * imagePoint - tvec);
	Point3f worldPoint(wcPoint.at<double>(0, 0), wcPoint.at<double>(1, 0), wcPoint.at<double>(2, 0));
	return worldPoint;
}

int main(int argc,char *argv[])
{ 
  Point3f out;
  double dis;
  Point2f inPoints;
  string x = string(argv[1]);
  string y = string(argv[2]);
  inPoints.x = atof(x.c_str()),inPoints.y=atof(y.c_str());
  cv::Mat imagePoint = (Mat_<double>(3,1)<<double(inPoints.x),double(inPoints.y),1);

  YAML::Node config,config1;
  config = YAML::LoadFile("./camera.yml");
  config1 = YAML::LoadFile("../config/pos.yml");

  std::vector<double>v_cam = config["camera_matrix"].as<vector<double>>();
  std::vector<double>v_tvec  = config["tvec"].as<vector<double>>();
  std::vector<double>v_rvec  = config["rvec"].as<vector<double>>();
 
  Mat cameraMatrix = convertvector2Mat(v_cam,1,3);
  Mat    tvec      = convertvector2Mat(v_tvec,1,3);
  Mat    rvec      = convertvector2Mat(v_rvec,1,3);
  
  Geodesic geod(Constants::WGS84_a(), Constants::WGS84_f());
  vector<double> gps_pos = config1["gps"].as<std::vector<double>>();  
  double lat0 = gps_pos[0] , lon0 = gps_pos[1];        //get camera lat , lon;
  cout<<"camera_lat: "<<setprecision(10)<<lat0<<" camera_lon: "<<lon0<<endl;

  out = getWorldPoints(inPoints, rvec, tvec, cameraMatrix);    //calculate x y;
  dis = get_distance(out,original_camera_point);               //calculate distance;
  double azi1= get_direction(out,dis);    //calculate dierction
  double out_lat,out_lon = get_lat_lon(lat0,lon0,azi1,dis);  //calculate object lat , lon;

  cout<<"out x&y: "<<out<<endl;
  cout<<"out distance: "<<dis<<endl;

  return 0;
}
