﻿#include "camerammind.h"
#include <QMap>
#include <QDir>
#include <QTime>
#include <QCoreApplication>
#include "common_log.h"


using namespace mmind::api;

static QMap<QString, MechEyeDeviceInfo> g_devInfo;

CameraMMind::CameraMMind()
{

}

CameraMMind::~CameraMMind()
{}

/*相机初始化接口*/
int CameraMMind::CameraInit(const CameraParam& param)
{
	m_ip = param.ip;
	m_id = param.id;
	if (m_ip.isEmpty() || m_id.isEmpty()) {
		log_error("ip error: {}, {}", m_ip, m_id);
		return -1;
	}

	if (g_devInfo.size() <= 0) {
		LoadMMindCameraDevInfo();
	}
	if (g_devInfo.find(m_ip) == g_devInfo.end()) {
		log_error("相机列表中未查找到{}", m_ip);
		return -1;
	}

	auto info = g_devInfo.find(m_ip).value();
	if (info.ipAddress != m_ip.toStdString()) {
		log_error("Not find camera ip = {}", m_ip);
		return -2;
	}

	m_mmind = new MechEyeDevice();
	auto ret = m_mmind->connect(info);
	if (ret.isOK()) {
		log_trace("camera {} connect successed", m_id);
	} else {
		log_error("camera {} connect failed, ip = {}", m_id, m_ip);
		return -1;
	}

	/*获取相机参数*/
	if (!SetImagAligmentParam()) {
		log_error("camera {} Get param failed", m_id);
		return -1;
	}
	/*创建图片存储文件*/
	if (!CreatePicturePath(param.path)) {
		log_error("{} create picture save path failed", m_id);
		return -1;
	}
	return 0;
}
/*触发拍照接口*/
bool CameraMMind::SnapPicture(QString& rgb, QString& depth)
{
	log_trace("start Snap", m_id);
	ColorMap cmap = {};
	DepthMap dmap = {};

	QString timestamp = QDateTime::currentDateTime().toString("hh-mm-ss");
	QString rgbpath = m_savePath + QString("color-%1.png").arg(timestamp);
	QString deppath = m_savePath + QString("depth-%1.png").arg(timestamp);
	/*通过相机获取图像实例*/
	auto ret = m_mmind->captureColorMap(cmap);
	if (!ret.isOK()) {
		log_error("Get RGB imagMap failed");
		rgb = "获取彩色图失败";
		return false;
	}
	cv::Mat color8UC3 = cv::Mat(cmap.height(), cmap.width(), CV_8UC3, cmap.data());
	cv::imwrite(rgbpath.toStdString().data(), color8UC3);
	rgb = rgbpath;

	ret = m_mmind->captureDepthMap(dmap);
	if (!ret.isOK()) {
		log_error("Get RGB imagMap failed");
		rgb = "获取深度图失败";
		return false;
	}
	cv::Mat depth32F = cv::Mat(dmap.height(), dmap.width(), CV_32FC1, dmap.data());
	cv::Mat depthTemp = ProjectionDepth2Color(depth32F, color8UC3.rows, color8UC3.cols);
	cv::Mat depth32FDst;
	depthTemp.convertTo(depth32FDst, CV_16U);
	cv::imwrite(deppath.toStdString().data(), depth32FDst);
	depth = deppath;

	return true;
}

void CameraMMind::LoadMMindCameraDevInfo()
{
	g_devInfo.clear();
	auto devs = MechEyeDevice::enumerateMechEyeDeviceList();
	for (auto& iter : devs) {
		g_devInfo.insert(QString::fromStdString(iter.ipAddress), iter);
		log_trace("Get MMIND camera ip : {}", iter.ipAddress);
	}
}
bool CameraMMind::SetImagAligmentParam()
{
	DeviceIntri intri = {};
	auto ret = m_mmind->getDeviceIntri(intri);
	if (!ret.isOK()) {
		log_error("Get intri param filed, id = {}", m_id);
		log_error("msg = {}", ret.errorDescription);
		return false;
	}

	double* cameraMatrix = intri.depthCameraIntri.cameraMatrix;//[fx, fy, cx, cy]
	double* pdistortion = intri.depthCameraIntri.distortion;
	m_depthIntrinsic = cv::Mat::eye(3, 3, CV_32F);
	m_depthIntrinsic.at<float>(0, 0) = cameraMatrix[0];
	m_depthIntrinsic.at<float>(0, 2) = cameraMatrix[2];
	m_depthIntrinsic.at<float>(1, 1) = cameraMatrix[1];
	m_depthIntrinsic.at<float>(1, 2) = cameraMatrix[3];

	m_depthDistortion = cv::Mat::eye(1, 5, CV_32F);
	m_depthDistortion.at<float>(0, 0) = pdistortion[0];
	m_depthDistortion.at<float>(0, 1) = pdistortion[1];
	m_depthDistortion.at<float>(0, 2) = pdistortion[2];
	m_depthDistortion.at<float>(0, 3) = pdistortion[3];
	m_depthDistortion.at<float>(0, 4) = pdistortion[4];

	double* pcameraMatrix = intri.textureCameraIntri.cameraMatrix;//[fx, fy, cx, cy]
	double* rgbpdistortion = intri.textureCameraIntri.distortion;
	m_rgbIntrinsic = cv::Mat::eye(3, 3, CV_32F);//数组转Mat
	m_rgbIntrinsic.at<float>(0, 0) = pcameraMatrix[0];
	m_rgbIntrinsic.at<float>(0, 2) = pcameraMatrix[2];
	m_rgbIntrinsic.at<float>(1, 1) = pcameraMatrix[1];
	m_rgbIntrinsic.at<float>(1, 2) = pcameraMatrix[3];


	m_rgbExtrinsic = cv::Mat::eye(4, 4, CV_32F);
	m_rgbExtrinsic.at<float>(0, 0) = intri.depthToTexture.rotation[0][0];
	m_rgbExtrinsic.at<float>(0, 1) = intri.depthToTexture.rotation[0][1];
	m_rgbExtrinsic.at<float>(0, 2) = intri.depthToTexture.rotation[0][2];
	m_rgbExtrinsic.at<float>(0, 3) = intri.depthToTexture.translation[0];

	m_rgbExtrinsic.at<float>(1, 0) = intri.depthToTexture.rotation[1][0];
	m_rgbExtrinsic.at<float>(1, 1) = intri.depthToTexture.rotation[1][1];
	m_rgbExtrinsic.at<float>(1, 2) = intri.depthToTexture.rotation[1][2];
	m_rgbExtrinsic.at<float>(1, 3) = intri.depthToTexture.translation[1];

	m_rgbExtrinsic.at<float>(2, 0) = intri.depthToTexture.rotation[2][0];
	m_rgbExtrinsic.at<float>(2, 1) = intri.depthToTexture.rotation[2][1];
	m_rgbExtrinsic.at<float>(2, 2) = intri.depthToTexture.rotation[2][2];
	m_rgbExtrinsic.at<float>(2, 3) = intri.depthToTexture.translation[2];

	m_rgbDistortion = cv::Mat::zeros(1, 5, CV_32F);
	m_rgbDistortion.at<float>(0, 0) = rgbpdistortion[0];
	m_rgbDistortion.at<float>(0, 1) = rgbpdistortion[1];
	m_rgbDistortion.at<float>(0, 2) = rgbpdistortion[2];
	m_rgbDistortion.at<float>(0, 3) = rgbpdistortion[3];
	m_rgbDistortion.at<float>(0, 4) = rgbpdistortion[4];
	log_error("camera {} get param successed", m_id);
	return true;
}
bool CameraMMind::CreatePicturePath(const QString& path)
{
	QString time = QDateTime::currentDateTime().toString("yyyy-MM-dd");
	if (path.isEmpty()) {
		m_savePath = QCoreApplication::applicationDirPath() + "/pic/" + m_ip + "/" + time + "/";
	} else {
		m_savePath = QCoreApplication::applicationDirPath() + path + m_ip + "/" + time + "/";
	}

	QDir dir;
	if (!dir.exists(m_savePath)) {
		bool ret = dir.mkpath(m_savePath);
		if (ret) {
			log_trace("camera parh [{}] create successed", m_savePath);
			return true;
		} else {
			log_trace("camera parh [{}] create failed", m_savePath);
			return false;
		}
	}
	return true;
}

// 将深度图投影到彩色图范围
cv::Mat CameraMMind::ProjectionDepth2Color(const cv::Mat& depth, const int& iImageRows, const int& iImageClos)
{
	cv::Mat depthImage = depth.clone();
	//如果含有形变矩阵需要矫正像素点  形变参数不是cv::Mat()
	if (!m_depthDistortion.empty()) {
		undistort(depth, depthImage, m_depthIntrinsic, m_depthDistortion);
	} else {
		depthImage = depth.clone();
	}

	double dfx, dfy, dcx, dcy;
	if (m_depthIntrinsic.type() == CV_32F) {
		dfx = m_depthIntrinsic.at<float>(0, 0);
		dfy = m_depthIntrinsic.at<float>(1, 1);
		dcx = m_depthIntrinsic.at<float>(0, 2);
		dcy = m_depthIntrinsic.at<float>(1, 2);
	} else {
		dfx = m_depthIntrinsic.at<double>(0, 0);
		dfy = m_depthIntrinsic.at<double>(1, 1);
		dcx = m_depthIntrinsic.at<double>(0, 2);
		dcy = m_depthIntrinsic.at<double>(1, 2);
	}
	if (true) {
		std::swap(dfx, dfy);//交换
		std::swap(dcx, dcy);//交换
	}
	std::vector<cv::Point3f> Points;//空间点
	int type = depth.type();
	for (int i = 0; i < depth.rows; i++) {
		for (int j = 0; j < depth.cols; j++) {
			float fZ = 0;
			if (type == CV_16U) {
				fZ = depthImage.at<ushort>(i, j);//一般的深度图数类型16位整数 mm单位
			} else if (type == CV_32F) {
				fZ = depthImage.at<float>(i, j);
			}
			if (fZ < 0.0001) {
				continue;//距离为0的不赋值 优化
			}

			int Px = i;
			int Py = j;
			float fXc = (Px - dcx) * fZ / dfx;
			float fYc = (Py - dcy) * fZ / dfy;
			if (true) {
				std::swap(fXc, fYc);//交换XY坐标
			}
			Points.push_back(cv::Point3f(fXc, fYc, fZ));
		}
	}

	//把点投影到2D平面上
	cv::Mat Rvec, Tvec;
	cv::Mat RMatrix;
	cv::Mat SubMat = m_rgbExtrinsic(cv::Range(0, 3), cv::Range(0, 3));
	SubMat.copyTo(RMatrix);
	cv::Mat SubMat2 = m_rgbExtrinsic(cv::Range(0, 3), cv::Range(3, 4));
	SubMat2.copyTo(Tvec);
	Rodrigues(RMatrix, Rvec);

	std::vector<cv::Point2f> ImagePoints;//绘制箭头投影图像点
	cv::projectPoints(Points, Rvec, Tvec, m_rgbIntrinsic, m_rgbDistortion, ImagePoints);

	//点云赋值
	cv::Mat pdepthImage = cv::Mat::zeros(iImageRows, iImageClos, CV_32FC1);//有序点云
	for (int i = 0; i < ImagePoints.size(); i++) {
		const cv::Point2f& point = ImagePoints[i];
		int px = round(point.x);
		int py = round(point.y);

		if (px > 0 && px < iImageClos && py>0 && py < iImageRows) {
			pdepthImage.at<float>(py, px) = Points.at(i).z;
		}
	}
	//cv::imwrite("./image/depth/a.png", pdepthImage);
	return pdepthImage;
}
