#include <limits>
#include <cassert>
#include <cmath>
#include "../common/common.hpp"
#include <TYImageProc.h>

//深度图与彩色图对齐方式切换开关，置1则将深度图对齐到彩色图坐标系，置0则将彩色图对齐到深度图坐标系
#define MAP_DEPTH_TO_COLOR  1
//开启以下深度图渲染显示将会降低帧率
DepthViewer depthViewer0("OrgDepth");//用于显示渲染后的原深度图
DepthViewer depthViewer1("FillHoleDepth");//用于显示渲染后的填洞处理之后的深度图
DepthViewer depthViewer2("SpeckleFilterDepth"); //用于显示渲染后的经星噪滤波过的深度图
DepthViewer depthViewer3("MappedDepth"); //用于显示渲染后的对齐到彩色图坐标系的深度图

//事件回调
void eventCallback(TY_EVENT_INFO *event_info, void *userdata)
{
	if (event_info->eventId == TY_EVENT_DEVICE_OFFLINE) {
		LOGD("=== Event Callback: Device Offline!");
		// Note: 
		//     Please set TY_BOOL_KEEP_ALIVE_ON OFF feature to false if you need to debug with breakpoint!
	}
	else if (event_info->eventId == TY_EVENT_LICENSE_ERROR) {
		LOGD("=== Event Callback: License Error!");
	}
}

//数据格式转换
//cv pixel format to TY_PIXEL_FORMAT
static int cvpf2typf(int cvpf)
{
    switch(cvpf){
        case CV_8U: return TY_PIXEL_FORMAT_MONO;
        case CV_8UC3: return TY_PIXEL_FORMAT_RGB;
        case CV_16UC1: return TY_PIXEL_FORMAT_DEPTH16;
        default: return TY_PIXEL_FORMAT_UNDEFINED;
    }
}

//数据格式转换
//mat to TY_IMAGE_DATA
static void mat2TY_IMAGE_DATA(int comp, const cv::Mat& mat, TY_IMAGE_DATA& data)
{
    data.status = 0;
    data.componentID = comp;
    data.size = mat.total() * mat.elemSize();
    data.buffer = mat.data;
    data.width = mat.cols;
    data.height = mat.rows;
    data.pixelFormat = cvpf2typf(mat.type());
}

//回调数据
struct CallbackData {
    int             index;
    TY_DEV_HANDLE   hDevice;
    TY_CAMERA_INTRINSIC* intri_depth;
	TY_CAMERA_INTRINSIC* intri_color;
	TY_CAMERA_CALIB_INFO depth_calib;
	TY_CAMERA_CALIB_INFO color_calib;
	float           scale_unit;
	TY_ISP_HANDLE   isp_handle;
    bool saveOneFramePoint3d;
    int  fileIndex;
};
CallbackData cb_data;
TY_ISP_HANDLE isp_handle = NULL;

//深度图转点云
//depth to pointcloud
cv::Mat depthToWorld(float* intr, const cv::Mat &depth,float scale_unit)
{
    cv::Mat world(depth.rows, depth.cols, CV_32FC3);
    float cx = intr[2];
    float cy = intr[5];
    /*float inv_fx = 1.0f / intr[0];
    float inv_fy = 1.0f / intr[4];*/
	float inv_fx = scale_unit / intr[0];
	float inv_fy = scale_unit / intr[4];
    for (int r = 0; r < depth.rows; r++)
    {
        uint16_t* pSrc = (uint16_t*)depth.data + r * depth.cols;
        cv::Vec3f* pDst = (cv::Vec3f*)world.data + r * depth.cols;
        for (int c = 0; c < depth.cols; c++)
        {
            uint16_t z = pSrc[c];
            if(z == 0){
                  pDst[c][0] = NAN;
                  pDst[c][1] = NAN;
                  pDst[c][2] = NAN;
            } else {
                  pDst[c][0] = (c - cx) * z * inv_fx;
                  pDst[c][1] = (r - cy) * z * inv_fy;
                  pDst[c][2] = z*scale_unit;
            }
        }
    }
    return world;
}

//输出畸变校正的彩色图，并实现彩色图对齐到深度图或深度图对齐到彩色图
static void doRegister(const TY_CAMERA_CALIB_INFO& depth_calib
	, const TY_CAMERA_CALIB_INFO& color_calib
	, const cv::Mat& depth
	, const float f_scale_unit
	, const cv::Mat& color
	, cv::Mat& undistort_color
	, cv::Mat& out
	, bool map_depth_to_color
)
{
	// do undistortion
		TY_IMAGE_DATA src;
		src.width = color.cols;
		src.height = color.rows;
		src.size = color.size().area() * 3;
		src.pixelFormat = TY_PIXEL_FORMAT_RGB;
		src.buffer = color.data;

		undistort_color = cv::Mat(color.size(), CV_8UC3);
		TY_IMAGE_DATA dst;
		dst.width = color.cols;
		dst.height = color.rows;
		dst.size = undistort_color.size().area() * 3;
		dst.buffer = undistort_color.data;
		dst.pixelFormat = TY_PIXEL_FORMAT_RGB;
		ASSERT_OK(TYUndistortImage(&color_calib, &src, NULL, &dst));

	// do register
	if (map_depth_to_color) {
		out = cv::Mat::zeros(undistort_color.size(), CV_16U);
		ASSERT_OK(
			TYMapDepthImageToColorCoordinate(
				&depth_calib,
				depth.cols, depth.rows, depth.ptr<uint16_t>(),
				&color_calib,
				out.cols, out.rows, out.ptr<uint16_t>(), f_scale_unit)
		);
	}
	else {
		out = cv::Mat::zeros(depth.size(), CV_8UC3);
		ASSERT_OK(
			TYMapRGBImageToDepthCoordinate(
				&depth_calib,
				depth.cols, depth.rows, depth.ptr<uint16_t>(),
				&color_calib,
				undistort_color.cols, undistort_color.rows, undistort_color.ptr<uint8_t>(),
				out.ptr<uint8_t>(), f_scale_unit)
		);
	}
}


//帧处理
void frameHandler(TY_FRAME_DATA* frame, void* userdata)
{
    CallbackData* pData = (CallbackData*) userdata;
    LOGD("=== Get frame %d", ++pData->index);

	std::vector<TY_VECT_3F> P3dtoColor;//对齐到color的点云
    cv::Mat depth, color, p3d, newP3d;
	//解析帧，不启用软isp
	parseFrame(*frame, &depth, 0, 0, &color);//拿深度图和color图
	//parseFrame(*frame, &depth, 0, 0, 0);//只拿深度图
	//解析帧，启用软isp，会降低帧率
	//parseFrame(*frame, &depth, 0, 0, &color, pData->isp_handle);

	//填洞开关
	bool FillHole = 0;
	//星噪滤波开关，深度图中离散点降噪处理
	bool SpeckleFilter = 1;

	//深度图处理
	if (!depth.empty())
	{
		if (FillHole)
		{
			//深度图填洞处理
			DepthInpainter inpainter;
			inpainter._kernelSize = 10;
			inpainter._maxInternalHoleToBeFilled = 1800;
			inpainter._fillAll = false;
			inpainter.inpaint(depth, depth, cv::Mat());
			depthViewer1.show(depth);
		}
		if (SpeckleFilter)
		{
			//使用星噪滤波
			TY_IMAGE_DATA tyFilteredDepth;
			cv::Mat filteredDepth(depth.size(), depth.type());
			filteredDepth = depth.clone();
			mat2TY_IMAGE_DATA(TY_COMPONENT_DEPTH_CAM, filteredDepth, tyFilteredDepth);
			struct DepthSpeckleFilterParameters sfparam = DepthSpeckleFilterParameters_Initializer;
			sfparam.max_speckle_size = 300;//噪点面积小于该值将被过滤
			sfparam.max_speckle_diff = 12;//相邻像素视差大于该值将被视为噪点
			TYDepthSpeckleFilter(&tyFilteredDepth, &sfparam);
			//显示星噪滤波后深度图渲染
			depthViewer2.show(filteredDepth);
			//点云， pointcloud in CV_32FC3 format
			newP3d = depthToWorld(pData->intri_depth->data, filteredDepth, pData->scale_unit);
			depth = filteredDepth.clone();

			////保存滤波后的深度图
			//char file[32];
			//sprintf(file, "depth-%d.png", pData->fileIndex++);
			//cv::imwrite(file, filteredDepth);
		}
		else if (!FillHole&&!SpeckleFilter)
		{
			//显示原深度图渲染
			depthViewer0.show(depth);
			//原点云
			p3d = depthToWorld(pData->intri_depth->data, depth, pData->scale_unit);
		}
	}
	
	//彩色图处理
	cv::Mat color_data_mat,p3dtocolorMat;
	if (!color.empty()) {
		//显示原彩色图
		imshow("orgColor", color);
		cv::Mat undistort_color, MappedDepth;
		if (MAP_DEPTH_TO_COLOR)
		{
			//彩色图去畸变，并将深度图对齐到彩色图坐标系
			doRegister(pData->depth_calib, pData->color_calib, depth, pData->scale_unit, color,undistort_color, MappedDepth, MAP_DEPTH_TO_COLOR);
			//显示畸变校正后的彩色图
			imshow("undistort_color", undistort_color);
			//显示对齐到彩色图坐标系的深度图
			depthViewer3.show(MappedDepth);
			//数据格式转换
			cv::cvtColor(undistort_color, color_data_mat, CV_BGR2RGB);
			
			//生成对齐到彩色图坐标系的点云，两种方法
			//方法一：生成点云放在TY_VECT_3F---P3dtoColor
			/*P3dtoColor.resize(MappedDepth.size().area());
			ASSERT_OK(TYMapDepthImageToPoint3d(&pData->color_calib, MappedDepth.cols, MappedDepth.rows
				, (uint16_t*)MappedDepth.data, &P3dtoColor[0]));*/
			//方法二：生成点云放在32FC3 Mat---p3dtocolorMat
			p3dtocolorMat = depthToWorld(pData->intri_color->data, MappedDepth, pData->scale_unit);
		}
		else
		{
			//彩色图去畸变，并将彩色图对齐到深度图
		    doRegister(pData->depth_calib, pData->color_calib, depth, pData->scale_unit, color, undistort_color, color_data_mat, MAP_DEPTH_TO_COLOR);
			//显示畸变校正后的彩色图
			imshow("undistort_color", undistort_color);
			//显示对齐到深度图坐标系的彩色图
			imshow("MappedColor", color_data_mat);
			//数据格式转换
			cv::cvtColor(color_data_mat, color_data_mat, CV_BGR2RGB);
		}

		//保存对齐的彩色图片
		/*char file[32];
		sprintf(file, "color-%d.png", pData->fileIndex++);
		cv::imwrite(file, color_data_mat);*/
	}
	//保存点云
	//save pointcloud
	if (pData->saveOneFramePoint3d) {
		char file[32];
		if (MAP_DEPTH_TO_COLOR)
		{
			LOGD("Save p3dtocolor now!!!");
			//保存对齐到color坐标系XYZ格式点云
			sprintf(file, "p3dtocolor-%d.xyz", pData->fileIndex++);
			writePointCloud((cv::Point3f*)p3dtocolorMat.data, (const cv::Vec3b*)color_data_mat.data, p3dtocolorMat.total(), file, PC_FILE_FORMAT_XYZ);
		}
		else
		{
			LOGD("Save point3d now!!!");
			//保存XYZ格式点云
			sprintf(file, "points-%d.xyz", pData->fileIndex++);
			writePointCloud((cv::Point3f*)newP3d.data, (const cv::Vec3b*)color_data_mat.data, newP3d.total(), file, PC_FILE_FORMAT_XYZ);
			//保存PLY格式点云
			//sprintf(file, "points-%d.ply", pData->fileIndex++);
			//writePointCloud((cv::Point3f*)newP3d.data, (const cv::Vec3b*)color_data_mat.data, newP3d.total(), file, PC_FILE_FORMAT_PLY);
		}
		pData->saveOneFramePoint3d = false;
		
	}
	//归还Buffer队列
    LOGD("=== Re-enqueue buffer(%p, %d)", frame->userBuffer, frame->bufferSize);
    ASSERT_OK( TYEnqueueBuffer(pData->hDevice, frame->userBuffer, frame->bufferSize) );
	TYISPUpdateDevice(pData->isp_handle);
}

int main(int argc, char* argv[])
{
    std::string ID, IP;
    TY_INTERFACE_HANDLE hIface = NULL;
    TY_DEV_HANDLE hDevice = NULL;
    TY_CAMERA_INTRINSIC intri_depth;
	TY_CAMERA_INTRINSIC intri_color;
	int32_t resend = 1;

    for(int i = 1; i < argc; i++){
        if(strcmp(argv[i], "-id") == 0){
            ID = argv[++i];
        } else if(strcmp(argv[i], "-ip") == 0) {
            IP = argv[++i];
        } else if(strcmp(argv[i], "-h") == 0){
            LOGI("Usage: SimpleView_Callback [-h] [-id <ID>]");
            return 0;
        }
    }

    LOGD("=== Init lib");
    ASSERT_OK( TYInitLib() );
    TY_VERSION_INFO ver;
    ASSERT_OK( TYLibVersion(&ver) );
    LOGD("     - lib version: %d.%d.%d", ver.major, ver.minor, ver.patch);

    std::vector<TY_DEVICE_BASE_INFO> selected;
	//选择相机
    ASSERT_OK( selectDevice(TY_INTERFACE_ALL, ID, IP, 1, selected) );
    ASSERT(selected.size() > 0);
	//默认加载第一个相机
    TY_DEVICE_BASE_INFO& selectedDev = selected[0];
	//打开接口和设备
    ASSERT_OK( TYOpenInterface(selectedDev.iface.id, &hIface) );
    ASSERT_OK( TYOpenDevice(hIface, selectedDev.id, &hDevice) );

	//使能彩色相机
	//try to enable color camera
	LOGD("Has RGB camera, open RGB cam");
	ASSERT_OK(TYEnableComponents(hDevice, TY_COMPONENT_RGB_CAM));

	//设置彩色相机像素格式和分辨率
	LOGD("=== Configure feature, set RGB resolution");
	//方法一:直接设置像素格式和分辨率
	//ASSERT_OK(TYSetEnum(hDevice, TY_COMPONENT_RGB_CAM, TY_ENUM_IMAGE_MODE, TY_IMAGE_MODE_BAYER8GB_640x480));
	// ASSERT_OK(TYSetEnum(hDevice, TY_COMPONENT_RGB_CAM, TY_ENUM_IMAGE_MODE, TY_IMAGE_MODE_YUYV_640x360));

	//不同型号图漾相机的彩色像素格式和分辨率不同，可参考下面;
	//For different percipio 3d camera,choose the image mode below.
	//YUYV_1920x1080,YUYV_1280x720,YUYV_640x360 for FS820
	//YUYV_1280x960,YUYV_640x480 for FM811-GIX-E1,FM830-45RI
	//YUYV_1280x960,YUYV_640x480or JPEG_2592x1944 for PM801/PM802
	//BAYER8GB_1280x960,BAYER8GB_640x480,for FM811-E2/FM851-E2/FS830-E2

	//方法二:通过枚举相机支持的图像模式，结合图像宽度选定分辨率，不关注像素格式
	TY_STATUS status = TY_STATUS_OK;
	if (TY_COMPONENT_RGB_CAM) {
		std::vector<TY_ENUM_ENTRY> image_mode_list;
		status = get_feature_enum_list(hDevice, TY_COMPONENT_RGB_CAM, TY_ENUM_IMAGE_MODE, image_mode_list);
		for (int idx = 0; idx < image_mode_list.size(); idx++) {
			TY_ENUM_ENTRY &entry = image_mode_list[idx];
			//try to select a vga resolution
			if (TYImageWidth(entry.value) == 1280) {
				LOGD("Select RGB Image Mode: %s", entry.description);
				int err = TYSetEnum(hDevice, TY_COMPONENT_RGB_CAM, TY_ENUM_IMAGE_MODE, entry.value);
				ASSERT(err == TY_STATUS_OK || err == TY_STATUS_NOT_PERMITTED);
				break;
			}
		}
	}
	
	//读取彩色相机标定数据
	LOGD("=== Get color intrinsic");
	ASSERT_OK(TYGetStruct(hDevice, TY_COMPONENT_RGB_CAM, TY_STRUCT_CAM_INTRINSIC, &intri_color, sizeof(intri_color)));
	LOGD("=== Read color calib data");
	ASSERT_OK(TYGetStruct(hDevice, TY_COMPONENT_RGB_CAM, TY_STRUCT_CAM_CALIB_DATA
		, &cb_data.color_calib, sizeof(cb_data.color_calib)));

	//软ISP功能，仅针对BAYER8GB格式彩色相机，开启后可实现一定程度的自动白平衡和自动曝光，会降低帧率。
	//create a default isp handle for bayer rgb images
	ASSERT_OK(TYISPCreate(&isp_handle)); 
	cb_data.isp_handle = isp_handle;
	ASSERT_OK(ColorIspInitSetting(isp_handle, hDevice));
	//You can turn on auto exposure function as follow ,but frame rate may reduce .
	//Device also may be casually stucked  1~2 seconds when software trying to adjust device exposure time value
#if 0 //置1则开启软ISP
	ASSERT_OK(ColorIspInitAutoExposure(isp_handle, hDevice));
#endif

	//目前图漾相机无法直接保存用户设置的相机参数，因此需要在软件初始化时完成相机参数设置
	//不同型号相机具备不同的参数属性，可以使用PercipioViewer看图软件确认相机支持的参数属性和参数取值范围

	//硬ISP功能,仅部分相机的RGB支持硬ISP。
	//3A Control only for hardware ISP
	//获取RGB是否支持自动曝光，自动增益，自动白平衡属性
	bool hasAUTOEXPOSURE, hasAUTOGAIN, hasAUTOAWB;
	ASSERT_OK(TYHasFeature(hDevice, TY_COMPONENT_RGB_CAM, TY_BOOL_AUTO_EXPOSURE, &hasAUTOEXPOSURE));
	ASSERT_OK(TYHasFeature(hDevice, TY_COMPONENT_RGB_CAM, TY_BOOL_AUTO_GAIN, &hasAUTOGAIN));
	ASSERT_OK(TYHasFeature(hDevice, TY_COMPONENT_RGB_CAM, TY_BOOL_AUTO_AWB, &hasAUTOAWB));
	if (hasAUTOEXPOSURE)
	{
		ASSERT_OK(TYSetBool(hDevice, TY_COMPONENT_RGB_CAM, TY_BOOL_AUTO_EXPOSURE, true));//turn on AEC 
	}
	if (hasAUTOGAIN)
	{
		ASSERT_OK(TYSetBool(hDevice, TY_COMPONENT_RGB_CAM, TY_BOOL_AUTO_GAIN, true));//turn on AGC
	}
	if (hasAUTOAWB)
	{
		ASSERT_OK(TYSetBool(hDevice, TY_COMPONENT_RGB_CAM, TY_BOOL_AUTO_AWB, true));//turn on AWB
	}


	//获取RGB支持的属性
	bool hasRGB_ANALOG_GAIN, hasRGB_R_GAIN, hasRGB_G_GAIN, hasRGB_B_GAIN, hasRGB_EXPOSURE_TIME;
	ASSERT_OK(TYHasFeature(hDevice, TY_COMPONENT_RGB_CAM, TY_INT_ANALOG_GAIN, &hasRGB_ANALOG_GAIN));
	ASSERT_OK(TYHasFeature(hDevice, TY_COMPONENT_RGB_CAM, TY_INT_R_GAIN, &hasRGB_R_GAIN));
	ASSERT_OK(TYHasFeature(hDevice, TY_COMPONENT_RGB_CAM, TY_INT_G_GAIN, &hasRGB_G_GAIN));
	ASSERT_OK(TYHasFeature(hDevice, TY_COMPONENT_RGB_CAM, TY_INT_B_GAIN, &hasRGB_B_GAIN));
	ASSERT_OK(TYHasFeature(hDevice, TY_COMPONENT_RGB_CAM, TY_INT_EXPOSURE_TIME, &hasRGB_EXPOSURE_TIME));

	if (hasRGB_ANALOG_GAIN)
	{
		ASSERT_OK(TYSetInt(hDevice, TY_COMPONENT_RGB_CAM, TY_INT_ANALOG_GAIN, 2));//设置RGB模拟增益
	}
	if (hasRGB_R_GAIN)
	{
		ASSERT_OK(TYSetInt(hDevice, TY_COMPONENT_RGB_CAM, TY_INT_R_GAIN, 150));//设置RGB数字增益R通道
	}
	if (hasRGB_G_GAIN)
	{
		ASSERT_OK(TYSetInt(hDevice, TY_COMPONENT_RGB_CAM, TY_INT_G_GAIN, 80));//设置RGB数字增益G通道
	}
	if (hasRGB_B_GAIN)
	{
		ASSERT_OK(TYSetInt(hDevice, TY_COMPONENT_RGB_CAM, TY_INT_B_GAIN, 150));//设置RGB数字增益B通道
	}
	if (hasRGB_EXPOSURE_TIME)
	{
		ASSERT_OK(TYSetInt(hDevice, TY_COMPONENT_RGB_CAM, TY_INT_EXPOSURE_TIME, 1000));//设置RGB曝光时间
	}
	
	
	//使能深度相机
	//try to enable depth cam
    LOGD("=== Configure components, open depth cam");
    int32_t componentIDs = TY_COMPONENT_DEPTH_CAM;
    ASSERT_OK( TYEnableComponents(hDevice, componentIDs) );

	//设置深度图分辨率
	LOGD("=== Configure feature, set depth resolution");
	//方法一:直接设置分辨率
	//ASSERT_OK(TYSetEnum(hDevice, TY_COMPONENT_DEPTH_CAM, TY_ENUM_IMAGE_MODE, TY_IMAGE_MODE_DEPTH16_640x480));

	//不同型号相机分辨率不同，下面仅供参考;
	//choose different mode for the percipio 3D camera
	//DEPTH16_1280x800,DEPTH16_640x400,DEPTH16_320x200 for FS820
	//DEPTH16_1280x960,DEPTH16_640x480,DEPTH16_320x240 for FM811/FM851/PM801

	//方法二:通过枚举相机支持的图像模式，结合图像宽度选定分辨率，不关注具体分辨率
	if (TY_COMPONENT_DEPTH_CAM) {
		std::vector<TY_ENUM_ENTRY> image_mode_list;
		status = get_feature_enum_list(hDevice, TY_COMPONENT_DEPTH_CAM, TY_ENUM_IMAGE_MODE, image_mode_list);
		for (int idx = 0; idx < image_mode_list.size(); idx++) {
			TY_ENUM_ENTRY &entry = image_mode_list[idx];
			//try to select a vga resolution
			if (TYImageWidth(entry.value) == 1280) {
				LOGD("Select Depth Image Mode: %s", entry.description);
				int err = TYSetEnum(hDevice, TY_COMPONENT_DEPTH_CAM, TY_ENUM_IMAGE_MODE, entry.value);
				ASSERT(err == TY_STATUS_OK || err == TY_STATUS_NOT_PERMITTED);
				status = TYEnableComponents(hDevice, TY_COMPONENT_DEPTH_CAM);
				break;
			}
		}
	}

	//读取深度相机内参和深度相机标定数据
	LOGD("=== Get depth intrinsic");
	ASSERT_OK(TYGetStruct(hDevice, TY_COMPONENT_DEPTH_CAM, TY_STRUCT_CAM_INTRINSIC, &intri_depth, sizeof(intri_depth)));
	LOGD("=== Read depth calib data");
	ASSERT_OK(TYGetStruct(hDevice, TY_COMPONENT_DEPTH_CAM, TY_STRUCT_CAM_CALIB_DATA
		, &cb_data.depth_calib, sizeof(cb_data.depth_calib)));

	//设置左右IR的模拟增益，数字增益和曝光
	//adjust the gain and exposure of Left&Right IR camera
	//获取左右IR支持的属性
	bool hasIR_ANALOG_GAIN, hasIR_GAIN, hasIR_EXPOSURE_TIME;
	ASSERT_OK(TYHasFeature(hDevice, TY_COMPONENT_IR_CAM_LEFT, TY_INT_ANALOG_GAIN, &hasIR_ANALOG_GAIN));
	ASSERT_OK(TYHasFeature(hDevice, TY_COMPONENT_IR_CAM_LEFT, TY_INT_GAIN, &hasIR_GAIN));
	ASSERT_OK(TYHasFeature(hDevice, TY_COMPONENT_IR_CAM_LEFT, TY_INT_EXPOSURE_TIME, &hasIR_EXPOSURE_TIME));
	if (hasIR_ANALOG_GAIN)
	{
		ASSERT_OK(TYSetInt(hDevice, TY_COMPONENT_IR_CAM_LEFT, TY_INT_ANALOG_GAIN, 2));
		ASSERT_OK(TYSetInt(hDevice, TY_COMPONENT_IR_CAM_RIGHT, TY_INT_ANALOG_GAIN, 2));//设置左右IR模拟增益
	}
	if (hasIR_GAIN)
	{
		ASSERT_OK(TYSetInt(hDevice, TY_COMPONENT_IR_CAM_LEFT, TY_INT_GAIN, 32));
		ASSERT_OK(TYSetInt(hDevice, TY_COMPONENT_IR_CAM_RIGHT, TY_INT_GAIN, 32));//设置左右IR数字增益
	}
	if (hasIR_EXPOSURE_TIME)
	{
		ASSERT_OK(TYSetInt(hDevice, TY_COMPONENT_IR_CAM_LEFT, TY_INT_EXPOSURE_TIME, 500));
		ASSERT_OK(TYSetInt(hDevice, TY_COMPONENT_IR_CAM_RIGHT, TY_INT_EXPOSURE_TIME, 500)); //设置左右IR曝光时间
	}

	//设置激光器亮度，默认不用设置，除非深度图过曝
	//adjust the laser power
	ASSERT_OK(TYSetInt(hDevice, TY_COMPONENT_LASER, TY_INT_LASER_POWER, 100));// range（0，100）

	//左右IR CMOS同步开关，默认开启，若关闭则可能提升帧率但会影响图像质量
	//Cmos sync switch,Closing the cmos sync can increase fps in 640*480 resolution,but may cause poor depth image.
	TYSetBool(hDevice, TY_COMPONENT_DEVICE, TY_BOOL_CMOS_SYNC, false);

	//数据流同步模式，默认RGB和Depth同步输出
	//stream async switch, see TY_STREAM_ASYNC_MODE
	//TYSetEnum(hDevice, TY_COMPONENT_DEVICE, TY_ENUM_STREAM_ASYNC, TY_STREAM_ASYNC_OFF);

	
	//获取所需Buffer大小
    LOGD("=== Prepare image buffer");
    uint32_t frameSize;
    ASSERT_OK( TYGetFrameBufferSize(hDevice, &frameSize) );
    LOGD("     - Get size of framebuffer, %d", frameSize);
	//分配两个Buffer，并压入队列
    LOGD("     - Allocate & enqueue buffers");
    char* frameBuffer[2];
    frameBuffer[0] = new char[frameSize];
    frameBuffer[1] = new char[frameSize];
    LOGD("     - Enqueue buffer (%p, %d)", frameBuffer[0], frameSize);
    ASSERT_OK( TYEnqueueBuffer(hDevice, frameBuffer[0], frameSize) );
    LOGD("     - Enqueue buffer (%p, %d)", frameBuffer[1], frameSize);
    ASSERT_OK( TYEnqueueBuffer(hDevice, frameBuffer[1], frameSize) );

	//注册事件回调
	LOGD("Register event callback");
	ASSERT_OK(TYRegisterEventCallback(hDevice, eventCallback, NULL));

	//触发模式设置
    LOGD("=== enable trigger mode");
    TY_TRIGGER_PARAM trigger;
		//连续采集模式			 
    trigger.mode = TY_TRIGGER_MODE_OFF;
		//软触发和硬触发模式					  
	//trigger.mode = TY_TRIGGER_MODE_SLAVE;
    ASSERT_OK(TYSetStruct(hDevice, TY_COMPONENT_DEVICE, TY_STRUCT_TRIGGER_PARAM, &trigger, sizeof(trigger)));

	//网口相机，启用丢包重传功能
	//for network only
	LOGD("=== resend: %d", resend);
	if (resend) {
		bool hasResend;
		ASSERT_OK(TYHasFeature(hDevice, TY_COMPONENT_DEVICE, TY_BOOL_GVSP_RESEND, &hasResend));
		if (hasResend) {
			LOGD("=== Open resend");
			ASSERT_OK(TYSetBool(hDevice, TY_COMPONENT_DEVICE, TY_BOOL_GVSP_RESEND, true));
		}
		else {
			LOGD("=== Not support feature TY_BOOL_GVSP_RESEND");
		}
	}
	//开始采集
    LOGD("=== Start capture");
    ASSERT_OK( TYStartCapture(hDevice) );

	//回调数据初始化
    cb_data.index = 0;
    cb_data.hDevice = hDevice;
    cb_data.saveOneFramePoint3d = false;
    cb_data.fileIndex = 0;
    cb_data.intri_depth = &intri_depth;
	cb_data.intri_color = &intri_color;

	float scale_unit = 1.;
	TYGetFloat(hDevice, TY_COMPONENT_DEPTH_CAM, TY_FLOAT_SCALE_UNIT, &scale_unit);
	cb_data.scale_unit = scale_unit;

	//循环取图
    LOGD("=== While loop to fetch frame");
    TY_FRAME_DATA frame;
    bool exit_main = false;
	int index = 0;
    while(!exit_main){
        int key = cv::waitKey(1);
        switch(key & 0xff){
            case 0xff:
                break;
            case 'q':
                exit_main = true;
                break;
            case 's':
                cb_data.saveOneFramePoint3d = true;//图片显示窗口上按s键则存一张点云图
                break;
            default:
                LOGD("Pressed key %d", key);
        }
		//发送一次软触发
		//ASSERT_OK(TYSendSoftTrigger(hDevice));
		//获取帧，默认超时设置为20s
        int err = TYFetchFrame(hDevice, &frame, 20000);
        if( err != TY_STATUS_OK ){
            LOGD("... Drop one frame");
            continue;
        }
		if (err == TY_STATUS_OK) {
			LOGD("Get frame %d", ++index);
			int fps = get_fps();
			if (fps > 0) {
				LOGI("fps: %d", fps);
			}
		}
        frameHandler(&frame, &cb_data);
    }

    ASSERT_OK( TYStopCapture(hDevice) );
    ASSERT_OK( TYCloseDevice(hDevice) );
    ASSERT_OK( TYCloseInterface(hIface) );
    ASSERT_OK( TYDeinitLib() );
    delete frameBuffer[0];
    delete frameBuffer[1];
	if (isp_handle) {
		TYISPRelease(&isp_handle);
	}
    LOGD("=== Main done!");
    return 0;
}
