#include "ros/ros.h"
#include "fyt_msg/CameraParam.h"
#include "sensor_msgs/Image.h"
#include "cv_bridge/cv_bridge.h"
#include "image_transport/image_transport.h"
#include "opencv2/opencv.hpp"
#include "std_msgs/String.h"
#include "../inc/GxIAPI.h"
#include "../inc/DxImageProc.h"
#include "../inc/CircularQueue.h"
#include <boost/thread.hpp>
#define BYTE unsigned char

static cv::VideoCapture video_capture;

GX_DEV_HANDLE       m_hDevice;              ///< 设备句柄  
unsigned char       *m_pBufferRaw;          ///< 原始图像数据  
unsigned char       *m_pBufferRGB;          ///< RGB图像数据，用于显示和保存bmp图像  
// int64_t           m_nImageHeight;         ///< 原始图像高  
// int64_t           m_nImageWidth;          ///< 原始图像宽  
int64_t             m_nPayLoadSize;
int64_t             m_nPixelColorFilter;    ///< Bayer格式  

cv::Mat camera_img;
sensor_msgs::Image img;
image_transport::Publisher pub_image;
ros::Subscriber sub_cameraParam;
static bool source_from_camera;
static int resolution_width;
static int resolution_height;
static int auto_white_balance;
static int frame_rate;
static int auto_explosure;
static int explosure_time;
static int init_flag=0;

void paramCallBack(const fyt_msg::CameraParamConstPtr &msg){
		static std::string last_source = "";
        if(msg->source==std::string("camera")){
            source_from_camera=true;
            resolution_height = msg->resolution_height;
			resolution_width  =msg->resolution_width;
			frame_rate = msg->frame_rate;
			explosure_time = msg->explosure_time;
			// printf("current explosure time:%d\n",explosure_time);
        }
        else if(last_source != msg->source){
            source_from_camera=false;
            video_capture.open(cv::String(msg->source));
        }
		last_source = msg->source;
}

//图像回调处理函数 
static void GX_STDC OnFrameCallbackFun(GX_FRAME_CALLBACK_PARAM* pFrame)
{ 
	if (pFrame->status == 0)
	{
		//对图像进行某些操作  
		memcpy(m_pBufferRaw, pFrame->pImgBuf, pFrame->nImgSize);
		
		// RGB转换  
		DxRaw8toRGB24(m_pBufferRaw
			, m_pBufferRGB
			, (VxUint32)(resolution_width)
			, (VxUint32)(resolution_height)
			, RAW2RGB_NEIGHBOUR
			, DX_PIXEL_COLOR_FILTER(m_nPixelColorFilter)
			, false); 
		
		memcpy(camera_img.data, m_pBufferRGB, resolution_width*resolution_height * 3);
		// imwrite("test1.bmp", camera_img);

		// img.header = std_msgs::Header();
		// img.encoding = "rgb8";
		// img.data.insert(img.data.end(),m_pBufferRGB,m_pBufferRGB+resolution_width);
		// //img.data.reserve(3932160);
		// img.height = resolution_height;
		// img.width = resolution_width;
		// img.step = 3840;
		// pub_image.publish(img);

		pub_image.publish(cv_bridge::CvImage(std_msgs::Header(), "rgb8", camera_img).toImageMsg());
		
		// cv::namedWindow("camera_img",0);
		// cv::imshow("camera_img", camera_img);
		// cv::waitKey(15);

	}
	//return;
}


bool init(){
			GX_STATUS emStatus = GX_STATUS_SUCCESS;
			GX_OPEN_PARAM openParam;
			uint32_t      nDeviceNum = 0;
			openParam.accessMode = GX_ACCESS_EXCLUSIVE;
			openParam.openMode = GX_OPEN_INDEX;
			openParam.pszContent = "1";
			// 初始化库 
			emStatus = GXInitLib();
			if (emStatus != GX_STATUS_SUCCESS)
			{
				printf("Can't init lib.\n");
				return 0;
			}
			// 枚举设备列表  
			emStatus = GXUpdateDeviceList(&nDeviceNum, 1000);
			if ((emStatus != GX_STATUS_SUCCESS) || (nDeviceNum <= 0))
			{
				printf("Can't find camera.\n");
				return 0;
			}
			//打开设备  
			emStatus = GXOpenDevice(&openParam, &m_hDevice);
			if (emStatus != GX_STATUS_SUCCESS)
			{
				printf("Camera open fail\n");
				return 0;
			}	

			
			// // 获取宽度  
			// emStatus = GXGetInt(m_hDevice, GX_INT_WIDTH, &m_nImageWidth);
			// // 获取高度  
			// emStatus = GXGetInt(m_hDevice, GX_INT_HEIGHT, &m_nImageHeight);

			// 设置宽度  
			emStatus = GXSetInt(m_hDevice, GX_INT_WIDTH, resolution_width);
			// 设置高度  
			emStatus = GXSetInt(m_hDevice, GX_INT_HEIGHT, resolution_height);

			// 获取图像大小  
			emStatus = GXGetInt(m_hDevice, GX_INT_PAYLOAD_SIZE, &m_nPayLoadSize);
			//设置采集模式连续采集  
			emStatus = GXSetEnum(m_hDevice, GX_ENUM_ACQUISITION_MODE, GX_ACQ_MODE_CONTINUOUS);
			emStatus = GXSetInt(m_hDevice, GX_INT_ACQUISITION_SPEED_LEVEL, 1);
			emStatus = GXSetEnum(m_hDevice, GX_ENUM_BALANCE_WHITE_AUTO, GX_BALANCE_WHITE_AUTO_CONTINUOUS);
			bool bColorFliter = false;

			
			// camera_img.create(m_nImageHeight, m_nImageWidth, CV_8UC3);
			camera_img.create(resolution_height, resolution_width, CV_8UC3);


			//设置自动白平衡
			GXSetEnum(m_hDevice, GX_ENUM_AWB_LAMP_HOUSE,1);

			//设置帧率
			GXSetFloat(m_hDevice, GX_FLOAT_ACQUISITION_FRAME_RATE, frame_rate);

			//设置曝光时间
			GXSetFloat(m_hDevice, GX_FLOAT_EXPOSURE_TIME, explosure_time);
			
			//设置彩色图像
			emStatus = GXGetEnum(m_hDevice, GX_ENUM_PIXEL_COLOR_FILTER, &m_nPixelColorFilter);


			// m_pBufferRGB = new unsigned char [(size_t)(m_nImageWidth * m_nImageHeight * 3)];
			m_pBufferRGB = new unsigned char [(size_t)(resolution_width * resolution_height * 3)];

			if (m_pBufferRGB == NULL)
			{
				return false;
			}
			//为存储原始图像数据申请空间  
			m_pBufferRaw = new unsigned char [(size_t)m_nPayLoadSize];
			if (m_pBufferRaw == NULL)
			{
				delete[]m_pBufferRGB;
				m_pBufferRGB = NULL;
				return false;
			}
			//注册图像处理回调函数 
			std::cout<<"ready open daheng"<<std::endl; 
			init_flag=1;
			emStatus = GXRegisterCaptureCallback(m_hDevice, NULL, OnFrameCallbackFun);
			//发送开采命令  
			emStatus = GXSendCommand(m_hDevice, GX_COMMAND_ACQUISITION_START);
			//---------------------  
			//在这个区间图像会通过OnFrameCallbackFun接口返给用户 
			std::cout<<"输入ctrl+c关闭程序"<<std::endl;

			while(ros::ok()){
					ros::NodeHandle h;
					ros::Subscriber sub;
					sub=h.subscribe("config_camera",1,paramCallBack);
					ros::spinOnce();
					GXSetFloat(m_hDevice, GX_FLOAT_EXPOSURE_TIME, explosure_time);
			}

			//---------------------  
			//发送停采命令  
			emStatus = GXSendCommand(m_hDevice, GX_COMMAND_ACQUISITION_STOP);
			//注销采集回调  
			emStatus = GXUnregisterCaptureCallback(m_hDevice);
			if (m_pBufferRGB != NULL)
			{
				delete[]m_pBufferRGB;
				m_pBufferRGB = NULL;
			}
			if (m_pBufferRaw != NULL)
			{
				delete[]m_pBufferRaw;
				m_pBufferRaw = NULL;
			}
			emStatus = GXCloseDevice(m_hDevice);
			emStatus = GXCloseLib();
			return 0;
			}

int main(int argc,char **argv){
    ros::init(argc,argv,"image_catcher_node");
    ros::NodeHandle n;
    cv::Mat src;
    sub_cameraParam=n.subscribe("config_camera",1,paramCallBack);
    // ros::spinOnce();
	
    ros::Publisher pub_img=n.advertise<sensor_msgs::Image>("raw_img",1);
    ros::Publisher pub_info=n.advertise<std_msgs::String>("send_info",1);

	ros::NodeHandle nh;
    image_transport::ImageTransport it(nh);
    pub_image = it.advertise("raw_img", 1);
    sensor_msgs::ImagePtr img_msg;


    while(ros::ok()){
		ros::spinOnce();
        if(source_from_camera){
			while(init_flag==0)
			{
			init();
			if(init_flag==0){
			// {printf("Video source initialization failed.\n");
			printf("Please reinsert the device\n");
			printf("### Error, start again! ###\n\n");}
            cv::waitKey(2000);
			};
		}
        else{
            if(!video_capture.read(src)){               
                std_msgs::String msg;
                msg.data="video source invalid. please check the filename.";
                pub_info.publish(msg);
                sleep(3);
                continue;
            }
            img_msg=cv_bridge::CvImage(std_msgs::Header(),"bgr8",src).toImageMsg();
            pub_img.publish(img_msg);
            usleep(50000);
        }
    }
}
