#include <unistd.h>
#include <error.h>
#include <errno.h>
#include <fcntl.h>
#include <sys/ioctl.h>
#include <sys/types.h>
#include <pthread.h>
#include <linux/videodev2.h>
#include <sys/mman.h>
#include <opencv2/core/core.hpp>
#include <opencv2/highgui/highgui.hpp>
#include <stdio.h>
#include <stdlib.h>
#include <string.h>

#include <iostream>
#include <iomanip>
#include <string>

#include <ros/ros.h>
#include <image_transport/image_transport.h>
#include <opencv2/highgui/highgui.hpp>
#include <cv_bridge/cv_bridge.h>
#include <sensor_msgs/Image.h>
#include <camera_info_manager/camera_info_manager.h>
#include <sstream>
#include <std_srvs/Empty.h>

using namespace std;

#define CLEAR(x) memset(&(x), 0, sizeof(x))

#define IMAGEWIDTH 3264
#define IMAGEHEIGHT 2448

// static unsigned short bmp_header_rgb888[]={ //cq
// /* 0000000: */ 0x4d42, 0x3036, 0x002a, 0x0000, 0x0000, 0x0036, 0x0000, 0x0028,
// /* 0000010: */ 0x0000, (unsigned short)WIDTH, 0x0000, (unsigned short)HEIGHT, 0x0000, 0x0001, 0x0018, 0x0000,
// /* 0000020: */ 0x0000, 0x3000, 0x002a, 0x0b13, 0x0000, 0x0b13, 0x0000, 0x0000,
// /* 0000030: */ 0x0000, 0x0000, 0x0000,
// };

class V4L2Capture {
public:
    //---ros
    ros::NodeHandle node_;
    // shared image message
    sensor_msgs::Image img_left;
    image_transport::CameraPublisher image_pub_left;
    sensor_msgs::Image img_right;
    image_transport::CameraPublisher image_pub_right;
    
    //---parameters
    boost::shared_ptr<camera_info_manager::CameraInfoManager> cinfo_left;
    boost::shared_ptr<camera_info_manager::CameraInfoManager> cinfo_right;
    
    struct cam_buffer
    {
            void* start;
            unsigned int length;
    };
        
    string dev_name_, camera_name_left_, camera_name_right_, left_info_url_, right_info_url_; 
    char* devName;
    int capW_;	int capH_;
    int fd_cam;
    cam_buffer *buffers;
    unsigned int n_buffers;
    int frameIndex;
    char save_img_name = "cpptest.bmp";
    bool save_img = true;
    int num_save = 0;
        
    V4L2Capture() : node_("~"){
        image_transport::ImageTransport it(node_);
        image_pub_left = it.advertiseCamera("left/image_raw", 1);
        image_pub_right = it.advertiseCamera("right/image_raw", 1);// here not sure
        node_.param("video_device", dev_name_, std::string("/dev/video0"));
        node_.param("camera_name_left", camera_name_left_, std::string("cam_left"));
        node_.param("camera_name_right", camera_name_right_, std::string("cam_right"));
        node_.param("left_info_url", left_info_url_, std::string(""));
        node_.param("right_info_url", right_info_url_, std::string(""));
        node_.param("image_width", capW_, 1280*4);
        node_.param("image_height", capH_, 720);
        cinfo_left.reset(new camera_info_manager::CameraInfoManager(node_, camera_name_left_, left_info_url_));
        cinfo_right.reset(new camera_info_manager::CameraInfoManager(node_, camera_name_right_, right_info_url_));
        
        //---parameters by default
        fd_cam = -1;
        buffers = NULL;
        n_buffers = 0;
        frameIndex = -1;
        
        // check for default camera info
//         if (!cinfo_left->isCalibrated() || !cinfo_right->isCalibrated()){
//             std::cout<< "please check camera_info..."<<std::endl;
// //             cinfo_->setCameraName(video_device_name_);
// //             sensor_msgs::CameraInfo camera_info;
// //             camera_info.header.frame_id = img_.header.frame_id;
// //             camera_info.width = image_width_;
// //             camera_info.height = image_height_;
// //             cinfo_->setCameraInfo(camera_info);
//         }
        devName = const_cast<char*>(dev_name_.c_str());
        
//         this->devName = devName;
// 	this->fd_cam = -1;
// 	this->buffers = NULL;
// 	this->n_buffers = 0;
// 	this->frameIndex = -1;
// 	this->capW=width;
// 	this->capH=height;
    }
    virtual ~V4L2Capture();

    int openDevice();
    int closeDevice();
    int initDevice();
    int startCapture();
    int stopCapture();
    int initBuffers();
    int freeBuffers();
    int getFrame(void **,size_t *);
    int backFrame();
    void verifyBuffers(struct v4l2_buffer *buf);
    void saveImage(struct v4l2_buffer *buf);
    void publishImage(struct v4l2_buffer *buf);
    static void test();

};

// V4L2Capture::V4L2Capture(char *devName, int width, int height) {
// 	// Auto-generated constructor stub
// 
// }

V4L2Capture::~V4L2Capture() {
	// TODO Auto-generated destructor stub
}

int V4L2Capture::openDevice() {
	/*设备的打开*/
	printf("video dev : %s\n", devName);
	fd_cam = open(devName, O_RDWR);
	if (fd_cam < 0) {
		perror("Can't open video device");
	}
	return 0;
}

int V4L2Capture::closeDevice() {
	if (fd_cam > 0) {
		int ret = 0;
		if ((ret = close(fd_cam)) < 0) {
			perror("Can't close video device");
		}
		return 0;
	} else {
		return -1;
	}
}

int V4L2Capture::initDevice() {
	int ret;
	struct v4l2_capability cam_cap;		//显示设备信息
	struct v4l2_cropcap cam_cropcap;	//设置摄像头的捕捉能力
	struct v4l2_fmtdesc cam_fmtdesc;	//查询所有支持的格式：VIDIOC_ENUM_FMT
	struct v4l2_crop cam_crop;			//图像的缩放
	struct v4l2_format cam_format;		//设置摄像头的视频制式、帧格式等

	/* 使用IOCTL命令VIDIOC_QUERYCAP，获取摄像头的基本信息*/
	ret = ioctl(fd_cam, VIDIOC_QUERYCAP, &cam_cap);
	if (ret < 0) {
		perror("Can't get device information: VIDIOCGCAP");
	}
	printf(
			"Driver Name:%s\nCard Name:%s\nBus info:%s\nDriver Version:%u.%u.%u\n",
			cam_cap.driver, cam_cap.card, cam_cap.bus_info,
			(cam_cap.version >> 16) & 0XFF, (cam_cap.version >> 8) & 0XFF,
			cam_cap.version & 0XFF);

	/* 使用IOCTL命令VIDIOC_ENUM_FMT，获取摄像头所有支持的格式*/
	cam_fmtdesc.index = 0;
	cam_fmtdesc.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
	printf("Support format:\n");
	while (ioctl(fd_cam, VIDIOC_ENUM_FMT, &cam_fmtdesc) != -1) {
		printf("\t%d.%s\n", cam_fmtdesc.index + 1, cam_fmtdesc.description);
		cam_fmtdesc.index++;
	}

	/* 使用IOCTL命令VIDIOC_CROPCAP，获取摄像头的捕捉能力*/
	//cam_cropcap.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
	//if (0 == ioctl(fd_cam, VIDIOC_CROPCAP, &cam_cropcap)) {
	//	printf("Default rec:\n\tleft:%d\n\ttop:%d\n\twidth:%d\n\theight:%d\n",
	//			cam_cropcap.defrect.left, cam_cropcap.defrect.top,
	//			cam_cropcap.defrect.width, cam_cropcap.defrect.height);
		/* 使用IOCTL命令VIDIOC_S_CROP，获取摄像头的窗口取景参数*/
	//	cam_crop.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
	//	cam_crop.c = cam_cropcap.defrect;		//默认取景窗口大小
	//	if (-1 == ioctl(fd_cam, VIDIOC_S_CROP, &cam_crop)) {
			//printf("Can't set crop para\n");
	//	}
	//} else {
	//	printf("Can't set cropcap para\n");
	//}

	/* 使用IOCTL命令VIDIOC_S_FMT，设置摄像头帧信息*/
	cam_format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
	cam_format.fmt.pix_mp.width = capW_;
	cam_format.fmt.pix_mp.height = capH_;
	cam_format.fmt.pix_mp.pixelformat = V4L2_PIX_FMT_RGB24;		//要和摄像头支持的类型对应
	cam_format.fmt.pix_mp.field = V4L2_FIELD_ANY;
        cam_format.fmt.pix_mp.num_planes = 1;
        cam_format.fmt.pix_mp.plane_fmt[0].bytesperline = 0;
	ret = ioctl(fd_cam, VIDIOC_S_FMT, &cam_format);
	if (ret < 0) {
		perror("Can't set frame information");
	}
	/* 使用IOCTL命令VIDIOC_G_FMT，获取摄像头帧信息*/
	cam_format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
	ret = ioctl(fd_cam, VIDIOC_G_FMT, &cam_format);
	if (ret < 0) {
		perror("Can't get frame information");
	}
	printf("Current data format information:\n\twidth:%d\n\theight:%d\n",
			cam_format.fmt.pix.width, cam_format.fmt.pix.height);
	ret = initBuffers();
	if (ret < 0) {
		perror("Buffers init error");
		//exit(-1);
	}
	return 0;
}

int V4L2Capture::initBuffers() {
	int ret;
	/* 使用IOCTL命令VIDIOC_REQBUFS，申请帧缓冲*/
	struct v4l2_requestbuffers req;
	CLEAR(req);
        struct v4l2_plane planes[8];
        CLEAR(planes);
        struct v4l2_buffer buf;
        CLEAR(buf);
        //struct buffer *mbuffers;
	req.count = 8;
printf("at first req.count is %d \n",req.count);
	req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
	req.memory = V4L2_MEMORY_MMAP;
	ret = ioctl(fd_cam, VIDIOC_REQBUFS, &req);
printf("after ioctl req.count is %d \n",req.count);
	if (ret < 0) {
		perror("Request frame buffers failed");
	}
	if (req.count < 2) {
		perror("Request frame buffers while insufficient buffer memory");
	}
	buffers = (struct cam_buffer*) calloc(req.count, sizeof(*buffers));
        //buffers = malloc(req.count * sizeof buffers[0]);
	if (buffers == NULL) {
		perror("Out of memory");
	}
	for (n_buffers = 0; n_buffers < req.count; n_buffers++) {
		
                printf("req.count is : %d \n",req.count);
		// 查询序号为n_buffers 的缓冲区，得到其起始物理地址和大小
                buf.index = n_buffers;
		buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
		buf.memory = V4L2_MEMORY_MMAP;
                buf.length = 8;
                buf.m.planes = planes;
		ret = ioctl(fd_cam, VIDIOC_QUERYBUF, &buf);
		if (ret < 0) {
			printf("VIDIOC_QUERYBUF %d failed\n", n_buffers);
			return -1;
		}else{
                   printf("Querybuf_%d success \n",n_buffers);
                }
		//buffers[n_buffers].idx = n_buffers;
                //buffers[n_buffers].mem[0] = mmap(0,
                //       buf.m.planes[0].length,
                //       PROT_READ | PROT_WRITE, MAP_SHARED, fd_cam,
                //       buf.m.planes[0].m.mem_offset);
                //if(buffers[n_buffers].mem[0] == MAP_FAILED){
                //     printf("unable to map buffer");
                //     return -1;
                //}
                //buffers[n_buffers].size[0] = buf.m.planes[0].length;
                //buffers[n_buffers].padding[0] = 0;
                //printf("buffer %u/%u mapped at address %p.\n",
                //        buffers[n_buffers].idx,0,buffers[n_buffers].mem[0]);
		//printf("buf.length= %d\n",buf.length);
		// 映射内存
		buffers[n_buffers].start = mmap(
				0, // start anywhere
				buf.m.planes[0].length, PROT_READ | PROT_WRITE, MAP_SHARED, fd_cam,
				buf.m.planes[0].m.mem_offset);
		buffers[n_buffers].length = buf.m.planes[0].length;
		if (MAP_FAILED == buffers[n_buffers].start) {
			printf("mmap buffer%d failed\n", n_buffers);
			return -1;
		}else{
                printf("buffer %u at address %p.\n",n_buffers,buffers[n_buffers].start);
                }
	}
	return 0;
}

int V4L2Capture::startCapture() {
        struct v4l2_plane planes[VIDEO_MAX_PLANES];
        CLEAR(planes);
	unsigned int i;
	for (i = 0; i < n_buffers; i++) {
		struct v4l2_buffer buf;
		CLEAR(buf);
		buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
		buf.memory = V4L2_MEMORY_MMAP;
		buf.index = i;
                buf.m.planes = planes;
		buf.length = 1;
		if (-1 == ioctl(fd_cam, VIDIOC_QBUF, &buf)) {
			printf("VIDIOC_QBUF buffer%d failed\n", i);
			return -1;
		}
	}
	enum v4l2_buf_type type;
	type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
	if (-1 == ioctl(fd_cam, VIDIOC_STREAMON, &type)) {
		printf("VIDIOC_STREAMON error");
		return -1;
	}
	return 0;
}

int V4L2Capture::stopCapture() {
	enum v4l2_buf_type type;
	type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
	if (-1 == ioctl(fd_cam, VIDIOC_STREAMOFF, &type)) {
		printf("VIDIOC_STREAMOFF error\n");
		return -1;
	}
	return 0;
}

int V4L2Capture::freeBuffers() {
	unsigned int i;
	for (i = 0; i < n_buffers; ++i) {
		if (-1 == munmap(buffers[i].start, buffers[i].length)) {
			printf("munmap buffer%d failed\n", i);
			return -1;
		}
	}
	free(buffers);
	return 0;
}

int V4L2Capture::getFrame(void **frame_buf, size_t* len) {
    struct v4l2_plane planes[VIDEO_MAX_PLANES];
	CLEAR(planes);
	struct v4l2_buffer queue_buf;
	CLEAR(queue_buf);
	queue_buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
	queue_buf.memory = V4L2_MEMORY_MMAP;
        queue_buf.length = VIDEO_MAX_PLANES;
        queue_buf.m.planes = planes;
	if (-1 == ioctl(fd_cam, VIDIOC_DQBUF, &queue_buf)) {
		printf("VIDIOC_DQBUF error\n");
		return -1;
	}
	*frame_buf = buffers[queue_buf.index].start;
	*len = buffers[queue_buf.index].length;
	frameIndex = queue_buf.index;
// 	if(save_img)
// 	    saveImage(&queue_buf);
	publishImage(&queue_buf);
	return 0;
}

int V4L2Capture::backFrame() {
	if (frameIndex != -1) {
		struct v4l2_buffer queue_buf;
		CLEAR(queue_buf);
                struct v4l2_plane planes[VIDEO_MAX_PLANES];
                CLEAR(planes);
		queue_buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
		queue_buf.memory = V4L2_MEMORY_MMAP;
		queue_buf.index = frameIndex;
                queue_buf.m.planes = planes;
		queue_buf.length = 1;
		if (-1 == ioctl(fd_cam, VIDIOC_QBUF, &queue_buf)) {
			printf("VIDIOC_QBUF error\n");
			return -1;
		}
		return 0;
	}
	return -1;
}
void V4L2Capture::verifyBuffers(struct v4l2_buffer *buf){
	// cam_buffer *temp_buffer = &buffers[buf->index];
	
	// const uint8_t *data = temp_buffer[0] + temp_buffer[0].length;

	// unsigned int errors = 0;
	// unsigned int dirty = 0;
	// unsigned int length;

	// length = buf->m.planes[0].bytesused;
}

void V4L2Capture::saveImage(struct v4l2_buffer *buf){
    
    if(num_save < 60){
// 	save_img = false;
	// int fd;
	// int ret = 0;
	// fd = open(&save_img_name, O_CREAT | O_WRONLY |  O_TRUNC,
	//         S_IRUSR | S_IWUSR | S_IRGRP | S_IWGRP | S_IROTH | S_IWOTH);
	// if (fd == -1)
	// {
	// 	printf("open bmp failed\n");
	// 	return;
	// }
	unsigned int length;
	length = buf->m.planes[0].bytesused;
	// printf("length is:%d\n",length);
	// cv::Mat image = cv::Mat(HEIGHT, WIDTH, CV_8UC3, buffers[buf->index].start);
        cv::Mat image = cv::Mat(cv::Size(1280 * 4, 720), CV_8UC3, buffers[buf->index].start);
	// cv::imshow("image", image);
//         cv::Mat img_left(image,cv::Rect(0,0,1280,720));
//         cv::Mat img_right(image,cv::Rect(1280,0,1280,720));
        cv::Mat image_useful = cv::Mat(image,cv::Rect(0,0,1280*2,720));
        string name_img = "image" + to_string(num_save);
        name_img = name_img.append(".").append("png");
	cv::imwrite(name_img, image_useful);
	// cv::waitKey();
	// write(fd, bmp_header_rgb888, sizeof(bmp_header_rgb888));
	// ret = write(fd, buffers[buf->index].start, length);
	// if (ret < 0) {
	// 		printf("write error: %s (%d)\n", strerror(errno), errno);
	// 	} else if (ret != (int)length)
	// 		printf("write error: only %d bytes written instead of %u\n",
	// 		       ret, length);
	// close(fd);	
        ++num_save;
    }
    else{
        save_img = false;
        std::cout<< "save_img down..." << std::endl;
    }
}

void V4L2Capture::publishImage(struct v4l2_buffer *buf){
	unsigned int length;
        sensor_msgs::Image msg_left, msg_right;
	// double pub_time;
	// pub_time = (double)cvGetTickCount();
	length = buf->m.planes[0].bytesused;
	//printf("length is:%d\n",length);
        // cv::Mat image = cv::Mat(HEIGHT, WIDTH, CV_8UC3, buffers[buf->index].start);
        cv::Mat image = cv::Mat(cv::Size(1280 * 4, 720), CV_8UC3, buffers[buf->index].start);
        cv::Mat img_left(image,cv::Rect(0,0,1280,720));
        cv::Mat img_right(image,cv::Rect(1280,0,1280,720));
// 	sensor_msgs::ImagePtr msg_left = cv_bridge::CvImage(std_msgs::Header(), "bgr8", img_left).toImageMsg();
//      sensor_msgs::ImagePtr msg_right = cv_bridge::CvImage(std_msgs::Header(), "bgr8", img_right).toImageMsg();
        cv_bridge::CvImage(std_msgs::Header(), "bgr8", img_left).toImageMsg(msg_left);
        cv_bridge::CvImage(std_msgs::Header(), "bgr8", img_right).toImageMsg(msg_right);
        msg_left.header.stamp = ros::Time::now();
        msg_right.header.stamp = msg_left.header.stamp;
        sensor_msgs::CameraInfoPtr ci_left(new sensor_msgs::CameraInfo(cinfo_left->getCameraInfo()));
        sensor_msgs::CameraInfoPtr ci_right(new sensor_msgs::CameraInfo(cinfo_right->getCameraInfo()));
        ci_left->header.frame_id = msg_left.header.frame_id;
        ci_left->header.stamp = msg_left.header.stamp;
        ci_right->header.frame_id = msg_right.header.frame_id;
        ci_right->header.stamp = msg_right.header.stamp;
        image_pub_left.publish(msg_left, *ci_left);
        image_pub_right.publish(msg_right, *ci_right);
// 	left_pub->publish(msg_left);
//         right_pub->publish(msg_right);
	// pub_time = (double)cvGetTickCount() - pub_time;
	// printf("pub used time is %g ms\n",( pub_time / (cvGetTickFrequency()*1000)));
}
void V4L2Capture::test() {
// 	unsigned char *yuv422frame = NULL;
// 	unsigned long yuvframeSize = 0;
// 
// 	string videoDev="/dev/video0";
//         // TODO:TEST
// 	V4L2Capture *vcap = new V4L2Capture(const_cast<char*>(videoDev.c_str()),
// 			1824, 940);
// 	vcap->openDevice();
// 	vcap->initDevice();
// 	vcap->startCapture();
// 	vcap->getFrame((void **) &yuv422frame, (size_t *)&yuvframeSize);
// 
// 	vcap->backFrame();
// 	vcap->freeBuffers();
// 	vcap->closeDevice();
}

int main(int argc, char* argv[]){
	ros::init(argc, argv, "lvds_img");
        V4L2Capture vcap;
        
        unsigned char *yuv422frame = NULL;
	unsigned long yuvframeSize = 0;
	double t;
        
        vcap.openDevice();
        vcap.initDevice();
        vcap.startCapture();
        
        while(ros::ok()){
		t = (double)cvGetTickCount();
		vcap.getFrame((void **) &yuv422frame, (size_t *)&yuvframeSize);


		vcap.backFrame();
		// if((cvWaitKey(1)&255) == 27){
		// 	exit(0);
		// }
		t = (double)cvGetTickCount() - t;
		printf("Used time is %g ms\n",( t / (cvGetTickFrequency()*1000)));
		ros::spinOnce();
	}		
	vcap.stopCapture();
	vcap.freeBuffers();
	vcap.closeDevice();
        
        
        
//   	ros::NodeHandle nh;
//         // TODO: add camera_info
//   	image_transport::ImageTransport it(nh);
//   	image_transport::Publisher left_pub = it.advertise("camera/left_pub", 1);
//         image_transport::Publisher right_pub = it.advertise("camera/right_pub",1);
// 	
// 	unsigned char *yuv422frame = NULL;
// 	unsigned long yuvframeSize = 0;
// 	double t;
// 
//         // modify here
// 	string videoDev = "/dev/video0";
// 	V4L2Capture *vcap = new V4L2Capture(const_cast<char*>(videoDev.c_str()), 1280*4, 720);
// 	
// 	(*vcap).left_pub = &(left_pub);
//         (*vcap).right_pub = &(right_pub);
// 	vcap->openDevice();
// 	vcap->initDevice();
// 	vcap->startCapture();
// 
// 	IplImage* img;
// 	CvMat cvmat;
//     bool first_image = true;
// 	while(ros::ok()){
// 		t = (double)cvGetTickCount();
// 		vcap->getFrame((void **) &yuv422frame, (size_t *)&yuvframeSize);
// 
// 
// 		vcap->backFrame();
// 		// if((cvWaitKey(1)&255) == 27){
// 		// 	exit(0);
// 		// }
// 		t = (double)cvGetTickCount() - t;
// 		printf("Used time is %g ms\n",( t / (cvGetTickFrequency()*1000)));
// 		ros::spinOnce();
// 	}		
// 	vcap->stopCapture();
// 	vcap->freeBuffers();
// 	vcap->closeDevice();

	return 0;
}
