/*******************************************************************************
* This class is used to open v4l2 cameras. It uses v4l2 libraries for video
* files.
* 
*
* 
*
* Licensed under GPL Version 3 license (/license.txt).
* Author: Eder A. Perez (eder.perez@ice.ufjf.br)
*******************************************************************************/

#include "cvvideo_v4l2.h"
#include <stdlib.h>
#include <string.h>
#include <sys/ioctl.h>
#include <fcntl.h>
#include <unistd.h>
#include <sys/mman.h>
#include <linux/videodev2.h>



/*********************************
*                                *
*     METHODS IMPLEMENTATION     *
*                                *
**********************************/

/** Constructor
*/
CVVideoV4L2::CVVideoV4L2():  width(0), height(0), fps(0.f), bpp(0), errorcode(CV_SUCCESS),
buffer(NULL), buffsize(0), input_opened(false) {

}


/** Destructor
*/
CVVideoV4L2::~CVVideoV4L2() {

  if(read_type = V4L2_CAP_STREAMING) { // Used streaming I/O
    munmap(buffer, buffsize);
    int t = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    ioctl(fd, VIDIOC_STREAMOFF, &t);
  }
  else free(buffer); // Used read() function

  close(fd);

}



/** Open a v4l2 device. It tries to set device priority to highest and also retrieves
* camera's capabilities (stored in capability private attribute).
* @params:
*   device: path of device (e.g. /dev/video0)
*   params: used to set camera parameters
*
* @return: returns true if device was opened, false otherwise.
*       
*/
bool CVVideoV4L2::openV4L2(const char* device, CVv4l2params_t params) {

  // Close the current input, if it exists, and open a new one
  if(input_opened) {
    this->~CVVideoV4L2();
    
    // Initiates attributes
    width = height = 0;
    fps = 0.f;
    bpp = 0;
    errorcode = CV_SUCCESS;
    buffer = NULL;
    buffsize = 0;
    input_opened = false;
    
  }
  
  // Try to open a camera
    // O_NONBLOCK flag doesn't block read() and VIDIOC_DQBUF ioctl
    // O_RDWR flag blocks read() and VIDIOC_DQBUF ioctl until data become available
  fd = open(device, O_RDWR);
  if( fd == -1) {
    errorcode = CVERR_V4L2_CAMERA_NOT_OPENED;
    return false;
  }
  
  
  // Try to set priority to highest (only this fd can change device properties)
  if( ioctl(fd, VIDIOC_S_PRIORITY, V4L2_PRIORITY_RECORD) == -1 )
    errorcode = CVERR_V4L2_WARNING_PRIORITY_NOT_SET;
 
 
  // Get device information
  memset(&capability, 0, sizeof(v4l2_capability));
  if( ioctl(fd, VIDIOC_QUERYCAP, &capability) == -1 ) {
    errorcode = CVERR_V4L2_INCOMPATIBLE_DEVICE;
    close(fd);
    return false;
  }
 
  // If device can't capture video, abort
  if( !(capability.capabilities & V4L2_CAP_VIDEO_CAPTURE) ) {
    errorcode = CVERR_V4L2_INCOMPATIBLE_DEVICE;
    close(fd);
    return false;
  }
  
  // If device is offline, abort
  memset(&input, 0, sizeof(v4l2_input));
  ioctl(fd, VIDIOC_G_INPUT, &(input.index));
  ioctl(fd, VIDIOC_ENUMINPUT, &input);
  if( (input.status & V4L2_IN_ST_NO_POWER) || (input.status & V4L2_IN_ST_NO_SIGNAL) ) {
    errorcode = CVERR_V4L2_NO_SIGNAL;
    close(fd);
    return false;
  }
  
  
  // Set parameters and returns
  input_opened = true;
  return init_v4l2_camera(params);
	
}



/** Set the v4l2 device parameters
* @params:
*   params: used to set camera parameters
*
* @return: returns true in success, false otherwise.
*       
*/
bool CVVideoV4L2::init_v4l2_camera(CVv4l2params_t params) {

  // Data format negotiation
  struct v4l2_format format;
  memset(&format, 0, sizeof(v4l2_format));
  format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
  ioctl(fd, VIDIOC_G_FMT, &format);
  
  // Try to set user's specific format
  if(params.width  != 0) format.fmt.pix.width  = (unsigned int) params.width;
  if(params.height != 0) format.fmt.pix.height = (unsigned int) params.height;
  if(params.device_fmt != CV_V4L2_DEVFMT_DEFAULT) format.fmt.pix.pixelformat = (CVuint32) params.device_fmt;
  this->retrieve_fmt = params.retrieve_fmt;
  
  if( ioctl(fd, VIDIOC_S_FMT, &format) == -1 )
    errorcode = CVERR_V4L2_WARNING_SET_DATA_FORMAT_ERROR;

  ioctl(fd, VIDIOC_G_FMT, &format);
  this->width       = format.fmt.pix.width;
  this->height      = format.fmt.pix.height;
  this->bpp         = getBpp(format.fmt.pix.pixelformat);
  this->rowsize     = format.fmt.pix.bytesperline;
  this->buffsize    = format.fmt.pix.sizeimage;
  this->pixelformat = format.fmt.pix.pixelformat;


  // Streaming parameters (try to set fps)
  struct v4l2_streamparm streamparm;
  memset(&streamparm, 0, sizeof(v4l2_streamparm));
  streamparm.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
  if(ioctl(fd, VIDIOC_G_PARM, streamparm) != -1) {
    streamparm.parm.capture.capability = V4L2_CAP_TIMEPERFRAME;
    struct v4l2_fract timeperframe;
    timeperframe.numerator = 1; timeperframe.denominator = (unsigned int) params.fps;
    streamparm.parm.capture.timeperframe = timeperframe;
    streamparm.parm.capture.readbuffers = 4; // Use four buffers for read() mode
    ioctl(fd, VIDIOC_S_PARM, streamparm);
    if(ioctl(fd, VIDIOC_G_PARM, streamparm) != -1) { // Gets the final fps set
      this->fps = streamparm.parm.capture.timeperframe.denominator;
    }
  }


  // Set retrieving frame function (it depends on the capabilities)
  // Use read() function
  if( capability.capabilities & V4L2_CAP_READWRITE) {
    this->buffer = (CVubyte*) malloc(sizeof(CVubyte)*this->buffsize);
    this->get_frame_ptr = &CVVideoV4L2::get_frame_v4l2read;
    this->read_type = V4L2_CAP_READWRITE;
  }
  else { // Use memory map
    // Allocate buffer
    struct v4l2_requestbuffers requestbuffers;
    memset(&requestbuffers, 0, sizeof(v4l2_requestbuffers));
    requestbuffers.count  = 4;
    requestbuffers.type   = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    requestbuffers.memory = V4L2_MEMORY_MMAP;
    if( ioctl(fd, VIDIOC_REQBUFS, &requestbuffers) == -1) {
      errorcode = CVERR_V4L2_INCOMPATIBLE_DEVICE;
      return false;
    }
    
    // Query the status of a buffer
    memset(&(this->bf), 0, sizeof(v4l2_buffer));
    this->bf.type  = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    this->bf.index = 0;
    if( ioctl(fd, VIDIOC_QUERYBUF, &(this->bf)) == -1) {
      errorcode = CVERR_V4L2_INCOMPATIBLE_DEVICE;
      return false;
    }

    // Map device memory into the buffer
    this->buffsize = this->bf.length;
    this->buffer = (CVubyte*) mmap(NULL, bf.length, PROT_READ|PROT_WRITE, MAP_SHARED, fd, bf.m.offset);

    // Start streaming
    if( ioctl(fd, VIDIOC_STREAMON, &(requestbuffers.type)) == -1 ) {
      errorcode = CVERR_V4L2_INCOMPATIBLE_DEVICE;
      return false;
    }
        
    // Set getFrame() function
    this->get_frame_ptr = &CVVideoV4L2::get_frame_v4l2strm;
    this->read_type = V4L2_CAP_STREAMING;
  }
  
  //Set convertion pixel function
  set_conv_function();
  return true;

}



/** Return the bpp based on the pixel format
*  @param:
*    pxlfmt: pixel format.
*
*  @return: returns the bpp value.
*/
int CVVideoV4L2::getBpp(CVuint32 pxlfmt) {
  switch(pxlfmt) {
    // 8bpp
    case V4L2_PIX_FMT_PAL8:
    case V4L2_PIX_FMT_RGB332:
    case V4L2_PIX_FMT_SBGGR8:
    case V4L2_PIX_FMT_GREY:   return 8;
      
    // 16bpp
    case V4L2_PIX_FMT_RGB444:
    case V4L2_PIX_FMT_RGB555:
    case V4L2_PIX_FMT_RGB565:
    case V4L2_PIX_FMT_RGB555X:
    case V4L2_PIX_FMT_RGB565X:
    case V4L2_PIX_FMT_SBGGR16:
    case V4L2_PIX_FMT_YUV444:
    case V4L2_PIX_FMT_YUV555:
    case V4L2_PIX_FMT_YUV565:
    case V4L2_PIX_FMT_Y16:    return 16;
    
    // 24bpp
    case V4L2_PIX_FMT_BGR24:
    case V4L2_PIX_FMT_RGB24:
    case V4L2_PIX_FMT_YUYV:
    case V4L2_PIX_FMT_UYVY:
    case V4L2_PIX_FMT_Y41P:
    case V4L2_PIX_FMT_YVU420:
    case V4L2_PIX_FMT_YUV420: return 24;
    
    // 32bpp
    case V4L2_PIX_FMT_BGR32:
    case V4L2_PIX_FMT_RGB32:
    case V4L2_PIX_FMT_YUV32:  return 32;

    default: break;
  }
  
  return -1;
}



/** Copies the current frame to data
* 
* @params:
*   data: array of bytes.
*
* @return: returns true if device was opened, false otherwise.
*       
*/
bool CVVideoV4L2::getFrame(CVubyte* data) {
  return (this->*get_frame_ptr)(data);

}



/** Set the function to convert data (if retrieving and device formats are different)
 *       
 */
void CVVideoV4L2::set_conv_function() {
  
  switch(this->pixelformat) {

    /****************************
    *  YUV DEVICE PIXEL FORMAT  *
    ****************************/
    case V4L2_PIX_FMT_YUYV:
      switch((int)this->retrieve_fmt) {
        case CV_RETRFMT_DEFAULT:
          convert_buffer_data = NULL; // Retrieving and device format are equals
          break;
          
        case CV_RETRFMT_RGB24:
          convert_buffer_data = &CVVideoV4L2::YUYV_to_RGB24;
          this->bpp = 24;
          break;
          
        case CV_RETRFMT_GREY8:
          convert_buffer_data = &CVVideoV4L2::YUYV_to_GREY8;
          this->bpp = 8;
          break;
        
        case CV_RETRFMT_GREY24:
          convert_buffer_data = &CVVideoV4L2::YUYV_to_GREY24;
          this->bpp = 24;
          break;

      }
      break;
    
    
    
    /****************************
    *  JPEG DEVICE PIXEL FORMAT *
    ****************************/
    /*case V4L2_PIX_FMT_PJPG:
      switch((int)this->retrieve_fmt) {
        case CV_RETRFMT_DEFAULT:
          convert_buffer_data = NULL;
          break;
          
        case CV_RETRFMT_RGB24:
        case CV_RETRFMT_GREY8:
        case CV_RETRFMT_GREY24:
          break;
      }
      break;*/
    
  }

}



/** Gets frame from v4l2 device using read()
* @params:
*   data: array of bytes.
*
* @return: returns true in success, false otherwise.
*       
*/
bool CVVideoV4L2::get_frame_v4l2read(CVubyte* data) {
  
  if( read(fd, this->buffer, this->buffsize) == -1) return false;

  // If necessary to convert format data
  if(convert_buffer_data) {
    (this->*convert_buffer_data)(data);
    return true;
  }
    
  memcpy(data, this->buffer, sizeof(CVubyte)*this->buffsize);
  return true;

}



/** Gets frame from v4l2 device using streaming
* @params:
*   data: array of bytes.
*
* @return: returns true in success, false otherwise.
*       
*/
bool CVVideoV4L2::get_frame_v4l2strm(CVubyte* data) {

  ioctl(fd, VIDIOC_QBUF, &bf);
  ioctl(fd, VIDIOC_DQBUF, &bf);
  
  // If necessary to convert format data
  if(convert_buffer_data) {
    (this->*convert_buffer_data)(data);
    return true;
  }
    
  memcpy(data, this->buffer, sizeof(CVubyte)*this->buffsize);
  return true;

}



/** The macros below are used to pixel format convertion
*   ROWSIZE computes real frame line-width.
*   BYTERANGE limits the a's value to 0-255 (1 byte).
*/
#define ROWSIZE(width, bpp) ( ((((width) * (bpp)) + 31) & ~31) >> 3 )
#define BYTERANGE(a)( ( (a) > 255 )? 255 : ( ( (a) < 0 )? 0 : (a) ) )



/** Convert data in YUYV format to RGB24 format. In YUYV format, each four bytes
* is two pixels. The two pixels share U and V, and each Y goes for one of the pixels.
*
* @params:
*   data: data to be converted.
*
*/
void CVVideoV4L2::YUYV_to_RGB24(CVubyte* data) {
  register int rsize = ROWSIZE(this->width, 24);
  register int k, h = 0;
  for(register int i = this->height; --i;) {
    k = 0; ++h;
    for(register int j = 0; j < this->width; j += 2) {
      register int l = 3*j;
      CVubyte y1 = buffer[h*rowsize + k++];
      CVubyte u  = buffer[h*rowsize + k++];
      CVubyte y2 = buffer[h*rowsize + k++];
      CVubyte v  = buffer[h*rowsize + k++];
      
      data[i*rsize + l]   = (CVubyte)BYTERANGE( 1.164f*(float)(y1 - 16.f) + 1.596f*(float)(v - 128.f) );
      data[i*rsize + l+1] = (CVubyte)BYTERANGE( 1.164f*(float)(y1 - 16.f) - 0.813f*(float)(v - 128.f) - 0.391f*(float)(u - 128.f) );
      data[i*rsize + l+2] = (CVubyte)BYTERANGE( 1.164f*(float)(y1 - 16.f) + 2.018f*(float)(u - 128.f) );
      
      data[i*rsize + l+3] = (CVubyte)BYTERANGE( 1.164f*(float)(y2 - 16.f) + 1.596f*(float)(v - 128.f) );
      data[i*rsize + l+4] = (CVubyte)BYTERANGE( 1.164f*(float)(y2 - 16.f) - 0.813f*(float)(v - 128.f) - 0.391f*(float)(u - 128.f) );
      data[i*rsize + l+5] = (CVubyte)BYTERANGE( 1.164f*(float)(y2 - 16.f) + 2.018f*(float)(u - 128.f) );

    }
  }
}



/** Convert data in YUYV format to GREY8 format. In YUYV format, each four bytes
* is two pixels. The two pixels share U and V, and each Y goes for one of the pixels.
* This convertion function uses only the Y value of each pixel.
*
* @params:
*   data: data to be converted.
*
*/
void CVVideoV4L2::YUYV_to_GREY8(CVubyte* data) {
  register int rsize = ROWSIZE(this->width, 8);
  register int k, h = 0;
  for(register int i = this->height; --i;) {
    k = 0; ++h;
    for(register int j = 0; j < this->width; ++j)
      data[i*rsize + j] = buffer[h*rowsize + 2*(k++)];
      
  }
}



/** Convert data in YUYV format to GREY24 format. In YUYV format, each four bytes
* is two pixels. The two pixels share U and V, and each Y goes for one of the pixels.
* This convertion function uses only the Y value of each pixel.
*
* @params:
*   data: data to be converted.
*
*/
void CVVideoV4L2::YUYV_to_GREY24(CVubyte* data) {
  register int rsize = ROWSIZE(this->width, 24);
  register int k, h = 0;
  for(register int i = this->height; --i;) {
    k = 0; ++h;
    for(register int j = 0; j < this->width; ++j) {
      int l = 3*j;
      CVubyte tmp = buffer[h*rowsize + 2*(k++)];
      data[i*rsize + l]   = tmp;
      data[i*rsize + l+1] = tmp;
      data[i*rsize + l+2] = tmp;
    }
      
  }
}
