/*
   v4lcameras.C

   Licensed to the Apache Software Foundation (ASF) under one
   or more contributor license agreements.  See the NOTICE file
   distributed with this work for additional information
   regarding copyright ownership.  The ASF licenses this file
   to you under the Apache License, Version 2.0 (the
   "License"); you may not use this file except in compliance
   with the License.  You may obtain a copy of the License at

   http://www.apache.org/licenses/LICENSE-2.0

   Unless required by applicable law or agreed to in writing,
   software distributed under the License is distributed on an
   "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
   KIND, either express or implied.  See the License for the
   specific language governing permissions and limitations
   under the License.

   Created on: May 3, 2009

 */

#define XI_USE_V4L2

#if defined XI_USE_V4L2
#include <fcntl.h>
#include <unistd.h>
#include <errno.h>
#include <sys/ioctl.h>
#include <sys/mman.h>

#include <boost/thread.hpp>

#include <wells/v4lcameras.h>
#include <core/exceptions.h>

#define CLEAR(x) memset (&(x), 0, sizeof (x))

namespace crossEyed {

namespace wells {

namespace TV4L {

using namespace std ;
using namespace crossEyed::core ;

int TSpecificCamera::xioctl( int request, void * arg)
{
   int rc;

   do
   {
      rc = ioctl (fd, request, arg);
   }
   while (rc == -1 && errno == EINTR);

   return rc;
}

TSpecificCamera::TSpecificCamera( const char * pDevName,
                                  size_t       numCameraFrames,
                                  unsigned int minNumFramesV4LEnqueued )
   : //
     // Make the frameQueue smaller than the frameVector to guarantee
     // that there are always enough frames in the V4L queue
     //
     TCamera<TFrame>( numCameraFrames - minNumFramesV4LEnqueued ),
     devName( pDevName, strlen( pDevName ) ),
     fd( -1 ),
     numCameraFrames( numCameraFrames ),
     minNumFramesV4LEnqueued( minNumFramesV4LEnqueued ),
     numV4LEnqueued( 0 ),
     frameVector( numCameraFrames )
{
   struct stat st;

   if( stat( devName.data(), &st ) == -1 )
   {
      throw TIOException( devName.data(), errno, __FUNCTION__, __LINE__ ) ;
   }

   XI_ASSERT_FAST( S_ISCHR( st.st_mode ) ) ;

   fd = open( devName.data(), O_RDWR /* required */ | O_NONBLOCK, 0 );
   if( fd == -1 )
   {
      throw TIOException( devName.data(), errno, __FUNCTION__, __LINE__ ) ;
   }

   configure() ;

   setResolution( 640, 480 );
}

TSpecificCamera::~TSpecificCamera()
{
   close( fd );
}

void TSpecificCamera::configure()
{
   struct v4l2_capability cap;
   struct v4l2_cropcap cropcap;
   struct v4l2_crop crop;

   if( xioctl( VIDIOC_QUERYCAP, &cap) == -1)
   {
      if( errno == EINVAL )
      {
         throw TError( __FUNCTION__, __LINE__, "device is not a V4L device" ) ;
      }
      else
      {
         throw TIOException( devName.data(), errno, __FUNCTION__, __LINE__ ) ;
      }
   }

   XI_ASSERT_FAST( cap.capabilities & V4L2_CAP_VIDEO_CAPTURE ) ;
   XI_ASSERT_FAST( cap.capabilities & V4L2_CAP_STREAMING ) ;

   //
   // Select video input, video standard and tune here.
   //

   CLEAR (cropcap);

   cropcap.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;

   if( xioctl( VIDIOC_CROPCAP, &cropcap ) == 0 )
   {
      crop.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
      crop.c = cropcap.defrect; // reset to default

      if( xioctl ( VIDIOC_S_CROP, &crop) == -1 )
      {
         if( errno == EINVAL )
         {
            // throw TError( __FUNCTION__, __LINE__, "device does not support cropping" ) ;
         }
         else
         {
            throw TIOException( devName.data(), errno, __FUNCTION__, __LINE__ ) ;
         }
      }
   }
}

void TSpecificCamera::qFrame( unsigned int index )
{
   if(!frameVector[ index ].v4LEnqueued)
   {
      struct v4l2_buffer buf;

      CLEAR (buf);

      buf.type        = V4L2_BUF_TYPE_VIDEO_CAPTURE;
      buf.memory      = V4L2_MEMORY_MMAP;
      buf.index       = index;

      if( xioctl( VIDIOC_QBUF, &buf ) == -1 )
      {
         throw TIOException( devName.data(), errno, __FUNCTION__, __LINE__ ) ;
      }

      frameVector[ index ].v4LEnqueued = true ;
      numV4LEnqueued ++ ;

      // cout << "Enqueued buffer index: " << index << ", numV4LEnqueued: " << numV4LEnqueued << "\n";
   }
}

TFrame * TSpecificCamera::dqFrame( )
{
   TFrame *pDQFrame = NULL ;
   struct v4l2_buffer buf;

   CLEAR (buf);

   buf.type        = V4L2_BUF_TYPE_VIDEO_CAPTURE;
   buf.memory      = V4L2_MEMORY_MMAP;

   if( xioctl( VIDIOC_DQBUF, &buf ) == -1 )
   {
      switch (errno)
      {
         case EAGAIN:
         {
            pDQFrame = NULL ;
            break ;
         }
         case EIO:
         {
            //
            // Could ignore EIO, see spec.
            // falling through for now
            //
         }
         default:
         {
            throw TIOException( devName.data(), errno, __FUNCTION__, __LINE__ ) ;
         }
      }
   }
   else
   {
      pDQFrame = &frameVector[ buf.index ] ;
      pDQFrame->v4LEnqueued = false ;

      numV4LEnqueued -- ;

      // cout << "Dequeued buffer index: " << pDQFrame->index << ", numV4LEnqueued: " << numV4LEnqueued << "\n";
   }

   return pDQFrame ;
}

const TCountMonitor & TSpecificCamera::getProcessingMonitor()
{
   return processingMonitor ;
}

void TSpecificCamera::setResolution( int dimX, int dimY )
{
   struct v4l2_format fmt ;

   CLEAR (fmt) ;

   fmt.type                = V4L2_BUF_TYPE_VIDEO_CAPTURE ;
   fmt.fmt.pix.width       = dimX ;
   fmt.fmt.pix.height      = dimY ;
   fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_YUYV ;

   //fmt.fmt.pix.field       = V4L2_FIELD_NONE ;
   fmt.fmt.pix.field       = V4L2_FIELD_INTERLACED ;

   if( xioctl( VIDIOC_S_FMT, &fmt ) == - 1 )
   {
      throw TIOException( devName.data(), errno, __FUNCTION__, __LINE__ ) ;
   }
}

void TSpecificCamera::getResolution( int & dimX, int & dimY )
{
   struct v4l2_format fmt;

   CLEAR (fmt);

   fmt.type                = V4L2_BUF_TYPE_VIDEO_CAPTURE;
   fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_YUYV;

   if( xioctl( VIDIOC_G_FMT, &fmt ) == 0 )
   {
      dimX = fmt.fmt.pix.width;
      dimY = fmt.fmt.pix.height;
   }
   else
   {
      throw TIOException( devName.data(), errno, __FUNCTION__, __LINE__ ) ;
   }
}

void TSpecificCamera::prepare()
{
   struct v4l2_requestbuffers req;

   CLEAR( req );

   req.count  = numCameraFrames;
   req.type   = V4L2_BUF_TYPE_VIDEO_CAPTURE;
   req.memory = V4L2_MEMORY_MMAP;

   if ( xioctl( VIDIOC_REQBUFS, &req) == -1)
   {
      throw TIOException( devName.data(), errno, __FUNCTION__, __LINE__ ) ;
   }

   //
   // Make sure we get exactly as many buffers as we wanted to
   //
   XI_ASSERT_FAST( req.count == numCameraFrames ) ;

   for( unsigned int i = 0;
        i < req.count;
        i ++)
   {
      struct v4l2_buffer buf;

      CLEAR (buf);

      buf.type        = V4L2_BUF_TYPE_VIDEO_CAPTURE;
      buf.memory      = V4L2_MEMORY_MMAP;
      buf.index       = i;

      if( xioctl( VIDIOC_QUERYBUF, &buf) == -1 )
      {
         throw TIOException( devName.data(), errno, __FUNCTION__, __LINE__ ) ;
      }

      frameVector[i].index = i ;
      frameVector[i].size  = buf.length;
      frameVector[i].data  = (TYUYV *)mmap( NULL,
                                            buf.length,
                                            PROT_READ | PROT_WRITE,
                                            MAP_SHARED,
                                            fd,
                                            buf.m.offset);

      XI_ASSERT_HARD( frameVector[ i ].data != MAP_FAILED ) ;
   }
}

void TSpecificCamera::startStreaming()
{
   enum v4l2_buf_type type;

   for( unsigned int i = 0;
        i < numCameraFrames;
        i++)
   {
      qFrame( i ) ;
   }

   type = V4L2_BUF_TYPE_VIDEO_CAPTURE;

   if( xioctl( VIDIOC_STREAMON, &type) == -1 )
   {
      throw TIOException( devName.data(), errno, __FUNCTION__, __LINE__ ) ;
   }
}

void TSpecificCamera::eventLoop()
{
   fd_set         fds;
   struct timeval tv;
   int            rc;
   TFrame *       pOldFrame ;

   processingMonitor.reset() ;

   while( !boost::this_thread::interruption_requested() )
   {
      FD_ZERO (&fds);
      FD_SET (fd, &fds);

      //
      // Timeout.
      //
      tv.tv_sec = 8;
      tv.tv_usec = 0;

      rc = select (fd + 1, &fds, NULL, NULL, &tv);

      if( rc == -1 )
      {
         if( errno == EINTR )
         {
            continue ;
         }
         else
         {
            throw TIOException( devName.data(), errno, __FUNCTION__, __LINE__ ) ;
         }
      }

      if( rc == 0 )
      {
         //
         // Timeout. Slow webcam?
         //
         throw TIOException( devName.data(), errno, __FUNCTION__, __LINE__ ) ;
      }

      //
      // Dequeue the buffer from the output queue of the device
      //
      pOldFrame = frameQueue.swizzleWriterData( dqFrame() );

      if( pOldFrame != NULL && !pOldFrame->v4LEnqueued )
      {
         qFrame( pOldFrame->index ) ;
      }

      try
      {
         frameQueue.writerDataFinishAndAdvance() ;
      }
      catch( boost::thread_interrupted interrupt )
      {
         break ;
      }

      processingMonitor++ ;
   }
}

void TSpecificCamera::stopStreaming()
{
   enum v4l2_buf_type type;

   type = V4L2_BUF_TYPE_VIDEO_CAPTURE;

   if( xioctl( VIDIOC_STREAMOFF, &type) == -1 )
   {
      throw TIOException( devName.data(), errno, __FUNCTION__, __LINE__ ) ;
   }
}

void TSpecificCamera::release()
{
   for( unsigned int i = 0; i < numCameraFrames; i++ )
   {
      if( munmap( frameVector[ i ].data, frameVector[ i ].size ) == -1 )
      {
         throw TIOException( devName.data(), errno, __FUNCTION__, __LINE__ ) ;
      }

      frameVector[ i ].data  = NULL;
      frameVector[ i ].size  = 0;
      frameVector[ i ].index = i;
   }
}

}}};
#endif
