// -*- C++ -*-
/*!
 * @file  FaceTrackCV.cpp
 * @brief Sequence InPort component
 *
 */
#include "FaceTrackCV.h"

char windowName[] = "FaceTrackCV";

// Module specification
// <rtc-template block="module_spec">
static const char* col_spec[] =
  {
    "implementation_id", "FaceTrackCV",
    "type_name",         "FaceTrackCV",
    "description",       "FaceTrackCV component",
    "version",           "1.0",
    "vendor",            "Tomoaki Yoshikai, JSK",
    "category",          "example",
    "activity_type",     "DataFlowComponent",
    "max_instance",      "10",
    "language",          "C++",
    "lang_type",         "compile",
    "conf.default.print_name",         "0",
    ""
  };
// </rtc-template>

FaceTrackCV::FaceTrackCV(RTC::Manager* manager)
  : RTC::DataFlowComponentBase(manager),
    // <rtc-template block="initializer">
    m_ImageDataIn( "ImageData",m_ImageData )
    // </rtc-template>
{
}

FaceTrackCV::~FaceTrackCV()
{
}

void FaceTrackCV::face_detect(void){
  int i;
  IplImage *src_gray = 0;
  const char *cascade_name = "haarcascade_frontalface_alt_tree.xml";
  CvHaarClassifierCascade *cascade = 0;
  CvMemStorage *storage = 0;
  CvSeq *faces;

  static CvScalar colors[] = {
    {{0, 0, 255}}, {{0, 128, 255}},
    {{0, 255, 255}}, {{0, 255, 0}},
    {{255, 128, 0}}, {{255, 255, 0}},
    {{255, 0, 0}}, {{255, 0, 255}}
  };

  //グレイ画像を用意
  src_gray = cvCreateImage (cvGetSize (image), IPL_DEPTH_8U, 1);
  
  //ブーストされた分類器のカスケードを読み込む
  cascade = (CvHaarClassifierCascade *) cvLoad (cascade_name, 0, 0, 0);

  //メモリを確保し，元画像のグレースケール化，ヒストグラムの均一化を行う
  storage = cvCreateMemStorage (0);
  cvClearMemStorage (storage);
  cvCvtColor (image, src_gray, CV_BGR2GRAY);
  cvEqualizeHist (src_gray, src_gray);

  // 物体（顔）検出
  faces = cvHaarDetectObjects (src_gray, cascade, storage, 1.2, 1, CV_HAAR_DO_CANNY_PRUNING);

  // 検出された全ての顔位置に，円を描画する
  for (i = 0; i < (faces ? faces->total : 0); i++) {
    CvRect *r = (CvRect *) cvGetSeqElem (faces, i);
    CvPoint center;
    int radius;
    center.x = cvRound (r->x + r->width * 0.5);
    center.y = cvRound (r->y + r->height * 0.5);
    radius = cvRound ((r->width + r->height) * 0.25);
    cvCircle (image, center, radius, colors[i % 8], 3, 8, 0);
  }

  pthread_mutex_lock(&facelock);  
  m_facenum = faces->total;
  if(m_facenum > 0){
    CvRect *r = (CvRect *) cvGetSeqElem (faces, 0);
    m_facepos[0] = cvRound (r->x + r->width * 0.5);
    m_facepos[1] = cvRound (r->y + r->height * 0.5);
    m_facearea = r->width * r->height;
  }
  pthread_mutex_unlock(&facelock);  

  cvReleaseImage (&src_gray);
  cvReleaseMemStorage (&storage);
}


RTC::ReturnCode_t FaceTrackCV::onInitialize()
{
  // Registration: InPort/OutPort/Service
  // <rtc-template block="registration">
  // Set InPort buffers
  addInPort( "ImageData", m_ImageDataIn );    
  // Set service provider to Ports

  // Set CORBA Service Ports

  m_Width = 320;
  m_Height = 240;
  
  m_facenum=0;
  m_facepos[0] = 0.0;
  m_facepos[1] = 0.0;
  m_facearea = 0.0;

  //commands

  pthread_mutex_init(&facelock, 0);
  // </rtc-template>

  if (m_file.empty())
    bindParameter("file", m_file, "FaceTrackCVTest.conf");
  bindParameter("config", m_config, "FaceTrackCV0");
  bindParameter( "WaitTime", m_WaitTime, "10" );
  
  return RTC::RTC_OK;
}


/*
RTC::ReturnCode_t FaceTrackCV::onFinalize()
{
  return RTC::RTC_OK;
}
*/

/*
RTC::ReturnCode_t FaceTrackCV::onStartup(RTC::UniqueId ec_id)
{
  return RTC::RTC_OK;
}
*/

/*
RTC::ReturnCode_t FaceTrackCV::onShutdown(RTC::UniqueId ec_id)
{
  return RTC::RTC_OK;
}
*/

RTC::ReturnCode_t FaceTrackCV::onActivated(RTC::UniqueId ec_id)
{
  //ウィンドウを作成する
  cvNamedWindow( windowName, CV_WINDOW_AUTOSIZE );
  image = cvCreateImage( cvSize( m_Width, m_Height ), IPL_DEPTH_8U, 3 );
  cvSet( image, cvScalarAll( 0 ), NULL );
  cvShowImage( windowName, image );
  cvWaitKey( m_WaitTime );

  std::cerr <<  "Finish initialization" << std::endl;

  return RTC::RTC_OK;
}


RTC::ReturnCode_t FaceTrackCV::onDeactivated(RTC::UniqueId ec_id)
{
  cvDestroyWindow( windowName );

  return RTC::RTC_OK;
}


RTC::ReturnCode_t FaceTrackCV::onExecute(RTC::UniqueId ec_id)
{
  try
  {
    if ( !m_ImageDataIn.isNew() )
      {
        return RTC::RTC_OK;
      }
    m_ImageDataIn.read();

    //receive image
    for (long int i = 0; i < m_Width*m_Height*3; i++)
      image->imageData[i] = (unsigned char) m_ImageData.data[i];

    //face detect
    face_detect();

    cvShowImage( windowName, image );
    cvWaitKey( m_WaitTime );        
    

  }
  catch(const std::runtime_error &e)
  {
      // 失敗しても続行
      std::cerr << e.what() << std::endl;
  }
  //

  return RTC::RTC_OK;
}

/*
RTC::ReturnCode_t FaceTrackCV::onAborting(RTC::UniqueId ec_id)
{
  return RTC::RTC_OK;
}
*/

/*
RTC::ReturnCode_t FaceTrackCV::onError(RTC::UniqueId ec_id)
{
  return RTC::RTC_OK;
}
*/

/*
RTC::ReturnCode_t FaceTrackCV::onReset(RTC::UniqueId ec_id)
{
  return RTC::RTC_OK;
}
*/

/*
RTC::ReturnCode_t FaceTrackCV::onStateUpdate(RTC::UniqueId ec_id)
{
  return RTC::RTC_OK;
}
*/

/*
RTC::ReturnCode_t FaceTrackCV::onRateChanged(RTC::UniqueId ec_id)
{
  return RTC::RTC_OK;
}
*/

extern "C"
{
 
  void FaceTrackCVInit(RTC::Manager* manager)
  {
    coil::Properties profile(col_spec);
    manager->registerFactory(profile,
                             RTC::Create<FaceTrackCV>,
                             RTC::Delete<FaceTrackCV>);
  }
  
};


