/*
//
//              INTEL CORPORATION PROPRIETARY INFORMATION
//  This software is supplied under the terms of a license  agreement or
//  nondisclosure agreement with Intel Corporation and may not be copied
//  or disclosed except in  accordance  with the terms of that agreement.
//    Copyright (c) 2007-2008 Intel Corporation. All Rights Reserved.
//
//
*/

#include "umc_defs.h"
#if defined(UMC_ENABLE_AVS_VIDEO_ENCODER)

#include "umc_avs_enc.h"
#include "umc_avs_enc_fusion_core.h"
#include "umc_avs_thread.h"

#include "umc_scene_analyzer.h"
#include "umc_video_data_scene_info.h"
#include "umc_me.h"

#include "vm_sys_info.h"
#include "umc_event.h"
#include "umc_video_data.h"

#include "vm_strings.h"

namespace UMC
{
static Ipp32s frameCount = 0;
enum
{
    AVS_ENC_MAX_NUM_THREADS     = 8
};

AVSVideoEncoderParams::AVSVideoEncoderParams(void)
{
    m_maxGOPLength = 0;
    m_maxBLength = (Ipp32u) -1;
    m_maxDelayTime = 1000;
    m_bitRateType = AVS_RC_CBR;
    level = AVS_LEVEL_62;
    numThreads = 1;
    info.color_format = YUV420;
    info.framerate = 25.0;
    info.interlace_type = PROGRESSIVE;
    //
    m_bDeblocking = false;
    m_iConstQuant = -1;
    m_uiNumFrames = 1;
    m_uiMESearchSpeed = 0;
    m_bInterlace = false;
    m_bUseMeFeedback = false;

} // AVSVideoEncoderParams::AVSVideoEncoderParams(void)

AVSVideoEncoderParams::~AVSVideoEncoderParams(void)
{

} // AVSVideoEncoderParams::~AVSVideoEncoderParams(void)


AVSVideoEncoder::AVSVideoEncoder(void)
{
    m_pCore = (AVSEncFusionCore *) 0;
    m_pSceneAnalyzer = (SceneAnalyzerBase *) 0;

    m_pThreads = (AVSThread *) 0;
    m_numThreads = 0;

} // AVSVideoEncoder::AVSVideoEncoder(void)

AVSVideoEncoder::~AVSVideoEncoder(void)
{
    // close the object
    Close();

} // AVSVideoEncoder::~AVSVideoEncoder(void)

Status AVSVideoEncoder::Close(void)
{
    // terminate working threads
    if (m_pThreads)
        delete [] m_pThreads;

    // close fusion core
    if (m_pCore)
        delete m_pCore;

    // release the scene analyzer
    if (m_pSceneAnalyzer)
        delete m_pSceneAnalyzer;

    m_pCore = (AVSEncFusionCore *) 0;
    m_pSceneAnalyzer = (SceneAnalyzerBase *) 0;

    m_pThreads = (AVSThread *) 0;
    m_numThreads = 0;

    // call the parent's method
    VideoEncoder::Close();

    return UMC_OK;

} // Status AVSVideoEncoder::Close(void)

Status AVSVideoEncoder::Init(BaseCodecParams *pInitParams)
{
    AVSVideoEncoderParams *pParams = DynamicCast<AVSVideoEncoderParams> (pInitParams);
    frameCount = 0;

    memcpy(&m_internalParams,pParams,sizeof(m_internalParams));
    SceneAnalyzerParams sceneAnalyzerParams;
    Status umcRes;
    Ipp32u i, iThreads;

    // check error(s)
    if (NULL == pParams)
        return UMC_ERR_NULL_PTR;

    // release the object before initialization
    Close();

    // call the parent's method
    umcRes = VideoEncoder::Init(pParams);
    if (UMC_OK != umcRes)
        return umcRes;

    // calculate number of threads
    if (pParams->numThreads)
        iThreads = IPP_MIN(pParams->numThreads, AVS_ENC_MAX_NUM_THREADS);
    else
        iThreads = vm_sys_info_get_cpu_num();

    // create a fusion core
    m_pCore = new AVSEncFusionCore();
    if (NULL == m_pCore)
        return UMC_ERR_ALLOC;
    // initialize the fusion core
    umcRes = m_pCore->Init(iThreads, pParams);
    if (UMC_OK != umcRes)
        return UMC_ERR_INIT;
    
    // create the scene analyzer
    m_pSceneAnalyzer = new SceneAnalyzer();
    if (NULL == m_pSceneAnalyzer)
        return UMC_ERR_ALLOC;
    // initialize the scene analyzer
    sceneAnalyzerParams.m_maxGOPLength = pParams->m_maxGOPLength;
    sceneAnalyzerParams.m_maxBLength = pParams->m_maxBLength;
    sceneAnalyzerParams.m_interlaceType = pParams->info.interlace_type;
    umcRes = m_pSceneAnalyzer->Init(&sceneAnalyzerParams);
    if (UMC_OK != umcRes)
        return umcRes;

    // allocate an array of thread objects
    m_pThreads = new AVSThread[iThreads];
    if (NULL == m_pThreads)
        return UMC_ERR_ALLOC;
    // initialize the threads
    for (i = 0; i < iThreads; i += 1)
    {
        umcRes = m_pThreads[i].Init(m_pCore, i);
        if (UMC_OK != umcRes)
            return UMC_ERR_INIT;
    }

    return UMC_OK;

} // Status AVSVideoEncoder::Init(BaseCodecParams *pInit)

Status AVSVideoEncoder::GetFrame(MediaData *pSrc, MediaData *pDst)
{
    VideoData *pVideoSrc = DynamicCast<VideoData> (pSrc);
    VideoDataSceneInfo analyzedVideoSrc;
    Status umcRes;

    // check error(s)
    if ((NULL == pVideoSrc) && (pSrc))
    {
        return UMC_ERR_NULL_PTR;
    }
    if (NULL == pDst)
    {
        return UMC_ERR_NULL_PTR;
    }
    if (NULL == m_pCore)
    {
        return UMC_ERR_NOT_INITIALIZED;
    }

    // analyze the current frame

    umcRes = m_pSceneAnalyzer->GetFrame(pSrc, &analyzedVideoSrc);
    if (UMC_OK == umcRes)
    {
        // try to load source to the core
        umcRes = m_pCore->LoadSource(&analyzedVideoSrc);
        if (UMC_OK != umcRes)
            return umcRes;
    }
    else if (UMC_ERR_NOT_ENOUGH_DATA == umcRes)
    {
        // the decoder demands more source
        if (pSrc)
        {
            return umcRes;
        }

        // in opposite case we expect, that the stream is over.
        // so we need to pull buffered frames from the encoder.
    }
    else
    {
        return umcRes;
    }

    // set the destination video plane(s)
    umcRes = m_pCore->SetDestination(pDst);
    if (UMC_OK != umcRes)
        return umcRes;

    // take a part in decoding process
    m_pThreads[0].DoJob();
    
    return UMC_OK;

} // Status AVSVideoEncoder::GetFrame(MediaData *pSrc, MediaData *pDst)

Status AVSVideoEncoder::GetInfo(BaseCodecParams * pCodecParams)
{
    AVSVideoEncoderParams *pParams = DynamicCast<AVSVideoEncoderParams> (pCodecParams);
    pParams = &m_internalParams;
    return UMC_OK;//ERR_NOT_IMPLEMENTED;

} // Status AVSVideoEncoder::GetInfo(BaseCodecParams *pCodecParams)

Status AVSVideoEncoder::Reset(void)
{
    Event done;
    Ipp32u i;

    // initialize the event
    done.Init(0, 0);

    // make all thread sleep
    for (i = 1; i < m_numThreads; i += 1)
        m_pThreads[i].GoSleep(done);

    // reset the fusion core
    m_pCore->Reset();

    // wake up all threads
    for (i = 1; i < m_numThreads; i += 1)
        m_pThreads[i].WakeUp(done);

    return UMC_OK;

} // Status AVSVideoEncoder::Reset(void)

#define PAR_STRLEN 512
Status AVSVideoEncoderParams::ReadParamFile(const vm_char *ParFileName)
{
    return ReadParamFile(ParFileName, NULL);
} //Status AVSVideoEncoderParams::ReadParamFile(const vm_char *ParFileName)

Status AVSVideoEncoderParams::ReadParamFile(const vm_char *ParFileName, vm_char *InputYUVFileName)
{
  vm_file           *InputFile = 0;
  vm_char           line[PAR_STRLEN];
  vm_char           temp[PAR_STRLEN];
  vm_char*          param=0;
  vm_char*          end_of_line = 0;
  Ipp32s            i = 0;
 
  size_t            length = 0;

  const vm_char     comment = '#';

  if (0 == (InputFile = vm_file_open(ParFileName, VM_STRING("rt"))))
  {
    vm_debug_trace1(VM_DEBUG_ERROR, VM_STRING("Can't open parameter file %s\n"), ParFileName);
    return UMC_ERR_OPEN_FAILED;
  }

  while (vm_file_gets(line,PAR_STRLEN,InputFile))
  {
      i++;
      end_of_line   = vm_string_strchr(line,comment);
      length        = (end_of_line)? end_of_line - line: vm_string_strlen(line);
      length        = (length<PAR_STRLEN)? length:PAR_STRLEN-1;

      if (!length)
          continue;
      memset (temp,0,sizeof(vm_char)*PAR_STRLEN);
      vm_string_strncpy(temp, line,length);
      if (vm_string_strstr(temp,VM_STRING("Profile:")))
      {
          param = temp + vm_string_strlen(VM_STRING("Profile:"));
          if (vm_string_strstr(param,VM_STRING("JIZHUN")))
          {
            profile = AVS_PROFILE_JIZHUN; // Simple = JIZHUN ;-)
            continue;
          }
          else
          {
              vm_file_close(InputFile);
              return UMC::UMC_ERR_INIT;
          }
      }//profile
      else if (vm_string_strstr(temp,VM_STRING("GOPLength:")))
      {
          param = temp + vm_string_strlen(VM_STRING("GOPLength:"));
          vm_string_sscanf(param, VM_STRING("%d"),&m_maxGOPLength);
          continue;
      }
      else if (vm_string_strstr(temp,VM_STRING("BFramesLength:")))
      {
          param = temp + vm_string_strlen(VM_STRING("BFramesLength:"));
          vm_string_sscanf(param, VM_STRING("%d"),&m_maxBLength);
          continue;
      }
      else if (vm_string_strstr(temp,VM_STRING("Bitrate:")))
      {
          param = temp + vm_string_strlen(VM_STRING("Bitrate:"));
          vm_string_sscanf(param, VM_STRING("%d"),&info.bitrate);
          continue;
      }
      else if (vm_string_strstr(temp,VM_STRING("Width:")))
      {
          param = temp + vm_string_strlen(VM_STRING("Width:"));
          vm_string_sscanf(param, VM_STRING("%d"),&info.clip_info.width);
          continue;
      }
      else if (vm_string_strstr(temp,VM_STRING("Height:")))
      {
          param = temp + vm_string_strlen(VM_STRING("Height:"));
          vm_string_sscanf(param, VM_STRING("%d"),&info.clip_info.height);
          continue;
      }
      else if (vm_string_strstr(temp,VM_STRING("Deblocking:")))
      {
          Ipp32s t;
          param = temp + vm_string_strlen(VM_STRING("Deblocking:"));
          vm_string_sscanf(param, VM_STRING("%d"),&t);
          m_bDeblocking = (t!=0);
          continue;
      }
      else if (vm_string_strstr(temp,VM_STRING("ConstQuantization:")))
      {
          Ipp32s t;
          param = temp + vm_string_strlen(VM_STRING("ConstQuantization:"));
          vm_string_sscanf(param, VM_STRING("%d"),&t);
          m_iConstQuant = (Ipp8s)t;
          continue;
      }
      else if (vm_string_strstr(temp,VM_STRING("NumberOfFrames:")))
      {
          Ipp32u t;
          param = temp + vm_string_strlen(VM_STRING("NumberOfFrames:"));
          vm_string_sscanf(param, VM_STRING("%d"),&t);
          m_uiNumFrames = t;
          continue;
      }
      else if (vm_string_strstr(temp,VM_STRING("MESearchSpeed:")))
      {
          Ipp32u t;
          param = temp + vm_string_strlen(VM_STRING("MESearchSpeed:"));
          vm_string_sscanf(param, VM_STRING("%d"),&t);
          m_uiMESearchSpeed = t;
          continue;
      }
      else if (vm_string_strstr(temp,VM_STRING("InterlaceField:")))
      {
          Ipp32u t;
          param = temp + vm_string_strlen(VM_STRING("InterlaceField:"));
          vm_string_sscanf(param, VM_STRING("%d"),&t);
          m_bInterlace = (t!=0);
          continue;
      }
      else if  (vm_string_strstr(temp,VM_STRING("UseFeedback:")))
      {
          Ipp32u t = 0;
          param = temp + vm_string_strlen(VM_STRING("UseFeedback:"));
          vm_string_sscanf(param, VM_STRING("%d"),&t);
          m_bUseMeFeedback = (t!=0);

          continue;
      }
      else if  (( 2 == i) && (NULL != InputYUVFileName))
      {
          // copy InputYUVFileName
          //memset(InputYUVFileName, 0, sizeof(InputYUVFileName));
          // last two symbols are tabs, so real length should be decrease on 2
          vm_string_sscanf(line, VM_STRING("%s"), InputYUVFileName);
          continue;
      }
  } // while
  vm_file_close(InputFile);
  return UMC::UMC_OK;
}
#undef PAR_STRLEN

VideoEncoder* CreateAVSEncoder()
{
  VideoEncoder* ptr = new AVSVideoEncoder;
  return ptr;
}


} // namespace UMC

#endif // defined(UMC_ENABLE_AVS_VIDEO_ENCODER)
