/*
//
//              INTEL CORPORATION PROPRIETARY INFORMATION
//  This software is supplied under the terms of a license  agreement or
//  nondisclosure agreement with Intel Corporation and may not be copied
//  or disclosed except in  accordance  with the terms of that agreement.
//    Copyright (c) 2007-2008 Intel Corporation. All Rights Reserved.
//
//
*/

#include "umc_defs.h"
#if defined(UMC_ENABLE_AVS_VIDEO_DECODER)

#include "umc_avs_dec_processing_unit_dec.h"

#include "umc_automatic_mutex.h"
#include "umc_mutex.h"

namespace UMC
{

AVSDecProcessingUnit::AVSDecProcessingUnit(void)
{

} // AVSDecProcessingUnit::AVSDecProcessingUnit(void)

AVSDecProcessingUnit::~AVSDecProcessingUnit(void)
{

} // AVSDecProcessingUnit::~AVSDecProcessingUnit(void)

bool AVSDecProcessingUnit::LoadJob(AVSListElement<AVSFrame> *pFrameList)
{
    eAVSPicStructure field = AVS_UPPER_FIELD;
    Ipp32s refReadyLevel, refReadyLevelFrame = 0, refReadyLevelField = 0;

    //
    // Decoding of some frames depends on information
    // from previously decoded reference frames. So we need to
    // track level of ready samples in references.
    //
    if (pFrameList)
    {
        refReadyLevelFrame = pFrameList->m_seqHeader.vertical_size;
        refReadyLevelField = refReadyLevelFrame / 2;
    }

    // run over frame list and find an unparsed slice
    while (pFrameList)
    {
        AVSListElement<AVSSlice> *pSlice;
        AVSPicture *pPic;

        // get current picture
        if (AVS_FRAME == pFrameList->m_picStructure)
        {
            pPic = pFrameList;
            refReadyLevel = refReadyLevelFrame;
            m_pDecompressor = &m_dec;
        }
        else
        {
            pPic = pFrameList->GetPicture(field);
            refReadyLevel = refReadyLevelField;
            m_pDecompressor = &m_decFld;
        }

        // get slices from the frame
        pSlice = pPic->m_Slices.GetHead();

        // run over slice list and find an unparsed slice
        while (pSlice)
        {
            if ((false == pSlice->m_bError) &&
                (pSlice->m_bDecVacant) &&
                (pSlice->m_decCtx.MbIndex < pSlice->m_decCtx.MbLast))
            {
                Ipp16s *pCoeffsBuffer = pSlice->GetFree();

                // this slice is uncomplete,
                // wrap it around and go work.
                if ((pCoeffsBuffer) &&
                    ((AVS_B_PICTURE != pPic->m_picHeader.PictureType) ||
                     (refReadyLevel > pSlice->m_decCtx.MbY * 16)))
                {
                    pSlice->m_decCtx.m_pCoeffs = pCoeffsBuffer;
                    m_pDecompressor->SetDecodingContext(pSlice->m_decCtx);
                    pSlice->m_bDecVacant = false;

                    return true;
                }
            }

            pSlice = pSlice->GetNext();
        }

        // update reference picture reconstruction level
        if (AVS_B_PICTURE != pPic->m_picHeader.PictureType)
        {
            if (AVS_FRAME == pFrameList->m_picStructure)
            {
                refReadyLevelFrame = IPP_MIN(refReadyLevelFrame, GetDecReadyLevel(pPic));
                refReadyLevelField = (refReadyLevelFrame / 2) & -16;
            }
            else
            {
                refReadyLevelField = IPP_MIN(refReadyLevelField, GetDecReadyLevel(pPic));
                refReadyLevelFrame = refReadyLevelField * 2;
            }
        }

        // get next frame only if both fields of the current were inspected
        if ((AVS_FRAME == pFrameList->m_picStructure) ||
            (AVS_LOWER_FIELD == field))
        {
            pFrameList = pFrameList->GetNext();
            field = AVS_UPPER_FIELD;
        }
        else
        {
            field = AVS_LOWER_FIELD;
        }
    }

    return false;

} // bool AVSDecProcessingUnit::LoadJob(AVSListElement<AVSFrame> *pFrameList)

Status AVSDecProcessingUnit::DoJob(void)
{
    // do decoding
    switch (m_pDecompressor->GetDecodingContext().m_pPicHeader->PictureType)
    {
    case AVS_I_PICTURE:
        m_pDecompressor->DecodeIMacroBlocksRow();
        break;

    case AVS_P_PICTURE:
        m_pDecompressor->DecodePMacroBlocksRow();
        break;

    default:
        m_pDecompressor->DecodeBMacroBlocksRow();
        break;
    };

    // finalize processed task
    UnloadJob();

    return UMC_ERR_NOT_ENOUGH_DATA;

} // Status AVSDecProcessingUnit::DoJob(void)

Status AVSDecProcessingUnit::HandleError(void)
{
    AutomaticMutex guard(m_pGuard->ExtractHandle());
    AVSSlice *pSlice;

    pSlice = m_pDecompressor->GetDecodingContext().m_pSlice;

    // drop the slice's flags
    pSlice->m_bDecVacant = true;
    pSlice->m_bError = true;

    return UMC_ERR_NOT_ENOUGH_DATA;

} // Status AVSDecProcessingUnit::HandleError(void)

void AVSDecProcessingUnit::UnloadJob(void)
{
    AVSSlice *pSlice;

    pSlice = m_pDecompressor->GetDecodingContext().m_pSlice;

    // reflect changes to the slice
    {
        AutomaticMutex guard(m_pGuard->ExtractHandle());

        pSlice->m_decCtx = m_pDecompressor->GetDecodingContext();
        pSlice->AdvanceFree(pSlice->m_decCtx.m_pCoeffs - pSlice->GetFree());
        pSlice->m_bDecVacant = true;
    }

} // void AVSDecProcessingUnit::UnloadJob(void)

} // namespace UMC

#endif // defined(UMC_ENABLE_AVS_VIDEO_DECODER)
