// $URL: http://mingw-lib.googlecode.com/svn/trunk/working/avcodec_to_widget_8/t_av_packet.cpp $
// $Rev: 298 $
// $Author: akio.miyoshi $
// $Date:: 2010-07-28 14:12:27 +0000#$

#include "t_av_packet.h"
#include "t_box_player.h"

T_AV_Packet::T_AV_Packet(T_Box_Player_Context *a_player_ctx, AVPacket *a_av_packet)
    : m_player_ctx(a_player_ctx)
    , m_format_ctx(a_player_ctx->m_format_ctx)
{
    if(a_av_packet==NULL)
    {
        memset(&m_av_packet, 0, sizeof(m_av_packet));
        m_stream_index = -1;
        m_stream = NULL;
        m_codec_ctx = NULL;
        m_timing = m_player_ctx->duration();
    }
    else
    {
        m_av_packet = *a_av_packet;
        m_stream_index = m_av_packet.stream_index;
        m_stream = m_format_ctx->streams[m_stream_index];
        m_codec_ctx = m_stream->codec;
        m_timing =
                1000.0
                * m_av_packet.pts
                * m_stream->time_base.num
                / m_stream->time_base.den
                ;
    }

}

QByteArray T_AV_Packet::decodeAudio()
{
    if(this->isNull())
    {
        return QByteArray();
    }
    Q_ASSERT(m_codec_ctx);
    AVPacket v_av_packet = this->m_av_packet;
    QByteArray v_result;
    while(v_av_packet.size > 0)
    {
        int v_dec_len = sizeof(m_player_ctx->m_audio_decode_array);
        int v_enc_len = avcodec_decode_audio3(
                m_codec_ctx,
                (int16_t *)m_player_ctx->m_audio_decode_array,
                &v_dec_len,
                &v_av_packet
                );
        if (v_enc_len < 0)
        {
            /* if error, we skip the frame */
            v_av_packet.size = 0; //force frame finished
        }
        else
        {
            if (m_codec_ctx->sample_fmt == SAMPLE_FMT_S16)
            {
                v_result.append((char *)m_player_ctx->m_audio_decode_array, v_dec_len);
            }
            else
            {
                AVAudioConvert *convCtx =
                        av_audio_convert_alloc(
                                SAMPLE_FMT_S16,
                                1,
                                m_codec_ctx->sample_fmt,
                                1,
                                NULL,
                                0);
                const void *v_ibuf[6]= {m_player_ctx->m_audio_decode_array};
                void *v_obuf[6]= {m_player_ctx->m_audio_convert_array};
                int v_istride[6]= {av_get_bits_per_sample_format(m_codec_ctx->sample_fmt)/8};
                int v_ostride[6]= {av_get_bits_per_sample_format(SAMPLE_FMT_S16)/8};
                int v_len= v_dec_len/v_istride[0];
                if (av_audio_convert(convCtx, v_obuf, v_ostride, v_ibuf, v_istride, v_len)<0)
                {
                    qDebug() << "av_audio_convert() failed";
                    break;
                }
                av_audio_convert_free(convCtx);
                qDebug() << "av_audio_convert() successful";
                v_dec_len = v_len * v_ostride[0];
                v_result.append((char *)m_player_ctx->m_audio_convert_array, v_dec_len);
            }
            v_av_packet.data += v_enc_len;
            v_av_packet.size -= v_enc_len;
        }
    }
    return v_result;
}

bool T_AV_Packet::decodeVideo(SDL_Overlay *a_overlay)
{
    if(this->isNull())
    {
        return false;
    }
    AVFrame *v_frame=avcodec_alloc_frame();
    int v_got_pictur;
    if(avcodec_decode_video2(
            m_codec_ctx, //AVCodecContext *avctx,
            v_frame, //AVFrame *picture,
            &v_got_pictur, //int *got_picture_ptr,
            &m_av_packet //AVPacket *avpkt
            )<0)
    {
        av_free(v_frame);
        return false;
    }
    if(!v_got_pictur)
    {
        av_free(v_frame);
        return false;
    }

    if(!a_overlay)
    {
        av_free(v_frame);
        return true;
    }

    SDL_LockYUVOverlay(a_overlay);

    AVPicture pict;
    memset(&pict,0,sizeof(AVPicture));
    pict.data[0] = a_overlay->pixels[0];
    pict.data[1] = a_overlay->pixels[2];
    pict.data[2] = a_overlay->pixels[1];
    pict.linesize[0] = a_overlay->pitches[0];
    pict.linesize[1] = a_overlay->pitches[2];
    pict.linesize[2] = a_overlay->pitches[1];

    AVPicture pict_src;
    memset(&pict_src,0,sizeof(AVPicture));
    pict_src.data[0] = v_frame->data[0];
    pict_src.data[1] = v_frame->data[1];
    pict_src.data[2] = v_frame->data[2];
    pict_src.linesize[0] = v_frame->linesize[0];
    pict_src.linesize[1] = v_frame->linesize[1];
    pict_src.linesize[2] = v_frame->linesize[2];

    SwsContext *v_sws_ctx = sws_getContext(
            m_codec_ctx->width, //int srcW,
            m_codec_ctx->height, //int srcH,
            m_codec_ctx->pix_fmt, //enum PixelFormat srcFormat,
            a_overlay->w, //int dstW,
            a_overlay->h, //int dstH,
            PIX_FMT_YUV420P, //enum PixelFormat dstFormat,
            SWS_FAST_BILINEAR, //SWS_BICUBIC, //int flags,
            NULL, //SwsFilter *srcFilter,
            NULL, //SwsFilter *dstFilter,
            NULL  //const double *param
            );
    int v_sws_scale_result = sws_scale(
            v_sws_ctx,
            pict_src.data,
            pict_src.linesize,
            0,
            m_codec_ctx->height,
            pict.data,
            pict.linesize
            );
    Q_UNUSED(v_sws_scale_result);
    sws_freeContext(v_sws_ctx);

    SDL_UnlockYUVOverlay(a_overlay);
    av_free(v_frame);
    return true;
}

static const QMap<int, QVideoFrame::PixelFormat> &s_pixel_format_map()
{
    static QMap<int, QVideoFrame::PixelFormat> *s_format_map = NULL;
    if(!s_format_map)
    {
        s_format_map = new QMap<int, QVideoFrame::PixelFormat>();
        (*s_format_map)[PIX_FMT_NONE]            = QVideoFrame::Format_Invalid;  //-1
        (*s_format_map)[PIX_FMT_YUV420P]         = QVideoFrame::Format_YUV420P;  //0
        (*s_format_map)[PIX_FMT_YUYV422]         = QVideoFrame::Format_Invalid;  //1
        (*s_format_map)[PIX_FMT_RGB24]           = QVideoFrame::Format_RGB24;    //2
        (*s_format_map)[PIX_FMT_BGR24]           = QVideoFrame::Format_BGR24;    //3
        (*s_format_map)[PIX_FMT_YUV422P]         = QVideoFrame::Format_Invalid;  //4
        (*s_format_map)[PIX_FMT_YUV444P]         = QVideoFrame::Format_YUV444;   //5
        (*s_format_map)[PIX_FMT_RGB32]           = QVideoFrame::Format_RGB32;    //6
        (*s_format_map)[PIX_FMT_YUV410P]         = QVideoFrame::Format_Invalid;  //7
        (*s_format_map)[PIX_FMT_YUV411P]         = QVideoFrame::Format_Invalid;  //8
        (*s_format_map)[PIX_FMT_RGB565 ]         = QVideoFrame::Format_RGB565;   //9
        (*s_format_map)[PIX_FMT_RGB555]          = QVideoFrame::Format_RGB555;   //10
        (*s_format_map)[PIX_FMT_GRAY8]           = QVideoFrame::Format_Y8;       //11
        (*s_format_map)[PIX_FMT_MONOWHITE]       = QVideoFrame::Format_Invalid;  //12
        (*s_format_map)[PIX_FMT_MONOBLACK]       = QVideoFrame::Format_Invalid;  //13
        (*s_format_map)[PIX_FMT_PAL8]            = QVideoFrame::Format_Invalid;  //14
        (*s_format_map)[PIX_FMT_YUVJ420P]        = QVideoFrame::Format_YUV420P;  //15
        (*s_format_map)[PIX_FMT_YUVJ422P]        = QVideoFrame::Format_Invalid;  //16
        (*s_format_map)[PIX_FMT_YUVJ444P]        = QVideoFrame::Format_YUV444;   //17
        (*s_format_map)[PIX_FMT_XVMC_MPEG2_MC]   = QVideoFrame::Format_Invalid;  //18
        (*s_format_map)[PIX_FMT_XVMC_MPEG2_IDCT] = QVideoFrame::Format_Invalid;  //19
        (*s_format_map)[PIX_FMT_UYVY422]         = QVideoFrame::Format_Invalid;  //20
        (*s_format_map)[PIX_FMT_UYYVYY411]       = QVideoFrame::Format_Invalid;  //21
        (*s_format_map)[PIX_FMT_BGR32]           = QVideoFrame::Format_BGR32;    //22
        (*s_format_map)[PIX_FMT_BGR565]          = QVideoFrame::Format_BGR565;   //23
        (*s_format_map)[PIX_FMT_BGR555]          = QVideoFrame::Format_BGR555;   //24
        (*s_format_map)[PIX_FMT_BGR8]            = QVideoFrame::Format_Invalid;  //25
        (*s_format_map)[PIX_FMT_BGR4]            = QVideoFrame::Format_Invalid;  //26
        (*s_format_map)[PIX_FMT_BGR4_BYTE]       = QVideoFrame::Format_Invalid;  //27
        (*s_format_map)[PIX_FMT_RGB8]            = QVideoFrame::Format_Invalid;  //28
        (*s_format_map)[PIX_FMT_RGB4]            = QVideoFrame::Format_Invalid;  //29
        (*s_format_map)[PIX_FMT_RGB4_BYTE]       = QVideoFrame::Format_Invalid;  //30
        (*s_format_map)[PIX_FMT_NV12]            = QVideoFrame::Format_NV12;     //31
        (*s_format_map)[PIX_FMT_NV21]            = QVideoFrame::Format_NV21;     //32
        (*s_format_map)[PIX_FMT_RGB32_1]         = QVideoFrame::Format_Invalid;  //33
        (*s_format_map)[PIX_FMT_BGR32_1]         = QVideoFrame::Format_BGRA32;   //34
        (*s_format_map)[PIX_FMT_GRAY16BE]        = QVideoFrame::Format_Invalid;  //35
        (*s_format_map)[PIX_FMT_GRAY16LE]        = QVideoFrame::Format_Invalid;  //36
        (*s_format_map)[PIX_FMT_YUV440P]         = QVideoFrame::Format_Invalid;  //37
        (*s_format_map)[PIX_FMT_YUVJ440P]        = QVideoFrame::Format_Invalid;  //38
        (*s_format_map)[PIX_FMT_YUVA420P]        = QVideoFrame::Format_Invalid;  //39
        (*s_format_map)[PIX_FMT_NB]              = QVideoFrame::Format_Invalid;  //40
    }
    return (*s_format_map);
}

QVideoFrame *T_AV_Packet::decodeToQVideoFrame()
{
    if(this->isNull())
    {
        return NULL;
    }
    AVFrame *v_frame=avcodec_alloc_frame();
    int v_got_pictur;
    if(avcodec_decode_video2(
            m_codec_ctx, //AVCodecContext *avctx,
            v_frame, //AVFrame *picture,
            &v_got_pictur, //int *got_picture_ptr,
            &m_av_packet //AVPacket *avpkt
            )<0)
    {
        av_free(v_frame);
        return NULL;
    }
    if(!v_got_pictur)
    {
        av_free(v_frame);
        return NULL;
    }

    const QMap<int, QVideoFrame::PixelFormat> &v_pix_frm_map = s_pixel_format_map();
    //FIXME: instead of ASSERT return NULL;
    Q_ASSERT(v_pix_frm_map.contains(m_codec_ctx->pix_fmt));
    Q_ASSERT(v_pix_frm_map.value(m_codec_ctx->pix_fmt, QVideoFrame::Format_Invalid) != QVideoFrame::Format_Invalid);
    qDebug() << "[m_formatMap.value(m_codec_ctx->pix_fmt)]" << v_pix_frm_map.value(m_codec_ctx->pix_fmt);

    QSize v_frame_size = QSize(m_codec_ctx->width, m_codec_ctx->height);
    QVideoSurfaceFormat v_format = QVideoSurfaceFormat( v_frame_size, v_pix_frm_map.value(m_codec_ctx->pix_fmt) );
    int v_raw_num_bytes = avpicture_get_size(m_codec_ctx->pix_fmt,
                                             m_codec_ctx->width,
                                             m_codec_ctx->height);
    QVideoFrame *v_video_frame = new QVideoFrame(v_raw_num_bytes,
                                                v_format.frameSize(), // v_frame_size,
                                                v_frame->linesize[0],
                                                v_format.pixelFormat());
    v_video_frame->map(QAbstractVideoBuffer::WriteOnly);
    memcpy(v_video_frame->bits(), v_frame->data[0], v_video_frame->mappedBytes());
    v_video_frame->unmap();

    av_free(v_frame);
    return v_video_frame;
}

QImage T_AV_Packet::decodeToQImage(const QSize &a_size, int a_depth)
{
    if(this->isNull())
    {
        return QImage();
    }
    AVFrame *v_frame=avcodec_alloc_frame();
    int v_got_pictur;
    if(avcodec_decode_video2(
            m_codec_ctx, //AVCodecContext *avctx,
            v_frame, //AVFrame *picture,
            &v_got_pictur, //int *got_picture_ptr,
            &m_av_packet //AVPacket *avpkt
            )<0)
    {
        av_free(v_frame);
        return QImage();
    }
    if(!v_got_pictur)
    {
        av_free(v_frame);
        return QImage();
    }

    Q_ASSERT(a_depth==16||a_depth==32);

    PixelFormat v_src_fmt;
    QImage::Format v_dst_fmt;

    switch(a_depth)
    {
    case 16:
        v_src_fmt = PIX_FMT_RGB565;
        v_dst_fmt = QImage::Format_RGB16;
        break;
    case 32:
        v_src_fmt = PIX_FMT_BGRA;
        //v_dst_fmt = QImage::Format_ARGB32;
        v_dst_fmt = QImage::Format_RGB32;
        break;
    default:
        Q_ASSERT(0);
    }

    m_player_ctx->m_resize_pic.resize(v_src_fmt, a_size.width(), a_size.height());

    SwsContext *v_sws_ctx = sws_getContext(
            m_codec_ctx->width, //int srcW,
            m_codec_ctx->height, //int srcH,
            m_codec_ctx->pix_fmt, //enum PixelFormat srcFormat,
            m_player_ctx->m_resize_pic.m_width, //int dstW,
            m_player_ctx->m_resize_pic.m_height, //int dstH,
            m_player_ctx->m_resize_pic.m_pix_fmt, //enum PixelFormat dstFormat,
            SWS_FAST_BILINEAR, //SWS_BICUBIC, //int flags,
            NULL, //SwsFilter *srcFilter,
            NULL, //SwsFilter *dstFilter,
            NULL  //const double *param
            );
    int v_scale_result = sws_scale(
            v_sws_ctx,
            v_frame->data,
            v_frame->linesize,
            0,
            m_codec_ctx->height,
            m_player_ctx->m_resize_pic.m_av_picture.data,
            m_player_ctx->m_resize_pic.m_av_picture.linesize
            );
    Q_UNUSED(v_scale_result);
    sws_freeContext(v_sws_ctx);
    QImage v_img(
            m_player_ctx->m_resize_pic.m_av_picture.data[0],
            m_player_ctx->m_resize_pic.m_width,
            m_player_ctx->m_resize_pic.m_height,
            v_dst_fmt
            );
    return v_img;
}

