// $URL: http://mingw-lib.googlecode.com/svn/trunk/working/avcodec_to_widget_9/t_av_packet.cpp $
// $Rev: 319 $
// $Author: akio.miyoshi $
// $Date:: 2010-07-31 09:47:06 +0000#$

#include "t_av_packet.h"
#include "t_box_player_ctx.h"

T_AV_Packet::T_AV_Packet(T_Box_Player_Context *a_player_ctx, AVPacket *a_av_packet)
    : m_player_ctx(a_player_ctx)
    , m_format_ctx(a_player_ctx->m_format_ctx)
    , m_video_frame(0)
{
    if(a_av_packet==NULL)
    {
        // used as <end of queue> marker
        memset(&m_av_packet, 0, sizeof(m_av_packet));
        m_stream_index = -1;
        m_stream = NULL;
        m_codec_ctx = NULL;
        m_timing = m_player_ctx->duration();
    }
    else
    {
        m_av_packet = *a_av_packet;
        m_stream_index = m_av_packet.stream_index;
        m_stream = m_format_ctx->streams[m_stream_index];
        m_codec_ctx = m_stream->codec;
        m_timing =
                1000.0
                * m_av_packet.pts
                * m_stream->time_base.num
                / m_stream->time_base.den
                ;
    }
}

QByteArray T_AV_Packet::decodeAudio()
{
    QMutexLocker v_lock(this);
    Q_UNUSED(v_lock);
    if(this->isNull())
    {
        return QByteArray();
    }
    Q_ASSERT(m_codec_ctx);
    AVPacket v_av_packet = this->m_av_packet;
    QByteArray v_result;
    while(v_av_packet.size > 0)
    {
        int v_dec_len = sizeof(m_player_ctx->m_audio_decode_array);
        int v_enc_len = avcodec_decode_audio3(
                m_codec_ctx,
                (int16_t *)m_player_ctx->m_audio_decode_array,
                &v_dec_len,
                &v_av_packet
                );
        if (v_enc_len < 0)
        {
            /* if error, we skip the frame */
            v_av_packet.size = 0; //force frame finished
        }
        else
        {
            if (m_codec_ctx->sample_fmt == SAMPLE_FMT_S16)
            {
                v_result.append((char *)m_player_ctx->m_audio_decode_array, v_dec_len);
            }
            else
            {
                AVAudioConvert *convCtx =
                        av_audio_convert_alloc(
                                SAMPLE_FMT_S16,
                                1,
                                m_codec_ctx->sample_fmt,
                                1,
                                NULL,
                                0);
                const void *v_ibuf[6]= {m_player_ctx->m_audio_decode_array};
                void *v_obuf[6]= {m_player_ctx->m_audio_convert_array};
                int v_istride[6]= {av_get_bits_per_sample_format(m_codec_ctx->sample_fmt)/8};
                int v_ostride[6]= {av_get_bits_per_sample_format(SAMPLE_FMT_S16)/8};
                int v_len= v_dec_len/v_istride[0];
                if (av_audio_convert(convCtx, v_obuf, v_ostride, v_ibuf, v_istride, v_len)<0)
                {
                    qDebug() << "av_audio_convert() failed";
                    break;
                }
                av_audio_convert_free(convCtx);
                qDebug() << "av_audio_convert() successful";
                v_dec_len = v_len * v_ostride[0];
                v_result.append((char *)m_player_ctx->m_audio_convert_array, v_dec_len);
            }
            v_av_packet.data += v_enc_len;
            v_av_packet.size -= v_enc_len;
        }
    }
    return v_result;
}

bool T_AV_Packet::decodeVideoToNull()
{
    QMutexLocker v_lock(this);
    Q_UNUSED(v_lock);
    if(this->isNull())
    {
        return false;
    }
    if(m_video_frame)
    {
        return true;
    }
    m_video_frame = avcodec_alloc_frame();
    int v_got_pictur;
    if(avcodec_decode_video2(
            m_codec_ctx, //AVCodecContext *avctx,
            m_video_frame, //AVFrame *picture,
            &v_got_pictur, //int *got_picture_ptr,
            &m_av_packet //AVPacket *avpkt
            )<0)
    {
        av_free(m_video_frame);
        m_video_frame = NULL;
        return false;
    }
    if(!v_got_pictur)
    {
        av_free(m_video_frame);
        m_video_frame = NULL;
        return false;
    }
    return true;
}

QImage T_AV_Packet::decodeVideoToQImage(const QSize &a_size, int a_depth)
{
    QMutexLocker v_lock(this);
    Q_UNUSED(v_lock);
    if(this->isNull())
    {
        return QImage();
    }
    if(!this->decodeVideoToNull())
    {
        return QImage();
    }
    Q_ASSERT(m_video_frame);
    Q_ASSERT(a_depth==16||a_depth==32);
    PixelFormat v_src_fmt;
    QImage::Format v_dst_fmt;
    switch(a_depth)
    {
    case 16:
        v_src_fmt = PIX_FMT_RGB565;
        v_dst_fmt = QImage::Format_RGB16;
        break;
    case 32:
        v_src_fmt = PIX_FMT_BGRA;
        //v_dst_fmt = QImage::Format_ARGB32;
        v_dst_fmt = QImage::Format_RGB32;
        break;
    default:
        Q_ASSERT(0);
    }
    //FIXME: m_resize_pic should be locked
    m_player_ctx->m_resize_pic.resize(v_src_fmt, a_size.width(), a_size.height());
    SwsContext *v_sws_ctx = sws_getContext(
            m_codec_ctx->width, //int srcW,
            m_codec_ctx->height, //int srcH,
            m_codec_ctx->pix_fmt, //enum PixelFormat srcFormat,
            m_player_ctx->m_resize_pic.m_width, //int dstW,
            m_player_ctx->m_resize_pic.m_height, //int dstH,
            m_player_ctx->m_resize_pic.m_pix_fmt, //enum PixelFormat dstFormat,
            SWS_FAST_BILINEAR, //SWS_BICUBIC, //int flags,
            NULL, //SwsFilter *srcFilter,
            NULL, //SwsFilter *dstFilter,
            NULL  //const double *param
            );
    int v_scale_result = sws_scale(
            v_sws_ctx,
            m_video_frame->data,
            m_video_frame->linesize,
            0,
            m_codec_ctx->height,
            m_player_ctx->m_resize_pic.m_av_picture.data,
            m_player_ctx->m_resize_pic.m_av_picture.linesize
            );
    Q_UNUSED(v_scale_result);
    sws_freeContext(v_sws_ctx);
    QImage v_img(
            m_player_ctx->m_resize_pic.m_av_picture.data[0],
            m_player_ctx->m_resize_pic.m_width,
            m_player_ctx->m_resize_pic.m_height,
            v_dst_fmt
            );
    return v_img;
}

void T_AV_Packet_Queue::enqueue(T_AV_Packet *a_av_packet)
{
    QMutexLocker v_lock(&m_atomic);
    Q_UNUSED(v_lock);
    Q_ASSERT(!m_complete);
    if(a_av_packet==NULL)
    {
        a_av_packet = new T_AV_Packet(m_player_ctx, NULL);
        a_av_packet->m_frame_no = m_enqueue_count++;
        packet_queue.enqueue(QSharedPointer<T_AV_Packet>(a_av_packet));
        m_complete = true;
        m_max_timing = m_player_ctx->duration();
        return;
    }
    a_av_packet->m_frame_no = m_enqueue_count++;
    m_max_timing = a_av_packet->timing();
    packet_queue.enqueue(QSharedPointer<T_AV_Packet>(a_av_packet));
}
