// $URL: http://mingw-lib.googlecode.com/svn/trunk/working/avcodec_to_widget_10/t_av_packet.cpp $
// $Rev: 344 $
// $Author: akio.miyoshi $
// $Date:: 2010-08-02 12:42:15 +0000#$

#include "t_av_packet.h"
#include "t_box_player_ctx.h"

T_AV_Packet::T_AV_Packet(AVFormatContext *a_format_ctx, AVPacket *a_av_packet)
    : m_format_ctx(a_format_ctx)
    , m_video_frame(0)
    , m_video_picture(0)
{
    Q_ASSERT(a_av_packet);
    m_av_packet = *a_av_packet;
    m_stream = m_format_ctx->streams[m_av_packet.stream_index];
    m_codec_ctx = m_stream->codec;
    m_timing =
            1000.0
            * m_av_packet.pts
            * m_stream->time_base.num
            / m_stream->time_base.den
            ;
}

T_AV_Packet::T_AV_Packet(qint64 a_timing)
    : m_format_ctx(0)
    , m_video_frame(0)
    , m_video_picture(0)
{
    // used as <end of queue> marker
    memset(&m_av_packet, 0, sizeof(m_av_packet));
    m_stream = NULL;
    m_codec_ctx = NULL;
    m_timing = a_timing; //m_player_ctx->duration();
}

T_AV_Packet::~T_AV_Packet()
{
    if(m_video_frame && !!(this->m_av_packet.flags & PKT_FLAG_KEY))
    {
        qDebug() << "[T_AV_Packet::~T_AV_Packet()]" << this->frameNo() << this->timing() << (this->m_av_packet.flags & PKT_FLAG_KEY);
    }
    if(!this->isNull())
    {
        av_free_packet(&m_av_packet);
    }
    if(m_video_frame) av_free(m_video_frame);
    delete m_video_picture;
}

QByteArray T_AV_Packet::decodeAudio(T_AV_Audio_Decode_Buffer *a_decode_buff)
{
    QMutexLocker v_lock(this);
    Q_UNUSED(v_lock);
    if(this->isNull())
    {
        return QByteArray();
    }
    Q_ASSERT(m_codec_ctx);
    AVPacket v_av_packet = this->m_av_packet;
    QByteArray v_result;
    while(v_av_packet.size > 0)
    {
        int v_dec_len = sizeof(a_decode_buff->decode_array);
        int v_enc_len = avcodec_decode_audio3(
                m_codec_ctx,
                (int16_t *)a_decode_buff->decode_array,
                &v_dec_len,
                &v_av_packet
                );
        if (v_enc_len < 0)
        {
            /* if error, we skip the frame */
            v_av_packet.size = 0; //force frame finished
        }
        else
        {
            if (m_codec_ctx->sample_fmt == SAMPLE_FMT_S16)
            {
                v_result.append((char *)a_decode_buff->decode_array, v_dec_len);
            }
            else
            {
                AVAudioConvert *convCtx =
                        av_audio_convert_alloc(
                                SAMPLE_FMT_S16,
                                1,
                                m_codec_ctx->sample_fmt,
                                1,
                                NULL,
                                0);
                const void *v_ibuf[6]= {a_decode_buff->decode_array};
                void *v_obuf[6]= {a_decode_buff->convert_array};
                int v_istride[6]= {av_get_bits_per_sample_format(m_codec_ctx->sample_fmt)/8};
                int v_ostride[6]= {av_get_bits_per_sample_format(SAMPLE_FMT_S16)/8};
                int v_len= v_dec_len/v_istride[0];
                if (av_audio_convert(convCtx, v_obuf, v_ostride, v_ibuf, v_istride, v_len)<0)
                {
                    qDebug() << "av_audio_convert() failed";
                    break;
                }
                av_audio_convert_free(convCtx);
                qDebug() << "av_audio_convert() successful";
                v_dec_len = v_len * v_ostride[0];
                v_result.append((char *)a_decode_buff->convert_array, v_dec_len);
            }
            v_av_packet.data += v_enc_len;
            v_av_packet.size -= v_enc_len;
        }
    }
    return v_result;
}

bool T_AV_Packet::decodeVideoToNull(bool a_make_copy)
{
    QMutexLocker v_lock(this);
    Q_UNUSED(v_lock);
    if(this->isNull())
    {
        return false;
    }
    if(m_video_frame)
    {
        return true;
    }
    m_video_frame = new T_AVFrame(m_codec_ctx);
    int v_got_pictur;
    if(avcodec_decode_video2(
            m_codec_ctx, //AVCodecContext *avctx,
            m_video_frame->frame(), //AVFrame *picture,
            &v_got_pictur, //int *got_picture_ptr,
            &m_av_packet //AVPacket *avpkt
            )<0)
    {
        delete m_video_frame;
        m_video_frame = NULL;
        return false;
    }
    if(!v_got_pictur)
    {
        delete m_video_frame;
        m_video_frame = NULL;
        return false;
    }
    if(a_make_copy)
    {
#if 0x1
        m_video_picture = new T_AVPicture(m_video_frame);
#else
        m_video_picture = new T_AVPicture();
        m_video_picture->copyFrom(m_video_frame);
#endif
    }
    return true;
}

QImage T_AV_Packet::decodeVideoToQImage(T_AVPicture *a_decode_buff, const QSize &a_size, int a_depth)
{
    QMutexLocker v_lock(this);
    Q_UNUSED(v_lock);
    if(this->isNull())
    {
        m_video_image = QImage();
        return m_video_image;
    }
    if(!this->decodeVideoToNull())
    {
        m_video_image = QImage();
        return m_video_image;
    }
    if(!m_video_image.isNull())
    {
        return m_video_image;
    }
    Q_ASSERT(m_video_frame);
    Q_ASSERT(a_depth==16||a_depth==32);

    T_AVImage *v_image = m_video_picture ? (T_AVImage *)m_video_picture : (T_AVImage *)m_video_frame;

    a_decode_buff->resizeForQImage(a_depth, a_size.width(), a_size.height());
    a_decode_buff->scaleFrom(v_image, SWS_FAST_BILINEAR);
    m_video_image = a_decode_buff->toQImage();
    {
        QPainter v_p(&m_video_image);
        int v_font_size = 16;
        QFont v_f("MS UI Gothic", v_font_size, QFont::Bold);
        //QFontInfo fi(v_f);
        //qDebug() << fi.fixedPitch();
        v_f.setFixedPitch(true);
        v_p.setFont(v_f);
        QString v_msg = QString("F=%1 T=%2 %3")
                        .arg(this->frameNo())
                        .arg(this->timing())
                        .arg(this->timing()/1000.0)
                        ;
        //[Time etc]
        int v_diff = 1;
        int v_x_off = 2;
        ////int v_y_off = m_video_image.height() - 4;
        int v_y_off = 24;
        v_p.setPen(Qt::white);
        v_p.drawText(QPoint(v_x_off+v_diff, v_y_off+v_diff), v_msg);
        v_p.setPen(Qt::black);
        v_p.drawText(QPoint(v_x_off, v_y_off), v_msg);
    }

    Q_ASSERT(!m_video_image.isNull());
    return m_video_image;
}

void T_AV_Packet_Queue::enqueue(T_AV_Packet *a_av_packet)
{
    QMutexLocker v_lock(&m_atomic);
    Q_UNUSED(v_lock);
    Q_ASSERT(!m_complete);
    if(a_av_packet==NULL)
    {
        a_av_packet = new T_AV_Packet(m_player_ctx->duration());
        a_av_packet->m_frame_no = m_enqueue_count++;
        m_packet_queue.enqueue(QSharedPointer<T_AV_Packet>(a_av_packet));
        m_complete = true;
        m_max_timing = m_player_ctx->duration();
        return;
    }
    a_av_packet->m_frame_no = m_enqueue_count++;
    m_max_timing = a_av_packet->timing();
    m_packet_queue.enqueue(QSharedPointer<T_AV_Packet>(a_av_packet));
}
