#include "widget.h"
#include "ui_widget.h"

#include "stable.h"

#include "t_my_av_utils.h"
#include "t_box_player_ctx.h"

#include "t_youtube.h"
#include "input/input.h"

static void my_av_log_callback(void* ptr, int level, const char* fmt, va_list vl);


Widget::Widget(QWidget *parent)
    : QWidget(parent)
    , ui(new Ui::Widget)
    , m_media_seq(0)
    , m_core(0)
    , m_player_ctx(0)
    , m_fps1(1000)
    , m_fps2(1000)
    , m_fps_download(500)
{
    ui->setupUi(this);

    screenWidget = new T_ScreenWidget(this);
    screenWidget->setObjectName(QString::fromUtf8("screenWidget"));
    ui->verticalLayout->addWidget(screenWidget);

    av_register_all();
    av_log_set_callback(my_av_log_callback);

    m_media_seq = get_test_input();

    if(!m_media_seq) return;

    Q_ASSERT(m_media_seq);

    Q_ASSERT(m_media_seq->isValid());

    m_core = new T_Box_Core(m_media_seq);
    Q_ASSERT(m_core->isValid());
    Q_ASSERT(m_core->globalControl()->isValid());

    AVFormatContext	*v_format_ctx = m_core->globalControl()->formatContext();
    Q_ASSERT(v_format_ctx);
    dump_format(v_format_ctx, 0, m_media_seq->displayName().toLocal8Bit().constData(), false);
    test_dump_format_ctx(v_format_ctx);

    m_player_ctx = new T_Box_Player_Context(m_core);

    qDebug() << "[audioCodec->long_name]" << m_player_ctx->audio()->codec()->long_name;
    qDebug() << "[audioCtx->sample_fmt]" << m_player_ctx->audio()->codecContex()->sample_fmt;
    qDebug() << "[audioCtx->sample_rate]" << m_player_ctx->audio()->codecContex()->sample_rate;

    m_player_ctx->m_seek_bar = new T_VideoProgressBar(m_core->seekManager(), this);
    m_player_ctx->m_seek_bar->setMaximum(m_core->globalControl()->contentDuration());
    ui->verticalLayout->addWidget(m_player_ctx->m_seek_bar);

    m_player_ctx->m_buffering_bar = new T_BufferProgressBar(this);
    m_player_ctx->m_buffering_bar->setMaximum(m_core->globalControl()->contentDuration());
    ui->verticalLayout->addWidget(m_player_ctx->m_buffering_bar);

    qDebug()<< "[m_video_stream->start_time]"
            << m_player_ctx->video()->stream()->start_time
            ;

#if 0x1
    qDebug()<< "[m_video_stream->time_base]"
            << m_player_ctx->video()->time_base().num
            << m_player_ctx->video()->time_base().den
            << m_player_ctx->video()->time_base().toDouble();
            ;
    qDebug()<< "[m_video_stream->avg_frame_rate]"
            << m_player_ctx->video()->avg_frame_rate().num
            << m_player_ctx->video()->avg_frame_rate().den
            << m_player_ctx->video()->avg_frame_rate().toDouble();
            ;
    qDebug()<< "[m_video_stream->r_frame_rate]"
            << m_player_ctx->video()->r_frame_rate().num
            << m_player_ctx->video()->r_frame_rate().den
            << m_player_ctx->video()->r_frame_rate().toDouble();
            ;
    qDebug()<< "[m_audio.stream->time_base]"
            << m_player_ctx->audio()->time_base().num
            << m_player_ctx->audio()->time_base().den
            << m_player_ctx->audio()->time_base().toDouble();
            ;
#endif

    m_drop_count = 0;

    T_Clock v_duration_clock;
    v_duration_clock.setElapsed(m_core->globalControl()->contentDuration());

    this->setWindowTitle(QString("%1 %2")
                         .arg(v_duration_clock.toString("hh:mm:ss[zzz]"))
                         .arg(m_media_seq->displayName())
                         );

    if(m_player_ctx->video()->codecContex()->height >= 1080)
    {
        this->screenWidget->setScreenSize(
                QSize(m_player_ctx->video()->codecContex()->width/4,
                      m_player_ctx->video()->codecContex()->height/4)
                );
    }
    else if(m_player_ctx->video()->codecContex()->width >= 1280
       ||
       m_player_ctx->video()->codecContex()->height >= 720
       )
    {
        this->screenWidget->setScreenSize(
                QSize(m_player_ctx->video()->codecContex()->width/2,
                      m_player_ctx->video()->codecContex()->height/2)
                );
    }
    else
    {
        this->screenWidget->setScreenSize(
                QSize(m_player_ctx->video()->codecContex()->width,
                      m_player_ctx->video()->codecContex()->height)
                );
    }
    this->screenWidget->adjustSize();

    m_player_ctx->start();

    connect(&m_video_timer, SIGNAL(timeout()), SLOT(onTimerTimeout()));
    //m_video_timer.start(1);
    m_video_timer.start(5);
    //m_video_timer.start(10);
    //m_video_timer.start(30);
}

Widget::~Widget()
{
    qDebug() << "[Widget::~Widget()]";
    //if(m_audioOutput) m_audioOutput->stop();
    qDebug() << "[Widget::~Widget(1)]";
    m_video_timer.stop();
    qDebug() << "[Widget::~Widget(4)]";

    if(m_player_ctx) m_player_ctx->finish();
    delete m_player_ctx;
    qDebug() << "[Widget::~Widget(4.1)]";
    //delete m_core;
    T_Box_Core *a_core = m_core;
    m_core = NULL;
    delete a_core;
    qDebug() << "[Widget::~Widget(4.2)]";
    delete m_media_seq;
    qDebug() << "[Widget::~Widget(4.3)]";
    delete ui;
    qDebug() << "[Widget::~Widget(5)]";
}

void Widget::onTimerTimeout()
{
    //[seek lock]
    T_READ_LOCKER(v_seek_locker, m_core->seekManager());
    //
    //T_AV_Packet_Queue *v_qu = m_player_ctx->video()->packetQueue();
    T_SharedPointerQueue<T_AV_Packet> *v_qu = m_player_ctx->videoPacketQueue();
    //
    qint64 v_audio_elapsed = m_player_ctx->m_speaker->elapsed();
    if(v_audio_elapsed >= 0)
    {
        qint64 v_video_elapsed = m_core->timeline()->elapsed();
        qint64 v_elapsed_diff = v_audio_elapsed - v_video_elapsed;
        m_core->timeline()->setElapsed(v_video_elapsed + (v_elapsed_diff/2));
    }
    show_status();
    if(m_player_ctx->needBuffering())
    {
        suspend_time();
        return;
    }
    if(v_qu->size()==0)
    {
#if 0x0 //FIXME
        // end of queue
        if(v_qu->isComplete())
        {
            resume_time();
        }
        else
#endif
        {
            suspend_time();
        }
        return;
    }
    resume_time();
    int v_elapsed = m_core->timeline()->elapsed();

    QSharedPointer<T_AV_Packet> v_head_packet = v_qu->head();
    if(v_head_packet->isEnd())
    {
        if(v_elapsed < m_core->globalControl()->contentDuration())
        {
            resume_time();
        }
        else
        {
            m_core->timeline()->setElapsed(m_core->globalControl()->contentDuration());
            suspend_time();
        }
        return;
    }
    else if(v_elapsed < v_head_packet->timing())
    {
        // too early
        resume_time();
        return;
    }
#if 0x1
    if(v_qu->size()<2)
    {
        //
        resume_time();
        return;
    }
#endif
    resume_time();
    for(;;)
    {
        QSharedPointer<T_AV_Packet> v_head_packet = v_qu->head();
        if(v_elapsed < v_head_packet->timing())
        {
            // too early
            return;
        }
        QSharedPointer<T_AV_Packet> v_curr_packet = v_qu->dequeue();
        {
            if(v_qu->size()==0)
            {
                //there is no next frame
                // should render current frame
                //qDebug() << "(1)       [frameNo]" << v_curr_packet->frameNo() << "[packet.pts]" << v_curr_packet->timing() << v_elapsed;
                m_fps1.add(1);
                m_fps2.add(1);
            }
            else
            {
                QSharedPointer<T_AV_Packet> v_next_packet = v_qu->head();
                if(v_elapsed >= v_next_packet->timing())
                {
                    // should render next frame
                    //qDebug() << "=====>DROP[frameNo]" << v_curr_packet->frameNo() << "[packet.pts]" << v_curr_packet->timing() << v_elapsed;
                    m_fps1.add(1);
                    m_drop_count++;
#if 0x0
                    v_curr_packet->decodeVideoToNull();
#endif
                    v_curr_packet = QSharedPointer<T_AV_Packet>(NULL);
                    continue;
                }
                // should render current frame
                //qDebug() << "(2)       [frameNo]" << v_curr_packet->frameNo() << "[packet.pts]" << v_curr_packet->timing() << v_elapsed;
                m_fps1.add(1);
                m_fps2.add(1);
            }
            {
#if 0x1
                int v_depth = 16;
#else
                int v_depth = 32;
#endif
                QImage v_image = v_curr_packet->decodeVideoToQImage(
                        &m_player_ctx->m_video_decode_buff,
                        screenWidget->size(),
                        v_depth);
                if(!v_image.isNull())
                {
                    screenWidget->m_image = v_image;
                    screenWidget->update();
                }
                break;
            }
        }
        v_curr_packet = QSharedPointer<T_AV_Packet>(NULL);
        break;
    }
}

#if 0x0
void Widget::onTimerTimeout()
{
    //[seek lock]
    T_READ_LOCKER(v_seek_locker, m_core->seekManager());
    //
    ////T_READ_LOCKER(v_queue_lock, m_player_ctx->video()->packetQueue());
    //
    qint64 v_audio_elapsed = m_player_ctx->m_speaker->elapsed();
    if(v_audio_elapsed >= 0)
    {
        qint64 v_video_elapsed = m_core->timeline()->elapsed();
        qint64 v_elapsed_diff = v_audio_elapsed - v_video_elapsed;
        m_core->timeline()->setElapsed(v_video_elapsed + (v_elapsed_diff/2));
    }
    show_status();
    if(m_player_ctx->needBuffering())
    {
        suspend_time();
        return;
    }
    if(m_player_ctx->video()->packetQueue()->size()==0)
    {
        // end of queue
        if(m_player_ctx->video()->packetQueue()->isComplete())
        {
            resume_time();
        }
        else
        {
            suspend_time();
        }
        return;
    }
    resume_time();
    int v_elapsed = m_core->timeline()->elapsed();

    QSharedPointer<T_AV_Packet> v_head_packet = m_player_ctx->video()->packetQueue()->head();
    if(v_head_packet->isEnd())
    {
        if(v_elapsed < m_core->globalControl()->contentDuration())
        {
            resume_time();
        }
        else
        {
            m_core->timeline()->setElapsed(m_core->globalControl()->contentDuration());
            suspend_time();
        }
        return;
    }
    else if(v_elapsed < v_head_packet->timing())
    {
        // too early
        resume_time();
        return;
    }
#if 0x1
    if(m_player_ctx->video()->packetQueue()->size()<2)
    {
        //
        resume_time();
        return;
    }
#endif
    resume_time();
    for(;;)
    {
        QSharedPointer<T_AV_Packet> v_head_packet = m_player_ctx->video()->packetQueue()->head();
        if(v_elapsed < v_head_packet->timing())
        {
            // too early
            return;
        }
#if USE_DECODE_THREAD
        if(!v_head_packet->isDecoded())
        {
            return;
        }
#endif
        QSharedPointer<T_AV_Packet> v_curr_packet = m_player_ctx->video()->packetQueue()->dequeue();
        //QSharedPointer<T_AV_Packet> v_curr_packet;
        //m_player_ctx->video()->packetQueue()->dequeue(v_curr_packet);
        {
            if(m_player_ctx->video()->packetQueue()->size()==0)
            {
                //there is no next frame
                // should render current frame
                //qDebug() << "(1)       [frameNo]" << v_curr_packet->frameNo() << "[packet.pts]" << v_curr_packet->timing() << v_elapsed;
                m_fps1.add(1);
                m_fps2.add(1);
            }
            else
            {
                QSharedPointer<T_AV_Packet> v_next_packet = m_player_ctx->video()->packetQueue()->head();
                if(v_elapsed >= v_next_packet->timing())
                {
                    // should render next frame
                    //qDebug() << "=====>DROP[frameNo]" << v_curr_packet->frameNo() << "[packet.pts]" << v_curr_packet->timing() << v_elapsed;
                    m_fps1.add(1);
                    m_drop_count++;
#if !USE_DECODE_THREAD
                    v_curr_packet->decodeVideoToNull();
#endif
                    v_curr_packet = QSharedPointer<T_AV_Packet>(NULL);
                    continue;
                }
                // should render current frame
                //qDebug() << "(2)       [frameNo]" << v_curr_packet->frameNo() << "[packet.pts]" << v_curr_packet->timing() << v_elapsed;
                m_fps1.add(1);
                m_fps2.add(1);
            }
            {
#if 0x1
                int v_depth = 16;
#else
                int v_depth = 32;
#endif
                QImage v_image = v_curr_packet->decodeVideoToQImage(
                        &m_player_ctx->m_video_decode_buff,
                        screenWidget->size(),
                        v_depth);
                if(!v_image.isNull())
                {
                    screenWidget->m_image = v_image;
                    screenWidget->update();
                }
                break;
            }
        }
        v_curr_packet = QSharedPointer<T_AV_Packet>(NULL);
        break;
    }
}
#endif

static void my_av_log_callback(void* ptr, int level, const char* fmt, va_list vl)
{
    static QByteArray s_buffer;
    QString v_print;
    v_print.vsprintf(fmt, vl);
    s_buffer.append(v_print.toLatin1());
    QBuffer v_buffer(&s_buffer);
    v_buffer.open(QIODevice::ReadOnly);
    if(v_buffer.canReadLine())
    {
        QString v_line = v_buffer.readLine();
        v_line.replace(QRegExp("(\\r|\\n)"), "");
        qDebug() << "[av_log]" << v_line;
        s_buffer = v_buffer.readAll();
    }
    v_buffer.close();
}

void Widget::suspend_time()
{
    //m_player_ctx->speaker()->stop(); //N.G.: don't do this.
    m_core->timeline()->stop();
}

void Widget::resume_time()
{
    m_core->timeline()->start();
    m_player_ctx->speaker()->start();
}

void Widget::show_status()
{
    ////T_READ_LOCKER(v_queue_lock, m_player_ctx->video()->packetQueue());
    static int s_show_count = 0;
    static T_AutoRunClock s_sw;
    s_show_count++;
    T_AV_Packet_Queue &v_packet_queue = *m_player_ctx->video()->packetQueue();
    if(v_packet_queue.size()==0) return;
    QSharedPointer<T_AV_Packet> v_curr_packet = v_packet_queue.head();
    QSharedPointer<T_AV_Packet> v_last_packet = v_packet_queue.last();

    m_fps_download.set(v_last_packet->frameNo());
    QString v_msg = QString("%1:%2[%3 x %4] DL:%5 %6%7(%8)")
            .arg(m_fps1.value(), 6, 'f', 2, ' ')
            .arg(m_fps2.value(), 6, 'f', 2, ' ')
            .arg(screenWidget->width())
            .arg(screenWidget->height())
            .arg(m_fps_download.value(), 6, 'f', 2, ' ')
            .arg(m_player_ctx->video()->packetQueue()->decodedCount(LONG_LONG_MAX))
            //.arg(m_player_ctx->m_video.packet_queue.duration(m_player_ctx->m_stopwatch.elapsed())<T_Box::PLAYER_BUFFER_TIME)
            //.arg(m_player_ctx->seekManager()->endlessDuration(m_player_ctx->m_stopwatch.elapsed())<T_Box::PLAYER_BUFFER_TIME)
            //.arg(m_player_ctx->seekManager()->endlessDuration(m_player_ctx->m_stopwatch.elapsed()))
            //.arg(v_curr_packet->isNull())
            //.arg(v_last_packet->isNull())
            //.arg(v_last_packet->frameNo() - v_curr_packet->frameNo())
            ;
    this->ui->lineEdit->setText(v_msg);

    v_msg = QString("E=%1 F=%2/%3 D=%4")
            .arg(m_core->timeline()->toString("hh:mm:ss[zzz]"))
            .arg(v_curr_packet->frameNo())
            .arg(v_last_packet->frameNo())
            .arg(m_drop_count)
            ;
    this->ui->lineEdit_2->setText(v_msg);

    m_player_ctx->m_seek_bar->setElapsed(m_core->timeline()->elapsed());

    m_player_ctx->m_buffering_bar->setValue(m_core->seekManager()->seekableDuration());
    m_player_ctx->m_buffering_bar->buffering(m_player_ctx->needBuffering());
}

void Widget::on_pushButton_clicked()
{
}

void Widget::on_pushButton_2_clicked()
{
}

T_ScreenWidget::T_ScreenWidget(Widget *parent)
    : QWidget((QWidget *)parent)
    , m_size(QSize(200, 100))
{
    this->m_widget = parent;
    QSizePolicy sizePolicy(QSizePolicy::Minimum, QSizePolicy::Minimum);
    sizePolicy.setHorizontalStretch(1);
    sizePolicy.setVerticalStretch(1);
    sizePolicy.setHeightForWidth(this->sizePolicy().hasHeightForWidth());
    this->setSizePolicy(sizePolicy);
}

T_Box_Player_Context *T_ScreenWidget::playerContext()
{
    return m_widget->m_player_ctx;
}

void T_ScreenWidget::paintEvent(QPaintEvent *e)
{
    if(!playerContext())
    {
        QWidget::paintEvent(e);
        return;
    }
    QPainter v_p(this);
    int v_font_size = 16;
    QFont v_f("MS UI Gothic", v_font_size, QFont::Bold);
    //QFontInfo fi(v_f);
    //qDebug() << fi.fixedPitch();
    v_f.setFixedPitch(true);
    v_p.setFont(v_f);
    //m_display_mutex.lock();
    v_p.drawImage(QPoint(0, 0), m_image);
    //m_display_mutex.unlock();
#if 0x0
    QString v_msg = QString("E=%1 %2")
                    .arg(playerContext()->core()->timeline()->toString("hh:mm:ss[zzz]"))
                    .arg("오렌지캬라멜 - 마법소녀 [HD]")
                    ;
    //[Time etc]
    int v_diff = 1;
    int v_x_off = 2;
    int v_y_off = this->height() - 4;
    v_p.setPen(Qt::white);
    v_p.drawText(QPoint(v_x_off+v_diff, v_y_off+v_diff), v_msg);
    v_p.setPen(Qt::black);
    v_p.drawText(QPoint(v_x_off, v_y_off), v_msg);
#endif
}

