#include "widget.h"
#include "ui_widget.h"

#include <QtCore>

#include <SDL/SDL.h>

Widget::Widget(QWidget *parent)
    : QWidget(parent)
    , ui(new Ui::Widget)
    , m_video_thread(this)
    , m_audio_thread(this)
{
    ui->setupUi(this);

    screenWidget = new T_ScreenWidget(this);
    screenWidget->setObjectName(QString::fromUtf8("screenWidget"));
    ui->verticalLayout->addWidget(screenWidget);

    //filename="C:/6tQzHr5mgbc_34.flv";
    //filename="C:/EWeSEyvAJpc_34.flv";
    filename="C:/Soraya_-_Self_Control.mp4";
    qDebug() << filename;

    av_register_all();

    if(av_open_input_file(&formatCtx, filename.toLocal8Bit().constData(), NULL, 0, NULL)) {
        qDebug() << "Can't open video";
        return;
    }

    if(av_find_stream_info(formatCtx)<0) {
        qDebug() << "Can't find stream info";
    }

    dump_format(formatCtx, 0, filename.toLocal8Bit().constData(), false);

    //[Video]
    videoStream=-1;
    for(unsigned int i=0; i<formatCtx->nb_streams; i++)
        if(formatCtx->streams[i]->codec->codec_type==CODEC_TYPE_VIDEO) {
            videoStream=i;
            break;
        }
    if(videoStream<0) {
        qDebug() << "No video stream.";
        return;
    }

    qDebug() << "[videoStream]" << videoStream;

    //[Video]
    videoCtx=formatCtx->streams[videoStream]->codec;
    videoCodec=avcodec_find_decoder(videoCtx->codec_id);
    if(!videoCodec) {
        qDebug() << "Unsupported video codec.";
        return;
    }

    //[Video]
    if(avcodec_open(videoCtx, videoCodec)<0) {
        qDebug() << "avcodec_open (video) failed";
        return;
    }

    m_video_stream = formatCtx->streams[videoStream];

    //[Video/SDL]
    //SDL_putenv("SDL_VIDEODRIVER=windib");
    //SDL_putenv("SDL_VIDEODRIVER=directx");
    //SDL_putenv(QString("SDL_WINDOWID=%1").arg((quint32)this->screenWidget->winId()).toLatin1().constData());
    SDL_Init(SDL_INIT_VIDEO);
    window_id = SDL_CreateWindowFrom((void *)this->screenWidget->winId());
    Uint32 flags = SDL_HWSURFACE|SDL_ASYNCBLIT|SDL_HWACCEL|SDL_RESIZABLE;
    screenWidget->screen = SDL_SetVideoModeEx(
            screenWidget->width(), //640,
            screenWidget->height(), //360,
            0,
            flags,
            window_id);
    if( ! screenWidget->screen )
    {
        qDebug() << "( ! screenWidget->screen )";
        return;
    }

    char namebuf[256+1];
    qDebug() << "[SDL_VideoDriverName()]" << SDL_VideoDriverName(namebuf, sizeof(namebuf)-1);

    //[Audio]
    audioStream=-1;
    for(unsigned int i=0; i<formatCtx->nb_streams; i++)
        if(formatCtx->streams[i]->codec->codec_type==AVMEDIA_TYPE_AUDIO) {
            audioStream=i;
            break;
        }
    if(audioStream<0) {
        qDebug() << "No audio stream.";
        return;
    }

    qDebug() << "[audioStream]" << audioStream;

    //[Audio]
    audioCtx=formatCtx->streams[audioStream]->codec;

    if (audioCtx->channels > 0) {
        audioCtx->request_channels = FFMIN(2, audioCtx->channels);
    } else {
        audioCtx->request_channels = 2;
    }

    audioCodec=avcodec_find_decoder(audioCtx->codec_id);
    if(!audioCodec) {
        qDebug() << "Unsupported audio codec.";
        return;
    }

    //[Audio]
    if(avcodec_open(audioCtx, audioCodec)<0) {
        qDebug() << "avcodec_open (audio) failed";
        return;
    }

    m_audio_stream = formatCtx->streams[audioStream];

    qDebug() << "[audioCodec->long_name]" << audioCodec->long_name;
    qDebug() << "[audioCtx->sample_fmt]" << audioCtx->sample_fmt;

    qDebug() << "[audioCtx->sample_rate]" << audioCtx->sample_rate;
    //http://d.hatena.ne.jp/rainbow_beam/20100210/1265809987
#if 0x1
    T_PCM_AudioDeviceSpec wanted_spec;
    wanted_spec.nSamplesPerSec = audioCtx->sample_rate;
    wanted_spec.nBitsPerSample = 16;
    wanted_spec.nChannels = audioCtx->channels;
    wanted_spec.nSampleUnit = 1024;

    v_audio_dev = new T_PCM_AudioDevice();
    v_audio_dev->OpenAudio(&wanted_spec);

    if(!v_audio_dev->opened)
    {
        qDebug() << "(!v_audio_dev->opened)";
        return;
    }
#endif

    //double v_time_base = 1.0 * v_stream->time_base.num / v_stream->time_base.den;
    qDebug() << "[formatCtx->duration]" << formatCtx->duration << QString("[%1 sec]").arg(formatCtx->duration / AV_TIME_BASE);
    qDebug()<< "[v_stream->start_time]"
            << m_video_stream->start_time
            ;
    qDebug()<< "[codecCtx->time_base]"
            << videoCtx->time_base.num
            << videoCtx->time_base.den
            << 1.0 * videoCtx->time_base.num / videoCtx->time_base.den
            << 1.0 / (1.0 * videoCtx->time_base.num / videoCtx->time_base.den)
            ;
    qDebug()<< "[v_stream->avg_frame_rate]"
            << m_video_stream->avg_frame_rate.num
            << m_video_stream->avg_frame_rate.den
            << 1.0 * m_video_stream->avg_frame_rate.num / m_video_stream->avg_frame_rate.den
            ;
    qDebug()<< "[v_stream->r_frame_rate]"
            << m_video_stream->r_frame_rate.num
            << m_video_stream->r_frame_rate.den
            << 1.0 * m_video_stream->r_frame_rate.num / m_video_stream->r_frame_rate.den
            ;
    qDebug()<< "[v_stream->time_base]"
            << m_video_stream->time_base.num
            << m_video_stream->time_base.den
            << 1.0 * m_video_stream->time_base.num / m_video_stream->time_base.den
            ;
    qDebug()<< "[m_audio_stream->time_base]"
            << m_audio_stream->time_base.num
            << m_audio_stream->time_base.den
            << 1.0 * m_audio_stream->time_base.num / m_audio_stream->time_base.den
            ;

    frame=avcodec_alloc_frame();

    frameNo=0;
    m_drop_count = 0;


    this->setWindowTitle(filename);
    this->screenWidget->setScreenSize(QSize(videoCtx->width, videoCtx->height));
    this->screenWidget->adjustSize();

    m_fps1.wake();
    m_fps2.wake();

    T_AVPacket v_av_packet;
    while(av_read_frame(formatCtx, &v_av_packet.m_av_packet)>=0)
    {
        if(v_av_packet.m_av_packet.stream_index==videoStream)
        {
            //FIXME
            v_av_packet.m_av_packet.pts =
                    1000.0
                    * v_av_packet.m_av_packet.pts
                    * m_video_stream->time_base.num
                    / m_video_stream->time_base.den
                    ;
            v_av_packet.m_frame_no = m_video_packet_queue.size();
            m_video_packet_queue.enqueue(v_av_packet);
        }
        else if(v_av_packet.m_av_packet.stream_index==audioStream)
        {
            //FIXME
            v_av_packet.m_av_packet.pts =
                    1000.0
                    * v_av_packet.m_av_packet.pts
                    * m_audio_stream->time_base.num
                    / m_audio_stream->time_base.den
                    ;
            v_av_packet.m_frame_no = m_audio_packet_queue.size();
            m_audio_packet_queue.enqueue(v_av_packet);
        }
        else
        {
            av_free_packet(&v_av_packet.m_av_packet);
            continue;
        }
    }

    qDebug() << "[m_video_packet_queue.size()]" << m_video_packet_queue.size();
    qDebug() << "[m_audio_packet_queue.size()]" << m_audio_packet_queue.size();

#if 0x1
    connect(&m_timer, SIGNAL(timeout()), SLOT(onTimerTimeout()));
    m_timer.start(10);
#else
    m_video_thread.start(QThread::HighPriority);
#endif

    m_generator = new Generator(this, 0);
    v_audio_dev->paused = 0;
    //m_audio_thread.start(QThread::HighPriority);
    m_audio_thread.start(QThread::HighestPriority);
    //m_audio_thread.start(QThread::TimeCriticalPriority);
}

Widget::~Widget()
{
    v_audio_dev->paused = 1;
    v_audio_dev->enabled = 0;
    //m_audio_thread.wait();
    m_video_thread.wait();
    qDebug() << "[Widget::~Widget()]";
    delete ui;

#if 0x0
    if(v_audio_dev)
    {
        v_audio_dev->CloseAudio();
        delete v_audio_dev;
    }

    delete m_av_picture;
    av_free(frame);
    avcodec_close(videoCtx);
    av_close_input_file(formatCtx);
#endif
}

void T_VideoThread::run()
{
    T_PCM_AudioDevice *v_audio_dev = m_widget->v_audio_dev;
    while(v_audio_dev->enabled && m_widget->m_video_packet_queue.size()>0)
    {
        if(m_widget->m_time_line.isNull()) m_widget->m_time_line.start();
        int v_elapsed = m_widget->m_time_line.elapsed();
        T_AVPacket &v_head_packet = m_widget->m_video_packet_queue.head();
        if(v_head_packet.m_av_packet.pts > v_elapsed)
        {
            msleep(v_head_packet.m_av_packet.pts - v_elapsed);
        }
        m_widget->onTimerTimeout();
    }
}

void T_AudioThread::run()
{
    T_PCM_AudioDevice *v_audio_dev = m_widget->v_audio_dev;
    while(v_audio_dev->enabled)
    {
        qDebug() << QDateTime::currentDateTime();
        quint8 *stream = v_audio_dev->GetAudioBuf();
        memset(stream, v_audio_dev->m_spec.nSilence, v_audio_dev->m_spec.nSampleSize);
        if ( ! v_audio_dev->paused )
        {
            m_widget->m_generator->read((char *)stream, v_audio_dev->m_spec.nSampleSize);
            //m_widget->sdl_audio_callback(stream, v_audio_dev->m_spec.nSampleSize);
        }
        v_audio_dev->PlayAudio();
        v_audio_dev->WaitAudio();
    }
    v_audio_dev->WaitDone();
}

void Widget::onTimerTimeout()
{
    T_MutexLocker v_locker(&m_frame_mutex);
    qDebug() << "[Widget::onTimerTimeout()]";
    if(m_video_packet_queue.size()==0)
    {
        qDebug() << "Widget::onTimerTimeout(1)";
        m_timer.stop();
        return;
    }
    if(m_time_line.isNull()) m_time_line.start();
    int v_elapsed = m_time_line.elapsed();

    T_AVPacket &v_head_packet = m_video_packet_queue.head();
    if(v_elapsed < v_head_packet.m_av_packet.pts)
    {
        qDebug() << "Widget::onTimerTimeout(2)" << v_elapsed << v_head_packet.m_av_packet.pts;
        return;
    }
    for(;;)
    {
        T_AVPacket v_curr_packet = m_video_packet_queue.dequeue();
        frameNo = v_curr_packet.m_frame_no;
        avcodec_decode_video(
                videoCtx,
                frame,
                &frameFinished,
                v_curr_packet.m_av_packet.data,
                v_curr_packet.m_av_packet.size
                );
        qDebug() << "[m_video_packet_queue.size()]" << m_video_packet_queue.size() << frameFinished;
        if(frameFinished)
        {
            if(m_video_packet_queue.size()==0)
            {
                //there is no next frame
                qDebug() << "(1)       [frameNo]" << frameNo << "[packet.pts]" << v_curr_packet.m_av_packet.pts;
                m_fps1.add(1);
                m_fps2.add(1);
            }
            else
            {
                T_AVPacket &v_next_packet = m_video_packet_queue.head();
                if(v_elapsed >= v_next_packet.m_av_packet.pts)
                {
                    // should render next frame
                    qDebug() << "=====>DROP[frameNo]" << frameNo << "[packet.pts]" << v_curr_packet.m_av_packet.pts;
                    m_fps1.add(1);
                    m_drop_count++;
                    av_free_packet(&v_curr_packet.m_av_packet);
                    continue;
                }
                // should render current frame
                qDebug() << "(2)       [frameNo]" << frameNo << "[packet.pts]" << v_curr_packet.m_av_packet.pts;
                m_fps1.add(1);
                m_fps2.add(1);
            }
            //if(frameNo > 30) goto skip;
            {
                QString v_msg = QString("%1 fps : %2 fps [%3 x %4]")
                        .arg(m_fps1.value(), 6, 'f', 2, ' ')
                        .arg(m_fps2.value(), 6, 'f', 2, ' ')
                        .arg(screenWidget->width())
                        .arg(screenWidget->height())
                        ;
                this->ui->lineEdit->setText(v_msg);

                QTime v_elapsed_clock(0, 0, 0, 0);
                v_elapsed_clock = v_elapsed_clock.addMSecs(m_time_line.elapsed());
                v_msg = QString("E=%1 F=%2 D=%3")
                                .arg(v_elapsed_clock.toString("hh:mm:ss[zzz]"))
                                .arg(frameNo)
                                .arg(m_drop_count)
                                ;
                this->ui->lineEdit_2->setText(v_msg);

                qDebug() << "(A)" << videoCtx->width << videoCtx->height << videoCtx->pix_fmt << PIX_FMT_YUV420P;

                int v_ovl_w = screenWidget->width();
                int v_ovl_h = screenWidget->height();
                v_ovl_w = (v_ovl_w / 2) * 2;
                if(
#if 0x0
                        screenWidget->width() != screenWidget->screen->w
                        ||
                        screenWidget->height() != screenWidget->screen->h
#else
                        v_ovl_w != screenWidget->screen->w
                        ||
                        v_ovl_h != screenWidget->screen->h
#endif
                        /*
                        ||
                        screenWidget->hasBeenMoved()
                        */
                   )
                {
                    qDebug() << screenWidget->width() << screenWidget->width();
                    qDebug() << v_ovl_w << v_ovl_h;
                    Uint32 flags = 0;
                    flags |= SDL_HWSURFACE;
                    flags |= SDL_ASYNCBLIT;
                    flags |= SDL_HWACCEL;
                    flags |= SDL_RESIZABLE;
                    screenWidget->screen = SDL_SetVideoModeEx(
                            v_ovl_w, //this->screenWidget->width(),
                            v_ovl_h, //this->screenWidget->height(),
                            0,
                            flags,
                            window_id);
                }
#if 0x0
                int v_ovl_w = screenWidget->screen->w;
                int v_ovl_h = screenWidget->screen->h;
                v_ovl_w = (v_ovl_w / 2) * 2;
#endif
                SDL_Overlay *ovl = SDL_CreateYUVOverlay(
                        v_ovl_w, //videoCtx->width,
                        v_ovl_h, //videoCtx->height,
                        SDL_YV12_OVERLAY,
                        screenWidget->screen);
                qDebug() << "(B)";
                qDebug() << SDL_LockYUVOverlay(ovl);
                AVPicture pict;
                qDebug() << "(C)";
                memset(&pict,0,sizeof(AVPicture));

                qDebug() << "(D)";
                pict.data[0] = ovl->pixels[0];
                pict.data[1] = ovl->pixels[2];
                pict.data[2] = ovl->pixels[1];
                qDebug() << "(E)";
                pict.linesize[0] = ovl->pitches[0];
                pict.linesize[1] = ovl->pitches[2];
                pict.linesize[2] = ovl->pitches[1];

                AVPicture pict_src;
                memset(&pict_src,0,sizeof(AVPicture));
                pict_src.data[0] = frame->data[0];
                pict_src.data[1] = frame->data[1];
                pict_src.data[2] = frame->data[2];
                pict_src.linesize[0] = frame->linesize[0];
                pict_src.linesize[1] = frame->linesize[1];
                pict_src.linesize[2] = frame->linesize[2];

                SwsContext *v_sws_ctx_2 = sws_getContext(
                        videoCtx->width, //int srcW,
                        videoCtx->height, //int srcH,
                        videoCtx->pix_fmt, //enum PixelFormat srcFormat,
                        ovl->w, //int dstW,
                        ovl->h, //int dstH,
                        PIX_FMT_YUV420P, //enum PixelFormat dstFormat,
                        SWS_FAST_BILINEAR, //SWS_BICUBIC, //int flags,
                        NULL, //SwsFilter *srcFilter,
                        NULL, //SwsFilter *dstFilter,
                        NULL  //const double *param
                        );
                //qDebug() << v_sws_ctx;
                int v_scale_result_2 = sws_scale(
                        v_sws_ctx_2,
                        pict_src.data,
                        pict_src.linesize,
                        0,
                        videoCtx->height,
                        pict.data,
                        pict.linesize
                        );
                //qDebug() << "[v_scale_result]" << v_scale_result;
                sws_freeContext(v_sws_ctx_2);

                qDebug() << "(F)";
                ////av_picture_copy(&pict, &pict_src, PIX_FMT_YUV420P, videoCtx->width, videoCtx->height);
                qDebug() << "(G)";
                SDL_UnlockYUVOverlay(ovl);
                qDebug() << "(H)";
                SDL_Rect rect;
                rect.x = 0;
                rect.y = 0;
                rect.w = ovl->w; //videoCtx->width;
                rect.h = ovl->h; //videoCtx->height;
                qDebug() << "(I)";
                SDL_DisplayYUVOverlay(ovl, &rect);
                qDebug() << "(J)";
                SDL_FreeYUVOverlay(ovl);
                qDebug() << "(Z)";
                //this->update();
                break;
            }
            skip:
            QWidget::update();
        }
        av_free_packet(&v_curr_packet.m_av_packet);
        break;
    }
}

qint64 Generator::readData(char *data, qint64 len)
{
    qDebug() << "[Generator::readData()]";
    //m_widget->sdl_audio_callback((quint8 *)data, len);
    Q_ASSERT(len%2==0);

    if(m_widget->m_audio_packet_queue.size()==0)
    {
        memset(data, 0, len);
        m_widget->v_audio_dev->paused = 1;
        m_widget->v_audio_dev->enabled = 0;
        return len;
    }
    T_AVPacket &v_head_packet = m_widget->m_audio_packet_queue.head();
    T_AVPacket m_audio_temp = v_head_packet;

    int left_len = len;
    int read_len = 0;

    QBuffer v_read_buffer(&m_audio_buffer);
    v_read_buffer.open(QIODevice::ReadOnly);
    QByteArray v_result = v_read_buffer.read(left_len);
    v_read_buffer.close();
    m_audio_buffer = m_audio_buffer.mid(v_result.size());
    left_len -= v_result.size();
    read_len += v_result.size();


    if(m_widget->m_time_line.isNull()) m_widget->m_time_line.start();
    int v_elapsed = m_widget->m_time_line.elapsed();
    int m_audio_elapsed = m_audio_temp.m_av_packet.pts;

    qDebug()
            << "(A)[frame]" << m_audio_temp.m_frame_no
            << "[m_audio_elapsed]" << m_audio_elapsed
            << "[v_elapsed]" << v_elapsed
            << "[diff]" << v_elapsed - m_audio_elapsed
            << "[audioCtx->sample_rate]" << m_widget->audioCtx->sample_rate
            ;
    int v_diff = v_elapsed - m_audio_elapsed;
    m_widget->m_time_line = m_widget->m_time_line.addMSecs(v_diff/2);

    while(left_len > 0)
    {
        int v_dec_len = sizeof(m_widget->m_audio_decode_array);
        memset(m_widget->m_audio_decode_array, 0, sizeof(m_widget->m_audio_decode_array));
        qDebug() << "[m_audio_temp.m_av_packet.size]" << m_audio_temp.m_av_packet.size;
        int v_enc_len = avcodec_decode_audio3(
                m_widget->audioCtx,
                (int16_t *)m_widget->m_audio_decode_array,
                &v_dec_len,
                &m_audio_temp.m_av_packet
                );
        if (v_enc_len < 0)
        {
            qDebug() << "(B)2" << v_enc_len << m_widget->m_audio_packet_queue.size();
            /* if error, we skip the frame */
            m_audio_temp.m_av_packet.size = 0; //frame is finished
        }
        else
        {
            qDebug() << "(B)3:len1=" << v_enc_len << "[data_size]" << v_dec_len;
            if (m_widget->audioCtx->sample_fmt == SAMPLE_FMT_S16)
            {
                v_result.append((char *)m_widget->m_audio_decode_array, v_dec_len);
            }
            else
            {
                AVAudioConvert *convCtx =
                        av_audio_convert_alloc(
                                SAMPLE_FMT_S16,
                                1,
                                m_widget->audioCtx->sample_fmt,
                                1,
                                NULL,
                                0);
                const void *v_ibuf[6]= {m_widget->m_audio_decode_array};
                void *v_obuf[6]= {m_widget->m_audio_convert_array};
                int v_istride[6]= {av_get_bits_per_sample_format(m_widget->audioCtx->sample_fmt)/8};
                int v_ostride[6]= {av_get_bits_per_sample_format(SAMPLE_FMT_S16)/8};
                int v_len= v_dec_len/v_istride[0];
                if (av_audio_convert(convCtx, v_obuf, v_ostride, v_ibuf, v_istride, v_len)<0)
                {
                    qDebug() << "av_audio_convert() failed";
                    break;
                }
                av_audio_convert_free(convCtx);
                qDebug() << "av_audio_convert() successful";
                v_dec_len = v_len * v_ostride[0];
                v_result.append((char *)m_widget->m_audio_convert_array, v_dec_len);
            }
            m_audio_temp.m_av_packet.data += v_enc_len;
            m_audio_temp.m_av_packet.size -= v_enc_len;
            left_len -= v_dec_len;
            read_len += v_dec_len;
        }
        if(m_audio_temp.m_av_packet.size<=0)
        {
            // delete old one
            av_free_packet(&m_widget->m_audio_packet_queue.head().m_av_packet);
            m_widget->m_audio_packet_queue.dequeue();
            // pop new one
            if(m_widget->m_audio_packet_queue.size()==0) break;
            m_audio_temp = m_widget->m_audio_packet_queue.head();
        }
    }
    Q_ASSERT(read_len==v_result.size());

    qDebug() << "[len]" << len << "[read_len]" << read_len;
    if(read_len <= len)
    {
        memcpy(data, v_result.constData(), v_result.size());
    }
    else
    {
        memcpy(data, v_result.constData(), len);
        m_audio_buffer.append(v_result.mid(len));
    }
    return len;
}

