/*
 * copyright (c) 2009 Alexandr [Sn@ble] Surnin
 *
 * This file is part of FrameSeeker.
 *
 * FrameSeeker is free software; you can redistribute it and/or
 * modify it under the terms of the GNU Lesser General Public
 * License as published by the Free Software Foundation; either
 * version 2.1 of the License, or (at your option) any later version.
 *
 * FrameSeeker is distributed in the hope that it will be useful,
 * but WITHOUT ANY WARRANTY; without even the implied warranty of
 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
 * Lesser General Public License for more details.
 *
 * You can read the full version of GNU LGPL in
 * http://www.gnu.org/copyleft/lesser.html
 */


#include "FSffmpeg.h"

#include <QDebug>
#include <QDir>



// ============================================================
FSffmpeg::FSffmpeg() {

    fmtCtx = NULL;
    codecCtx = NULL;
    buffer = NULL;
    frameYUV = NULL;
    frameRGB = NULL;
    frame = NULL;

    frameIndex = 0;

    stop = 0;

    target_pix_fmt = PIX_FMT_RGB32;

}



// ============================================================
int FSffmpeg::openFile(QString _fileName) {


    // Register all video & audio codecs
    av_register_all();

    fileName = _fileName;
    qDebug() << "fileName: "
             << /*QDir::currentPath() + "/" +*/ fileName.toAscii();

    // Opening video file and obtaining format
    if (av_open_input_file(&fmtCtx, fileName.toAscii(), NULL, 0, NULL)!=0)
        return FS_NO_FILE_SPECIFIED;

    // Obtaining inforamtion about video format
    if (av_find_stream_info(fmtCtx)<0)
        return FS_NO_STREAMS;

    // Output file information onto console
    dump_format(fmtCtx, 0, fileName.toAscii(), 0);

    videoStreamIndex = -1;
    audioStreamIndex = -1;
    for (unsigned int vs = 0; vs < fmtCtx->nb_streams; vs++) {
        if ((fmtCtx->streams[vs]->codec->codec_type==CODEC_TYPE_VIDEO) &&
            (videoStreamIndex < 0)) {
            // Find FIRST video stream
            videoStreamIndex = vs;
        }
        if ((fmtCtx->streams[vs]->codec->codec_type==CODEC_TYPE_AUDIO) &&
            (audioStreamIndex < 0)) {
            // Find FIRST audio stream
            audioStreamIndex = vs;
        }
    }

    if (videoStreamIndex == -1) {
        qDebug() << "No video stream";
        return FS_NO_VIDEO_STREAM;
    }

    if (audioStreamIndex == -1) {
        qDebug() << "No audio stream";
	// return FS_NO_AUDIO_STREAM;
    }

    // Assign codec context to video stream
    codecCtx = fmtCtx->streams[videoStreamIndex]->codec;

    // Assign simple variable names
    imgWidth = codecCtx->width; // Video width (<--->)
    imgHeight = codecCtx->height; // Video height (Up&Down)
    orig_pix_fmt = codecCtx->pix_fmt; // Pixel format (likely PIX_FMT_YUV420P)
    frameRate = (codecCtx->time_base.den / codecCtx->time_base.num); // Frame Rate (for NTSC is 29.97)

    // MUST BE FF_DEBUG_MV
    codecCtx->debug_mv = FF_DEBUG_MV;

    // Finding decoder
    codec = avcodec_find_decoder(codecCtx->codec_id);
    if (codec == NULL) {
        qDebug() << "Unsupported codec";
        return FS_NO_DECODER;
    }

    // Opening decoder
    if (avcodec_open(codecCtx, codec)<0)
        return FS_CANT_OPEN_DECODER;

    // Create frames (allocate memory)
    if (createFrames() != 0)
        return FS_CANT_CREATE_FRAMES;

    // Create scaling/converting contexts for frames
    if (createSwsContexts() != 0)
        return FS_CANT_CREATE_SWS_CONTEXTS;

    return FS_SUCCESS;
}



// ============================================================
int FSffmpeg::createFrames() {

    // Allocation memory for YUV video frame
    frameYUV = avcodec_alloc_frame();
    if (frameYUV == NULL)
        return FS_CANT_ALLOC_YUV_FRAME;

    // Allocation memory for RGB video frame
    frameRGB = avcodec_alloc_frame();
    if (frameRGB == NULL)
        return FS_CANT_ALLOC_RGB_FRAME;

    // Determine required buffer size and allocate memory
    unsigned int numBytes;
    numBytes = avpicture_get_size(target_pix_fmt,
                                  imgWidth,
                                  imgHeight);
    buffer = (uint8_t*)av_malloc(numBytes * sizeof(uint8_t));

    // Assign appropriate parts of buffer to image planes in frameRGB
    avpicture_fill((AVPicture*)frameRGB,
                   buffer,
                   target_pix_fmt,
                   imgWidth,
                   imgHeight);

    return FS_SUCCESS;
}



// ============================================================
int FSffmpeg::readFrame() {

    if (stop) return FS_VIDEO_ENDS;

    static QTime time;
    static quint64 lastFrameIndex;
    if (frameIndex == 0) {
        time.start();
        lastFrameIndex = 0;
    }

    if (time.elapsed() > 1000) {
        qDebug() << "fps == "
                 << (frameIndex - lastFrameIndex);
        time.restart();
        lastFrameIndex = frameIndex;
    }

    int frameFinished;
    AVPacket packet;

    // FIXME: 3262 in avcodec.h
    // I removed "attribute_deprecated" before "int avcodec..."
    // because no "avcodec_decode_video2" in my libs
    read_frame_label:
    if (av_read_frame(fmtCtx, &packet) >= 0) {
        if (packet.stream_index == videoStreamIndex) {
            // If frame exists, than decode it
            avcodec_decode_video(codecCtx,
                                 frameYUV,
                                 &frameFinished,
                                 packet.data,
                                 packet.size);

            if (frameFinished) {
                // Convert video to target(RGB) format
                if (target_pix_fmt != orig_pix_fmt) {
                    sws_scale(toTarget,
                              frameYUV->data,
                              frameYUV->linesize,
                              0,
                              imgHeight,
                              frameRGB->data,
                              frameRGB->linesize);
                } else {
                    frameRGB = frameYUV;
                }


                if (frame) {
                    delete frame;
                }

                // Create new frame structure
                frame = new FSFrame((uchar*)frameRGB->data[0],
                                    imgWidth,
                                    imgHeight,
                                    frameIndex,
                                    frameYUV->pts,
                                    frameYUV->pict_type);

//                qDebug() << "Before send frame";
//                qDebug() << "Frame MVs count: " << frame->getMVs().count();
//                qDebug() << "Frame type: " << frame->type();
                // Send frame
                emit sendFrame(frame);
//                qDebug() << "After send frame";

            }
        } else {
            // If packet.stream_index != videoStreamIndex
            goto read_frame_label;
        }

        // Increment frame number
        frameIndex++;

        // FIXME: There is no reference to av_free_packet in my shared libs...
        av_free_packet(&packet);

    } else {
        stop = 1;
    }

    return FS_SUCCESS;
}





// ============================================================
void FSffmpeg::seekFrame(int _frameIndex) {

    // Find Presentaion Time Stamp (PTS)
    uint64_t TARGET_PTS = (uint64_t) ( _frameIndex * AV_TIME_BASE  / frameRate );

    // Seek to closest I-Frame
    av_seek_frame( fmtCtx, -1, TARGET_PTS, AVSEEK_FLAG_BACKWARD );

    // Decode video to the necessary frame
    codecCtx->hurry_up = 1;
    int gotFrame;
    AVPacket Packet;
    while (true) {
        av_read_frame( fmtCtx, &Packet );
        uint64_t MyPts = av_rescale( Packet.pts,
                                     AV_TIME_BASE * (int64_t) codecCtx->time_base.num,
                                     codecCtx->time_base.den );
        if( MyPts >= TARGET_PTS )
            break;
        avcodec_decode_video( codecCtx, frameYUV, &gotFrame, Packet.data,
                              Packet.size );
        //FIXME: There is no reference to av_free_packet in my version of libav*
        av_free_packet( &Packet );
    };
    codecCtx->hurry_up = 0;
}





// ============================================================
FSffmpeg::~FSffmpeg() {

    if (buffer != NULL) {
        av_free(buffer);
        qDebug() << "Memory for \"buffer\" was unlocked";
    } else {
        qDebug() << "\"buffer\" is not used!";
    }

    if (frameYUV != NULL) {
        av_free(frameYUV);
        qDebug() << "Memory for \"frameYUV\" was unlocked";
    } else {
        qDebug() << "\"frameYUV\" is not used!";
    }

    if (frameRGB != NULL) {
        av_free(frameRGB);
        qDebug() << "Memory for \"frameRGB\" was unlocked";
    } else {
        qDebug() << "\"frameRGB\" is not used!";
    }

    if (codecCtx != NULL) {
        avcodec_close(codecCtx);
        qDebug() << "Codec closed";
    } else {
        qDebug() << "\"codecCtx\" is not used!";
    }

    if (fmtCtx != NULL) {
        av_close_input_file(fmtCtx);
        qDebug() << "File closed";
    } else {
        qDebug() << "\"fmtCtx\" is not used!";
    }

    if (frame != NULL) {
        delete frame;
        qDebug() << "Memory for \"frame\" was unlocked";
    } else {
        qDebug() << "\"frame\" is not used!";
    }
}




// ============================================================
int FSffmpeg::createSwsContexts() {

    // Create software scaling/conversion context for orig->RGB
    toTarget = sws_getContext(imgWidth,
                              imgHeight,
                              orig_pix_fmt,
                              imgWidth, // without
                              imgHeight, // scaling
                              target_pix_fmt, // RGB32
                              SWS_FAST_BILINEAR,
                              NULL,
                              NULL,
                              NULL);

    return FS_SUCCESS;
}




// ============================================================
float FSffmpeg::getFrameRate() {

    return frameRate;
}





// ============================================================
int FSffmpeg::getFrameType() {

    return frameYUV->pict_type;
}




// ============================================================
void FSffmpeg::readSettings(QSettings *settings) {

    qDebug() << "Applying FFMPEG settings...";
}
