#include <stdio.h>
#include <stdlib.h>
#include <stdbool.h>

#include "../inc/decoder.hpp"

Decoder::Decoder(char* input_video_file, int width, int height) {
    this->filename = input_video_file;
    this->width = width;
    this->height = height;

    streamDecoded = false;

    AVCodec *pCodec;

    av_register_all();
    if (av_open_input_file(&pFormatCtx, this->filename, NULL, 0, NULL) != 0) {
        printf("Error opening file");
        exit(1);
    }

    if (av_find_stream_info(pFormatCtx) < 0) {
        printf("Couldn't find stream information");
        exit(1);
    }

    dump_format(pFormatCtx, 0, this->filename, false);

    // Find the first video stream
    this->videoStream = -1;
    int i;

    for (i = 0; i < pFormatCtx->nb_streams; i++)
        if (pFormatCtx->streams[i]->codec->codec_type == CODEC_TYPE_VIDEO) {
            this->videoStream = i;
            break;
        }
    if (this->videoStream == -1) {
        printf("No video stream found");
        exit(1);
    }

    // Get a pointer to the codec context for the video stream
    pCodecCtx = pFormatCtx->streams[videoStream]->codec;

    // Find the decoder for the video stream
    pCodec = avcodec_find_decoder(pCodecCtx->codec_id);
    if (pCodec == NULL) {
        printf("Codec not found");
        exit(1);
    }

    // Open codec
    if (avcodec_open(pCodecCtx, pCodec) < 0) {
        printf("Could not open codec");
        exit(1);
    }

    // Hack to correct wrong frame rates that seem to be generated by some codecs
    if (pCodecCtx->time_base.num > 1000 && pCodecCtx->time_base.den == 1)
        pCodecCtx->time_base.den = 1000;

    // Allocate video frame
    pFrame = avcodec_alloc_frame();

    img_convert_ctx = NULL;


    // Allocate an AVFrame structure

    // Determine required buffer size and allocate buffer
    imageBufferSize = avpicture_get_size(PIX_FMT_RGB24, this->width,
            this->height);

}

int Decoder::getFrameSize() {
    return imageBufferSize;
}

bool Decoder::stillHasFrame() {
    return !streamDecoded;
}
/*
 *
 * Not to forget to call av_free(return value)
 *     av_free(pFrameRGB);
 */
AVFrame * Decoder::getNextVideoFrame(uint8_t* buffer) {
    int frameFinished;
    AVPacket packet;

    AVFrame *pFrameRGB = avcodec_alloc_frame();
    if (pFrameRGB == NULL) {
        printf("Can't allocate frame memory");
        exit(1);
    }


    // Assign appropriate parts of buffer to image planes in pFrameRGB
    avpicture_fill((AVPicture *) pFrameRGB, buffer, PIX_FMT_RGB24,
            this->width, this->height);

    while (av_read_frame(this->pFormatCtx, &packet) >= 0) {
        if (packet.stream_index == videoStream) {
            // Decode video frame
            avcodec_decode_video2(pCodecCtx, pFrame, &frameFinished, &packet);

            // Did we get a video frame?
            if (frameFinished) {
                // Convert the image into YUV format that SDL uses
                if (img_convert_ctx == NULL) {
                    img_convert_ctx = sws_getContext(pCodecCtx->width,
                            pCodecCtx->height,
                            pCodecCtx->pix_fmt,
                            this->width,
                            this->height,
                            PIX_FMT_RGB24,
                            SWS_BICUBIC, (int) NULL, NULL, NULL);
                    //This way should normally work
                    /*img_convert_ctx = sws_alloc_context();

                    av_set_int(img_convert_ctx, "srcw", pCodecCtx->width);
                    av_set_int(img_convert_ctx, "srch", pCodecCtx->height);
                    av_set_int(img_convert_ctx, "dstw", pCodecCtx->width);
                    av_set_int(img_convert_ctx, "dsth", pCodecCtx->height);
                    av_set_int(img_convert_ctx, "src_format", pCodecCtx->pix_fmt);
                    av_set_int(img_convert_ctx, "dst_format", PIX_FMT_RGB24);
                    av_set_int(img_convert_ctx, "sws_flags", SWS_BICUBIC);

                    av_set_int(img_convert_ctx, "src_range", NULL);
                    av_set_int(img_convert_ctx, "dst_range", NULL);
                     */

                    if (sws_init_context(img_convert_ctx, NULL, NULL) < 0) {
                        fprintf(stderr, "Cannot initialize the conversion context!\n");
                        exit(1);
                    }
                }

                sws_scale(img_convert_ctx, (const uint8_t * const*) pFrame->data, pFrame->linesize, 0, pCodecCtx->height, pFrameRGB->data, pFrameRGB->linesize);
                //                free(img_convert_ctx);
                av_free_packet(&packet);
                return pFrameRGB;
            }
        }
        av_free_packet(&packet);
    }
    //end of file reached
    //    free(img_convert_ctx);
    free(buffer);
    streamDecoded = true;
    return NULL;
}

Decoder::~Decoder() {
    //    av_free_packet(&packet);

    free(img_convert_ctx);

    // Free the RGB image
//    free(pFrameRGB);

    // Free the YUV frame
    av_free(pFrame);

    // Close the codec
    avcodec_close(pCodecCtx);

    // Close the video file
    av_close_input_file(pFormatCtx);

}