#include <jni.h>
#include <string>
#include "common.h"

#ifdef __cplusplus
extern "C"
{
#endif
#include <libavformat/avformat.h>
#include <libavutil/imgutils.h>
#include <libavutil/pixfmt.h>
#include <libswscale/swscale.h>

#ifdef __cplusplus
};
#endif

AVPacket avpkt;
AVCodecContext *avctx;
const AVCodec *codec;
AVFrame *frame;
AVFrame *rgbFrame;
uint8_t * rgbBuffer;

uint8_t *video_dst_data[4] = {NULL};
int      video_dst_linesize[4];
int video_dst_bufsize;

struct SwsContext *sws_ctx;
char fileDir[1024];
int frameCount;

int isRenderImage;

void ffmpeg_log_callback(void *ptr, int level, const char *fmt, va_list vl)
{
    va_list vl2;
    char line[1024];
    static int print_prefix = 1;


    va_copy(vl2, vl);
    av_log_format_line(ptr, level, fmt, vl2, line, sizeof(line), &print_prefix);
    va_end(vl2);

    LOGD("%s", line);

}


extern "C" JNIEXPORT void JNICALL
Java_com_example_king_ffmpegexample_NativeH264ToRgb_init(
        JNIEnv* env,
        jobject /* this */,
        jstring outputFilePath,
        jboolean render) {

    av_register_all();
    av_log_set_callback(ffmpeg_log_callback);

    const char * video_dst_filename = env->GetStringUTFChars(outputFilePath, NULL);
    if (!video_dst_filename) {
        return;
    }
    strcpy(fileDir, video_dst_filename);

    frameCount = 0;

    av_init_packet(&avpkt);

    codec = avcodec_find_decoder(AV_CODEC_ID_H264);
    if (!codec) {
        LOGE("Codec not found\n");
        env->ReleaseStringUTFChars(outputFilePath, video_dst_filename);
        return;
    }
    avctx = avcodec_alloc_context3(codec);
    if (!avctx) {
        LOGE("Could not allocate video codec context\n");
        env->ReleaseStringUTFChars(outputFilePath, video_dst_filename);
        return;
    }
    if (codec->capabilities & AV_CODEC_CAP_TRUNCATED)
        avctx->flags |= AV_CODEC_FLAG_TRUNCATED; // we do not send complete frames

    avctx->width = 240;
    avctx->height = 320;
    avctx->time_base.num = 1;
    avctx->frame_number = 1;
    avctx->codec_type = AVMEDIA_TYPE_VIDEO;
    avctx->bit_rate = 0;

    /* open it */
    if (avcodec_open2(avctx, codec, NULL) < 0) {
        LOGE("Could not open codec\n");
        exit(1);
    }

    frame = av_frame_alloc();
    if (!frame) {
        LOGE("Could not allocate video frame\n");
        env->ReleaseStringUTFChars(outputFilePath, video_dst_filename);
        return;
    }

    isRenderImage = render;

    AVPixelFormat pixelFormat = AV_PIX_FMT_RGB24;
    if (isRenderImage) {
        pixelFormat = AV_PIX_FMT_RGB565LE;
    }

    rgbFrame = av_frame_alloc();
    int dst_bytes_num = avpicture_get_size(pixelFormat,
                                           avctx->width, avctx->height);

    video_dst_bufsize = dst_bytes_num;

    rgbBuffer = static_cast<uint8_t *>(av_malloc(dst_bytes_num * sizeof(uint8_t)));
    avpicture_fill(reinterpret_cast<AVPicture *>(rgbFrame), rgbBuffer, pixelFormat,
                   avctx->width, avctx->height);
//    av_image_fill_arrays(rgbFrame, rgbBuffer, )


////    int ret = av_image_alloc(video_dst_data, video_dst_linesize,
////                             avctx->width, avctx->height, AV_PIX_FMT_RGB565LE, 1);
//
//    if (ret < 0) {
//        LOGE("Could not allocate raw video buffer\n");
//        env->ReleaseStringUTFChars(outputFilePath, video_dst_filename);
//        return;
//    }
//    video_dst_bufsize = ret;

    sws_ctx = sws_getContext(avctx->width, avctx->height, AV_PIX_FMT_YUV420P,
                             avctx->width, avctx->height, pixelFormat, SWS_BILINEAR, NULL, NULL, NULL);

    env->ReleaseStringUTFChars(outputFilePath, video_dst_filename);
}

static void ppm_save(unsigned char *buf, int wrap, int xsize, int ysize, char *filename) {
    FILE *f;
    int y;

    f = fopen(filename,"w");
    fprintf(f, "P6\n%d %d\n%d\n", xsize, ysize, 255);
    for (y = 0; y < ysize; y++)
        fwrite(buf + y * wrap, 1, xsize * 3, f);
    fclose(f);

}

extern "C" JNIEXPORT jint JNICALL
Java_com_example_king_ffmpegexample_NativeH264ToRgb_decodeH264ToRgb(
        JNIEnv* env,
        jobject /* this */,
        jbyteArray in,
        jint len,
        jbyteArray out) {

    if (len > 0) {
        jbyte *buf = env->GetByteArrayElements(in, NULL);
        if (buf) {
            avpkt.size = len;
            avpkt.data = (uint8_t *) buf;
            int got_frame = 0;

            int decodedLen = avcodec_decode_video2(avctx, frame, &got_frame, &avpkt);
            if (decodedLen < 0) {
                LOGE("Error while decoding frame");
                return decodedLen;
            }
            if (got_frame && frameCount < 5) {
                LOGD("--------------------------------got_frame");
                sws_scale(sws_ctx, (const uint8_t* const*)frame->data, frame->linesize, 0,
                          avctx->height, rgbFrame->data, rgbFrame->linesize);

                env->ReleaseByteArrayElements(in, buf, 0);

                if (isRenderImage) {
                    env->SetByteArrayRegion(out, 0, avctx->width * avctx->height * 2,
                                            reinterpret_cast<const jbyte *>(rgbFrame->data[0]));

                    return 1;

                } else {
                    char fileName[1024];
                    snprintf(fileName, 1024, "%s/test_%d.ppm", fileDir, frameCount);

                    LOGD("----------------------------save ppm %s", fileName);

                    ppm_save(rgbFrame->data[0], rgbFrame->linesize[0], avctx->width, avctx->height,
                             fileName);

                    frameCount++;
                    return -1;
                }
            }
        }
    }
    return -1;
}


extern "C" JNIEXPORT void JNICALL
Java_com_example_king_ffmpegexample_NativeH264ToRgb_release(
        JNIEnv* env,
        jobject /* this */) {
    avcodec_free_context(&avctx);
    av_frame_free(&frame);
    av_frame_free(&rgbFrame);

    av_freep(&video_dst_data[0]);
    av_free(rgbBuffer);
    if (sws_ctx) {
        sws_freeContext(sws_ctx);
    }
}
