#include <jni.h>
#include <string>

extern "C" {
#include "libavcodec/avcodec.h"
#include "libavformat/avformat.h"
#include "libavutil/imgutils.h"
#include "libswscale/swscale.h"
#include <libavutil/time.h>
}

#include <android/log.h>
#include <android/native_window_jni.h>


#define LOGD(...) __android_log_print(ANDROID_LOG_INFO,"123123",__VA_ARGS__)

extern "C"
JNIEXPORT jstring JNICALL
Java_com_yfjin_ffmpeg52_MainActivityVideo_stringFromJNI(JNIEnv *env, jobject /* this */) {

    std::string hello = avcodec_configuration();

    return env->NewStringUTF(hello.c_str());
}


AVFormatContext *avFormatContext;
AVCodecContext *videoCodecContext;

uint8_t *outbuffer;

//转换器上下文
SwsContext *swsContext;


ANativeWindow *nativeWindow;
ANativeWindow_Buffer nativeWindowBuffer;

extern "C" JNIEXPORT jint
Java_com_yfjin_ffmpeg52_MainActivityVideo_play(JNIEnv *env, jobject thiz, jstring url_,
                                               jobject surface) {

    const char *url = env->GetStringUTFChars(url_, 0);
    //    注册所有的组件
    avcodec_register_all();
    //上下文，所有解码程序的上下文
    avFormatContext = avformat_alloc_context();

    if (avformat_open_input(&avFormatContext, url, NULL, NULL) != 0) {
        LOGD("Couldn't open input stream.\n");
        return -1;
    }
    LOGD("打开视频成功.\n");

    if (avformat_find_stream_info(avFormatContext, NULL) < 0) {
        LOGD("avformat_find_stream_info fail.\n");
        return -1;
    }

    int videoIndex = -1;
    int audioIndex = -1;
    for (int i = 0; i < avFormatContext->nb_streams; ++i) {
        int type = avFormatContext->streams[i]->codecpar->codec_type;
        if (type == AVMEDIA_TYPE_VIDEO) {
            videoIndex = i;
        } else if (type == AVMEDIA_TYPE_AUDIO) {
            audioIndex = i;
        }
    }
    if (videoIndex == -1) {
        LOGD("Couldn't find a video stream.\n");
        return -1;
    }
    LOGD("videoIndex:%d,audioIndex:%d\n", videoIndex, audioIndex);


    videoCodecContext = avFormatContext->streams[videoIndex]->codec;
    //找到对应的解码器
    AVCodec *videoCodec = avcodec_find_decoder(videoCodecContext->codec_id);

    //打开解码器
    if (avcodec_open2(videoCodecContext, videoCodec, NULL) < 0) {
        LOGD("Couldn't open codec.\n");
        return -1;
    }
    LOGD("打开了解码成功\n");

    //----------------------------------------------------------------
    //用来最先接收解码好的数据
    AVPacket *videoPacket = av_packet_alloc();
    //原始数据容器  先获得videoPacket->videoFrame
    AVFrame *videoFrame = av_frame_alloc();
    //转换后适配宽高的容器 剪裁成适配屏幕宽高的帧
    AVFrame *rgbFrame = av_frame_alloc();
    int width = videoCodecContext->width;
    int height = videoCodecContext->height;

    //align=1 按照1个字节对齐，就是按照实际结果
    int numBytes = av_image_get_buffer_size(AV_PIX_FMT_RGBA, width, height, 1);
    LOGD("计算解码后的rgb %d\n", numBytes);
    //实例化一个输入缓冲区
    outbuffer = (uint8_t *) av_malloc(numBytes * sizeof(uint8_t));
    //把缓冲区 设置给 rgbferame
    av_image_fill_arrays(rgbFrame->data, rgbFrame->linesize, outbuffer, AV_PIX_FMT_RGBA, width,
                         height, 1);

    //转换器
    swsContext = sws_getContext(
            //srcWidth,srcHeight
            width, height, videoCodecContext->pix_fmt
            //dstWidth,dstHeight
            , width, height, AV_PIX_FMT_RGBA, SWS_BICUBIC, NULL, NULL, NULL
    );



    //----------------------------------------------------------------

    nativeWindow = ANativeWindow_fromSurface(env, surface);
    if (0 == nativeWindow) {
        LOGD("Couldn't get native window from surface.\n");
        return -1;
    }

    if (0 >
        ANativeWindow_setBuffersGeometry(nativeWindow, width, height, WINDOW_FORMAT_RGBA_8888)) {
        LOGD("Couldn't set buffers geometry.\n");
        ANativeWindow_release(nativeWindow);
        return -1;
    }
    LOGD("ANativeWindow_setBuffersGeometry成功\n");

    //----------------------------------------------------------------
    //ffmpeg3.x版本是同步方式
    //ffmpeg4.x是会有个队列，同mediacodec，avcodec_send_packet放入，通过avcodec_receive_frame接收器接收出来
    while (av_read_frame(avFormatContext, videoPacket) >= 0) {
        if (videoPacket->stream_index == videoIndex) {
            //如果是解码，成双出现
            //avcodec_send_packet(),avcodec_receive_frame()
            //如果是解码，成双出现
            //avcodec_send_frame(),avcodec_receive_packet()
            int ret = avcodec_send_packet(videoCodecContext, videoPacket);
            if (ret < 0
                //不出错
                && ret != AVERROR(EAGAIN)
                //或者到了末尾
                && ret != AVERROR_EOF) {
                LOGD("解码出错");
                return -1;
            }
            //---------------------------------------------------------
            ret = avcodec_receive_frame(videoCodecContext, videoFrame);
            if (ret == AVERROR(EAGAIN)) {
                continue;
            } else if (ret < 0) {
                break;
            }
            sws_scale(swsContext, videoFrame->data, videoFrame->linesize, 0,
                      videoCodecContext->height,
                      rgbFrame->data, rgbFrame->linesize
            );


            if (ANativeWindow_lock(nativeWindow, &nativeWindowBuffer, NULL) < 0) {
                LOGD("不能lock windows");
            } else {
                uint8_t *dst = static_cast<uint8_t *>(nativeWindowBuffer.bits);
                for (int i = 0; i < height; ++i) {
                    memcpy(dst + i * nativeWindowBuffer.stride * 4,
                           outbuffer + i * rgbFrame->linesize[0],
                           rgbFrame->linesize[0]
                    );
                }
                switch (videoFrame->pict_type) {
                    case AV_PICTURE_TYPE_I:
                        LOGD("I");
                        break;
                    case AV_PICTURE_TYPE_P:
                        LOGD("P");
                        break;
                    case AV_PICTURE_TYPE_B:
                        LOGD("B");
                        break;
                    default:;
                        break;
                }
            }

//        av_usleep(1000 * 33);
            ANativeWindow_unlockAndPost(nativeWindow);
        }

    }
    //----------------------------------------------------------------
    av_frame_free(&videoFrame);
    av_free(videoPacket);

    sws_freeContext(swsContext);
//    swr_free(&swrContext)
    avcodec_close(videoCodecContext);
//    avformat_close_input(pFor)
    avformat_free_context(avFormatContext);

    env->ReleaseStringUTFChars(url_, url);
    return 0;
}