

#include <jni.h>
#include <android/log.h>
#include <android/native_window.h>
#include <android/native_window_jni.h>

#include <SDL.h>
#include <SDL_thread.h>

//OpenGL要用的
#include <GLES3/gl3.h>
//OpenGL要用的
#include <GLES3/gl3ext.h>
//OpenGL要用的
#include <EGL/egl.h>
//OpenGL要用的
#include <EGL/eglext.h>
//#include "esUtil.h"

extern "C" {
#include "libavcodec/avcodec.h"
#include "libavformat/avformat.h"
#include "libswscale/swscale.h"
#include "libavutil/imgutils.h"
#include <pthread.h>
#include <unistd.h>

#define  LOG_TAG    "ffmpegandroidplayer"
#define  LOGD(...)  __android_log_print(ANDROID_LOG_DEBUG, LOG_TAG, __VA_ARGS__)

int g_modifyPath = false;
pthread_mutex_t g_mutex;
char g_path[1024];
/*
EGLDisplay  display;
EGLContext context;
EGLConfig config;
EGLint numConfigs;

bool initOpenGlES(){
    if ((display = eglGetDisplay(EGL_DEFAULT_DISPLAY)) == EGL_NO_DISPLAY) {
        LOGD("eglGetDisplay returned err %d", eglGetError());
        return false;
    }
    if (!eglInitialize(display, 0, 0)) {
        LOGD("eglInitialize returned err %d", eglGetError());
        return false;
    }
    const EGLint attribs[] = {EGL_BUFFER_SIZE, 32,
                              EGL_ALPHA_SIZE, 8,
                              EGL_BLUE_SIZE, 8,
                              EGL_GREEN_SIZE, 8,
                              EGL_RED_SIZE, 8,
                              EGL_RENDERABLE_TYPE, EGL_OPENGL_ES3_BIT_KHR,
                              EGL_SURFACE_TYPE, EGL_WINDOW_BIT,
                              EGL_NONE};
    if (!eglChooseConfig(display, attribs, &config, 1, &numConfigs)) {
        LOGD("eglChooseConfig returned err %d", eglGetError());
        return false;
    }
    EGLint attributes[] = {EGL_CONTEXT_CLIENT_VERSION, 3, EGL_NONE};
    if (!(context = eglCreateContext(display, config, NULL, attributes))) {
        LOGD("eglCreateContext returned err %d", eglGetError());
        return false;
    }
    EGLSurface surface = NULL;
    EGLint format;
    if (!eglGetConfigAttrib(display, config, EGL_NATIVE_VISUAL_ID, &format)) {
        LOGD("eglGetConfigAttrib returned err %d", eglGetError());
        return false;
    }
    LOGD("好高兴啊，我成功了😄😄😄");
    return true;
}
*/
extern "C" JNIEXPORT jint JNICALL
Java_com_example_carscreen_Ffmpeg_setpath(JNIEnv *env, jclass clazz, jstring path)
{
    LOGD("setPath start ********************************************** g_path\n");

    LOGD("setPath start ********************************************** g_path:%s\n", g_path);
    pthread_mutex_lock(&g_mutex);

    const char *file_name = env->GetStringUTFChars(path, 0);
    if(sizeof(g_path) > strlen(file_name) + 1){
        memcpy(g_path, file_name, strlen(file_name) + 1);
        g_modifyPath = true;
    }

    LOGD("setPath g_path:%s\n", g_path);
    pthread_mutex_unlock(&g_mutex);

    return 0;
}
/*
void videoFrameRender(char * yuvDataBuf, int yuv_width, int yuv_height) {
    GLuint textureYID;
    GLuint textureUID;
    GLuint textureVID;

    glGenTextures(1, &textureYID);
    glGenTextures(1, &textureUID);
    glGenTextures(1, &textureVID);

    glActiveTexture(GL_TEXTURE0);
    glBindTexture(GL_TEXTURE_2D, textureYID);
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR_MIPMAP_LINEAR);
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
    glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, yuv_width, yuv_height,
                 0, GL_LUMINANCE, GL_UNSIGNED_BYTE, yuvDataBuf);
    glGenerateMipmap(GL_TEXTURE_2D);
    glUniform1i(uTextureSamplerYHandle, 0);


    glActiveTexture(GL_TEXTURE1);
    glBindTexture(GL_TEXTURE_2D, textureUID);
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR_MIPMAP_LINEAR);
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
    glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, yuv_width/2, yuv_height/2,
                 0, GL_LUMINANCE, GL_UNSIGNED_BYTE, yuvDataBuf + yuv_width * yuv_height);
    glGenerateMipmap(GL_TEXTURE_2D);
    glUniform1i(uTextureSamplerUHandle, 1);


    glActiveTexture(GL_TEXTURE2);
    glBindTexture(GL_TEXTURE_2D, textureVID);
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR_MIPMAP_LINEAR);
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
    glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, yuv_width/2, yuv_height/2,
                 0, GL_LUMINANCE, GL_UNSIGNED_BYTE, yuvDataBuf + yuv_width * yuv_height * 5 / 4);
    glGenerateMipmap(GL_TEXTURE_2D);
    glUniform1i(uTextureSamplerVHandle, 2);


    glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
    glClear(GL_COLOR_BUFFER_BIT);

    glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);

    glBindTexture(GL_TEXTURE_2D, 0);
    glDeleteTextures(1, &textureYID);
    glDeleteTextures(1, &textureUID);
    glDeleteTextures(1, &textureVID);

}
*/
extern "C" JNIEXPORT jint JNICALL
Java_rocks_georgik_sdlapp_Ffmpeg_play(JNIEnv *env, jclass clazz, jobject surface, jstring path_)
{
    static int initgl = false;
    if(initgl == false){
        initgl = true;
        //initOpenGlES();
    }

    const char *file_name = env->GetStringUTFChars(path_, 0);
    LOGD("play file_name=%s\n", file_name);

    //return 0;
    // sd卡中的视频文件地址,可自行修改或者通过jni传入
    //char *file_name = "/storage/emulated/0/ws2.mp4";
    //char *file_name = "/storage/emulated/0/NCE/NCE1_001_002.mp4";
    //char *file_name = "/storage/emulated/0/tencent/MicroMsg/WeiXin/1563250940304.mp4";

    av_register_all();
    avcodec_register_all();

    AVFormatContext *pFormatCtx = avformat_alloc_context();

    int val = avformat_open_input(&pFormatCtx, file_name, NULL, NULL);
    // Open video file
    if (val != 0) {
        char errmsg[2000];
        av_strerror(val, errmsg, sizeof(errmsg));
        LOGD("Couldn't open file:%s,val=%d, errmsg=%s\n", file_name, val, errmsg);

        FILE *fp;
        fp = fopen(file_name, "rb"); //w+ , "a+"
        if (fp != NULL) {
            LOGD("open file succeed!!!!\n");
            fclose(fp);
        }

        LOGD("open file failed!!!!!\n");
        return -1; // Couldn't open file
    }

    // Retrieve stream information
    if (avformat_find_stream_info(pFormatCtx, NULL) < 0) {
        LOGD("Couldn't find stream information.");
        return -1;
    }

    // Find the first video stream
    int videoStream = -1, i;
    for (i = 0; i < pFormatCtx->nb_streams; i++) {
        if (pFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO
            && videoStream < 0) {
            videoStream = i;
        }
    }
    if (videoStream == -1) {
        LOGD("Didn't find a video stream.");
        return -1; // Didn't find a video stream
    }

    // Get a pointer to the codec context for the video stream
    AVCodecContext *pCodecCtx = pFormatCtx->streams[videoStream]->codec;

    // Find the decoder for the video stream
    AVCodec *pCodec = avcodec_find_decoder(pCodecCtx->codec_id);
    if (pCodec == NULL) {
        LOGD("Codec not found.");
        return -1; // Codec not found
    }

    if (avcodec_open2(pCodecCtx, pCodec, NULL) < 0) {
        LOGD("Could not open codec.");
        return -1; // Could not open codec
    }

    // 获取native window
    ANativeWindow *nativeWindow = ANativeWindow_fromSurface(env, surface);

    // 获取视频宽高
    int videoWidth = pCodecCtx->width;
    int videoHeight = pCodecCtx->height;

    // 设置native window的buffer大小,可自动拉伸
    ANativeWindow_setBuffersGeometry(nativeWindow, videoWidth, videoHeight,
                                     WINDOW_FORMAT_RGBA_8888);
    ANativeWindow_Buffer windowBuffer;

    if (avcodec_open2(pCodecCtx, pCodec, NULL) < 0) {
        LOGD("Could not open codec.");
        return -1; // Could not open codec
    }
/*
    //开始准备sdl的部分
    //SDL 四大要  window render texture  surface
    SDL_Window *window;
    SDL_Renderer *renderer;
    SDL_Event event;
    SDL_Rect sdlRect;
    SDL_Thread *video_tid;

    //初始化SDL
    SDL_SetMainReady();
    LOGD("SDL_Init begin!");
    if (SDL_Init(SDL_INIT_VIDEO) < 0) {
        LOGD("Could not initialize SDL - %s", SDL_GetError());
        return 1;
    }
    LOGD("SDL_Init succeed!");
*/
    // Allocate video frame
    AVFrame *pFrame = av_frame_alloc();

    // 用于渲染
    AVFrame *pFrameRGBA = av_frame_alloc();
    if (pFrameRGBA == NULL || pFrame == NULL) {
        LOGD("Could not allocate video frame.");
        return -1;
    }

    // Determine required buffer size and allocate buffer
    // buffer中数据就是用于渲染的,且格式为RGBA
    int numBytes = av_image_get_buffer_size(AV_PIX_FMT_RGBA, pCodecCtx->width, pCodecCtx->height,
                                            1);
    uint8_t *buffer = (uint8_t *) av_malloc(numBytes * sizeof(uint8_t));
    av_image_fill_arrays(pFrameRGBA->data, pFrameRGBA->linesize, buffer, AV_PIX_FMT_RGBA,
                         pCodecCtx->width, pCodecCtx->height, 1);

    // 由于解码出来的帧格式不是RGBA的,在渲染之前需要进行格式转换
    struct SwsContext *sws_ctx = sws_getContext(pCodecCtx->width,
                                                pCodecCtx->height,
                                                pCodecCtx->pix_fmt,
                                                pCodecCtx->width,
                                                pCodecCtx->height,
                                                AV_PIX_FMT_RGBA,
                                                SWS_BILINEAR,
                                                NULL,
                                                NULL,
                                                NULL);

    int frameFinished;
    AVPacket packet;
    while (av_read_frame(pFormatCtx, &packet) >= 0) {
        // Is this a packet from the video stream?
        if (packet.stream_index == videoStream) {

            // Decode video frame
            avcodec_decode_video2(pCodecCtx, pFrame, &frameFinished, &packet);

            // 并不是decode一次就可解码出一帧
            if (frameFinished) {

                // lock native window buffer
                ANativeWindow_lock(nativeWindow, &windowBuffer, 0);

                // 格式转换
                sws_scale(sws_ctx, (uint8_t const *const *) pFrame->data,
                          pFrame->linesize, 0, pCodecCtx->height,
                          pFrameRGBA->data, pFrameRGBA->linesize);

                // 获取stride
                uint8_t *dst = (uint8_t *) windowBuffer.bits;
                int dstStride = windowBuffer.stride * 4;
                uint8_t *src = (pFrameRGBA->data[0]);
                int srcStride = pFrameRGBA->linesize[0];

                // 由于window的stride和帧的stride不同,因此需要逐行复制
                int h;
                for (h = 0; h < videoHeight; h++) {
                    memcpy(dst + h * dstStride, src + h * srcStride, srcStride);
                }

                ANativeWindow_unlockAndPost(nativeWindow);
            }

        }
        av_packet_unref(&packet);
    }

    av_free(buffer);
    av_free(pFrameRGBA);

    // Free the YUV frame
    av_free(pFrame);

    // Close the codecs
    avcodec_close(pCodecCtx);

    // Close the video file
    avformat_close_input(&pFormatCtx);
    return 0;
}
}

