#include <jni.h>
#include <android/log.h>
#include <android/native_window.h>
#include <android/native_window_jni.h>

#include <functional>
#include <string>

#include "SDL.h"
#include "SDL_image.h"
#include "SDL_mixer.h"
#include "SDL2_gfxPrimitives.h"
#include "SDL_ttf.h"
#include "image_button.h"

#define  LOG_TAG    "carscreen"
const char* logtag(const char *file, const char *fun, int line)
{
    static char log[1024] = {"carscreen"};
    sprintf(log, "%s::%d:\t", fun, line);
    return log;
}

#define  LOGD(...)  __android_log_print(ANDROID_LOG_DEBUG, logtag(__FILE__, __FUNCTION__, __LINE__), __VA_ARGS__)
//#define LOGD(str, ...)  do{__android_log_print(ANDROID_LOG_DEBUG, LOG_TAG, "%s%s::%s::%d:\t" str "\n", __FILE__, __FUNCTION__, __LINE__, ##__VA_ARGS__);}while(0)

static char * icon_xpm[] = {
        "32 23 3 1",
        "     c #FFFFFF",
        ".    c #000000",
        "+    c #FFFF00",
        "                                ",
        "            ........            ",
        "          ..++++++++..          ",
        "         .++++++++++++.         ",
        "        .++++++++++++++.        ",
        "       .++++++++++++++++.       ",
        "      .++++++++++++++++++.      ",
        "      .+++....++++....+++.      ",
        "     .++++.. .++++.. .++++.     ",
        "     .++++....++++....++++.     ",
        "     .++++++++++++++++++++.     ",
        "     .++++++++++++++++++++.     ",
        "     .+++++++++..+++++++++.     ",
        "     .+++++++++..+++++++++.     ",
        "     .++++++++++++++++++++.     ",
        "      .++++++++++++++++++.      ",
        "      .++...++++++++...++.      ",
        "       .++............++.       ",
        "        .++..........++.        ",
        "         .+++......+++.         ",
        "          ..++++++++..          ",
        "            ........            ",
        "                                "};

extern "C" {
#include "libavcodec/avcodec.h"
#include "libavformat/avformat.h"
#include "libswscale/swscale.h"
#include "libavutil/imgutils.h"
#include <pthread.h>
#include <unistd.h>

//#define  LOG_TAG    "ffmpegandroidplayer"
//#define  LOGD(...)  __android_log_print(ANDROID_LOG_DEBUG, LOG_TAG, __VA_ARGS__)

extern ANativeWindow* Android_JNI_GetNativeWindow(void);
}



int play(ANativeWindow *nativeWindow)
{
    const char *file_name = "/storage/emulated/0/mediapicker/videos/ff725481-c26e-40d0-886f-e0d093b2a8ec.mp4";
    LOGD("play file_name=%s\n", file_name);

    //return 0;
    // sd卡中的视频文件地址,可自行修改或者通过jni传入
    //char *file_name = "/storage/emulated/0/ws2.mp4";
    //char *file_name = "/storage/emulated/0/NCE/NCE1_001_002.mp4";
    //char *file_name = "/storage/emulated/0/tencent/MicroMsg/WeiXin/1563250940304.mp4";

    av_register_all();
    avcodec_register_all();

    AVFormatContext *pFormatCtx = avformat_alloc_context();

    int val = avformat_open_input(&pFormatCtx, file_name, NULL, NULL);
    // Open video file
    if (val != 0) {
        char errmsg[2000];
        av_strerror(val, errmsg, sizeof(errmsg));
        LOGD("Couldn't open file:%s,val=%d, errmsg=%s\n", file_name, val, errmsg);

        FILE *fp;
        fp = fopen(file_name, "rb"); //w+ , "a+"
        if (fp != NULL) {
            LOGD("open file succeed!!!!\n");
            fclose(fp);
        }

        LOGD("open file failed!!!!!\n");
        return -1; // Couldn't open file
    }

    // Retrieve stream information
    if (avformat_find_stream_info(pFormatCtx, NULL) < 0) {
        LOGD("Couldn't find stream information.");
        return -1;
    }

    // Find the first video stream
    int videoStream = -1, i;
    for (i = 0; i < pFormatCtx->nb_streams; i++) {
        if (pFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO
            && videoStream < 0) {
            videoStream = i;
        }
    }
    if (videoStream == -1) {
        LOGD("Didn't find a video stream.");
        return -1; // Didn't find a video stream
    }

    // Get a pointer to the codec context for the video stream
    AVCodecContext *pCodecCtx = pFormatCtx->streams[videoStream]->codec;

    // Find the decoder for the video stream
    AVCodec *pCodec = avcodec_find_decoder(pCodecCtx->codec_id);
    if (pCodec == NULL) {
        LOGD("Codec not found.");
        return -1; // Codec not found
    }

    if (avcodec_open2(pCodecCtx, pCodec, NULL) < 0) {
        LOGD("Could not open codec.");
        return -1; // Could not open codec
    }

    // 获取视频宽高
    int videoWidth = pCodecCtx->width;
    int videoHeight = pCodecCtx->height;

    // 设置native window的buffer大小,可自动拉伸
    ANativeWindow_setBuffersGeometry(nativeWindow, videoWidth, videoHeight,
                                     WINDOW_FORMAT_RGBA_8888);
    ANativeWindow_Buffer windowBuffer;

    if (avcodec_open2(pCodecCtx, pCodec, NULL) < 0) {
        LOGD("Could not open codec.");
        return -1; // Could not open codec
    }
/*
    //开始准备sdl的部分
    //SDL 四大要  window render texture  surface
    SDL_Window *window;
    SDL_Renderer *renderer;
    SDL_Event event;
    SDL_Rect sdlRect;
    SDL_Thread *video_tid;

    //初始化SDL
    SDL_SetMainReady();
    LOGD("SDL_Init begin!");
    if (SDL_Init(SDL_INIT_VIDEO) < 0) {
        LOGD("Could not initialize SDL - %s", SDL_GetError());
        return 1;
    }
    LOGD("SDL_Init succeed!");
*/
    // Allocate video frame
    AVFrame *pFrame = av_frame_alloc();

    // 用于渲染
    AVFrame *pFrameRGBA = av_frame_alloc();
    if (pFrameRGBA == NULL || pFrame == NULL) {
        LOGD("Could not allocate video frame.");
        return -1;
    }

    // Determine required buffer size and allocate buffer
    // buffer中数据就是用于渲染的,且格式为RGBA
    int numBytes = av_image_get_buffer_size(AV_PIX_FMT_RGBA, pCodecCtx->width, pCodecCtx->height,
                                            1);
    uint8_t *buffer = (uint8_t *) av_malloc(numBytes * sizeof(uint8_t));
    av_image_fill_arrays(pFrameRGBA->data, pFrameRGBA->linesize, buffer, AV_PIX_FMT_RGBA,
                         pCodecCtx->width, pCodecCtx->height, 1);

    // 由于解码出来的帧格式不是RGBA的,在渲染之前需要进行格式转换
    struct SwsContext *sws_ctx = sws_getContext(pCodecCtx->width,
                                                pCodecCtx->height,
                                                pCodecCtx->pix_fmt,
                                                pCodecCtx->width,
                                                pCodecCtx->height,
                                                AV_PIX_FMT_RGBA,
                                                SWS_BILINEAR,
                                                NULL,
                                                NULL,
                                                NULL);

    int frameFinished;
    AVPacket packet;
    while (av_read_frame(pFormatCtx, &packet) >= 0) {
        // Is this a packet from the video stream?
        if (packet.stream_index == videoStream) {

            // Decode video frame
            avcodec_decode_video2(pCodecCtx, pFrame, &frameFinished, &packet);

            // 并不是decode一次就可解码出一帧
            if (frameFinished) {

                // lock native window buffer
                ANativeWindow_lock(nativeWindow, &windowBuffer, 0);

                // 格式转换
                sws_scale(sws_ctx, (uint8_t const *const *) pFrame->data,
                          pFrame->linesize, 0, pCodecCtx->height,
                          pFrameRGBA->data, pFrameRGBA->linesize);

                // 获取stride
                uint8_t *dst = (uint8_t *) windowBuffer.bits;
                int dstStride = windowBuffer.stride * 4;
                uint8_t *src = (pFrameRGBA->data[0]);
                int srcStride = pFrameRGBA->linesize[0];

                // 由于window的stride和帧的stride不同,因此需要逐行复制
                int h;
                for (h = 0; h < videoHeight; h++) {
                    memcpy(dst + h * dstStride, src + h * srcStride, srcStride);
                }

                ANativeWindow_unlockAndPost(nativeWindow);
            }

        }
        av_packet_unref(&packet);
    }

    av_free(buffer);
    av_free(pFrameRGBA);

    // Free the YUV frame
    av_free(pFrame);

    // Close the codecs
    avcodec_close(pCodecCtx);

    // Close the video file
    avformat_close_input(&pFormatCtx);
    return 0;
}

int SDL_main111(int argc, char* argv[]) {
    LOGD("setPath start ********************************************** g_path\n");

    SDL_Window *window;                    // Declare a pointer
    SDL_Surface *surface;
    SDL_Texture *texture;
    SDL_Init(SDL_INIT_VIDEO);              // Initialize SDL2

    // Create an application window with the following settings:
    window = SDL_CreateWindow(
        "An SDL2 window",                  // window title
        SDL_WINDOWPOS_UNDEFINED,           // initial x position
        SDL_WINDOWPOS_UNDEFINED,           // initial y position
        640,                               // width, in pixels
        480,                               // height, in pixels
        SDL_WINDOW_OPENGL                  // flags - see below
    );

    // Check that the window was successfully created
    if (window == NULL) {
        // In the case that the window could not be made...
        printf("Could not create window: %s\n", SDL_GetError());
        return 1;
    }

    if (Mix_OpenAudio(22050, MIX_DEFAULT_FORMAT, 2, 4096) == -1 ) {
        SDL_LogError(SDL_LOG_CATEGORY_APPLICATION,
                     "Couldn't open mixer: %s", SDL_GetError());
        return 2;
    }

    Mix_Chunk *sample = Mix_LoadWAV("cuckoo.wav");
    if (sample == NULL) {
        fprintf(stderr, "Unable to load wave file\n");
        return 3;
    }


    // Setup renderer
    SDL_Renderer* renderer = NULL;
    renderer =  SDL_CreateRenderer( window, -1, SDL_RENDERER_ACCELERATED);

    surface = IMG_ReadXPMFromArray(icon_xpm);
    texture = SDL_CreateTextureFromSurface(renderer, surface);
    if (!texture) {
        SDL_LogError(SDL_LOG_CATEGORY_APPLICATION,
                     "Couldn't load texture: %s", SDL_GetError());
        return 4;
    }

    // Set render color to red ( background will be rendered in this color )
    SDL_SetRenderDrawColor( renderer, 220, 220, 220, 255 );

    // Clear winow
    SDL_RenderClear( renderer );

    // Display image
    SDL_Rect dstrect;


    // Load smiley.png and display it.
    // Location of image files for Android is: app/src/main/assets
    SDL_Surface *loadedSurface = IMG_Load("smiley.png");
    if (!loadedSurface) {
        SDL_LogError(SDL_LOG_CATEGORY_APPLICATION,
                     "Couldn't load PNG image: %s", SDL_GetError());
        return 5;
    }

    SDL_Surface *backgroundSurface = IMG_Load("brno-snow.jpg");
    if (!backgroundSurface) {
        SDL_LogError(SDL_LOG_CATEGORY_APPLICATION,
                     "Couldn't load JPG image: %s", SDL_GetError());
        return 6;
    }

    // Initialize TTF
    if (TTF_Init() == -1) {
        SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, "TTF_Init: %s\n", TTF_GetError());
        return 7;
    }

    // Load font
    TTF_Font *font = TTF_OpenFont("blazed.ttf", 32);
    if (!font) {
        SDL_LogError(SDL_LOG_CATEGORY_APPLICATION,
                     "Unable to load font: %s\n", TTF_GetError());
        return 8;
    }

    dstrect.x = 0;
    dstrect.y = 0;
    dstrect.w = backgroundSurface->w;
    dstrect.h = backgroundSurface->h;
    SDL_Texture *backgroundTexture = SDL_CreateTextureFromSurface(renderer, backgroundSurface);
    SDL_RenderCopy(renderer, backgroundTexture, NULL, &dstrect);

    dstrect.x = 190;
    dstrect.y = 130;
    dstrect.w = loadedSurface->w;
    dstrect.h = loadedSurface->h;
    SDL_Texture *smileyTexture = SDL_CreateTextureFromSurface(renderer, loadedSurface);
    SDL_RenderCopy(renderer, smileyTexture, NULL, &dstrect);

    dstrect.x = 20;
    dstrect.y = 120;
    dstrect.w = 128;
    dstrect.h = 128;
    SDL_RenderCopy(renderer, texture, NULL, &dstrect);

    dstrect.x = 0;
    dstrect.y = 0;
    dstrect.w = 450;
    dstrect.h = 100;
    SDL_Color textColor = { 255, 240, 0, 255 };
    SDL_Surface* solid = TTF_RenderText_Solid(font, "SDL2 Android Example", textColor);

    SDL_Texture* solidTexture = SDL_CreateTextureFromSurface(renderer, solid);
    SDL_RenderCopy(renderer, solidTexture, NULL, &dstrect);
    SDL_FreeSurface(solid);

    // SDL2_gfx example
    thickLineColor(renderer, 0, 300, 300, 300, 20, 0xFF00FFFF) ;

    // Render to the screen
    SDL_RenderPresent(renderer);

    ANativeWindow* nativeWindow = Android_JNI_GetNativeWindow();
    //play(nativeWindow);

    // Event loop
    bool quit = false;
    SDL_Event event;
    while(!quit && SDL_WaitEvent(&event)) {
        LOGD("setPath start **********************************************event.type=%d\n", event.type);
        switch (event.type) {
            case SDL_QUIT:
                quit = true;
                break;
            case SDL_KEYDOWN:
                if ((event.key.keysym.sym == SDLK_AC_BACK) || (event.key.keysym.sym == SDLK_ESCAPE)) {
                    quit = true;
                }
                break;

            case SDL_FINGERDOWN: {
                Mix_PlayChannel(-1, sample, 0);
                dstrect.x = event.tfinger.x * 1080;
                dstrect.y = event.tfinger.y * 1920;
                dstrect.w = 128;
                dstrect.h = 128;
                LOGD("event.tfinger.x=%f, event.tfinger.y=%f，loadedSurface->w=%d,loadedSurface->h=%d\n", event.tfinger.x, event.tfinger.y, loadedSurface->w, loadedSurface->h);
                SDL_RenderCopy(renderer, texture, NULL, &dstrect);
                SDL_RenderPresent(renderer);
                break;
            }
            default:
                break;
        }
    }

    Mix_CloseAudio();

    // Close and destroy the window
    SDL_DestroyWindow(window);

    // Clean up
    SDL_Quit();

    return 0;
}

void marquee_rect(int i, int start, int w, int len, SDL_Rect *window, SDL_Rect *mar_rect)
{
    //mar_rect[0] = {start, 0, len, w};
    //return;

    switch(i){
        case 0:
            mar_rect[0] = {start, 0, len, w};
            break;
        case 1:
            mar_rect[0] = {window->w - w, start, w, len};
            break;
        case 2:
            mar_rect[0] = {window->w - (start + len), window->h - w, len, w};
            break;
        case 3:
            mar_rect[0] = {0, window->h - (start + len), w, len};
            break;
        default:
            break;
    }
}

int marquee_time(SDL_Rect window, SDL_Rect *mar_rect, Uint32 t)
{
    Uint32 one = 5000;
    const int len = (window.w + window.h)/4;

    Uint32 surplus = t - ((t/one) * one);
    int start = (int)(((window.w + window.h) * 2 * surplus) / one);
    int end = start + len;
    int w = 50;
    //LOGD("marquee_time surplus=%d,start=%d\n", surplus, start);

    if(start < window.w){
        if(end > window.w){
            int len_one = window.w - start;
            marquee_rect(0, start, w, len_one, &window, mar_rect);
            marquee_rect(1, 0, w, len - len_one, &window, mar_rect + 1);
            return 2;
        }else{
            marquee_rect(0, start, w, len, &window, mar_rect);
            return 1;
        }
    }else if(start < window.w + window.h){
        start = start - window.w;
        if(start + len > window.h){
            int len_one = window.h - start;
            marquee_rect(1, start, w, len_one, &window, mar_rect);
            marquee_rect(2, 0, w, len - len_one, &window, mar_rect + 1);
            return 2;
        }else{
            marquee_rect(1, start, w, len, &window, mar_rect);
            return 1;
        }
    }else if(start < window.w + window.h + window.w){
        start = start - (window.w + window.h);
        if(start + len > window.w){
            int len_one = window.w - start;
            marquee_rect(2, start, w, len_one, &window, mar_rect);
            marquee_rect(3, 0, w, len - len_one, &window, mar_rect + 1);
            return 2;
        }else{
            marquee_rect(2, start, w, len, &window, mar_rect);
            return 1;
        }
    }else{
        start = start - (window.w + window.h + window.w);
        if(start + len > window.h){
            int len_one = window.h - start;
            marquee_rect(3, start, w, len_one, &window, mar_rect);
            marquee_rect(0, 0, w, len - len_one, &window, mar_rect + 1);
            return 2;
        }else{
            marquee_rect(3, start, w, len, &window, mar_rect);
            return 1;
        }
    }

    return 0;
}

int point_in_rect(int x, int y, const  SDL_Rect *rect)
{
    return rect->x <= x && x < rect->x + rect->w && rect->y <= y && y < rect->y + rect->h;
}

char g_cache_path[1024] = {""};
unsigned g_cache_path_len = 0;
char g_file[1024] = {"/storage/emulated/0/DCIM/jiluo.gif"};
int g_file_change = 0;

const char* asserts_path(const char *file)
{
    memcpy(g_cache_path + g_cache_path_len, file, strlen(file)+1);
    return g_cache_path;
}

int play_video_windows(const char *file_name, SDL_Renderer *renderer, std::function<int(SDL_Renderer *renderer, SDL_Texture *texture)> callback)
{
    AVFormatContext *pFormatCtx = NULL;
    int videoStream;
    unsigned i;
    AVCodecContext *pCodecCtxOrig = NULL;
    AVCodecContext *pCodecCtx = NULL;
    AVCodec *pCodec = NULL;
    AVFrame *pFrame = NULL;
    AVPacket packet;
    int frameFinished;
    struct SwsContext *sws_ctx = NULL;
    SDL_Event event;
    SDL_Texture *texture;
    Uint8 *yPlane, *uPlane, *vPlane;
    size_t yPlaneSz, uvPlaneSz;
    int uvPitch;

    if (avformat_open_input(&pFormatCtx, file_name, NULL, NULL) != 0) {
        LOGD("avformat_open_input filaed file_name=%s\n", file_name);
        return -1; // Couldn't open file
    }

    // Retrieve stream information
    if (avformat_find_stream_info(pFormatCtx, NULL) < 0) {
        LOGD("avformat_find_stream_info filaed file_name=%s\n", file_name);
        return -1; // Couldn't find stream information
    }

    // Dump information about file onto standard error
    av_dump_format(pFormatCtx, 0, file_name, 0);

    // Find the first video stream
    videoStream = -1;
    for (i = 0; i < pFormatCtx->nb_streams; i++)
        if (pFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO) {
            videoStream = i;
            break;
        }
    if (videoStream == -1) {
        LOGD("videoStream == -1 filaed file_name=%s\n", file_name);
        return -1; // Didn't find a video stream
    }

    // Get a pointer to the codec context for the video stream
    pCodecCtxOrig = pFormatCtx->streams[videoStream]->codec;
    // Find the decoder for the video stream
    pCodec = avcodec_find_decoder(pCodecCtxOrig->codec_id);
    if (pCodec == NULL) {
        LOGD("Unsupported codec!\n");
        return -1; // Codec not found
    }

    // Copy context
    pCodecCtx = avcodec_alloc_context3(pCodec);
    if (avcodec_copy_context(pCodecCtx, pCodecCtxOrig) != 0) {
        LOGD("Couldn't copy codec context");
        return -1; // Error copying codec context
    }

    // Open codec
    if (avcodec_open2(pCodecCtx, pCodec, NULL) < 0) {
        LOGD("Couldn't avcodec_open2");
        return -1; // Could not open codec
    }

    // Allocate video frame
    pFrame = av_frame_alloc();

    SDL_RendererInfo rendererInfo;
    SDL_GetRendererInfo(renderer, &rendererInfo);

    SDL_Rect displayRect;
    SDL_GetDisplayBounds(0, &displayRect);

    LOGD("Renderinfo++++++++++++++++++++++++++:%s--%s,displayRect.x=%d,displayRect.y=%d,displayRect.w=%d,displayRect.h=%d,\n",
         SDL_GetCurrentVideoDriver(), rendererInfo.name, displayRect.x, displayRect.y,
         displayRect.w, displayRect.h);

    // Allocate a place to put our YUV image on that screen
    texture = SDL_CreateTexture(
            renderer,
            SDL_PIXELFORMAT_YV12,
            SDL_TEXTUREACCESS_STREAMING,
            pCodecCtx->width,
            pCodecCtx->height
    );
    if (!texture) {
        LOGD("SDL: could not create texture - exiting\n");
        SDL_Delay(20000);
        exit(1);
    }

    // initialize SWS context for software scaling
    sws_ctx = sws_getContext(pCodecCtx->width, pCodecCtx->height,
                             pCodecCtx->pix_fmt, pCodecCtx->width, pCodecCtx->height,
                             AV_PIX_FMT_YUV420P,
                             SWS_BILINEAR,
                             NULL,
                             NULL,
                             NULL);

    // set up YV12 pixel array (12 bits per pixel)
    yPlaneSz = pCodecCtx->width * pCodecCtx->height;
    uvPlaneSz = pCodecCtx->width * pCodecCtx->height / 4;
    yPlane = (Uint8 *) malloc(yPlaneSz);
    uPlane = (Uint8 *) malloc(uvPlaneSz);
    vPlane = (Uint8 *) malloc(uvPlaneSz);
    if (!yPlane || !uPlane || !vPlane) {
        LOGD("Could not allocate pixel buffers - exiting\n");
        SDL_Delay(20000);
        exit(1);
    }

    uvPitch = pCodecCtx->width / 2;

    while (av_read_frame(pFormatCtx, &packet) >= 0) {
        // Is this a packet from the video stream?
        if (packet.stream_index == videoStream) {
            // Decode video frame
            avcodec_decode_video2(pCodecCtx, pFrame, &frameFinished, &packet);

            // Did we get a video frame?
            if (frameFinished) {
                AVPicture pict;
                pict.data[0] = yPlane;
                pict.data[1] = uPlane;
                pict.data[2] = vPlane;
                pict.linesize[0] = pCodecCtx->width;
                pict.linesize[1] = uvPitch;
                pict.linesize[2] = uvPitch;

                // Convert the image into YUV format that SDL uses
                sws_scale(sws_ctx, (uint8_t const *const *) pFrame->data,
                          pFrame->linesize, 0, pCodecCtx->height, pict.data,
                          pict.linesize);

                SDL_UpdateYUVTexture(
                        texture,
                        NULL,
                        yPlane,
                        pCodecCtx->width,
                        uPlane,
                        uvPitch,
                        vPlane,
                        uvPitch
                );

                SDL_RenderClear(renderer);
                SDL_RenderCopy(renderer, texture, NULL, NULL);

                if(callback(renderer, texture))
                    break;
            }
        }

    }

    // Free the YUV frame
    av_frame_free(&pFrame);
    free(yPlane);
    free(uPlane);
    free(vPlane);

    // Close the codec
    avcodec_close(pCodecCtx);
    avcodec_close(pCodecCtxOrig);

    // Close the video file
    avformat_close_input(&pFormatCtx);

    SDL_DestroyTexture(texture);

    return 0;
}

int play_video(const char *file_name, std::function<int(SDL_Renderer *renderer, SDL_Texture *texture, SDL_Window *screen)> callback)
{
    AVFormatContext *pFormatCtx = NULL;
    int videoStream;
    unsigned i;
    AVCodecContext *pCodecCtxOrig = NULL;
    AVCodecContext *pCodecCtx = NULL;
    AVCodec *pCodec = NULL;
    AVFrame *pFrame = NULL;
    AVPacket packet;
    int frameFinished;
    struct SwsContext *sws_ctx = NULL;
    SDL_Event event;
    SDL_Window *screen;
    SDL_Renderer *renderer;
    SDL_Texture *texture;
    Uint8 *yPlane, *uPlane, *vPlane;
    size_t yPlaneSz, uvPlaneSz;
    int uvPitch;

    if (avformat_open_input(&pFormatCtx, file_name, NULL, NULL) != 0) {
        LOGD("avformat_open_input filaed file_name=%s\n", file_name);
        return -1; // Couldn't open file
    }

    // Retrieve stream information
    if (avformat_find_stream_info(pFormatCtx, NULL) < 0) {
        LOGD("avformat_find_stream_info filaed file_name=%s\n", file_name);
        return -1; // Couldn't find stream information
    }

    // Dump information about file onto standard error
    av_dump_format(pFormatCtx, 0, file_name, 0);

    // Find the first video stream
    videoStream = -1;
    for (i = 0; i < pFormatCtx->nb_streams; i++)
        if (pFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO) {
            videoStream = i;
            break;
        }
    if (videoStream == -1) {
        LOGD("videoStream == -1 filaed file_name=%s\n", file_name);
        return -1; // Didn't find a video stream
    }

    // Get a pointer to the codec context for the video stream
    pCodecCtxOrig = pFormatCtx->streams[videoStream]->codec;
    // Find the decoder for the video stream
    pCodec = avcodec_find_decoder(pCodecCtxOrig->codec_id);
    if (pCodec == NULL) {
        LOGD("Unsupported codec!\n");
        return -1; // Codec not found
    }

    // Copy context
    pCodecCtx = avcodec_alloc_context3(pCodec);
    if (avcodec_copy_context(pCodecCtx, pCodecCtxOrig) != 0) {
        LOGD("Couldn't copy codec context");
        return -1; // Error copying codec context
    }

    // Open codec
    if (avcodec_open2(pCodecCtx, pCodec, NULL) < 0) {
        LOGD("Couldn't avcodec_open2");
        return -1; // Could not open codec
    }


    // Allocate video frame
    pFrame = av_frame_alloc();

    LOGD("SDL_CreateWindow pCodecCtx->width=%d,pCodecCtx->height=%d", pCodecCtx->width, pCodecCtx->height);
    SDL_Rect windowRect;
    SDL_GetDisplayBounds(0, &windowRect);
    // Make a screen to put our video
    screen = SDL_CreateWindow(
            "FFmpeg Tutorial",
            SDL_WINDOWPOS_UNDEFINED,
            SDL_WINDOWPOS_UNDEFINED,
            windowRect.w,
            windowRect.h,
            0
    );

    if (!screen) {
        LOGD("SDL: could not create window - exiting\n");
        SDL_Delay(20000);
        exit(1);
    }

    renderer = SDL_CreateRenderer(screen, -1, 0);
    if (!renderer) {
        LOGD("SDL: could not create renderer - exiting\n");
        SDL_Delay(20000);
        exit(1);
    }

    SDL_RendererInfo rendererInfo;
    SDL_GetRendererInfo(renderer, &rendererInfo);

    SDL_Rect displayRect;
    SDL_GetDisplayBounds(0, &displayRect);

    LOGD("Renderinfo++++++++++++++++++++++++++:%s--%s,displayRect.x=%d,displayRect.y=%d,displayRect.w=%d,displayRect.h=%d,\n",
         SDL_GetCurrentVideoDriver(), rendererInfo.name, displayRect.x, displayRect.y,
         displayRect.w, displayRect.h);

    // Allocate a place to put our YUV image on that screen
    texture = SDL_CreateTexture(
            renderer,
            SDL_PIXELFORMAT_YV12,
            SDL_TEXTUREACCESS_STREAMING,
            pCodecCtx->width,
            pCodecCtx->height
    );
    if (!texture) {
        LOGD("SDL: could not create texture - exiting\n");
        SDL_Delay(20000);
        exit(1);
    }

    // initialize SWS context for software scaling
    sws_ctx = sws_getContext(pCodecCtx->width, pCodecCtx->height,
                             pCodecCtx->pix_fmt, pCodecCtx->width, pCodecCtx->height,
                             AV_PIX_FMT_YUV420P,
                             SWS_BILINEAR,
                             NULL,
                             NULL,
                             NULL);

    // set up YV12 pixel array (12 bits per pixel)
    yPlaneSz = pCodecCtx->width * pCodecCtx->height;
    uvPlaneSz = pCodecCtx->width * pCodecCtx->height / 4;
    yPlane = (Uint8 *) malloc(yPlaneSz);
    uPlane = (Uint8 *) malloc(uvPlaneSz);
    vPlane = (Uint8 *) malloc(uvPlaneSz);
    if (!yPlane || !uPlane || !vPlane) {
        LOGD("Could not allocate pixel buffers - exiting\n");
        SDL_Delay(20000);
        exit(1);
    }

    uvPitch = pCodecCtx->width / 2;

    while (av_read_frame(pFormatCtx, &packet) >= 0) {
        // Is this a packet from the video stream?
        if (packet.stream_index == videoStream) {
            // Decode video frame
            avcodec_decode_video2(pCodecCtx, pFrame, &frameFinished, &packet);

            // Did we get a video frame?
            if (frameFinished) {
                AVPicture pict;
                pict.data[0] = yPlane;
                pict.data[1] = uPlane;
                pict.data[2] = vPlane;
                pict.linesize[0] = pCodecCtx->width;
                pict.linesize[1] = uvPitch;
                pict.linesize[2] = uvPitch;

                // Convert the image into YUV format that SDL uses
                sws_scale(sws_ctx, (uint8_t const *const *) pFrame->data,
                          pFrame->linesize, 0, pCodecCtx->height, pict.data,
                          pict.linesize);

                SDL_UpdateYUVTexture(
                        texture,
                        NULL,
                        yPlane,
                        pCodecCtx->width,
                        uPlane,
                        uvPitch,
                        vPlane,
                        uvPitch
                );

                SDL_RenderClear(renderer);
                SDL_RenderCopy(renderer, texture, NULL, NULL);

                if(callback(renderer, texture, screen))
                    break;
            }
        }

    }

    // Free the YUV frame
    av_frame_free(&pFrame);
    free(yPlane);
    free(uPlane);
    free(vPlane);

    // Close the codec
    avcodec_close(pCodecCtx);
    avcodec_close(pCodecCtxOrig);

    // Close the video file
    avformat_close_input(&pFormatCtx);

    SDL_DestroyTexture(texture);
    SDL_DestroyRenderer(renderer);
    SDL_DestroyWindow(screen);

    return 0;
}

int SDL_main(int argc, char *argv[]) {
    AVFormatContext *pFormatCtx = NULL;
    int videoStream;
    unsigned i;
    AVCodecContext *pCodecCtxOrig = NULL;
    AVCodecContext *pCodecCtx = NULL;
    AVCodec *pCodec = NULL;
    AVFrame *pFrame = NULL;
    AVPacket packet;
    int frameFinished;
    struct SwsContext *sws_ctx = NULL;
    SDL_Event event;
    SDL_Window *screen;
    SDL_Renderer *renderer;
    SDL_Texture *texture;
    Uint8 *yPlane, *uPlane, *vPlane;
    size_t yPlaneSz, uvPlaneSz;
    int uvPitch;
    //const char *file_name = "/storage/emulated/0/DCIM/A61.mp4";
    const char *file_name = g_file;

    SDL_Rect pinp_dstrect_video = {0, 0, 384, 216};
    int draw_pinp = 0;
    class image_button image_button("image/pinp.bmp", 350, 150, 180, 180);
    class image_button sdl_button[] = {
            {"image/跑马灯.bmp", 50, 150, 180, 180},
            {"image/画中画.bmp", 250, 150, 180, 180},
            {"image/报警.bmp", 450, 150, 180, 180, "cache/报警.gif"},
            {"image/紧急.bmp", 650, 150, 180, 180, "cache/紧急.gif"},
            {"image/公益01.bmp", 850, 150, 180, 180, "cache/公益01.gif"},
            {"image/公益02.bmp", 50, 350, 180, 180, "cache/公益02.gif"},
            {"image/雾天.bmp", 250, 350, 180, 180, "cache/雾天.gif"},
            {"image/雨天.bmp", 450, 350, 180, 180, "cache/雨天.gif"},
            {"image/左转.bmp", 650, 350, 180, 180, "cache/左转.gif"},
            {"image/右转.bmp", 850, 350, 180, 180, "cache/右转.gif"}
    };

    /*
    if (argc < 2) {
        fprintf(stderr, "Usage: test <file>\n");
        exit(1);
    }
     */
    strcpy(g_cache_path, argv[1]);
    strcat(g_cache_path, "/");
    g_cache_path_len = strlen(g_cache_path);
    LOGD("it is a test g_cache_path_len=%d,g_cache_path=%s\n", g_cache_path_len, g_cache_path);
    strcpy(g_file, asserts_path("cache/jiluo.gif"));
    LOGD("it is a test g_cache_path_len=%d,g_cache_path=%s\n", g_cache_path_len, g_cache_path);
    // Register all formats and codecs
    av_register_all();

    //SDL_SetHint(SDL_HINT_RENDER_DRIVER, "opengles2");

    if (SDL_Init(SDL_INIT_VIDEO | SDL_INIT_AUDIO | SDL_INIT_TIMER)) {
        LOGD("Could not initialize SDL - %s\n", SDL_GetError());
        exit(1);
    }

    Uint32 begin = SDL_GetTicks();
    SDL_Rect maqquee_rect[4];
    int drws_marquee = 0;

    SDL_Rect displayRect;
    SDL_GetDisplayBounds(0, &displayRect);
    LOGD("displayRect.w=%d,displayRect.h=%d\n", displayRect.w, displayRect.h);

    while(1){

        play_video(file_name, [&](SDL_Renderer *renderer, SDL_Texture *texture, SDL_Window *screen) -> int {

            for(unsigned i = 0; i < sizeof(sdl_button)/sizeof(sdl_button[0]); i++){
                sdl_button[i].draw(renderer);
            }

            if (sdl_button[0].clicked()) {
                SDL_SetRenderDrawColor(renderer, 255, 0, 0, 100);
                SDL_GetDisplayBounds(0, &displayRect);
                SDL_RenderFillRects(renderer, maqquee_rect,
                                    marquee_time(displayRect, maqquee_rect,
                                                 SDL_GetTicks() - begin));
            }

            if (sdl_button[1].clicked()) {
                SDL_RenderCopy(renderer, texture, NULL, &pinp_dstrect_video);
            }

            for(unsigned i = 2; i < sizeof(sdl_button)/sizeof(sdl_button[0]); i++){
                if (sdl_button[i].reset()) {
                    const char* path = sdl_button[i].cache_path();
                    LOGD("sdl_button - path=%s\n", path);
                    play_video_windows(asserts_path(path), renderer, [&](SDL_Renderer *renderer, SDL_Texture *texture) -> int {
                        SDL_RenderPresent(renderer);
                        SDL_Delay(20);
                        return 0;
                    });
                }
            }

            /*
            if (sdl_button[2].reset()) {
                play_video_windows(asserts_path("cache/报警.gif"), renderer, [&](SDL_Renderer *renderer, SDL_Texture *texture) -> int {
                    SDL_RenderPresent(renderer);
                    SDL_Delay(20);
                    return 0;
                });
            }
             */

            SDL_RenderPresent(renderer);
            SDL_Delay(20);

            SDL_PollEvent(&event);
            switch (event.type) {
                case SDL_QUIT:
                    LOGD("Could not allocate pixel buffers - SDL_QUIT\n");
                    SDL_Delay(20000);
                    SDL_DestroyTexture(texture);
                    SDL_DestroyRenderer(renderer);
                    SDL_DestroyWindow(screen);
                    SDL_Quit();
                    return 1;
                case SDL_FINGERDOWN: {
                    /*
                    dstrect.x = event.tfinger.x * displayRect.w;
                    dstrect.y = event.tfinger.y * displayRect.h;
                    dstrect.w = 128;
                    dstrect.h = 128;
                     */
                    int x = event.tfinger.x * displayRect.w;
                    int y = event.tfinger.y * displayRect.h;
                    pinp_dstrect_video.x = x;
                    pinp_dstrect_video.y = y;
                    LOGD("event.tfinger.x=%f, event.tfinger.y=%f\n", event.tfinger.x,
                         event.tfinger.y);
                    LOGD("x=%d,y=%d\n", x, y);
                    LOGD("displayRect.w=%d,displayRect.h=%d\n", displayRect.w, displayRect.h);
                    for(unsigned i = 0; i < sizeof(sdl_button)/sizeof(sdl_button[0]); i++){
                        sdl_button[i].click(x, y);
                    }
                    if (sdl_button[0].clicked()) {
                        begin = SDL_GetTicks();
                    }
                    break;
                }
                default:
                    break;
            }

            return 0;
        });
    }

    while(1) {
        // Open video file
        if (avformat_open_input(&pFormatCtx, file_name, NULL, NULL) != 0) {
            LOGD("avformat_open_input filaed file_name=%s\n", file_name);
            return -1; // Couldn't open file
        }


        // Retrieve stream information
        if (avformat_find_stream_info(pFormatCtx, NULL) < 0) {
            LOGD("avformat_find_stream_info filaed file_name=%s\n", file_name);
            return -1; // Couldn't find stream information
        }


        // Dump information about file onto standard error
        av_dump_format(pFormatCtx, 0, file_name, 0);

        // Find the first video stream
        videoStream = -1;
        for (i = 0; i < pFormatCtx->nb_streams; i++)
            if (pFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO) {
                videoStream = i;
                break;
            }
        if (videoStream == -1) {
            LOGD("videoStream == -1 filaed file_name=%s\n", file_name);
            return -1; // Didn't find a video stream
        }


        // Get a pointer to the codec context for the video stream
        pCodecCtxOrig = pFormatCtx->streams[videoStream]->codec;
        // Find the decoder for the video stream
        pCodec = avcodec_find_decoder(pCodecCtxOrig->codec_id);
        if (pCodec == NULL) {
            LOGD("Unsupported codec!\n");
            return -1; // Codec not found
        }

        // Copy context
        pCodecCtx = avcodec_alloc_context3(pCodec);
        if (avcodec_copy_context(pCodecCtx, pCodecCtxOrig) != 0) {
            LOGD("Couldn't copy codec context");
            return -1; // Error copying codec context
        }

        // Open codec
        if (avcodec_open2(pCodecCtx, pCodec, NULL) < 0) {
            LOGD("Couldn't avcodec_open2");
            return -1; // Could not open codec
        }


        // Allocate video frame
        pFrame = av_frame_alloc();

        // Make a screen to put our video
        screen = SDL_CreateWindow(
                "FFmpeg Tutorial",
                SDL_WINDOWPOS_UNDEFINED,
                SDL_WINDOWPOS_UNDEFINED,
                pCodecCtx->width,
                pCodecCtx->height,
                0
        );

        if (!screen) {
            LOGD("SDL: could not create window - exiting\n");
            SDL_Delay(20000);
            exit(1);
        }

        renderer = SDL_CreateRenderer(screen, -1, 0);
        if (!renderer) {
            LOGD("SDL: could not create renderer - exiting\n");
            SDL_Delay(20000);
            exit(1);
        }

        SDL_RendererInfo rendererInfo;
        SDL_GetRendererInfo(renderer, &rendererInfo);

        SDL_Rect displayRect;
        SDL_GetDisplayBounds(0, &displayRect);

        LOGD("Renderinfo++++++++++++++++++++++++++:%s--%s,displayRect.x=%d,displayRect.y=%d,displayRect.w=%d,displayRect.h=%d,\n",
             SDL_GetCurrentVideoDriver(), rendererInfo.name, displayRect.x, displayRect.y,
             displayRect.w, displayRect.h);

        // Allocate a place to put our YUV image on that screen
        texture = SDL_CreateTexture(
                renderer,
                SDL_PIXELFORMAT_YV12,
                SDL_TEXTUREACCESS_STREAMING,
                pCodecCtx->width,
                pCodecCtx->height
        );
        if (!texture) {
            LOGD("SDL: could not create texture - exiting\n");
            SDL_Delay(20000);
            exit(1);
        }

        // initialize SWS context for software scaling
        sws_ctx = sws_getContext(pCodecCtx->width, pCodecCtx->height,
                                 pCodecCtx->pix_fmt, pCodecCtx->width, pCodecCtx->height,
                                 AV_PIX_FMT_YUV420P,
                                 SWS_BILINEAR,
                                 NULL,
                                 NULL,
                                 NULL);

        // set up YV12 pixel array (12 bits per pixel)
        yPlaneSz = pCodecCtx->width * pCodecCtx->height;
        uvPlaneSz = pCodecCtx->width * pCodecCtx->height / 4;
        yPlane = (Uint8 *) malloc(yPlaneSz);
        uPlane = (Uint8 *) malloc(uvPlaneSz);
        vPlane = (Uint8 *) malloc(uvPlaneSz);
        if (!yPlane || !uPlane || !vPlane) {
            LOGD("Could not allocate pixel buffers - exiting\n");
            SDL_Delay(20000);
            exit(1);
        }

        uvPitch = pCodecCtx->width / 2;

        Uint32 begin = SDL_GetTicks();
        SDL_Rect maqquee_rect[4];
        int drws_marquee = 0;

        while (av_read_frame(pFormatCtx, &packet) >= 0) {
            // Is this a packet from the video stream?
            if (packet.stream_index == videoStream) {
                // Decode video frame
                avcodec_decode_video2(pCodecCtx, pFrame, &frameFinished, &packet);

                // Did we get a video frame?
                if (frameFinished) {
                    AVPicture pict;
                    pict.data[0] = yPlane;
                    pict.data[1] = uPlane;
                    pict.data[2] = vPlane;
                    pict.linesize[0] = pCodecCtx->width;
                    pict.linesize[1] = uvPitch;
                    pict.linesize[2] = uvPitch;

                    // Convert the image into YUV format that SDL uses
                    sws_scale(sws_ctx, (uint8_t const *const *) pFrame->data,
                              pFrame->linesize, 0, pCodecCtx->height, pict.data,
                              pict.linesize);

                    SDL_UpdateYUVTexture(
                            texture,
                            NULL,
                            yPlane,
                            pCodecCtx->width,
                            uPlane,
                            uvPitch,
                            vPlane,
                            uvPitch
                    );

                    SDL_RenderClear(renderer);
                    SDL_RenderCopy(renderer, texture, NULL, NULL);

                    //SDL_RenderCopy(renderer, image_texture, NULL, &dstrect);
                    //SDL_RenderCopy(renderer, pinp_image_texture, NULL, &pinp_dstrect);
                    //image_button.draw(renderer);
                    for(unsigned i = 0; i < sizeof(sdl_button)/sizeof(sdl_button[0]); i++){
                        sdl_button[i].draw(renderer);
                    }

                    if (sdl_button[0].clicked()) {
                        SDL_SetRenderDrawColor(renderer, 255, 0, 0, 100);
                        SDL_RenderFillRects(renderer, maqquee_rect,
                                            marquee_time(displayRect, maqquee_rect,
                                                         begin - SDL_GetTicks()));
                    }
                    if (sdl_button[1].clicked()) {
                        SDL_RenderCopy(renderer, texture, NULL, &pinp_dstrect_video);
                    }

                    SDL_RenderPresent(renderer);
                    SDL_Delay(20);
                }
            }

            // Free the packet that was allocated by av_read_frame
            if(g_file_change){
                g_file_change = 0;
                break;
            }
            av_free_packet(&packet);
            SDL_PollEvent(&event);
            switch (event.type) {
                case SDL_QUIT:
                    LOGD("Could not allocate pixel buffers - SDL_QUIT\n");
                    SDL_Delay(20000);
                    SDL_DestroyTexture(texture);
                    SDL_DestroyRenderer(renderer);
                    SDL_DestroyWindow(screen);
                    SDL_Quit();
                    exit(0);
                    break;
                case SDL_FINGERDOWN: {
                    /*
                    dstrect.x = event.tfinger.x * displayRect.w;
                    dstrect.y = event.tfinger.y * displayRect.h;
                    dstrect.w = 128;
                    dstrect.h = 128;
                     */
                    int x = event.tfinger.x * displayRect.w;
                    int y = event.tfinger.y * displayRect.h;
                    pinp_dstrect_video.x = x;
                    pinp_dstrect_video.y = y;
                    LOGD("event.tfinger.x=%f, event.tfinger.y=%f\n", event.tfinger.x,
                         event.tfinger.y);
                    LOGD("x=%d,y=%d\n", x, y);
                    LOGD("displayRect.w=%d,displayRect.h=%d\n", displayRect.w, displayRect.h);
                    for(unsigned i = 0; i < sizeof(sdl_button)/sizeof(sdl_button[0]); i++){
                        sdl_button[i].click(x, y);
                    }
                    break;
                }
                default:
                    break;
            }

        }

        // Free the YUV frame
        av_frame_free(&pFrame);
        free(yPlane);
        free(uPlane);
        free(vPlane);

        // Close the codec
        avcodec_close(pCodecCtx);
        avcodec_close(pCodecCtxOrig);

        // Close the video file
        avformat_close_input(&pFormatCtx);

        SDL_DestroyTexture(texture);
        SDL_DestroyRenderer(renderer);
        SDL_DestroyWindow(screen);
    }

    SDL_Quit();

    return 0;
}
