#include "includes.h"
#include "log.h"

#define MEDIA_DEMO_VER "v0.0.1"

/***********************************************************************
 * Note: media-jni.cpp 管理所有java调C的接口，所有C调java的接口
 * ********************************************************************/

//JNI函数命名规则：Java_所在包名_类名_方法名
//Java_com_lstudio_mediademo_MainActivity_stringFromJNI

extern "C" JNIEXPORT jstring JNICALL
Java_com_lstudio_mediademo_MainActivity_stringFromJNI(JNIEnv *env, jobject /* this */)
{
    std::string hello = "Hello from C++";
    avcodec_register_all();
    //std::string version = MEDIA_DEMO_VER;//avcodec_configuration();
    std::string version = avcodec_configuration();
    return env->NewStringUTF(version.c_str());
}

extern "C" JNIEXPORT jstring JNICALL
Java_com_lstudio_mediademo_MainActivity_getMediaJNIVersion(JNIEnv *env, jobject /* this */)
{
    std::string hello = " Hello from C++";
    return env->NewStringUTF(hello.c_str());
}

struct PlayArgs
{
    std::string filepath_;
    ANativeWindow *win_;
    sem_t sem;
};

#define PLAER_TAG "player"
static void *play_thread_proc(void *arg);
static void *play_thread_proc_gl(void *arg);

extern "C" JNIEXPORT jint JNICALL
Java_com_lstudio_mediademo_MainActivity_playVideo(JNIEnv *env, jobject thiz, jstring filepath, jobject s)
{
    // TODO: implement playVideo()

    const char *mp4file = env->GetStringUTFChars(filepath, 0);

    struct PlayArgs args;
    args.filepath_ = mp4file;
    args.win_ = ANativeWindow_fromSurface(env, s);
    sem_init(&args.sem, 0, 0);

    pthread_t thread_id;
    pthread_create(&thread_id, NULL, play_thread_proc, (void *)&args);
    //pthread_create(&thread_id, NULL, play_thread_proc_gl, (void *)&args);

    sem_wait(&args.sem);
    sem_destroy(&args.sem);

    LOG_DEB(PLAER_TAG, "后台线程开始播放 %s", mp4file);

    env->ReleaseStringUTFChars(filepath, mp4file);

    return 0;
}

static void *play_thread_proc(void *arg)
{
    //struct PlayArgs playArgs = *(struct PlayArgs *)arg;
    //sem_post(&playArgs.sem);

    struct PlayArgs *playArgs = (struct PlayArgs *)arg;
    std::string filepath = playArgs->filepath_;
    ANativeWindow *pwin = playArgs->win_;
    sem_post(&playArgs->sem);

    AVFormatContext *pfmt_ctx;
    pfmt_ctx = avformat_alloc_context();

    int ret = avformat_open_input(&pfmt_ctx, filepath.c_str(), NULL, NULL);
    if (ret != 0)
    {
        //open failed
        return NULL;
    }

    avformat_find_stream_info(pfmt_ctx, NULL);

    int video_idx = av_find_best_stream(pfmt_ctx, AVMEDIA_TYPE_VIDEO, -1, -1, NULL, 0);

    AVCodecParameters *c_par;
    AVCodecContext *cc_ctx;
    const AVCodec *codec;

    c_par = pfmt_ctx->streams[video_idx]->codecpar;
    codec = avcodec_find_decoder(c_par->codec_id);

    //用参数c_par实例化编解码器上下文，，并打开编解码器
    cc_ctx = avcodec_alloc_context3(codec);

    // 关联解码器上下文
    ret = avcodec_parameters_to_context(cc_ctx, c_par);

    if (ret < 0)
    {
        //LOGE("解码器上下文关联失败:%s", av_err2str(re));
        //return;
    }

    //打开解码器
    ret = avcodec_open2(cc_ctx, codec, nullptr);

    if (ret != 0)
    {
        // LOGE("打开解码器失败:%s", av_err2str(re));
        // return;
    }

    //数据包
    AVPacket *pkt;
    //数据帧
    AVFrame *frame;

    //初始化
    pkt = av_packet_alloc();
    frame = av_frame_alloc();


        // 获取视频的宽高,也可以通过解码器获取
    AVStream *as = pfmt_ctx->streams[video_idx];
    int width = as->codecpar->width;
    int height = as->codecpar->height;

    LOG_INFO(PLAER_TAG,"width:%d", width);
    LOG_INFO(PLAER_TAG,"height:%d", height);

    //初始化像素格式转换的上下文
    SwsContext *vctx = NULL;
    int outWidth = 1920;
    int outHeight = 1080;
    char *rgb = new char[outWidth * outHeight * 4];
    char *pcm = new char[48000 * 4 * 2];

    //设置AHARDWAREBUFFER_FORMAT_Y8Cb8Cr8_420 看来还需要做其他事？
    //显示窗口初始化
    ANativeWindow *nwin = pwin; //ANativeWindow_fromSurface(env, s);
    ANativeWindow_setBuffersGeometry(nwin, width, height, AHARDWAREBUFFER_FORMAT_Y8Cb8Cr8_420);
    //ANativeWindow_setBuffersGeometry(nwin, outWidth, outHeight, AHARDWAREBUFFER_FORMAT_Y8Cb8Cr8_420);
    ANativeWindow_Buffer wbuf;

    int ret2 = -1;

PLAY_AGAIN:
    while ((ret2 = av_read_frame(pfmt_ctx, pkt)) >= 0)
    { //持续读帧
        // 只解码视频流
        if (pkt->stream_index == video_idx)
        {

            //发送数据包到解码器
            avcodec_send_packet(cc_ctx, pkt);

            //清理
            av_packet_unref(pkt);

            //这里为什么要使用一个for循环呢？
            // 因为avcodec_send_packet和avcodec_receive_frame并不是一对一的关系的
            //一个avcodec_send_packet可能会出发多个avcodec_receive_frame
            for (;;)
            {
                // 接受解码的数据
                ret = avcodec_receive_frame(cc_ctx, frame);
                if (ret != 0)
                {
                    break;
                }
                else
                {

                    // 将YUV数据转换成RGB数据显示

                    vctx = sws_getCachedContext(vctx,
                                                frame->width,
                                                frame->height,
                                                (AVPixelFormat)frame->format,
                                                outWidth,
                                                outHeight,
                                                AV_PIX_FMT_YUV420P,//AV_PIX_FMT_YUV420P12LE,//AV_PIX_FMT_YUV420P10LE,
                                                SWS_FAST_BILINEAR,
                                                0, 0, 0);
                    if (!vctx)
                    {
                        LOG_ERR(PLAER_TAG, "sws_getCachedContext failed!");
                    }
                    else
                    {
                        uint8_t *data[AV_NUM_DATA_POINTERS] = {0};
                        data[0] = (uint8_t *)rgb;
                        int lines[AV_NUM_DATA_POINTERS] = {0};
                        lines[0] = outWidth * 4;
                        #if 0
                        int h = sws_scale(vctx,
                                          (const uint8_t **)frame->data,
                                          frame->linesize, 0,
                                          frame->height,
                                          data, lines);
                        LOG_INFO(PLAER_TAG, "sws_scale = %d, frame->format=%d", h, frame->format);
                        if (h > 0)
                        #endif
                        {
                            // 绘制
                            ANativeWindow_lock(nwin, &wbuf, 0);
                            uint8_t *dst = (uint8_t *)wbuf.bits;
                            //memcpy(dst, rgb, outWidth * outHeight * 4);
                            memcpy(dst, frame->data, width * height);
                            
                            ANativeWindow_unlockAndPost(nwin);
                        }
                    }
                }
            }
        }
    }

    LOG_INFO(PLAER_TAG, "#####");

    if (ret2 == AVERROR_EOF)
    {
        LOG_INFO(PLAER_TAG, "#####");
        //avformat_seek_file(pfmt_ctx, video_idx, 0, 0, 0, );
        av_seek_frame(pfmt_ctx, video_idx, 0, AVSEEK_FLAG_FRAME);
        LOG_WAR(PLAER_TAG, "重新播放");
        goto PLAY_AGAIN;
    }

    //关闭环境
    avcodec_free_context(&cc_ctx);
    // 释放资源
    av_frame_free(&frame);
    av_packet_free(&pkt);

    avformat_close_input(&pfmt_ctx);

    avformat_free_context(pfmt_ctx);

    LOG_INFO(PLAER_TAG, "播放完毕");

    return NULL;
}

#include <EGL/egl.h>
#include <GLES2/gl2.h>

//顶点着色器glsl的宏
// 第二个#号的意思是自动链接字符串，而不用增加引号，参考ijkplayer的写法

#define GET_STR(x) #x
#define LOGE(...) __android_log_print(ANDROID_LOG_ERROR, "FFMPEG", __VA_ARGS__)

static const char *vertexShader = GET_STR(

    attribute vec4 aPosition; //顶点坐标，在外部获取传递进来

    attribute vec2 aTexCoord; //材质（纹理）顶点坐标

    varying vec2 vTexCoord; //输出的材质（纹理）坐标，给片元着色器使用
    void main() {
        //纹理坐标转换，以左上角为原点的纹理坐标转换成以左下角为原点的纹理坐标，
        // 比如以左上角为原点的（0，0）对应以左下角为原点的纹理坐标的（0，1）
        vTexCoord = vec2(aTexCoord.x, 1.0 - aTexCoord.y);
        gl_Position = aPosition;
    });

//片元着色器,软解码和部分x86硬解码解码得出来的格式是YUV420p

static const char *fragYUV420P = GET_STR(

    precision mediump float; //精度

    varying vec2 vTexCoord; //顶点着色器传递的坐标，相同名字opengl会自动关联

    uniform sampler2D yTexture; //输入的材质（不透明灰度，单像素）

    uniform sampler2D uTexture;

    uniform sampler2D vTexture;
    void main() {
        vec3 yuv;
        vec3 rgb;
        yuv.r = texture2D(yTexture, vTexCoord).r; // y分量
        // 因为UV的默认值是127，所以我们这里要减去0.5（OpenGLES的Shader中会把内存中0～255的整数数值换算为0.0～1.0的浮点数值）
        yuv.g = texture2D(uTexture, vTexCoord).r - 0.5; // u分量
        yuv.b = texture2D(vTexture, vTexCoord).r - 0.5; // v分量
        // yuv转换成rgb，两种方法，一种是RGB按照特定换算公式单独转换
        // 另外一种是使用矩阵转换
        rgb = mat3(1.0, 1.0, 1.0,
                   0.0, -0.39465, 2.03211,
                   1.13983, -0.58060, 0.0) *
              yuv;
        //输出像素颜色
        gl_FragColor = vec4(rgb, 1.0);
    });

GLint InitShader(const char *code, GLint type)
{
    //创建shader
    GLint sh = glCreateShader(type);
    if (sh == 0)
    {
        LOGE("glCreateShader %d failed!", type);
        return 0;
    }
    //加载shader
    glShaderSource(sh,
                   1,     //shader数量
                   &code, //shader代码
                   0);    //代码长度
    //编译shader
    glCompileShader(sh);

    //获取编译情况
    GLint status;
    glGetShaderiv(sh, GL_COMPILE_STATUS, &status);
    if (status == 0)
    {
        LOGE("glCompileShader failed!");
        return 0;
    }
    LOGE("glCompileShader success!");
    return sh;
}

/**
 * 将数据转换成double类型的一个方法
 * @param r
 * @return
 */
static double r2d(AVRational r)
{
    return r.num == 0 || r.den == 0 ? 0 : (double)r.num / (double)r.den;
}

static int play_status = 0;
static int play_op = 0;

#define USE_FFMPEG_DECODER 0

static void *play_thread_proc_gl(void *arg)
{
    //struct PlayArgs playArgs = *(struct PlayArgs *)arg;
    //sem_post(&playArgs.sem);

    play_status = 1;
    play_op = 0;

    struct PlayArgs *playArgs = (struct PlayArgs *)arg;
    std::string filepath = playArgs->filepath_;
    ANativeWindow *pwin = playArgs->win_;
    sem_post(&playArgs->sem);

    AVFormatContext *pfmt_ctx;
    pfmt_ctx = avformat_alloc_context();

    int ret = avformat_open_input(&pfmt_ctx, filepath.c_str(), NULL, NULL);
    if (ret != 0)
    {
        //open failed
        return NULL;
    }

    avformat_find_stream_info(pfmt_ctx, NULL);

    int video_idx = av_find_best_stream(pfmt_ctx, AVMEDIA_TYPE_VIDEO, -1, -1, NULL, 0);

    AVCodecParameters *c_par;
    AVCodecContext *cc_ctx;
    const AVCodec *codec;

    c_par = pfmt_ctx->streams[video_idx]->codecpar;
    codec = avcodec_find_decoder(c_par->codec_id);

    //用参数c_par实例化编解码器上下文，，并打开编解码器
    cc_ctx = avcodec_alloc_context3(codec);

    // 关联解码器上下文
    ret = avcodec_parameters_to_context(cc_ctx, c_par);

    if (ret < 0)
    {
        //LOGE("解码器上下文关联失败:%s", av_err2str(re));
        //return;
    }

    //打开解码器
    ret = avcodec_open2(cc_ctx, codec, nullptr);

    if (ret != 0)
    {
        // LOGE("打开解码器失败:%s", av_err2str(re));
        // return;
    }

    // 获取视频的宽高,也可以通过解码器获取
    AVStream *as = pfmt_ctx->streams[video_idx];
    int width = as->codecpar->width;
    int height = as->codecpar->height;

    LOGE("width:%d", width);
    LOGE("height:%d", height);

    //数据包
    AVPacket *pkt;
    //数据帧
    AVFrame *frame;

    //初始化
    pkt = av_packet_alloc();
    frame = av_frame_alloc();

    //初始化像素格式转换的上下文
    SwsContext *vctx = NULL;
    int outWidth = 1920;
    int outHeight = 1080;
    char *rgb = new char[outWidth * outHeight * 4];
    char *pcm = new char[48000 * 4 * 2];

    //显示窗口初始化
    ANativeWindow *nwin = pwin; //ANativeWindow_fromSurface(env, s);
    ANativeWindow_setBuffersGeometry(nwin, outWidth, outHeight, WINDOW_FORMAT_RGBA_8888);
    ANativeWindow_Buffer wbuf;

    //OPENGL
    ///EGL
    //1 EGL display创建和初始化
    EGLDisplay display = eglGetDisplay(EGL_DEFAULT_DISPLAY);
    if (display == EGL_NO_DISPLAY)
    {
        LOGE("eglGetDisplay failed!");
        return NULL;
    }
    if (EGL_TRUE != eglInitialize(display, 0, 0))
    {
        LOGE("eglInitialize failed!");
        return NULL;
    }
    //2 surface
    //2-1 surface窗口配置
    //输出配置
    EGLConfig config;
    EGLint configNum;
    EGLint configSpec[] = {
        EGL_RED_SIZE, 8,
        EGL_GREEN_SIZE, 8,
        EGL_BLUE_SIZE, 8,
        EGL_SURFACE_TYPE, EGL_WINDOW_BIT, EGL_NONE};
    if (EGL_TRUE != eglChooseConfig(display, configSpec, &config, 1, &configNum))
    {
        LOGE("eglChooseConfig failed!");
        return NULL;
    }
    //创建surface
    EGLSurface winsurface = eglCreateWindowSurface(display, config, nwin, 0);
    if (winsurface == EGL_NO_SURFACE)
    {
        LOGE("eglCreateWindowSurface failed!");
        return NULL;
    }

    //3 context 创建关联的上下文
    const EGLint ctxAttr[] = {
        EGL_CONTEXT_CLIENT_VERSION, 2, EGL_NONE};
    EGLContext context = eglCreateContext(display, config, EGL_NO_CONTEXT, ctxAttr);
    if (context == EGL_NO_CONTEXT)
    {
        LOGE("eglCreateContext failed!");
        return NULL;
    }
    if (EGL_TRUE != eglMakeCurrent(display, winsurface, winsurface, context))
    {
        LOGE("eglMakeCurrent failed!");
        return NULL;
    }

    LOGE("EGL Init Success!");

    //顶点和片元shader初始化
    //顶点shader初始化
    GLint vsh = InitShader(vertexShader, GL_VERTEX_SHADER);
    //片元yuv420 shader初始化
    GLint fsh = InitShader(fragYUV420P, GL_FRAGMENT_SHADER);

    //创建渲染程序
    GLint program = glCreateProgram();
    if (program == 0)
    {
        LOGE("glCreateProgram failed!");
        return NULL;
    }
    //渲染程序中加入着色器代码
    glAttachShader(program, vsh);
    glAttachShader(program, fsh);

    //链接程序
    glLinkProgram(program);
    GLint status = 0;
    glGetProgramiv(program, GL_LINK_STATUS, &status);
    if (status != GL_TRUE)
    {
        LOGE("glLinkProgram failed!");
        return NULL;
    }
    glUseProgram(program);
    LOGE("glLinkProgram success!");

    //加入三维顶点数据 两个三角形组成正方形
    static float vers[] = {
        1.0f,
        -1.0f,
        0.0f,
        -1.0f,
        -1.0f,
        0.0f,
        1.0f,
        1.0f,
        0.0f,
        -1.0f,
        1.0f,
        0.0f,
    };
    GLuint apos = (GLuint)glGetAttribLocation(program, "aPosition");
    glEnableVertexAttribArray(apos);
    //传递顶点
    glVertexAttribPointer(apos, 3, GL_FLOAT, GL_FALSE, 12, vers);

    //加入材质坐标数据
    static float txts[] = {
        1.0f, 0.0f, //右下
        0.0f, 0.0f,
        1.0f, 1.0f,
        0.0, 1.0};
    GLuint atex = (GLuint)glGetAttribLocation(program, "aTexCoord");
    glEnableVertexAttribArray(atex);
    glVertexAttribPointer(atex, 2, GL_FLOAT, GL_FALSE, 8, txts);

    //材质纹理初始化
    //设置纹理层
    glUniform1i(glGetUniformLocation(program, "yTexture"), 0); //对于纹理第1层
    glUniform1i(glGetUniformLocation(program, "uTexture"), 1); //对于纹理第2层
    glUniform1i(glGetUniformLocation(program, "vTexture"), 2); //对于纹理第3层

    //创建opengl纹理
    GLuint texts[3] = {0};
    //创建三个纹理
    glGenTextures(3, texts);

    //设置纹理属性
    glBindTexture(GL_TEXTURE_2D, texts[0]);
    //缩小的过滤器
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
    //设置纹理的格式和大小
    glTexImage2D(GL_TEXTURE_2D,
                 0,                //细节基本 0默认
                 GL_LUMINANCE,     //gpu内部格式 亮度，灰度图
                 width, height,    //拉升到全屏
                 0,                //边框
                 GL_LUMINANCE,     //数据的像素格式 亮度，灰度图 要与上面一致
                 GL_UNSIGNED_BYTE, //像素的数据类型
                 NULL              //纹理的数据
    );

    //设置纹理属性
    glBindTexture(GL_TEXTURE_2D, texts[1]);
    //缩小的过滤器
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
    //设置纹理的格式和大小
    glTexImage2D(GL_TEXTURE_2D,
                 0,                     //细节基本 0默认
                 GL_LUMINANCE,          //gpu内部格式 亮度，灰度图
                 width / 2, height / 2, //拉升到全屏
                 0,                     //边框
                 GL_LUMINANCE,          //数据的像素格式 亮度，灰度图 要与上面一致
                 GL_UNSIGNED_BYTE,      //像素的数据类型
                 NULL                   //纹理的数据
    );

    //设置纹理属性
    glBindTexture(GL_TEXTURE_2D, texts[2]);
    //缩小的过滤器
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
    //设置纹理的格式和大小
    glTexImage2D(GL_TEXTURE_2D,
                 0,                     //细节基本 0默认
                 GL_LUMINANCE,          //gpu内部格式 亮度，灰度图
                 width / 2, height / 2, //拉升到全屏
                 0,                     //边框
                 GL_LUMINANCE,          //数据的像素格式 亮度，灰度图 要与上面一致
                 GL_UNSIGNED_BYTE,      //像素的数据类型
                 NULL                   //纹理的数据
    );

    //纹理的修改和显示
    unsigned char *buf[3] = {0};
    buf[0] = new unsigned char[width * height];
    buf[1] = new unsigned char[width * height / 4];
    buf[2] = new unsigned char[width * height / 4];
    ////

    //memset(buf[1], 0x7f, width * height / 4);
    //memset(buf[2], 0x7f, width * height / 4);

    int ret2 = -1;
    int frame_cout = 0;
    int duration = 40000; //us
    uint64_t cur_ts, pre_ts;
    uint64_t start_ts = get_current_timestamp();

    //mediacodec decoder
    const char mime[32] = "video/avc";
    AMediaCodec *p_mediacodec = AMediaCodec_createDecoderByType(mime);
    AMediaFormat *p_codec_fmt = AMediaFormat_new();
    AMediaFormat_setString(p_codec_fmt, AMEDIAFORMAT_KEY_MIME, mime);
    AMediaFormat_setInt32(p_codec_fmt, AMEDIAFORMAT_KEY_WIDTH, width);
    AMediaFormat_setInt32(p_codec_fmt, AMEDIAFORMAT_KEY_HEIGHT, height);
    //AMediaFormat_setInt32(p_codec_fmt, AMEDIAFORMAT_KEY_COLOR_FORMAT, 21);//?解码器需要指定这个吗？
    //AMEDIAFORMAT_KEY_COLOR_FORMAT
    AMediaCodec_configure(p_mediacodec, p_codec_fmt, NULL, NULL, 0);
    AMediaCodec_start(p_mediacodec);

    //AMediaFormat_delete(p_codec_fmt);//放到最后

    uint8_t *pdata = NULL;
    int data_size = 0;
    AVBitStreamFilterContext *p_bs_filter = NULL;
    p_bs_filter = av_bitstream_filter_init("h264_mp4toannexb");

PLAY_AGAIN:
    while ((ret2 = av_read_frame(pfmt_ctx, pkt)) >= 0)
    { //持续读帧

        // 只解码视频流
        if (pkt->stream_index == video_idx)
        {

            //display control
            cur_ts = get_current_timestamp();

            if (frame_cout > 0)
            {
                int sleep_us = duration - (cur_ts - pre_ts) > 0 ? duration - (cur_ts - pre_ts) : 0;
                if (sleep_us > 0)
                    usleep(sleep_us);
            }

            pre_ts = cur_ts;
            frame_cout++;

            ///////////////
#if USE_FFMPEG_DECODER
            //发送数据包到解码器
            avcodec_send_packet(cc_ctx, pkt);
#else
            // h264mp4_to_annexb filter
            av_bitstream_filter_filter(p_bs_filter, cc_ctx, NULL, &pdata, &data_size, pkt->data, pkt->size, pkt->flags & AV_PKT_FLAG_KEY);

            size_t buf_idx = AMediaCodec_dequeueInputBuffer(p_mediacodec, 4000000); //40ms
            if (buf_idx < 0)
                LOG_WAR("MediaCodec", "buf_idx error.\n");
            size_t out_size = 0;
            uint8_t *p_buf_in = AMediaCodec_getInputBuffer(p_mediacodec, buf_idx, &out_size);
            if (data_size < out_size)
            {
                //LOG_WAR("MediaCodec", "AMediaCodec_getInputBufferout_size =%d.\n", out_size);
                memcpy(p_buf_in, pdata, data_size);
            }

            AMediaCodec_queueInputBuffer(p_mediacodec, buf_idx, 0, data_size, cur_ts, 0);

#endif

			//一定要释放，否则会内存溢出
			if (pdata != NULL)
			{
				free(pdata);
				pdata = NULL;
			}

            //清理
            av_packet_unref(pkt);

            //这里为什么要使用一个for循环呢？
            // 因为avcodec_send_packet和avcodec_receive_frame并不是一对一的关系的
            //一个avcodec_send_packet可能会出发多个avcodec_receive_frame
#if USE_FFMPEG_DECODER
            for (;;)
#endif
            {
                // 接受解码的数据
#if USE_FFMPEG_DECODER
                ret = avcodec_receive_frame(cc_ctx, frame);
#else

              //  LOG_WAR("MediaCodec", "AMediaCodec_getOutputBuffer.\n");
                size_t out_frame_size = 0;
                AMediaCodecBufferInfo buf_info = {0};
                ssize_t out_idx = -1;//AMediaCodec_dequeueOutputBuffer(p_mediacodec, &buf_info, 100000000);
                WAIT:
                //timeout设置了没有效果吗? 测试时timeout设置了10s都会出现buf
                out_idx = AMediaCodec_dequeueOutputBuffer(p_mediacodec, &buf_info, 100000000);
                if (buf_info.size <= 0)
                {
                    LOG_WAR("MediaCodec", "AMediaCodec_getOutputBuffer buf_info.size =%d.\n", buf_info.size);
                    LOG_WAR("MediaCodec", "AMediaCodec_getOutputBuffer out_idx =%d.\n", out_idx);
                    goto WAIT;
                }
                //LOG_WAR("MediaCodec", "AMediaCodec_getOutputBuffer buf_info.size =%d.\n", buf_info.size);
               // LOG_WAR("MediaCodec", "AMediaCodec_getOutputBuffer out_idx =%d.\n", out_idx);
                uint8_t *p_buf_out = AMediaCodec_getOutputBuffer(p_mediacodec, out_idx, &out_frame_size);
               // LOG_WAR("MediaCodec", "AMediaCodec_getOutputBuffer out_frame_size =%d p_buf_out=0x%x.\n", out_frame_size, p_buf_out);
#endif
#if USE_FFMPEG_DECODER
                if (ret != 0)
                {
                    break;
                }
                else
#endif
                {
#if 1 //no display
                    // 解码得到YUV数据
#if USE_FFMPEG_DECODER
                    // 数据Y
                    buf[0] = frame->data[0];
                    buf[1] = frame->data[1];
                    buf[2] = frame->data[2];

                    //memcpy(buf[0],frame->data[0],width*height);
                    // 数据U
                    //memcpy(buf[1],frame->data[1],width*height/4);

                    // 数据V
                    //memcpy(buf[2],frame->data[2],width*height/4);
#else

                    //Y UV
                    //android NV12 -> YUV420sp
                    //buf[0] = p_buf_out;
                    //buf[1] = p_buf_out + width * height;
                    //buf[2] = p_buf_out;// + width * height + width * height / 4;
                   // LOG_WAR("MediaCodec", "###############width=%d height=%d##############.\n", width , height);
                   // LOG_WAR("MediaCodec", "###############buf[0]=0x%x buf[1]=0x%x buf[2]=0x%x##############.\n", buf[0],buf[1] , buf[2]);
                    memcpy(buf[0],p_buf_out,width*height);
                    //memcpy(buf[1],p_buf_out,width*height/4);
                    //memcpy(buf[2],p_buf_out,width*height/4);
#if 1
                    int k,k2;
                    for (k = 0; k < height/2; k++)
                    {
                        for (k2 = 0; k2 < width/2; k2++)
                        {
                          //  *(buf[1] + k*width/2 + k2) = *((p_buf_out+width * height) + 2*(k*width/2 + k2)+1);
                          //  *(buf[2] + k*width/2 + k2) = *((p_buf_out+width * height) + 2*(k*width/2 + k2)+0);
                            *(buf[1] + k*width/2 + k2) = *((p_buf_out+width * height) + 2*(k*width/2 + k2)+0);
                            *(buf[2] + k*width/2 + k2) = *((p_buf_out+width * height) + 2*(k*width/2 + k2)+1);
                            //*(buf[1]+k*width/2 + k2) = *((p_buf_out+width * height)+k*width/2 + k2+0);
                            //*(buf[2]+k*width/2 + k2) = *((p_buf_out+width * height)+k*width/2 + k2+1);
                        }
                    }
#endif

                   // LOG_WAR("MediaCodec", "#############################.\n");

#endif

#if 1
                    //激活第1层纹理,绑定到创建的opengl纹理
                    glActiveTexture(GL_TEXTURE0);
                    glBindTexture(GL_TEXTURE_2D, texts[0]);
                    //替换纹理内容
                    glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, width, height, GL_LUMINANCE, GL_UNSIGNED_BYTE, buf[0]);

                    //激活第2层纹理,绑定到创建的opengl纹理
                    glActiveTexture(GL_TEXTURE0 + 1);
                    glBindTexture(GL_TEXTURE_2D, texts[1]);
                    //替换纹理内容
                    glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, width / 2, height / 2, GL_LUMINANCE, GL_UNSIGNED_BYTE, buf[1]);

                    //激活第2层纹理,绑定到创建的opengl纹理
                    glActiveTexture(GL_TEXTURE0 + 2);
                    glBindTexture(GL_TEXTURE_2D, texts[2]);
                    //替换纹理内容
                    glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, width / 2, height / 2, GL_LUMINANCE, GL_UNSIGNED_BYTE, buf[2]);

                    //三维绘制
                    glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
                    //窗口显示
                    eglSwapBuffers(display, winsurface);
#endif

#endif
                }
#if USE_FFMPEG_DECODER
#else
            AMediaCodec_releaseOutputBuffer(p_mediacodec, out_idx, false);
#endif        
            }
        }

        if (play_op & 0x1)
        {
            //stop play
            ret2 = 0;
            break;
        }
    }

    LOG_INFO(PLAER_TAG, "#####");

    if (ret2 == AVERROR_EOF)
    {
        LOG_INFO(PLAER_TAG, "#####");
        //avformat_seek_file(pfmt_ctx, video_idx, 0, 0, 0, );
        av_seek_frame(pfmt_ctx, video_idx, 0, AVSEEK_FLAG_FRAME);
        LOG_WAR(PLAER_TAG, "重新播放");
        goto PLAY_AGAIN;
    }

    //关闭环境
    avcodec_free_context(&cc_ctx);
    // 释放资源
    av_frame_free(&frame);
    av_packet_free(&pkt);

    avformat_close_input(&pfmt_ctx);

    avformat_free_context(pfmt_ctx);

    LOG_INFO(PLAER_TAG, "播放完毕");

    play_status = 0;

    return NULL;
}

extern "C" JNIEXPORT jint JNICALL
Java_com_lstudio_mediademo_MainActivity_stopPlayVideo(JNIEnv *env, jobject thiz)
{
    // TODO: implement stopPlayVideo()
    play_op = 0x1;
    return (jint)0;
}

extern "C" JNIEXPORT jint JNICALL
Java_com_lstudio_mediademo_MainActivity_getPlayerStatus(JNIEnv *env, jobject thiz)
{
    // TODO: implement getPlayerStatus()
    jint status = play_status;
    return status;
}
               