//
// Created by colbert on 2018/4/9.
//
#include <jni.h>
#include <android/log.h>
#include <android/native_window.h>
#include <android/native_window_jni.h>
#include <SLES/OpenSLES.h>
#include <SLES/OpenSLES_Android.h>
#include <EGL/egl.h>
#include <GLES2/gl2.h>

extern "C" {
#include <libavcodec/avcodec.h>
#include <libswscale/swscale.h>
#include <libswresample/swresample.h>
#include <libavutil/avutil.h>
#include <libavformat/avformat.h>
#include <libavutil/frame.h>
#include <libavcodec/jni.h>
#include <libavutil/imgutils.h>
}

#define LOG_W(...) __android_log_print(ANDROID_LOG_WARN, "media_jni", __VA_ARGS__)

//extern "C"
//JNIEXPORT jint
//JNI_OnLoad(JavaVM *vm, void *reserved) {
////JNI_OnLoad(JavaVM *vm) {
//    // 给硬解码器设置虚拟机信息
//    av_jni_set_java_vm(vm, 0);
//    return JNI_VERSION_1_4;
//}

// 播放回调
void playCallback(SLAndroidSimpleBufferQueueItf buffer_queue, void *context) {

}

extern "C"
JNIEXPORT void
Java_xyz_xgqn_media_XPlayer_xOpen(JNIEnv *env, jobject _instance, jstring _url, jobject _surface) {
    // 播放地址
    const char *url = env->GetStringUTFChars(_url, 0);
    // 函数返回值
    int ret = 0;
    // 解封装上下文
    AVFormatContext *fmt_ctx = NULL;
    // 视频流的索引
    int vs_index;
    // 视频解码器
    AVCodec *v_codec = NULL;
    // 视频解码上下文
    AVCodecContext *v_ctx = NULL;
    // 视频转换上下文
    SwsContext *sws_ctx = NULL;
    // 视频输出的大小
    int dstWidth = 1280, dstHeight = 720;
    // 视频缓冲区的大小
    int v_buf_size = av_image_get_buffer_size(AV_PIX_FMT_RGBA, dstWidth, dstHeight, 1);
    // 视频缓冲区
    char *v_buf = NULL;
    // 音频流的索引
    int as_index;
    // 音频解码器
    AVCodec *a_codec = NULL;
    // 音频解码上下文
    AVCodecContext *a_ctx = NULL;
    // 音频转换上下文
    SwrContext *swr_ctx = NULL;
    // 音频输出声道数
    int out_nb_channels = 2;
    // 音频缓冲区的大小
    int a_buf_size = 48000 * out_nb_channels * 2;
    // 音频缓冲区
    char *a_buf = NULL;
    // 解码数据包
    AVPacket *pkt = NULL;
    AVFrame *frame = NULL;
    // 视频显示窗口
    ANativeWindow *window = ANativeWindow_fromSurface(env, _surface);
    ANativeWindow_Buffer window_buffer;
    // OpenSLES
    SLresult slResult;
    SLObjectItf slObject = NULL;
    SLEngineItf slEngine;
    SLObjectItf slMix = NULL; // 混音器
    SLDataLocator_OutputMix slDataLocatorMix;
    SLDataSink slDataSink;
    SLDataLocator_AndroidSimpleBufferQueue slDataLocatorBuffer;
    SLDataFormat_PCM slDataFormat;
    SLDataSource slDataSource;
    SLObjectItf slPlayerObj = NULL; // 播放器
    SLInterfaceID slPlayerIDs[1] = {SL_IID_BUFFERQUEUE};
    SLboolean slPlayerFlags[1] = {SL_BOOLEAN_FALSE};
    SLPlayItf slPlayer;
    SLAndroidSimpleBufferQueueItf slBuffer = NULL;

    // 注册解封装组件
    av_register_all();
    // 初始化网络组件
    avformat_network_init();
    // 注册解码器
    avcodec_register_all();

    // 打开视频
    ret = avformat_open_input(&fmt_ctx, url, 0, 0);
    if (ret != 0) {
        LOG_W("avformat_open_input open %s failed, error=%d", url, ret);
        goto RELEASE;
    }

    // 查找视频流信息
    ret = avformat_find_stream_info(fmt_ctx, 0);
    if (ret != 0) {
        LOG_W("avformat_find_stream_info failed, error = %d", ret);
        goto RELEASE;
    }

    vs_index = av_find_best_stream(fmt_ctx, AVMEDIA_TYPE_VIDEO, -1, -1, 0, 0);
    LOG_W("av_find_best_stream video index = %d", vs_index);
    v_codec = avcodec_find_decoder(fmt_ctx->streams[vs_index]->codecpar->codec_id); // 软解码器
    //v_codec = avcodec_find_decoder_by_name("h264_mediacodec"); // 硬解码器
    v_ctx = avcodec_alloc_context3(v_codec);
    if (!v_ctx) {
        LOG_W("video avcodec_alloc_context3 failed");
        goto RELEASE;
    }
    ret = avcodec_parameters_to_context(v_ctx, fmt_ctx->streams[vs_index]->codecpar);
    if (ret != 0) {
        LOG_W("video avcodec_parameters_to_context failed, error = %d", ret);
        goto RELEASE;
    }
    // 设置解码线程数量
    v_ctx->thread_count = 1;
    // 打开视频解码器
    ret = avcodec_open2(v_ctx, v_codec, 0);
    if (ret != 0) {
        LOG_W("video avcodec_open2 failed, error = %d", ret);
        goto RELEASE;
    }

    // 音频流的索引
    as_index = av_find_best_stream(fmt_ctx, AVMEDIA_TYPE_AUDIO, -1, -1, 0, 0);
    LOG_W("av_find_best_stream audio index = %d", as_index);
    // 音频解码器
    a_codec = avcodec_find_decoder(fmt_ctx->streams[as_index]->codecpar->codec_id);
    // 音频解码上下文
    a_ctx = avcodec_alloc_context3(a_codec);
    if (!a_ctx) {
        LOG_W("audio avcodec_alloc_context3 failed");
        goto RELEASE;
    }
    ret = avcodec_parameters_to_context(a_ctx, fmt_ctx->streams[as_index]->codecpar);
    if (ret != 0) {
        LOG_W("audio avcodec_parameters_to_context failed, error = %d", ret);
        goto RELEASE;
    }
    // 设置解码线程数量
    a_ctx->thread_count = 1;
    // 打开音频解码器
    ret = avcodec_open2(a_ctx, a_codec, 0);
    if (ret != 0) {
        LOG_W("audio avcodec_open2 failed, error = %d", ret);
        goto RELEASE;
    }

    // 视频缓冲区
    v_buf = new char[v_buf_size];
    // 音频转换及缓冲区
    swr_ctx = swr_alloc_set_opts(0, av_get_default_channel_layout(out_nb_channels),
                                 AV_SAMPLE_FMT_S16,
                                 a_ctx->sample_rate,
                                 a_ctx->channel_layout,
                                 a_ctx->sample_fmt,
                                 a_ctx->sample_rate,
                                 0, 0);
    if (!swr_ctx) {
        LOG_W("swr_alloc_set_opts failed");
        goto RELEASE;
    }
    ret = swr_init(swr_ctx);
    if (ret != 0) {
        LOG_W("swr_init failed, error = %d", ret);
        goto RELEASE;
    }
    a_buf = new char[a_buf_size];

    // 设置视频显示参数
    ANativeWindow_setBuffersGeometry(window, dstWidth, dstHeight, WINDOW_FORMAT_RGBA_8888);

    // 使用 OpenSLES 播放音频
    slResult = slCreateEngine(&slObject, 0, 0, 0, 0, 0);
    if (slResult != SL_RESULT_SUCCESS) {
        LOG_W("slCreateEngine failed, error = %d", slResult);
        goto RELEASE;
    }
    slResult = (*slObject)->Realize(slObject, SL_BOOLEAN_FALSE);
    if (slResult != SL_RESULT_SUCCESS) {
        LOG_W("slObject Realize failed, error = %d", slResult);
        goto RELEASE;
    }
    slResult = (*slObject)->GetInterface(slObject, SL_IID_ENGINE, &slEngine);
    if (slResult != SL_RESULT_SUCCESS) {
        LOG_W("slObject GetInterface SL_IID_ENGINE failed, error = %d", slResult);
        goto RELEASE;
    }
    // 初始化混音器
    slResult = (*slEngine)->CreateOutputMix(slEngine, &slMix, 0, 0, 0);
    if (slResult != SL_RESULT_SUCCESS) {
        LOG_W("slEngine CreateOutputMix failed, error = %d", slResult);
        goto RELEASE;
    }
    slResult = (*slMix)->Realize(slMix, SL_BOOLEAN_FALSE);
    if (slResult != SL_RESULT_SUCCESS) {
        LOG_W("slMix Realize failed, error = %d", slResult);
        goto RELEASE;
    }
    slDataLocatorMix = {SL_DATALOCATOR_OUTPUTMIX, slMix};
    slDataSink = {&slDataLocatorMix, 0};
    slDataLocatorBuffer = {SL_DATALOCATOR_ANDROIDSIMPLEBUFFERQUEUE, 10};
    slDataFormat = {
            SL_DATAFORMAT_PCM,
            2,  // 双声道
            SL_SAMPLINGRATE_44_1, // 44100 hz
            SL_PCMSAMPLEFORMAT_FIXED_16, // 16位
            SL_PCMSAMPLEFORMAT_FIXED_16,
            SL_SPEAKER_FRONT_LEFT | SL_SPEAKER_FRONT_RIGHT, // 立体声
            SL_BYTEORDER_LITTLEENDIAN // 字节序 小端
    };
    slDataSource = {&slDataLocatorBuffer, &slDataFormat};
    // 初始化播放器
    slResult = (*slEngine)->CreateAudioPlayer(slEngine,
                                              &slPlayerObj,
                                              &slDataSource,
                                              &slDataSink,
                                              1,
                                              slPlayerIDs,
                                              slPlayerFlags);
    if (slResult != SL_RESULT_SUCCESS) {
        LOG_W("slEngine CreateAudioPlayer failed, error = %d", slResult);
        goto RELEASE;
    }
    slResult = (*slPlayerObj)->Realize(slPlayerObj, SL_BOOLEAN_FALSE);
    if (slResult != SL_RESULT_SUCCESS) {
        LOG_W("slPlayerObj Realize failed, error = %d", slResult);
        goto RELEASE;
    }
    slResult = (*slPlayerObj)->GetInterface(slPlayerObj, SL_IID_PLAY, &slPlayer);
    if (slResult != SL_RESULT_SUCCESS) {
        LOG_W("slPlayerObj GetInterface SL_IID_PLAY failed, error = %d", slResult);
        goto RELEASE;
    }
    slResult = (*slPlayerObj)->GetInterface(slPlayerObj, SL_IID_BUFFERQUEUE, &slBuffer);
    if (slResult != SL_RESULT_SUCCESS) {
        LOG_W("slPlayerObj GetInterface SL_IID_BUFFERQUEUE failed, error = %d", slResult);
        goto RELEASE;
    }
    slResult = (*slBuffer)->RegisterCallback(slBuffer, playCallback, 0);
    if (slResult != SL_RESULT_SUCCESS) {
        LOG_W("slBuffer RegisterCallback failed, error = %d", slResult);
        goto RELEASE;
    }
    // 设置播放状态
    slResult = (*slPlayer)->SetPlayState(slPlayer, SL_PLAYSTATE_PLAYING);
    if (slResult != SL_RESULT_SUCCESS) {
        LOG_W("slPlayer SetPlayState failed, error = %d", slResult);
        goto RELEASE;
    }

    // 流数据包
    pkt = av_packet_alloc();
    frame = av_frame_alloc();

    // 开始解码
    for (;;) {
        ret = av_read_frame(fmt_ctx, pkt);
        if (ret < 0) {
            if (ret == AVERROR_EOF) {
                LOG_W("播放完毕!");
            } else {
                LOG_W("播放发生错误，error=%d", ret);
            }
            break;
        }
        AVCodecContext *av_ctx = NULL;
        if (pkt->stream_index == vs_index) {
            // 视频包
            av_ctx = v_ctx;
        } else if (pkt->stream_index == as_index) {
            // 音频包
            av_ctx = a_ctx;
        } else {
            continue;
        }
        // 发送解码包
        ret = avcodec_send_packet(av_ctx, pkt);
        av_packet_unref(pkt);
        if (ret != 0) {
            LOG_W("avcodec_send_packet failed, error=%d", ret);
            continue;
        }
        // 获取解码帧
        for (;;) {
            ret = avcodec_receive_frame(av_ctx, frame);
            if (ret != 0) {
                break;
            }
            if (av_ctx->codec_type == AVMEDIA_TYPE_VIDEO) {
                // 视频帧，获取转换上下文
                sws_ctx = sws_getCachedContext(sws_ctx,
                                               frame->width,
                                               frame->height,
                                               (AVPixelFormat) frame->format,
                                               dstWidth,
                                               dstHeight,
                                               AV_PIX_FMT_RGBA,
                                               SWS_FAST_BILINEAR,
                                               0, 0, 0);
                if (!sws_ctx) {
                    LOG_W("sws_getCachedContext failed");
                    break;
                }
                // 转换视频帧为 rgba 数据
                uint8_t *dst[AV_NUM_DATA_POINTERS] = {0};
                dst[0] = (uint8_t *) v_buf;
                int lines[AV_NUM_DATA_POINTERS] = {0};
                lines[0] = dstWidth * 4;
                int h = sws_scale(sws_ctx,
                                  (const uint8_t *const *) frame->data,
                                  frame->linesize,
                                  0,
                                  frame->height,
                                  dst,
                                  lines);
                if (h > 0) {
                    // 显示视频帧
                    ANativeWindow_lock(window, &window_buffer, 0);
                    uint8_t *v_dst = (uint8_t *) window_buffer.bits;
                    memcpy(v_dst, v_buf, (size_t) v_buf_size);
                    ANativeWindow_unlockAndPost(window);
                    LOG_W("decode video height=%d", h);
                }
            } else if (av_ctx->codec_type == AVMEDIA_TYPE_AUDIO) {
                // 音频帧
                uint8_t *dst[AV_NUM_DATA_POINTERS] = {0};
                dst[0] = (uint8_t *) a_buf;
                int nb_samples = swr_convert(swr_ctx,
                                             (uint8_t **) dst,
                                             frame->nb_samples,
                                             (const uint8_t **) frame->data,
                                             frame->nb_samples);
                if (nb_samples > 0) {
                    LOG_W("decode audio nb_samples=%d", nb_samples);
                }
            }
        }
    }

    RELEASE:
    if (slBuffer) {
        (*slBuffer)->Clear(slBuffer);
    }
    if (slPlayerObj) {
        (*slPlayerObj)->Destroy(slPlayerObj);
    }
    if (slMix) {
        (*slMix)->Destroy(slMix);
    }
    if (slObject) {
        (*slObject)->Destroy(slObject);
    }
    // 释放视频窗口
    if (window) {
        ANativeWindow_release(window);
    }
    // 释放解码帧
    if (frame) {
        av_frame_free(&frame);
    }
    // 释放数据包
    if (pkt) {
        av_packet_free(&pkt);
    }
    if (swr_ctx) {
        // 释放音频转换上下文
        swr_close(swr_ctx);
        swr_free(&swr_ctx);
    }
    delete[] a_buf;
    if (sws_ctx) {
        // 释放视频转换上下文
        sws_freeContext(sws_ctx);
    }
    delete[] v_buf;
    if (a_ctx) {
        // 释放音频解码上下文
        avcodec_free_context(&a_ctx);
    }
    if (v_ctx) {
        // 释放视频解码上下文
        avcodec_free_context(&v_ctx);
    }
    if (!fmt_ctx) {
        // 关闭并释放解封装上下文
        avformat_close_input(&fmt_ctx);
        avformat_free_context(fmt_ctx);
    }
    env->ReleaseStringUTFChars(_url, url);
}

#define GET_STR(x) #x // 自动将 x 转换为 "x"

// GLSL
// 1. 顶点着色器
static const char *vertexShader = GET_STR(
        attribute
        vec4 aPosition;    // 顶点坐标
        attribute
        vec2 aTexCoord;    // 纹理顶点坐标
        varying
        vec2 vTexCoord;      // 输出的纹理坐标
        void main() {
            vTexCoord = vec2(aTexCoord.x, 1.0 - aTexCoord.y); // 坐标转换
            gl_Position = aPosition;
        }
);
// 2. 片元着色器，软解码和部分x86硬解码
static const char *fragmentShaderYUV420P = GET_STR(
        precision
        mediump float; // 精度
        varying
        vec2 vTexCoord;  // 顶点着色器传递的坐标
        uniform
        sampler2D yTexture; // 输入的纹理 （不透明灰度，单像素）
        uniform
        sampler2D uTexture;
        uniform
        sampler2D vTexture;
        void main() {
            vec3 yuv;
            vec3 rgb;
            yuv.r = texture2D(yTexture, vTexCoord).r;
            yuv.g = texture2D(uTexture, vTexCoord).r - 0.5;
            yuv.b = texture2D(vTexture, vTexCoord).r - 0.5;
            // yuv -> rgb 转换
            rgb = mat3(1.0, 1.0, 1.0,
                       0.0, -0.39465, 2.03211,
                       1.13983, -0.5806, 0.0) * yuv;
            // 输出像素颜色
            gl_FragColor = vec4(rgb, 1.0);
        }
);

GLuint init_Shader(const char *code, GLenum type) {
    // 创建 shader
    GLuint shader = glCreateShader(type);
    if (shader == 0) {
        LOG_W("glCreateShader %d failed", type);
        return 0;
    }
    // 加载 shader
    glShaderSource(shader, 1, &code, 0);
    // 编译 shader
    glCompileShader(shader);
    // 获取编译情况
    GLint status;
    glGetShaderiv(shader, GL_COMPILE_STATUS, &status);
    if (status == 0) {
        LOG_W("glCompileShader %d failed", type);
        return 0;
    }
    LOG_W("glCompileShader %d success", type);
    return shader;
}

extern "C"
JNIEXPORT void
Java_xyz_xgqn_media_XPlayer_xOpenWithOpenGLES(JNIEnv *env, jobject _instance, jstring _url,
                                             jobject _surface) {
    // 播放地址
    const char *url = env->GetStringUTFChars(_url, 0);
    // 视频显示窗口
    ANativeWindow *window = NULL;
    // EGL
    EGLDisplay eglDisplay;
    EGLConfig eglConfig;
    EGLint eglConfigNum;
    EGLint eglConfigSpec[] = {
            EGL_RED_SIZE, 8,
            EGL_GREEN_SIZE, 8,
            EGL_BLUE_SIZE, 8,
            EGL_SURFACE_TYPE, EGL_WINDOW_BIT,
            EGL_NONE
    };
    EGLSurface eglSurface;
    const EGLint eglContextSpec[] = {
            EGL_CONTEXT_CLIENT_VERSION, 2,
            EGL_NONE
    };
    EGLContext eglContext;
    // Shader
    GLuint vShader;
    GLuint fShader;
    // 渲染程序
    GLuint program;
    GLint linkStatus;
    // 顶点坐标
    static float vertex[] = {
            1.0f, -1.0f, 0.0f,
            -1.0f, -1.0f, 0.0f,
            1.0, 1.0, 0.0f,
            -1.0f, 1.0, 0.0f
    };
    GLuint aPosition;
    // 纹理坐标
    static float aText[] = {
            1.0f, 0.0f, // 右下
            0.0f, 0.0f,
            1.0f, 1.0f,
            0.0f, 1.0f
    };
    GLuint aTexCoord;
    GLuint textures[3] = {0};
    // 尺寸是2的次方
    int width = 424, height = 240;
    // 纹理的缓冲区
    unsigned char *buf[3] = {0};
    // 文件
    FILE *fp;

    fp = fopen(url, "rb");
    if (!fp) {
        LOG_W("文件打开失败, url=%s", url);
        goto RELEASE;
    }

    // 获取显示窗口
    window = ANativeWindow_fromSurface(env, _surface);

    // EGL
    // 1. display
    eglDisplay = eglGetDisplay(EGL_DEFAULT_DISPLAY);
    if (EGL_NO_DISPLAY == eglDisplay) {
        LOG_W("eglGetDisplay failed");
        goto RELEASE;
    }
    if (EGL_TRUE != eglInitialize(eglDisplay, 0, 0)) {
        LOG_W("eglInitialize failed");
        goto RELEASE;
    }
    // 2. surface
    // 2.1 surface 窗口配置
    if (EGL_TRUE != eglChooseConfig(eglDisplay, eglConfigSpec, &eglConfig, 1, &eglConfigNum)) {
        LOG_W("eglChooseConfig failed");
        goto RELEASE;
    }
    // 2.2 surface 创建
    eglSurface = eglCreateWindowSurface(eglDisplay, eglConfig, window, 0);
    if (EGL_NO_SURFACE == eglSurface) {
        LOG_W("eglCreateWindowSurface failed");
        goto RELEASE;
    }
    // 3 context
    // 3.1 创建关联上下文
    eglContext = eglCreateContext(eglDisplay, eglConfig, EGL_NO_CONTEXT, eglContextSpec);
    if (EGL_NO_CONTEXT == eglContext) {
        LOG_W("eglCreateContext failed");
        goto RELEASE;
    }
    // 3.2 关联surface
    if (EGL_TRUE != eglMakeCurrent(eglDisplay, eglSurface, eglSurface, eglContext)) {
        LOG_W("eglMakeCurrent failed");
        goto RELEASE;
    }

    LOG_W("egl init success");

    // shader 初始化
    // 顶点 shader
    vShader = init_Shader(vertexShader, GL_VERTEX_SHADER);
    // 片元 yuv420p shader
    fShader = init_Shader(fragmentShaderYUV420P, GL_FRAGMENT_SHADER);

    // 创建渲染程序
    program = glCreateProgram();
    if (program == 0) {
        LOG_W("glCreateProgram failed");
        goto RELEASE;
    }
    // 渲染程序加入着色器
    glAttachShader(program, vShader);
    glAttachShader(program, fShader);
    // 链接程序
    glLinkProgram(program);
    // 获取渲染程序的链接状态
    glGetProgramiv(program, GL_LINK_STATUS, &linkStatus);
    if (GL_TRUE != linkStatus) {
        LOG_W("glLinkProgram failed");
        goto RELEASE;
    }
    glUseProgram(program);
    LOG_W("glLinkProgram success");

    // 加入三维顶点数据 两个三角形组成正方形
    aPosition = (GLuint) glGetAttribLocation(program, "aPosition");
    glEnableVertexAttribArray(aPosition);
    // 传递顶点
    glVertexAttribPointer(aPosition, 3, GL_FLOAT, GL_FALSE, 12, vertex);

    // 加入纹理坐标数据
    aTexCoord = (GLuint) glGetAttribLocation(program, "aTexCoord");
    glEnableVertexAttribArray(aTexCoord);
    // 传递顶点
    glVertexAttribPointer(aTexCoord, 2, GL_FLOAT, GL_FALSE, 8, aText);

    // 纹理初始化
    // 设置纹理层
    glUniform1i(glGetUniformLocation(program, "yTexture"), 0); // 对于纹理第一层
    glUniform1i(glGetUniformLocation(program, "uTexture"), 1); // 对于纹理第二层
    glUniform1i(glGetUniformLocation(program, "vTexture"), 2); // 对于纹理第三层
    // 创建OpenGL纹理
    glGenTextures(3, textures);
    // 设置纹理属性
    glBindTexture(GL_TEXTURE_2D, textures[0]);
    // 设置缩小和放大的过滤器
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
    // 设置纹理的格式和大小
    glTexImage2D(GL_TEXTURE_2D,
                 0, // 细节级别 0 默认
                 GL_LUMINANCE, // gpu 内部格式，亮度，灰度图
                 width,
                 height, // 拉伸到全屏
                 0, // 边框
                 GL_LUMINANCE, // 像素点的格式 亮度，灰度图 要与上面一致
                 GL_UNSIGNED_BYTE, // 像素的数据类型
                 NULL // 纹理的数据
    );
    glBindTexture(GL_TEXTURE_2D, textures[1]);
    // 设置缩小和放大的过滤器
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
    // 设置纹理的格式和大小
    glTexImage2D(GL_TEXTURE_2D,
                 0, // 细节级别 0 默认
                 GL_LUMINANCE, // gpu 内部格式，亮度，灰度图
                 width / 2,
                 height / 2, // 拉伸到全屏
                 0, // 边框
                 GL_LUMINANCE, // 像素点的格式 亮度，灰度图 要与上面一致
                 GL_UNSIGNED_BYTE, // 像素的数据类型
                 NULL // 纹理的数据
    );
    glBindTexture(GL_TEXTURE_2D, textures[2]);
    // 设置缩小和放大的过滤器
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
    // 设置纹理的格式和大小
    glTexImage2D(GL_TEXTURE_2D,
                 0, // 细节级别 0 默认
                 GL_LUMINANCE, // gpu 内部格式，亮度，灰度图
                 width / 2,
                 height / 2, // 拉伸到全屏
                 0, // 边框
                 GL_LUMINANCE, // 像素点的格式 亮度，灰度图 要与上面一致
                 GL_UNSIGNED_BYTE, // 像素的数据类型
                 NULL // 纹理的数据
    );
    // 纹理的修改和显示
    buf[0] = new unsigned char[width * height];
    buf[1] = new unsigned char[width * height / 4];
    buf[2] = new unsigned char[width * height / 4];
    for (;;) {
        if (feof(fp)) {
            LOG_W("已读到文件结尾。");
            break;
        }
        fread(buf[0], 1, (size_t) width * height, fp);
        fread(buf[1], 1, (size_t) width * height / 4, fp);
        fread(buf[2], 1, (size_t) width * height / 4, fp);

        // 激活第一层纹理,绑定到OpenGL创建的纹理
        glActiveTexture(GL_TEXTURE0);
        glBindTexture(GL_TEXTURE_2D, textures[0]);
        // 替换纹理的内容
        glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, width, height, GL_LUMINANCE,
                        GL_UNSIGNED_BYTE,
                        buf[0]);

        // 激活第二层纹理,绑定到OpenGL创建的纹理
        glActiveTexture(GL_TEXTURE0 + 1);
        glBindTexture(GL_TEXTURE_2D, textures[1]);
        // 替换纹理的内容
        glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, width / 2, height / 2, GL_LUMINANCE,
                        GL_UNSIGNED_BYTE,
                        buf[1]);

        // 激活第三层纹理,绑定到OpenGL创建的纹理
        glActiveTexture(GL_TEXTURE0 + 2);
        glBindTexture(GL_TEXTURE_2D, textures[2]);
        // 替换纹理的内容
        glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, width / 2, height / 2, GL_LUMINANCE,
                        GL_UNSIGNED_BYTE,
                        buf[2]);

        // 三维绘制
        glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);

        // 显示到窗口
        eglSwapBuffers(eglDisplay, eglSurface);
    }

    RELEASE:
    // 释放视频窗口
    if (window) {
        ANativeWindow_release(window);
    }
    env->ReleaseStringUTFChars(_url, url);
}