#include <jni.h>
#include <string>
#include <android/log.h>
#include <android/native_window.h>
#include <android/native_window_jni.h>
#include <EGL/egl.h>
#include <GLES2/gl2.h>
/*
 * 导入音频相关的库
 * */
#include <SLES/OpenSLES.h>
#include <SLES/OpenSLES_Android.h>
/**
 * 增加信息跟踪
 */
#define LOGW(...) __android_log_print(ANDROID_LOG_WARN,"ff_warn",__VA_ARGS__)
#define LOGI(...) __android_log_print(ANDROID_LOG_INFO,"ff_info",__VA_ARGS__)
#define LOGE(...) __android_log_print(ANDROID_LOG_ERROR,"ff_error",__VA_ARGS__)
#define LOGD(...) __android_log_print(ANDROID_LOG_DEBUG,"ff_debug",__VA_ARGS__)

extern "C" {
#include <libavformat/avformat.h>
#include <libavcodec/avcodec.h>
#include <libavcodec/jni.h>
#include <libswscale/swscale.h>
#include <libswresample/swresample.h>
}

static double r2d(AVRational rational){
   return rational.num == 0 || rational.den == 0 ? 0:(double )rational.num/(double )rational.den;
}
/**
 * 当前时间戳 clock
 */
 long long GetNowMs(){
     struct timeval tv;
    gettimeofday(&tv,NULL);
    int  sec = tv.tv_sec%360000;
    long long t = sec *1000  + tv.tv_usec/1000;
    return t;
 }

extern "C" JNIEXPORT jstring JNICALL
Java_com_example_openplaydemo_MainActivity_stringFromJNI(
        JNIEnv* env,
        jobject /* this */) {
    std::string hello = "Hello from C++";
    return env->NewStringUTF(hello.c_str());
}
extern "C"
JNIEXPORT jstring JNICALL
Java_com_example_openplaydemo_MainActivity_testFFmpegCodecOutput(JNIEnv *env, jobject thiz) {
    // TODO: implement testFFmpegCodecOutput()
    std::string codec_config = " ffmpeg codec == ";
    codec_config += avcodec_configuration();
    return env->NewStringUTF(codec_config.c_str());
}
extern "C"
JNIEXPORT jstring JNICALL
Java_com_example_openplaydemo_MainActivity_testFFmpegFlow(JNIEnv *env, jobject thiz) {
    // TODO: implement testFFmpegFlow()
    std::string ffmpegFlow = "ffmpeg flow";

    //初始化框架与网络
    av_register_all();
    avformat_network_init();

    //使用编解码器须注册编解码器，不注册打开解码器可能音视频参数获取不到，会报-22错误码
    avcodec_register_all();
    //打开文件
    AVFormatContext *ic = NULL;
   const char *path = "/sdcard/test/test.mp4";
   int re = avformat_open_input(&ic,path,0,0);
    if (re != 0){
        //增加错误信息判断 错误输出日志信息
        LOGE("avformat_open_input failed");
    }
    LOGD("avformat_open_input success");

    //根据打开文件上下文句柄，获取流信息
    re = avformat_find_stream_info(ic,0);
    if (re != 0){
        LOGE("avformat_find_stream_info failed");
    }
    LOGD("avformat_find_stream_info success, stream count === %d, totol time == %lld",ic->streams,ic->duration);

    //获取流信息
    int fps = 0;
    int videoStream = 0;
    int audioStream = 1;
    for (int i = 0; i < ic->nb_streams; ++i) {
        AVStream *stream = ic->streams[i];
        if (stream->codecpar->codec_type == AVMEDIA_TYPE_VIDEO){
            //视频信息
            videoStream = i;
            fps = r2d(stream->avg_frame_rate);
            LOGD("video data -----fps = %d,width=%d height=%d codecid=%d pixformat=%d",
                 fps,
                 stream->codecpar->width,
                 stream->codecpar->height,
                 stream->codecpar->codec_id,
                 stream->codecpar->format
            );
        } else if (stream->codecpar->codec_type == AVMEDIA_TYPE_AUDIO){
            //音频信息
            audioStream = i;
            LOGD("audio data ----- sample_rate =  %d channels = %d sample_format = %d",
                 stream->codecpar->sample_rate,
                 stream->codecpar->channels,
                 stream->codecpar->format
            );
        }
    }

    //获取音频流信息
    audioStream = av_find_best_stream(ic,AVMEDIA_TYPE_AUDIO,-1,-1,NULL,0);
    LOGD("av_find_best_stream audiostream = %d",audioStream);
    //获取视频流信息
    videoStream = av_find_best_stream(ic,AVMEDIA_TYPE_VIDEO,-1,-1,NULL,0);
    LOGD("av_find_best_stream videostream = %d",videoStream);

    //以视频解码为例
    //ffmpeg软解码
    AVCodec *vcodec = avcodec_find_decoder(ic->streams[videoStream]->codecpar->codec_id);
    //硬解码
    vcodec = avcodec_find_decoder_by_name("h264_mediacodec");
    if (!vcodec){
        LOGE("avcodec_find_decoder video failed");

    }

    AVCodecContext *vctx = avcodec_alloc_context3(vcodec);
    avcodec_parameters_to_context(vctx,ic->streams[videoStream]->codecpar);
    vctx->thread_count = 1;
    re = avcodec_open2(vctx,vcodec,0);
    if (re != 0){
        LOGE("avcodec_open2 video failed");
    }

    //解码音频同理
    AVCodec *acodec = avcodec_find_decoder(ic->streams[audioStream]->codecpar->codec_id);

    if (!acodec){
        LOGE("avcodec_find_decoder audio failed");
    }
    AVCodecContext *actx = avcodec_alloc_context3(acodec);
    avcodec_parameters_to_context(actx,ic->streams[audioStream]->codecpar);
    actx->thread_count = 1;
    re = avcodec_open2(actx,acodec,0);
    if (re != 0){
        LOGE("avcodec_open2 audio failed");
    }

    //读取帧数据
    AVPacket *packet = av_packet_alloc();
    AVFrame *frame = av_frame_alloc();

    //初始化像素格式转换上下文--视频转码
    SwsContext *swsContext = NULL;
    int sws_width = 1280;
    int sws_height = 720;
    const char *rgb = new char[ sws_width * sws_height * 4];

    //音频重采样上下文 --音频转码
    SwrContext *swrContext = swr_alloc();
    swrContext = swr_alloc_set_opts(swrContext,
                                    av_get_default_channel_layout(2),
                                    AV_SAMPLE_FMT_S16,
                                    actx->sample_rate,
                                    av_get_default_channel_layout(2),
                                    actx->sample_fmt,
                                    actx->sample_rate,
                                    0,
                                    0);
    re = swr_init(swrContext);
    if (re != 0){
        LOGE("swr_init failed");
    }
    LOGD("swr_init success");
    const char *pcm = new char [48000*4*2];


    //测试硬解码
    long long start = GetNowMs();
    int frameCount = 0;
    for (;;){
        //3s计时算平均帧率
        if (GetNowMs() - start >= 3000){
            LOGD("now decode fps is %d",frameCount/3);
            start = GetNowMs();
            frameCount = 0;
        }
        int re = av_read_frame(ic,packet);
        if (re != 0){
            LOGD(" read frame end");
            break;
            //seek操作跑性能 循环读取
            int pos = 20 * r2d(ic->streams[videoStream]->time_base);
            //读取关键帧和后
            av_seek_frame(ic,videoStream,pos,AVSEEK_FLAG_BACKWARD | AVSEEK_FLAG_FRAME);
            continue;
        }
        LOGD("stream = %d size = %d pts = %lld flag=%d",
             packet->stream_index,
             packet->size,
             packet->pts,
             packet->flags);
        AVCodecContext *ctx = vctx;
        if (packet->stream_index == audioStream) ctx = actx;

        //发送到线程中解码
        re = avcodec_send_packet(ctx,packet);
        av_packet_unref(packet);
        if (re != 0){
            //LOGE("avcodec_send_packet failed");
            continue;
        }
        for(;;){
            re = avcodec_receive_frame(ctx,frame);
            if (re != 0){
                //LOGE("avcodec_receive_frame failed");
                break;
            }
            //LOGD("avcodec_receive_frame pts === %lld",frame->pts);
            if (ctx == vctx){
                frameCount++;
                //LOGD("frameCount add+");
                //像素格式转换
                swsContext = sws_getCachedContext(swsContext,
                                                  frame->width,
                                                  frame->height,
                                                  (AVPixelFormat)frame->format,
                                                  sws_width,
                                                  sws_height,
                                                  AV_PIX_FMT_RGBA,
                                                  SWS_FAST_BILINEAR,
                                                  0,
                                                  0,
                                                  0);
                if (!swsContext){
                    LOGD("sws_getCachedContext failed");
                } else{
                    uint8_t *data[AV_NUM_DATA_POINTERS] = {0};
                    data[0] = (uint8_t *)rgb;
                    int lines[AV_NUM_DATA_POINTERS] = {0};
                    lines[0] = sws_width * 4;
                    int h = sws_scale(swsContext,
                                      (const uint8_t **)frame->data,
                                      frame->linesize,
                                      0,
                                      frame->height,
                                      data,
                                      lines);
                    LOGD("sws_scale = %d",h);
                }


            } else if(ctx == actx){
                //音频
                uint8_t *out[2] = {0};
                out[0] = (uint8_t *)pcm;
                //音频重采样
                int len = swr_convert(swrContext,
                                      out,
                                      frame->nb_samples,
                                      (const uint8_t **)frame->data,
                                      frame->nb_samples);
                LOGD("swr_convert === %d",len);
            }
        }
    }

    //防止内存泄漏删除对应的rgb
    delete rgb;
    delete pcm;

    //打开与关闭上下文成对存在
    avformat_close_input(&ic);

    return env->NewStringUTF(ffmpegFlow.c_str());
}
extern "C"
JNIEXPORT void JNICALL
Java_com_example_openplaydemo_RGBPlayer_playSDCardVideo(JNIEnv *env, jobject thiz, jstring path,
                                                        jobject surface) {
    // TODO: implement playSDCardVideo()
    const char *play_path = env->GetStringUTFChars(path,0);
    //初始化框架与网络
    av_register_all();
    avformat_network_init();

    //使用编解码器须注册编解码器，不注册打开解码器可能音视频参数获取不到，会报-22错误码
    avcodec_register_all();
    //打开文件
    AVFormatContext *ic = NULL;
    int re = avformat_open_input(&ic,play_path,0,0);
    if (re != 0){
        //增加错误信息判断 错误输出日志信息
        LOGE("avformat_open_input failed");
    }
    LOGD("avformat_open_input success");

    //根据打开文件上下文句柄，获取流信息
    re = avformat_find_stream_info(ic,0);
    if (re != 0){
        LOGE("avformat_find_stream_info failed");
    }
    LOGD("avformat_find_stream_info success, stream count === %d, totol time == %lld",ic->streams,ic->duration);

    //获取流信息
    int fps = 0;
    int videoStream = 0;
    int audioStream = 1;
    for (int i = 0; i < ic->nb_streams; ++i) {
        AVStream *stream = ic->streams[i];
        if (stream->codecpar->codec_type == AVMEDIA_TYPE_VIDEO){
            //视频信息
            videoStream = i;
            fps = r2d(stream->avg_frame_rate);
            LOGD("video data -----fps = %d,width=%d height=%d codecid=%d pixformat=%d",
                 fps,
                 stream->codecpar->width,
                 stream->codecpar->height,
                 stream->codecpar->codec_id,
                 stream->codecpar->format
            );
        } else if (stream->codecpar->codec_type == AVMEDIA_TYPE_AUDIO){
            //音频信息
            audioStream = i;
            LOGD("audio data ----- sample_rate =  %d channels = %d sample_format = %d",
                 stream->codecpar->sample_rate,
                 stream->codecpar->channels,
                 stream->codecpar->format
            );
        }
    }

    //获取音频流信息
    audioStream = av_find_best_stream(ic,AVMEDIA_TYPE_AUDIO,-1,-1,NULL,0);
    LOGD("av_find_best_stream audiostream = %d",audioStream);
    //获取视频流信息
    videoStream = av_find_best_stream(ic,AVMEDIA_TYPE_VIDEO,-1,-1,NULL,0);
    LOGD("av_find_best_stream videostream = %d",videoStream);

    //以视频解码为例
    //ffmpeg软解码
    AVCodec *vcodec = avcodec_find_decoder(ic->streams[videoStream]->codecpar->codec_id);
    //硬解码
    vcodec = avcodec_find_decoder_by_name("h264_mediacodec");
    if (!vcodec){
        LOGE("avcodec_find_decoder video failed");

    }

    AVCodecContext *vctx = avcodec_alloc_context3(vcodec);
    avcodec_parameters_to_context(vctx,ic->streams[videoStream]->codecpar);
    vctx->thread_count = 1;
    re = avcodec_open2(vctx,vcodec,0);
    if (re != 0){
        LOGE("avcodec_open2 video failed");
    }

    //解码音频同理
    AVCodec *acodec = avcodec_find_decoder(ic->streams[audioStream]->codecpar->codec_id);

    if (!acodec){
        LOGE("avcodec_find_decoder audio failed");
    }
    AVCodecContext *actx = avcodec_alloc_context3(acodec);
    avcodec_parameters_to_context(actx,ic->streams[audioStream]->codecpar);
    actx->thread_count = 1;
    re = avcodec_open2(actx,acodec,0);
    if (re != 0){
        LOGE("avcodec_open2 audio failed");
    }

    //读取帧数据
    AVPacket *packet = av_packet_alloc();
    AVFrame *frame = av_frame_alloc();

    //初始化像素格式转换上下文--视频转码
    SwsContext *swsContext = NULL;
    int sws_width = 1280;
    int sws_height = 720;
    const char *rgb = new char[ sws_width * sws_height * 4];

    //音频重采样上下文 --音频转码
    SwrContext *swrContext = swr_alloc();
    swrContext = swr_alloc_set_opts(swrContext,
                                    av_get_default_channel_layout(2),
                                    AV_SAMPLE_FMT_S16,
                                    actx->sample_rate,
                                    av_get_default_channel_layout(actx->channels),
                                    actx->sample_fmt,
                                    actx->sample_rate,
                                    0,
                                    0);
    re = swr_init(swrContext);
    if (re != 0){
        LOGE("swr_init failed");
    }
    LOGD("swr_init success");
    const char *pcm = new char [48000*4*2];


    //显示窗口初始化
    ANativeWindow *nativeWindow = ANativeWindow_fromSurface(env,surface);
    ANativeWindow_setBuffersGeometry(nativeWindow,sws_width,sws_height,WINDOW_FORMAT_RGBA_8888);
    ANativeWindow_Buffer nativeWindowBuffer;

    //测试硬解码
    long long start = GetNowMs();
    int frameCount = 0;
    for (;;){
        //3s计时算平均帧率
        if (GetNowMs() - start >= 3000){
            LOGD("now decode fps is %d",frameCount/3);
            start = GetNowMs();
            frameCount = 0;
        }
        int re = av_read_frame(ic,packet);
        if (re != 0){
            LOGD(" read frame end");
            break;
//            //seek操作跑性能 循环读取
//            int pos = 20 * r2d(ic->streams[videoStream]->time_base);
//            //读取关键帧和后
//            av_seek_frame(ic,videoStream,pos,AVSEEK_FLAG_BACKWARD | AVSEEK_FLAG_FRAME);
//            continue;
        }
        LOGD("stream = %d size = %d pts = %lld flag=%d",
             packet->stream_index,
             packet->size,
             packet->pts,
             packet->flags);
        AVCodecContext *ctx = vctx;
        if (packet->stream_index == audioStream) ctx = actx;

        //发送到线程中解码
        re = avcodec_send_packet(ctx,packet);
        av_packet_unref(packet);
        if (re != 0){
            //LOGE("avcodec_send_packet failed");
            continue;
        }
        for(;;){
            re = avcodec_receive_frame(ctx,frame);
            if (re != 0){
                //LOGE("avcodec_receive_frame failed");
                break;
            }
            //LOGD("avcodec_receive_frame pts === %lld",frame->pts);
            if (ctx == vctx){
                frameCount++;
                //LOGD("frameCount add+");
                //像素格式转换
                swsContext = sws_getCachedContext(swsContext,
                                                  frame->width,
                                                  frame->height,
                                                  (AVPixelFormat)frame->format,
                                                  sws_width,
                                                  sws_height,
                                                  AV_PIX_FMT_RGBA,
                                                  SWS_FAST_BILINEAR,
                                                  0,
                                                  0,
                                                  0);
                if (!swsContext){
                    LOGD("sws_getCachedContext failed");
                } else{
                    uint8_t *data[AV_NUM_DATA_POINTERS] = {0};
                    data[0] = (uint8_t *)rgb;
                    int lines[AV_NUM_DATA_POINTERS] = {0};
                    lines[0] = sws_width * 4;
                    int h = sws_scale(swsContext,
                                      (const uint8_t **)frame->data,
                                      frame->linesize,
                                      0,
                                      frame->height,
                                      data,
                                      lines);
                    LOGD("sws_scale = %d",h);
                    if (h > 0){
                        ANativeWindow_lock(nativeWindow,&nativeWindowBuffer,0);
                        uint8_t *dst = (uint8_t *)nativeWindowBuffer.bits;
                        //切记记得数据的统一
                        memcpy(dst,rgb,sws_width*sws_height*4);
                        ANativeWindow_unlockAndPost(nativeWindow);
                    }
                }


            } else if(ctx == actx){
                //音频
                uint8_t *out[2] = {0};
                out[0] = (uint8_t *)pcm;
                //音频重采样
                int len = swr_convert(swrContext,
                                      out,
                                      frame->nb_samples,
                                      (const uint8_t **)frame->data,
                                      frame->nb_samples);
                LOGD("swr_convert === %d",len);
            }
        }
    }

    //防止内存泄漏删除对应的rgb
    delete rgb;
    delete pcm;

    //打开与关闭上下文成对存在
    avformat_close_input(&ic);
    env->ReleaseStringUTFChars(path,play_path);
}



/*
 * 测试音频播放OpenSL ES库播放pcm数据
 * */
//1.创建引擎
static SLObjectItf engineSL = NULL;
SLEngineItf CreateSL(){
    SLresult sLresult;
    SLEngineItf engineItf;
    sLresult = slCreateEngine(&engineSL,0,0,0,0,0);
    if (sLresult != SL_RESULT_SUCCESS) return NULL;
    sLresult = (*engineSL)->Realize(engineSL,SL_BOOLEAN_FALSE);
    if (sLresult != SL_RESULT_SUCCESS) return NULL;
    sLresult = (*engineSL)->GetInterface(engineSL,SL_IID_ENGINE,&engineItf);
    if (sLresult != SL_RESULT_SUCCESS) return NULL;
    return engineItf;
}
void PcmCall(SLAndroidSimpleBufferQueueItf audioBuf,void *context){
    LOGD("pcm play start");
    static FILE *fp = NULL;
    static char *buf = NULL;
    if (!buf){
        buf = new char[10224*1024];
    }
    if (!fp){
        fp = fopen("/sdcard/test/test.pcm","rb");
    }
    if (!fp) return;
    if (feof(fp) == 0){
        int  len = fread(buf,1,1024,fp);
        if (len > 0){
            (*audioBuf)->Enqueue(audioBuf,buf,len);
        }else{
            LOGD("pcm play end");
            delete buf;
            free(fp);
        }
    }
}
extern "C"
JNIEXPORT void JNICALL
Java_com_example_openplaydemo_PcmPlayer_playPcm(JNIEnv *env, jobject thiz) {
    // TODO: implement playPcm()

    //1.创建引擎
    SLEngineItf engineItf = CreateSL();
    if (engineItf){
        LOGD("CreateSL success");
    } else{
        LOGE("CreateSL failed");
    }

    //2.创建混音器
    SLObjectItf mix = NULL;
    SLresult sLresult = 0;
    sLresult = (*engineItf)->CreateOutputMix(engineItf,&mix,0,0,0);
    if (sLresult != SL_RESULT_SUCCESS){
        LOGE("(*engineItf)->CreateOutputMix failed");
    } else{
        LOGD("(*engineItf)->CreateOutputMix success");
    }
    sLresult = (*mix)->Realize(mix,SL_BOOLEAN_FALSE);
    if (sLresult != SL_RESULT_SUCCESS){
        LOGE("(*mix)->Realize failed");
    } else{
        LOGD("(*mix)->Realize success");
    }
    SLDataLocator_OutputMix outputMix = {SL_DATALOCATOR_OUTPUTMIX,mix};
    SLDataSink audioSink = {&outputMix,0};

    //3.配置音频信息
    //缓冲队列
    SLDataLocator_AndroidSimpleBufferQueue queue = {SL_DATALOCATOR_ANDROIDSIMPLEBUFFERQUEUE,10};
    //音频格式
    SLDataFormat_PCM pcm = {
            SL_DATAFORMAT_PCM,
            2,
            SL_SAMPLINGRATE_44_1,
            SL_PCMSAMPLEFORMAT_FIXED_16,
            SL_PCMSAMPLEFORMAT_FIXED_16,
            SL_SPEAKER_FRONT_LEFT | SL_SPEAKER_FRONT_RIGHT,
            SL_BYTEORDER_LITTLEENDIAN //子节端，小端
    };
    SLDataSource dataSource = {&queue,&pcm};

    //4.创建播放器
    SLObjectItf player = NULL;
    SLPlayItf  playItf = NULL;
    SLAndroidSimpleBufferQueueItf pcmQueue = NULL;
    const SLInterfaceID ids[] = {SL_IID_BUFFERQUEUE};
    const SLboolean req[] = {SL_BOOLEAN_TRUE};
    sLresult = (*engineItf)->CreateAudioPlayer(engineItf,&player,&dataSource,&audioSink,sizeof(ids)/sizeof (SLInterfaceID),ids,req);
    if (sLresult != SL_RESULT_SUCCESS){
        LOGE("(*engineItf)->CreateAudioPlayer failed");
    } else{
        LOGD("(*engineItf)->CreateAudioPlayer success");
    }
    (*player)->Realize(player,SL_BOOLEAN_FALSE);
    //获取player接口
    sLresult = (*player)->GetInterface(player,SL_IID_PLAY,&playItf);
    if (sLresult != SL_RESULT_SUCCESS){
        LOGE("(*player)->GetInterface SL_IID_PLAY failed");
    }
    sLresult = (*player)->GetInterface(player,SL_IID_BUFFERQUEUE,&pcmQueue);
    if (sLresult != SL_RESULT_SUCCESS){
        LOGE("(*player)->GetInterface SL_IID_BUFFERQUEUE failed");
    }

    //设置回调函数
    (*pcmQueue)->RegisterCallback(pcmQueue,PcmCall,0);

    //设置播放状态
    (*playItf)->SetPlayState(playItf,SL_PLAYSTATE_PLAYING);
    //启动队列回调
    (*pcmQueue)->Enqueue(pcmQueue,"",1);

    //设置状态，清空缓存队列
    //(*playItf)->SetPlayState(playItf,SL_PLAYSTATE_STOPPED);
    //(*pcmQueue)->Clear(pcmQueue);
}

/*
 * 渲染yuv数据
 * */
#define GET_STR(x) #x
//顶点着色器
static const char *vertexShader = GET_STR(
        attribute vec4 aPosition;
        attribute vec2 aTexCoord;
        varying vec2 vTexCoord;
        void main(){
            vTexCoord = vec2(aTexCoord.x,1.0-aTexCoord.y);
            gl_Position = aPosition;
        });

//片段着色器 yuv420p
static const char *fragmentShader = GET_STR(
        precision mediump float;
        varying vec2 vTexCoord;
        uniform sampler2D yTexture;
        uniform sampler2D uTexture;
        uniform sampler2D vTexture;
        void main(){
            vec3 yuv;
            vec3 rgb;
            yuv.r = texture2D(yTexture,vTexCoord).r;
            yuv.g = texture2D(uTexture,vTexCoord).r - 0.5;
            yuv.b = texture2D(vTexture,vTexCoord).r - 0.5;
            rgb = mat3(1.0,     1.0,    1.0,
                       0.0,-0.39465,2.03211,
                       1.13983,-0.58060,0.0)*yuv;
            gl_FragColor = vec4(rgb,1.0);
        });

/**
 * 初始化着色器，顶点着色器和片段着色器
 * @param code
 * @param type
 * @return
 */
GLint InitShader(const char *code,GLint type){
    /**
     * 1.创建shader
     * 2.加载shader
     * 3.编译shader
     * 4.链接shader
     * 5.生成program
    */
    GLint sh = glCreateShader(type);
    if (sh == 0){
        LOGE("glCreateShader failed");
        return 0;
    }

    glShaderSource(sh,1,&code,0);

    glCompileShader(sh);

    GLint status;
    glGetShaderiv(sh,GL_COMPILE_STATUS,&status);
    if (status == 0){
        LOGE("glCompileShader failed");
        return 0;
    }
    return sh;
}
extern "C"
JNIEXPORT void JNICALL
Java_com_example_openplaydemo_YuvView_playSDCardYuvPlayer(JNIEnv *env, jobject thiz, jstring path,
                                                          jobject surface) {
    // TODO: implement playSDCardYuvPlayer()
    const char  *url = env->GetStringUTFChars(path,0);

    LOGD("open url is %s",url);
    FILE *fp = fopen(url,"rb");
    if (!fp){
        LOGE("open file %s failed",url);
        return;
    }
    /**
     * 1.获取Native原始窗口
     * 2.display获取和初始化
     * 3.surface创建及配置
     * 4.创建关联的上下文
     * 5.初始化着色器
     */
    // 获取原始窗口NativeWindow
    ANativeWindow *nativeWindow = ANativeWindow_fromSurface(env,surface);

    //EGL 是opengl es 与展示视图的桥梁 中转站
    EGLDisplay  display = eglGetDisplay(EGL_DEFAULT_DISPLAY);
    if (display == EGL_NO_DISPLAY){
        LOGE("eglGetDisplay failed");
        return;
    }

    //初始化
    EGLBoolean result = eglInitialize(display,0,0);
    if (result != EGL_TRUE){
        LOGE("eglInitialize failed");
        return;
    }

    //surface创建
    EGLint configSpec[] = {
          EGL_RED_SIZE,8,
          EGL_GREEN_SIZE,8,
          EGL_BLUE_SIZE,8,
          EGL_SURFACE_TYPE,EGL_WINDOW_BIT,EGL_NONE
    };
    EGLint configNum;
    EGLConfig config;
    result = eglChooseConfig(display,configSpec,&config,1,&configNum);
    if (result != EGL_TRUE){
        LOGE("eglChooseConfig failed");
        return;
    }

    EGLSurface eglSurface = eglCreateWindowSurface(display,config,nativeWindow,0);
    if (eglSurface == EGL_NO_SURFACE){
        LOGE("eglCreatePlatformWindowSurface failed");
        return;
    }

    const EGLint ctxAttr[] = {
            EGL_CONTEXT_CLIENT_VERSION,2,EGL_NONE
    };
    EGLContext eglContext = eglCreateContext(display,config,EGL_NO_CONTEXT,ctxAttr);
    if (eglContext == EGL_NO_CONTEXT){
        LOGE("eglCreateContext failed");
        return;
    }

    result = eglMakeCurrent(display,eglSurface,eglSurface,eglContext);
    if (result != EGL_TRUE){
        LOGE("eglMakeCurrent failed");
        return;
    }

    //顶点和片元shader初始化
    GLint  vsh = InitShader(vertexShader,GL_VERTEX_SHADER);
    GLint  fsh = InitShader(fragmentShader,GL_FRAGMENT_SHADER);

    GLint program = glCreateProgram();
    if (program == 0){
        LOGE("glCreateProgram failed");
        return;
    }

    //将着色器代码附在着色器程序中
    glAttachShader(program,vsh);
    glAttachShader(program,fsh);

    //链接着色器程序
    glLinkProgram(program);
    GLint status = 0;
    glGetProgramiv(program,GL_LINK_STATUS,&status);
    if (status != GL_TRUE){
        LOGE("glLinkProgram failed");
        return;
    }
    glUseProgram(program);

    //顶点坐标数组
    static float vers[] = {
            1.0f,-1.0f,0.0f,
            -1.0f,-1.0f,0.0f,
            1.0f,1.0f,0.0f,
            -1.0f,1.0f,0.0f,
    };
    GLuint apos = (GLuint) glGetAttribLocation(program,"aPosition");
    glEnableVertexAttribArray(apos);
    //gl vertex
    glVertexAttribPointer(apos,3,GL_FLOAT,GL_FALSE,12,vers);


    //纹理坐标
    static float txts[] = {
            1.0,0.0,
            0.0,0.0,
            1.0,1.0,
            0.0,1.0,
    };
    GLuint atext = (GLuint) glGetAttribLocation(program,"aTexCoord");
    glEnableVertexAttribArray(atext);
    glVertexAttribPointer(atext,2,GL_FLOAT,GL_FALSE,8,txts);


    //通过ffmpeg获取素材的宽和高,目前写死宽高测试
    /**
     * 测试yuv播放器须使用ffmpeg命令行工具
     * 先从mp4文件获取里面的原始数据结构
     * ffprobe test.mp4
     * 可查看mp4文件的宽高
     * 1.mp4中提取yuv
     * ffmpeg -i test.mp4 -s 640x480 -pix_fmt yuv420p test.yuv
     * 2.使用yuv播放工具测试播放
     * ffplay -f rawvideo -video_size 640x480 -pix_fmt yuv420p test.yuv
     *
     * yuv420p: Y,U,V 三个通道分开存储的
     * yuv420sp: Y,UV/VU 两个通道分开存储，iOS 是UV 又名NV12,Android 是VU，又名NV21
     */
     //测试素材为640*480
    int width = 640;
    int height = 480;

    //材质纹理初始化
    glUniform1i(glGetUniformLocation(program,"yTexture"),0);
    glUniform1i(glGetUniformLocation(program,"uTexture"),1);
    glUniform1i(glGetUniformLocation(program,"vTexture"),2);

    //创建opengl纹理,y纹理，u纹理，v纹理
    GLuint texts[3] = {0};
    glGenTextures(3,texts);

    glBindTexture(GL_TEXTURE_2D,texts[0]);
    glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MIN_FILTER,GL_LINEAR);
    glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MAG_FILTER,GL_LINEAR);
    glTexImage2D(GL_TEXTURE_2D,0,GL_LUMINANCE,width,height,0,GL_LUMINANCE,GL_UNSIGNED_BYTE,NULL);

    glBindTexture(GL_TEXTURE_2D,texts[1]);
    glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MIN_FILTER,GL_LINEAR);
    glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MAG_FILTER,GL_LINEAR);
    glTexImage2D(GL_TEXTURE_2D,0,GL_LUMINANCE,width>>1,height>>1,0,GL_LUMINANCE,GL_UNSIGNED_BYTE,NULL);

    glBindTexture(GL_TEXTURE_2D,texts[2]);
    glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MIN_FILTER,GL_LINEAR);
    glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MAG_FILTER,GL_LINEAR);
    glTexImage2D(GL_TEXTURE_2D,0,GL_LUMINANCE,width>>1,height>>1,0,GL_LUMINANCE,GL_UNSIGNED_BYTE,NULL);

    ///纹理的修改和显示
    unsigned char  *buf[3] = {0};
    buf[0] = new unsigned  char[width*height];
    buf[1] = new unsigned char[width*height/4];
    buf[2] = new unsigned char[width*height/4];

    /**
     * 测试代码 使用ffmpeg获取宽高 转换为yuv420p格式的视频 进行opengl es 渲染展示
     *
     */
    for (;;) {
        if (feof(fp) == 0){
            fread(buf[0],1,width*height,fp);
            fread(buf[1],1,width*height>>2,fp);
            fread(buf[2],1,width*height>>2,fp);
            glActiveTexture(GL_TEXTURE0);
            glBindTexture(GL_TEXTURE_2D,texts[0]);
            glTexSubImage2D(GL_TEXTURE_2D,0,0,0,width,height,GL_LUMINANCE,GL_UNSIGNED_BYTE,buf[0]);

            glActiveTexture(GL_TEXTURE1);
            glBindTexture(GL_TEXTURE_2D,texts[1]);
            glTexSubImage2D(GL_TEXTURE_2D,0,0,0,width>>1,height>>1,GL_LUMINANCE,GL_UNSIGNED_BYTE,buf[1]);

            glActiveTexture(GL_TEXTURE2);
            glBindTexture(GL_TEXTURE_2D,texts[2]);
            glTexSubImage2D(GL_TEXTURE_2D,0,0,0,width>>1,height>>1,GL_LUMINANCE,GL_UNSIGNED_BYTE,buf[2]);

            glDrawArrays(GL_TRIANGLE_STRIP,0,4);
            eglSwapBuffers(display,eglSurface);

            glDrawArrays(GL_TRIANGLE_STRIP,0,4);
            eglSwapBuffers(display,eglSurface);

        } else{
            //结束
            fclose(fp);
            LOGD("play yuv file end");
            break;
        }
    }

    /*
     * 测试代码
     * */
//    for (int i = 0; i < 10000000; ++i) {
//        memset(buf[0],i,width*height);
//        memset(buf[1],i,width*height>>2);
//        memset(buf[2],i,width*height>>2);
//
//        //激活各层纹理
//        glActiveTexture(GL_TEXTURE0);
//        glBindTexture(GL_TEXTURE_2D,texts[0]);
//        glTexSubImage2D(GL_TEXTURE_2D,0,0,0,width,height,GL_LUMINANCE,GL_UNSIGNED_BYTE,buf[0]);
//
//        glActiveTexture(GL_TEXTURE1);
//        glBindTexture(GL_TEXTURE_2D,texts[1]);
//        glTexSubImage2D(GL_TEXTURE_2D,0,0,0,width>>1,height>>1,GL_LUMINANCE,GL_UNSIGNED_BYTE,buf[1]);
//
//        glActiveTexture(GL_TEXTURE2);
//        glBindTexture(GL_TEXTURE_2D,texts[2]);
//        glTexSubImage2D(GL_TEXTURE_2D,0,0,0,width>>1,height>>1,GL_LUMINANCE,GL_UNSIGNED_BYTE,buf[2]);
//
//        glDrawArrays(GL_TRIANGLE_STRIP,0,4);
//        eglSwapBuffers(display,eglSurface);
//    }

    env->ReleaseStringUTFChars(path,url);
}

#include "./Player/Demux/OpFFDemux.h"
#include "./Player/Log/OpLog.h"
#include "./Player/Observer/IOpObserver.h"
#include "./Player/Demux/OpThread.h"
#include <thread>
using namespace std;
void sleep(int millseconds){
    chrono::milliseconds  du(millseconds);
    this_thread::sleep_for(du);
}
class TObservers:public IOpObserver{
public: void update(OpData data) {
         OPLOGI("Read data size is %d", data.size);
    }
};
extern "C"
JNIEXPORT jobject JNICALL
Java_com_example_openplaydemo_MainActivity_testDemuxMp4File(JNIEnv *env, jobject thiz,
                                                            jstring file_path) {
    // TODO: implement testDemuxMp4File()
    // TODO: implement testDemuxMp4File()
    const char  *url = env->GetStringUTFChars(file_path,0);
    ///
    OpFFDemux *demux = new OpFFDemux();
    demux->Open(url);
    /**
     * 1.for循环简单测试
     */
//    for(;;){
//        OpData opData = demux->Read();
//        XLOGI("Read data size is %d",opData.size);
//    }
    /*
     * 2.上面语句改造成下面的线程调用
     * 抽离测试
     */
    demux->Start();
    env->ReleaseStringUTFChars(file_path,url);

    /**
     * 观察者测试代码块
     *
     *  TObservers *tObservers = new TObservers();
    IOpDemux *iOpDemux = new OpFFDemux();
    iOpDemux->addObservers(tObservers);
    iOpDemux->Open(url);
    iOpDemux->Start();
    sleep(3000);
    iOpDemux->Stop();
     */

}

#include "./Player/Video/OpEGL.h"
#include "./Player/Shader/OpShader.h"
#include "./Player/Video/IOpVideoView.h"
#include "./Player/Video/OpVideoView.h"
IOpVideoView *view = NULL;
/**
 *  ANativeWindow *window = ANativeWindow_fromSurface(env,surface);
 *   OpEGL::Get()->Init(window);
    OpShader shader;
    shader.Init();
 */
extern "C"
JNIEXPORT void JNICALL
Java_com_example_openplaydemo_RGBPlayer_InitView(JNIEnv *env, jobject thiz, jobject surface) {
    // TODO: implement InitView()
    ANativeWindow *window = ANativeWindow_fromSurface(env,surface);
    view->SetRender(window);
}
#include "../Player/Decode/IOpDecode.h"
#include "../Player/Decode/OpFFDecode.h"
#include "../Player/Audio/IOpResample.h"
#include "../Player/Audio/OpFFResample.h"

extern "C"
JNIEXPORT void JNICALL
Java_com_example_openplaydemo_OpenVideoView_InitVideoVView(JNIEnv *env, jobject thiz,
                                                           jobject surface) {
    // TODO: implement InitVideoVView()
    ANativeWindow *window = ANativeWindow_fromSurface(env,surface);
    view->SetRender(window);
}
#include "../Player/Audio/IOpAudioPlay.h"
#include "../Player/Audio/OpSLAudioPlay.h"
extern "C"
JNIEXPORT void JNICALL
Java_com_example_openplaydemo_OpenPlayAVAcrivity_testAV(JNIEnv *env, jobject thiz) {
    // TODO: implement testAV()

    IOpDemux *demux = new OpFFDemux();
    demux->Open("/sdcard/test.mp4");

    IOpDecode *vdecode = new OpFFDecode();
    vdecode->Open(demux->GetVideoParam());

    IOpDecode *adecode = new OpFFDecode();
    adecode->Open(demux->GetAudioParam());

    demux->addObservers(vdecode);
    demux->addObservers(adecode);

    view = new OpVideoView();
    vdecode->addObservers(view);

    IOpResample *resample = new OpFFResample();
    OpParameter outParam = demux->GetAudioParam();
    resample->Open(demux->GetAudioParam(),outParam);
    adecode->addObservers(resample);

    IOpAudioPlay *audioPlay = new OpSLAudioPlay();
    audioPlay->startPlay(outParam);
    resample->addObservers(audioPlay);

    demux->Start();
    vdecode->Start();
    adecode->Start();
}
/**
 * OpPlayer测试函数
 */
#include "../Player/IPlayer/IOpPlayer.h"


IOpVideoView *iview = NULL;
static IOpPlayer* player = NULL;

/**
 * Java调C/C++ 会调用jni_onload()
 */
#include "../Player/IPlayer/IOpPlayerBuilder.h"
#include "../Player/IPlayer/OpPlayerBuilder.h"

#include "../Player/IPlayer/IOpPlayerBuilder.h"
#include "../Player/IPlayer/IOpPlayProxy.h"
extern "C" JNIEXPORT jint JNI_OnLoad(JavaVM *vm,void *res){
    //av_jni_set_java_vm(vm,0);
   // OpFFDecode::InitHard(vm);
   //OpPlayerBuilder::initHard(vm);//2构建者

   IOpPlayProxy::getInstance()->Init(vm);//3代理


//    player = OpPlayerBuilder::Get()->BuilderPlayer();
//    player->open("/sdcard/test.mp4");
//    player->start();

    return JNI_VERSION_1_4;
}
static IOpPlayer *play = NULL;
extern "C"
JNIEXPORT void JNICALL
Java_com_example_openplaydemo_IOpenVideoView_InitOpVideoView(JNIEnv *env, jobject thiz,
                                                             jobject surface) {
    // TODO: implement InitOpVideoView()
    ANativeWindow* window = ANativeWindow_fromSurface(env,surface);
    //iview->SetRender(window);
    //IOpPlayer::get()->videoView->SetRender(window);
    //if (play) play->InitView(window);//2构建者
    IOpPlayProxy::getInstance()->InitView(window);//3代理
}
extern "C"
JNIEXPORT void JNICALL
Java_com_example_openplaydemo_TestOpenAVActivity_testOpenAV(JNIEnv *env, jobject thiz) {
    // TODO: implement testOpenAV()
//    IOpDemux*  demux = new OpFFDemux();
//
//    IOpDecode* vdecode = new OpFFDecode();
//
//    IOpDecode* adecode = new OpFFDecode();
//
//    demux->addObservers(vdecode);
//    demux->addObservers(adecode);
//
//    iview = new OpVideoView();
//    vdecode->addObservers(iview);
//
//    IOpResample* resample = new OpFFResample();
//    adecode->addObservers(resample);
//
//    IOpAudioPlay* audioPlay = new OpSLAudioPlay();
//    resample->addObservers(audioPlay);
//
//    IOpPlayer::get()->demux = demux;
//    IOpPlayer::get()->adecode = adecode;
//    IOpPlayer::get()->vdecode = vdecode;
//    IOpPlayer::get()->videoView = iview;
//    IOpPlayer::get()->resample = resample;
//    IOpPlayer::get()->audioPlay = audioPlay;
//
//    IOpPlayer::get()->open("/sdcard/test.mp4");
//    IOpPlayer::get()->start();



        //1.上面是标准

        //2.构建者
//        play = OpPlayerBuilder::get()->BuilderPlayer();
//        play->open("/sdcard/test.mp4");
//        play->start();

        //3.代理
        IOpPlayProxy::getInstance()->open("/sdcard/test.mp4");
        IOpPlayProxy::getInstance()->start();
}