#include <jni.h>
#include <string>
#include <android/log.h>
#include <SLES/OpenSLES.h>
#include <SLES/OpenSLES_Android.h>
#include <EGL/egl.h>
#include <GLES2/gl2.h>
static SLObjectItf engineSL=NULL;
#define LOGW(...) __android_log_print(ANDROID_LOG_WARN,"LOG",__VA_ARGS__)

//顶点着色器glsl
#define GET_STR(x) #x
static const char *vertexShader = GET_STR(
        attribute vec4 aPosition; //顶点坐标
        attribute vec2 aTexCoord; //材质顶点坐标
        varying vec2 vTexCoord;   //输出的材质坐标
        void main(){
            vTexCoord = vec2(aTexCoord.x,1.0-aTexCoord.y);
            gl_Position = aPosition;
        }
);

//片元着色器,软解码和部分x86硬解码
static const char *fragYUV420P = GET_STR(
        precision mediump float;    //精度
        varying vec2 vTexCoord;     //顶点着色器传递的坐标
        uniform sampler2D yTexture; //输入的材质（不透明灰度，单像素）
        uniform sampler2D uTexture;
        uniform sampler2D vTexture;
        void main(){
            vec3 yuv;
            vec3 rgb;
            yuv.r = texture2D(yTexture,vTexCoord).r;
            yuv.g = texture2D(uTexture,vTexCoord).r - 0.5;
            yuv.b = texture2D(vTexture,vTexCoord).r - 0.5;
            //yuv转rgb
            rgb = mat3(1.0,     1.0,    1.0,
                       0.0,-0.39465,2.03211,
                       1.13983,-0.58060,0.0)*yuv;
            //输出像素颜色
            gl_FragColor = vec4(rgb,1.0);
        }
);
GLint InitShader(const char *code,GLint type)
{
    //创建shader
    GLint sh = glCreateShader(type);
    if(sh == 0)
    {
        LOGW("glCreateShader %d failed!",type);
        return 0;
    }
    //加载shader
    glShaderSource(sh,
                   1,    //shader数量
                   &code, //shader代码
                   0);   //代码长度
    //编译shader
    glCompileShader(sh);

    //获取编译情况
    GLint status;
    glGetShaderiv(sh,GL_COMPILE_STATUS,&status);
    if(status == 0)
    {
        LOGW("glCompileShader failed!");
        return 0;
    }
    LOGW("glCompileShader success!");
    return sh;
}



//创建引擎
SLEngineItf CreateSL()
{
   SLresult re;
   SLEngineItf  en;
    re = slCreateEngine(&engineSL, 0, 0, 0, 0,0);
    if (re != SL_RESULT_SUCCESS) {
        return NULL;
    }
    re = (*engineSL)->Realize(engineSL, SL_BOOLEAN_FALSE);
    if (re != SL_RESULT_SUCCESS) {
        return NULL;
    }
    re = (*engineSL)->GetInterface(engineSL, SL_IID_ENGINE,&en);
    if (re != SL_RESULT_SUCCESS) {
        return NULL;
    }
    return en;
}

void PcmCall(SLAndroidSimpleBufferQueueItf bg, void *contex)
{
    LOGW("PcmCall!");
    static FILE *fp=NULL;
    static char *buf=NULL;
    if (!buf) {
        buf=new char[1024*1024];
    }
    if (!fp) {
        //打开资源路径
        fp = fopen("/sdcard/text.pcm", "rb");
    }
    if (!fp) {
        return;
    }
    if (feof(fp)==0) {
        //没到结尾
        int len=fread(buf, 1, 1024, fp);
        if (len > 0) {
            (*bg)->Enqueue(bg, buf, len);
        }
    }
}


//extern "C" -编译器这部分代码按C语言
extern "C"{
   #include<libavcodec/avcodec.h>
   #include<libavformat/avformat.h>
   #include<libavutil/avutil.h>
   #include <libavcodec/jni.h>
   #include <libswscale/swscale.h>
   #include <libswresample/swresample.h>
   #include <android/native_window.h>
   #include<android/native_window_jni.h>
}


extern "C"
JNIEXPORT
jint JNI_OnLoad(JavaVM *vm,void *res){
    //手动设置 检索虚拟机环境
    av_jni_set_java_vm(vm,0);
    return JNI_VERSION_1_4;
}

//检查0运算 分数转换为浮点数
static double r2d(AVRational avRational){
    return avRational.num==0||avRational.den==0?0:(double)avRational.num/(double)avRational.den;
}

//当前时间戳

long long getNowMs()
{
    struct timeval tv;
    gettimeofday(&tv, NULL);
    int sec=tv.tv_sec%360000;
    long long t=sec*1000+tv.tv_usec/1000;
    return t;
}

extern "C" JNIEXPORT jstring JNICALL
Java_com_example_ffmpeg_MainActivity_stringFromJNI(
        JNIEnv* env,
        jobject /* this */) {
    std::string hello = "Hello from C++";
    //1 OpenSLEL引擎初始化 引擎对象是OpenSl ES提供api的唯一入口
    SLEngineItf eng=CreateSL();
    if (eng) {
        LOGW("Create engineItf success!");
    }else{
        LOGW("Create engineItf failed!");
    }
    //2 创建混音器
    SLObjectItf mix=NULL;
    SLresult re=0;
    re=(*eng)->CreateOutputMix(eng, &mix, 0, 0, 0);
    if (re != SL_RESULT_SUCCESS) {
        LOGW("CreateOutputMix failed!");
    }
    re = (*mix)->Realize(mix, SL_BOOLEAN_FALSE);
    if (re != SL_RESULT_SUCCESS) {
        LOGW("(*mix)->Realize failed!");
    }
    SLDataLocator_OutputMix  outputMix={SL_DATALOCATOR_OUTPUTMIX,mix};
    SLDataSink audioSink={&outputMix,0};
    //3 配置音频信息
    //缓冲队列
    SLDataLocator_AndroidSimpleBufferQueue que = {SL_DATALOCATOR_ANDROIDSIMPLEBUFFERQUEUE, 10};
    //音频格式
    SLDataFormat_PCM pcm={
            SL_DATAFORMAT_PCM,
            2,//声道数
            SL_SAMPLINGRATE_44_1,//采样率
            SL_PCMSAMPLEFORMAT_FIXED_16,
            SL_PCMSAMPLEFORMAT_FIXED_16,
            SL_SPEAKER_FRONT_LEFT|SL_SPEAKER_FRONT_RIGHT,//声道
            SL_BYTEORDER_LITTLEENDIAN//字节序 小端（地位字节在前）
    };
    SLDataSource  ds={&que,&pcm};

    //4 创建播放器
    SLObjectItf player;//播放器对象存放
    //获取play接口
    SLPlayItf iplayer=NULL;
    //缓冲队列
    SLAndroidSimpleBufferQueueItf pcmQue=NULL;
    const SLInterfaceID  ids[]={SL_IID_BUFFERQUEUE};
    const SLboolean  req[]={SL_BOOLEAN_TRUE};
    // p6 添加接口后面才能够获取到
    re=(*eng)->CreateAudioPlayer(eng, &player, &ds, &audioSink, sizeof(ids)/sizeof(SLInterfaceID), ids, req);
    if (re != SL_RESULT_SUCCESS) {
        LOGW("CreateAudioPlayer failed!");
    }else{
        LOGW("CreateAudioPlayer success!");
    }
    //实例化 任何对象接口取出后都要实例化
    (*player)->Realize(player, SL_BOOLEAN_FALSE);

    re=(*player)->GetInterface(player,SL_IID_PLAY,&iplayer);
    if (re != SL_RESULT_SUCCESS) {
        LOGW("GetInterface SL_IID_PLAY failed!");
    }
    re=(*player)->GetInterface(player, SL_IID_BUFFERQUEUE, &pcmQue);
    if (re != SL_RESULT_SUCCESS) {
        LOGW("GetInterface SL_IID_BUFFERQUEUE failed!");
    }

    //设置回调函数 播放队列空调用
    (*pcmQue)->RegisterCallback(pcmQue,PcmCall,0);
    (*iplayer)->SetPlayState(iplayer, SL_PLAYSTATE_PLAYING);
    //启动队列回调
    (*pcmQue)->Enqueue(pcmQue, "", 1);

    return env->NewStringUTF(hello.c_str());

}
extern "C"
JNIEXPORT void JNICALL
Java_com_example_ffmpeg_PlayView_Open(JNIEnv *env, jobject thiz, jstring url, jobject surface) {
    std::string hello = "Hello from C++";
    hello+=avcodec_configuration();
    //解封装
    //注册解封装器
    av_register_all();
    //注册解码器
    avcodec_register_all();
    avformat_network_init();
    //文件格式上下文
    AVFormatContext *ic=NULL;
    const char *path=env->GetStringUTFChars(url,0);
    //打开输入流并读取头部信息  第一步解封装
    int re = avformat_open_input(&ic, path, 0, 0);
    if (re == 0) {
        LOGW("avformat_open_input %s success", path);
        //时长和流的数量
        LOGW("duration= %lld nb_streams=%d", ic->duration,ic->nb_streams);
        //参数
        int fps=0;
        int width=0;
        int height=0;
        int codec_id=0;//编码格式
        int videoFormat=0;//像素格式
        int videoStream=0;
        int audioStream=0;
        //遍历取流
        for (int i = 0; i < ic->nb_streams; i++) {
            //资源格式没有头部信息或者头部信息不同 可以通过此方法读取部分流探测流信息
           //    int re1 = avformat_find_stream_info(ic, 0);
            //遍历流的数量 比如音频和视频
            AVStream *as = ic->streams[i];
            if (as->codecpar->codec_type == AVMEDIA_TYPE_VIDEO) {
                //判断媒体类型是否是视频
                LOGW("视频数据");
                videoStream=i;
                fps=r2d(as->avg_frame_rate);
                LOGW("fps=%d,width=%d height=%d codeid=%d pix_format=%d",fps,
                     as->codecpar->width,as->codecpar->height,as->codecpar->codec_id,as->codecpar->format);
            }
            else if (as->codecpar->codec_type == AVMEDIA_TYPE_AUDIO) {
                LOGW("音频数据");
                audioStream=i;
                //音频参数 采样率 频道数  音频格式
                LOGW("sample_rate=%d,channel=%d codeid=%d sample_format=%d",
                     as->codecpar->sample_rate,as->codecpar->channels,as->codecpar->codec_id,as->codecpar->format);
            }
        }
        //通过方法获取音视频流索引
//        audioStream=av_find_best_stream(ic,AVMEDIA_TYPE_AUDIO,-1,-1,NULL,0);
        LOGW("av_find_best_stream=%d", audioStream);
        //音频解码器
        AVCodec *acodec = avcodec_find_decoder(ic->streams[audioStream]->codecpar->codec_id);
//        acodec = avcodec_find_decoder_by_name("h264_mediacodec");
        if (!acodec) {
            LOGW("avcodec find audiocodec failed!");
            return;
        }else{
            LOGW("avcodec find audioAVCodec success!");
        }
        //音频解码上下文
        AVCodecContext *ac = avcodec_alloc_context3(acodec);
        //复制参数
        avcodec_parameters_to_context(ac,ic->streams[audioStream]->codecpar);
        //线程数设置成1
        ac->thread_count=8;
        //打开解码器
        re=avcodec_open2(ac, 0, 0);
        if (re!=0) {
            LOGW("avcodec_open2 audio failed!");
        }else{
            LOGW("avcodec_open2 video success!%d",re);
        }

        AVCodec *codec = avcodec_find_decoder(ic->streams[videoStream]->codecpar->codec_id);
        //硬解码
        codec = avcodec_find_decoder_by_name("h264_mediacodec");

        if (!codec) {
            LOGW("avcodec find failed!");
            return;
        }else{
            LOGW("avcodec find videoAVCodec success!");
        }
        //视频解码上下文
        AVCodecContext *vc = avcodec_alloc_context3(codec);
        //复制参数
        avcodec_parameters_to_context(vc,ic->streams[videoStream]->codecpar);
        //线程数设置成1
        vc->thread_count=8;
        //打开解码器
        re=avcodec_open2(vc, 0, 0);
        if (re != 0) {
            LOGW("avcodec_open2 video failed!%d",re);
            return;
        }else{
            LOGW("avcodec_open2 video success!%d",re);
        }

        //读取帧数据
        AVPacket *pkt=av_packet_alloc();
        AVFrame  *frame=av_frame_alloc();
        long long start=getNowMs();
        int frameCount=0;
        //初始化像素格式转换的上下文
        SwsContext *vctx=NULL;
        //输出宽高
        LOGW("the video with:%d",vc->width);
        int outWidth=1080;
        int outHeight=720;
        char *rgb = new char[1920*1080*4];

        //音频重采样上下文初始化
        SwrContext *actx=swr_alloc();
        //2p:通道格式默认 声道数 3p:输出格式
        actx = swr_alloc_set_opts(actx, av_get_default_channel_layout(2), AV_SAMPLE_FMT_S16,ac->sample_rate,
                                  av_get_default_channel_layout(ac->channels), ac->sample_fmt, ac->sample_rate,0,0);
        re = swr_init(actx);
        char *pcm = new char[48000*4*2];
        if(re!=0){
            LOGW("swr_init failed");
        }else{
            LOGW("swr_init success");
        }

        //初始化窗口客服端传递的容器 Android 相关的底层库
        ANativeWindow *nwin = ANativeWindow_fromSurface(env, surface);
        ANativeWindow_setBuffersGeometry(nwin, outWidth, outHeight, WINDOW_FORMAT_RGBA_8888);
        ANativeWindow_Buffer wbuffer;


        for (;;) {
            //检测单线程下解码速度
            if (getNowMs() - start >= 3000) {
                LOGW("now decode fps is %d", frameCount / 3);
                start = getNowMs();
                frameCount = 0;
            }

            int re = av_read_frame(ic, pkt);
            if (re != 0) {

                LOGW("读取到结尾处!");
                int pos = 20 * r2d(ic->streams[videoStream]->time_base);
                av_seek_frame(ic, videoStream, pos, AVSEEK_FLAG_BACKWARD | AVSEEK_FLAG_FRAME);
                continue;
            }

            AVCodecContext *cc = vc;
            if (pkt->stream_index == audioStream){
                cc = ac;
            }


            //发送到线程中解码
            re = avcodec_send_packet(cc, pkt);
            //清理
            int p = pkt->pts;
            av_packet_unref(pkt);

            if (re != 0) {
                LOGW("avcodec_send_packet failed!");
                continue;
            }else{
                LOGW("avcodec_send_packet success!");
            }
            for (;;) {
                re = avcodec_receive_frame(cc, frame);
                if (re != 0) {
                    //LOGW("avcodec_receive_frame failed!");
                    break;
                }
                LOGW("avcodec_receive_frame %lld", frame->pts);
                if (cc == vc) {
                    //视频
                    frameCount++;
                    //格式转化
                    //宽高比格式不变的情况下只初始化一次
                    vctx = sws_getCachedContext(vctx,frame->width,frame->height,(AVPixelFormat) frame->format,
                                                outWidth,
                                                outHeight,
                                                AV_PIX_FMT_RGBA,
                                                SWS_FAST_BILINEAR,
                                                0, 0, 0);
                    if (!vctx) {
                        LOGW("sws_getCachedContext failed!");
                    } else {
                        //这里可能受目标资源的影响 目前两个视频有一个转化失败
                        //转换失败的：原视频vc->width=1280 转换尺寸为1280 转换尺寸换成1080成功
                        //转化成功的：原视频vc->width=1080 转换尺寸为1280 转换尺寸换成1080成功
                        uint8_t *data[AV_NUM_DATA_POINTERS] = {0};
                        data[0] = (uint8_t *) rgb;
                        int lines[AV_NUM_DATA_POINTERS] = {0};
                        lines[0] = outWidth * 4;
                        int h = sws_scale(vctx,
                                          (const uint8_t **) frame->data,
                                          frame->linesize, 0,
                                          frame->height,
                                          data, lines);
                        //h  转换后的高度
                        LOGW("sws_scale = %d", h);
                        if (h > 0) {
                            ANativeWindow_lock(nwin,&wbuffer,0);
                            uint8_t *dst=(uint8_t*)wbuffer.bits;
                            //vWidth * vHeight * 4 RGBA8888 四个字节
                            //拷贝数据
                            memcpy(dst, rgb, outWidth * outHeight * 4);
                            ANativeWindow_unlockAndPost(nwin);
                        }
                    }
                }else{
                    //音频解码成功重采样
                    uint8_t *out[2];
                    out[0]=(uint8_t *)pcm;
                    //out 输出数据 p3:一个frame中有多少个音频样板
                    int len=swr_convert(actx,out,frame->nb_samples,
                                        (const uint8_t **)frame->data,frame->nb_samples);
                    //通道输出样品数
                    LOGW("swr_convert = %d", len);

                }
            }

        }
        delete rgb;
        avformat_close_input(&ic);
    }else{
        LOGW("avformat_open_input %s error", av_err2str(re));
    }

}extern "C"
JNIEXPORT void JNICALL
Java_com_example_ffmpeg_PlayView_Open2(JNIEnv *env, jobject thiz, jstring url, jobject surface) {
    //获取原始窗口
    ANativeWindow *nwin = ANativeWindow_fromSurface(env, surface);
    //EGL（Android使用EGL提供本地平台对OpenGL ES的实现）
    //1.display创建初始化
    EGLDisplay  display=eglGetDisplay(EGL_DEFAULT_DISPLAY);
    if (display == EGL_NO_DISPLAY) {
        LOGW("eglGetDisplay failed");
        return;
    }
    //初始化
    if (EGL_TRUE != eglInitialize(display, 0, 0)) {
        LOGW("eglInitialize failed");
        return;
    }
    //2 surface 窗口配置
    EGLConfig  config;
    EGLint configNum;
    EGLint configSpec[]={
            EGL_RED_SIZE,8,
            EGL_GREEN_SIZE,8,
            EGL_BLUE_SIZE,8,
            EGL_SURFACE_TYPE,EGL_WINDOW_BIT,EGL_NONE
    };
    if(EGL_TRUE != eglChooseConfig(display, configSpec,&config, 1, &configNum)){
        LOGW("eglChooseConfig failed");
        return;
    }
    //创建surface
    EGLSurface  eglSurface=eglCreateWindowSurface(display, config, nwin, 0);
    if (eglSurface == EGL_NO_SURFACE) {
        LOGW("eglCreateWindowSurface failed");
        return;
    }

    //3 context 创建上下文
    const EGLint  ctx[]={
            EGL_CONTEXT_CLIENT_VERSION,2,EGL_NONE
    };
    EGLContext context = eglCreateContext(display, config, EGL_NO_CONTEXT, ctx);
    if (context == EGL_NO_CONTEXT) {
        LOGW("eglCreateContext failed");
        return;
    }

    if(EGL_TRUE != eglMakeCurrent(display,eglSurface,eglSurface,context))
    {
        LOGW("eglMakeCurrent failed!");
        return;
    }

    LOGW("EGL Init Success!");

    //顶点和片元shader初始化
    //顶点shader初始化
    GLint vsh = InitShader(vertexShader,GL_VERTEX_SHADER);
    //片元yuv420 shader初始化
    GLint fsh = InitShader(fragYUV420P,GL_FRAGMENT_SHADER);


    /////////////////////////////////////////////////////////////
    //创建渲染程序
    GLint program = glCreateProgram();
    if(program == 0)
    {
        LOGW("glCreateProgram failed!");
        return;
    }
    //渲染程序中加入着色器代码
    glAttachShader(program,vsh);
    glAttachShader(program,fsh);

    //链接程序
    glLinkProgram(program);
    GLint status = 0;
    glGetProgramiv(program,GL_LINK_STATUS,&status);
    if(status != GL_TRUE)
    {
        LOGW("glLinkProgram failed!");
        return;
    }
    glUseProgram(program);
    LOGW("glLinkProgram success!");
    /////////////////////////////////////////////////////////////


    //加入三维顶点数据 两个三角形组成正方形
    static float vers[] = {
            1.0f,-1.0f,0.0f,
            -1.0f,-1.0f,0.0f,
            1.0f,1.0f,0.0f,
            -1.0f,1.0f,0.0f,
    };
    GLuint apos = (GLuint)glGetAttribLocation(program,"aPosition");
    glEnableVertexAttribArray(apos);
    //传递顶点
    glVertexAttribPointer(apos,3,GL_FLOAT,GL_FALSE,12,vers);

    //加入材质坐标数据
    static float txts[] = {
            1.0f,0.0f , //右下
            0.0f,0.0f,
            1.0f,1.0f,
            0.0,1.0
    };
    GLuint atex = (GLuint)glGetAttribLocation(program,"aTexCoord");
    glEnableVertexAttribArray(atex);
    glVertexAttribPointer(atex,2,GL_FLOAT,GL_FALSE,8,txts);

}

