#include <jni.h>
#include <string>

#include <android/native_window_jni.h>
#include "ffmpeg/MNQueue.h"
#include "util/LogUtil.h"
#include "MyGLRenderContext.h"
#include "TSFFmpeg.h"
#include "TSAudio.h"
#include "TsCallJava.h"
#include "TSPlayerStatus.h"
#define NATIVE_RENDER_CLASS_NAME "com/ts/view/MyGlSurfaceView"
extern "C"{
#include "include/libavformat/avformat.h"
#include "include/libavcodec/avcodec.h"
#include "include/libswscale/swscale.h"
#include "include/libswresample/swresample.h"
#include "include/libavutil/imgutils.h"
}
AVFormatContext *avFormatContext;

#define LOGI(...) __android_log_print(ANDROID_LOG_INFO,"初始化层",__VA_ARGS__)
#define LOGD(...) __android_log_print(ANDROID_LOG_INFO,"h264层",__VA_ARGS__)
#define LOGE(...) __android_log_print(ANDROID_LOG_INFO,"解码层",__VA_ARGS__)
#define LOGV(...) __android_log_print(ANDROID_LOG_INFO,"同步层",__VA_ARGS__)
#define LOGQ(...) __android_log_print(ANDROID_LOG_INFO,"队列层",__VA_ARGS__)
#define LOGA(...) __android_log_print(ANDROID_LOG_INFO,"音频",__VA_ARGS__)

JavaVM *jvm=NULL;
JNIEXPORT void JNICALL native_OnSurfaceCreated(JNIEnv *env, jobject instance)
{
MyGLRenderContext::GetInstance()->OnSurfaceCreated();

//    MyGLRenderContext::GetInstance()->OnSurfaceCreated();
}

/*
 * Class:     com_byteflow_app_MyNativeRender
 * Method:    native_OnDrawFrame
 * Signature: ()V
 */
JNIEXPORT void JNICALL native_OnDrawFrame(JNIEnv *env, jobject instance)
{
MyGLRenderContext::GetInstance()->OnDrawFrame();
}
JNIEXPORT void JNICALL native_OnSurfaceChanged(JNIEnv *env, jint width,jint height)
{
MyGLRenderContext::GetInstance()->OnSurfaceChanged(width,height);

//    MyGLRenderContext::GetInstance()->OnSurfaceCreated();
}
/*
 * Class:     com_byteflow_app_MyNativeRender
 * Method:    native_Init
 * Signature: ()V
 */
JNIEXPORT void JNICALL native_Init(JNIEnv *env, jobject instance)
{
MyGLRenderContext::GetInstance();
}

JNIEXPORT void JNICALL native_SetImageData
(JNIEnv *env, jobject instance, jint format, jint width, jint height, jbyteArray imageData)
{
int len = env->GetArrayLength (imageData);
uint8_t* buf = new uint8_t[len];
env->GetByteArrayRegion(imageData, 0, len, reinterpret_cast<jbyte*>(buf));
MyGLRenderContext::GetInstance()->SetImageData(format, width, height, buf);
delete[] buf;
env->DeleteLocalRef(imageData);
}
static JNINativeMethod g_RenderMethods[] = {
        {"native_Init",                      "()V",       (void *)(native_Init)},
//        {"native_UnInit",                    "()V",       (void *)(native_UnInit)},
         {"native_SetImageData",              "(III[B)V",  (void *)(native_SetImageData)},
//        {"native_SetImageDataWithIndex",     "(IIII[B)V", (void *)(native_SetImageDataWithIndex)},
//        {"native_SetParamsInt",              "(III)V",    (void *)(native_SetParamsInt)},
//        {"native_SetParamsFloat",            "(IFF)V",    (void *)(native_SetParamsFloat)},
//        {"native_SetAudioData",              "([S)V",     (void *)(native_SetAudioData)},
//        {"native_UpdateTransformMatrix",     "(FFFF)V",   (void *)(native_UpdateTransformMatrix)},
        {"native_OnSurfaceCreated",          "()V",       (void *)(native_OnSurfaceCreated)},
        {"native_OnSurfaceChanged",          "(II)V",     (void *)(native_OnSurfaceChanged)},
        {"native_OnDrawFrame",               "()V",       (void *)(native_OnDrawFrame)},
};



static int RegisterNativeMethods(JNIEnv *env, const char *className, JNINativeMethod *methods, int methodNum)
{
LOGCATE("RegisterNativeMethods");
jclass clazz = env->FindClass(className);
if (clazz == NULL)
{
LOGCATE("RegisterNativeMethods fail. clazz == NULL");
return JNI_FALSE;
}
if (env->RegisterNatives(clazz, methods, methodNum) < 0)
{
LOGCATE("RegisterNativeMethods fail");
return JNI_FALSE;
}
return JNI_TRUE;
}

extern "C" jint JNI_OnLoad(JavaVM* vm, void* reserved) {
    JNIEnv *env = NULL;
    jint result = -1;
    jvm = vm;
    if (vm->GetEnv((void **) &env, JNI_VERSION_1_6) != JNI_OK) {
        return JNI_ERR;
    }
    jint regRet = RegisterNativeMethods(env, NATIVE_RENDER_CLASS_NAME, g_RenderMethods,
                                        sizeof(g_RenderMethods) /
                                        sizeof(g_RenderMethods[0]));
    if (regRet != JNI_TRUE)
    {
        return JNI_ERR;
    }
    return JNI_VERSION_1_6;
}

static void UnregisterNativeMethods(JNIEnv *env, const char *className)
{
LOGCATE("UnregisterNativeMethods");
jclass clazz = env->FindClass(className);
if (clazz == NULL)
{
LOGCATE("UnregisterNativeMethods fail. clazz == NULL");
return;
}
if (env != NULL)
{
env->UnregisterNatives(clazz);
}
}


extern "C" void JNI_OnUnload(JavaVM *jvm, void *p)
{
    JNIEnv *env = NULL;
    if (jvm->GetEnv((void **) (&env), JNI_VERSION_1_6) != JNI_OK)
    {
        return;
    }

    UnregisterNativeMethods(env, NATIVE_RENDER_CLASS_NAME);


}
//视频队列
MNQueue *videoQueue;
//音频队列
MNQueue *audioQueue;
bool isStart = false;




AVCodecContext *videoContext;
//视频索引
int videoIndex = -1;
//音频索引
int audioIndex = -1;

ANativeWindow *nativeWindow;
SwsContext *swsContext;
//
AVFrame *rgbFrame;

uint8_t *outbuffer;

int width;

int height;

void *decodePacket(void *pVoid) {
//子线程中
    LOGI("==========读取线程");

    while (isStart) {
        if (videoQueue->size() > 100) {
            usleep(100 * 1000);
        }

        if (audioQueue->size() > 100) {
            usleep(100 * 1000);
        }
        AVPacket *avPacket = av_packet_alloc();
        int ret = av_read_frame(avFormatContext, avPacket);//压缩数据
        if (ret < 0) {
//            文件末尾
            break;
        }
        if (avPacket->stream_index == videoIndex) {
//视频包
            LOGD("视频包 %d", avPacket->size);
            videoQueue->push(avPacket);
        }else  if(avPacket->stream_index == audioIndex) {
//视频包
         audioQueue->push(avPacket);


        }
    }

    return NULL;
}
AVCodecContext *audioContext;
ANativeWindow_Buffer windowBuffer;
void *decodeVideo(void *pVoid) {
    LOGI("==========解码线程");
    while (isStart) {
        AVPacket *videoPacket = av_packet_alloc();
//        有数据    就会取出来  没有数据   阻塞
        videoQueue->get(videoPacket);
        int ret =avcodec_send_packet(videoContext, videoPacket);
        if (ret != 0) {
            av_packet_free(&videoPacket);
            av_free(videoPacket);
            videoPacket = NULL;
            continue;
        }
//        容器 饭缸
        AVFrame *videoFrame = av_frame_alloc();
//        解码   dsp芯片  都是  api    dsp    cpu 解码  宽高  显示格式 容器  data  是系统帮你做了

        avcodec_receive_frame(videoContext, videoFrame);
        videoFrame->data;
        if (ret != 0) {
            av_frame_free(&videoFrame);
            av_free(videoFrame);
            videoFrame = NULL;
            av_packet_free(&videoPacket);
            av_free(videoPacket);
            videoPacket = NULL;
            LOGE("=================");
            continue;
        }
//        sws_scale()
//转换了

        sws_scale(swsContext, videoFrame->data,
                  videoFrame->linesize,
                  0, videoContext->height,
                  rgbFrame->data,
                  rgbFrame->linesize
        );
//        现在  1/5   基本  surface   导致     4/5
//入参 出参对象
//outBuffer
        ANativeWindow_lock(nativeWindow, &windowBuffer, NULL);
//目的地
        uint8_t *dstWindow = static_cast<uint8_t *>(windowBuffer.bits);
//       数据源
//    outbuffer
// 不可以  outbuffe
//        memcpy(dstWindow, outbuffer, width * height * 4);
        for (int i = 0; i < height; ++i) {
//
            memcpy(dstWindow+i*windowBuffer.stride*4, outbuffer+i * rgbFrame->linesize[0], rgbFrame->linesize[0]);

        }
        ANativeWindow_unlockAndPost(nativeWindow);
        av_frame_free(&videoFrame);
        av_free(videoFrame);
        videoFrame = NULL;
        av_packet_free(&videoPacket);
        av_free(videoPacket);
        videoPacket = NULL;
//        windowBuffer.bits;
//        解码
//avdecodeframe  周五再讲
//        avcodec_send_packet()
//        avcodec_receive_frame()
    }

}
jmethodID playTrack;
jobject mInstance;
void *decodeAudio(void *pVoid){
    //分配一个解码后的数据
    AVFrame  *frame=av_frame_alloc();
    // 分配重采样的上下文
    SwrContext *swrContext=swr_alloc();

    uint64_t out_ch_layout=AV_CH_LAYOUT_STEREO;
    enum AVSampleFormat out_formart=AV_SAMPLE_FMT_S16;
    int out_sample_rate = audioContext->sample_rate;
    //out_sample_rate 不变
    //申请初始化重采样内存
    swr_alloc_set_opts(swrContext,out_ch_layout,
                       out_formart,
                       out_sample_rate,
                       audioContext->channel_layout,
                       audioContext->sample_fmt,
                       audioContext->sample_rate,
                       0,NULL);
    //实例化重采样对象
    swr_init(swrContext);

    //分配输出的内存大小
    uint8_t  *outbuffer= (uint8_t *) av_malloc(44100*2);
    //获得对应的声道
    int out_channer_nb= av_get_default_channel_layout(AV_CH_LAYOUT_STEREO);
    while (isStart){
        //音频解码数据分配
          AVPacket *audioPacket=av_packet_alloc();
        //队列获取音频解码数据
          audioQueue->get(audioPacket);

          //送原数据去解码
          int ret= avcodec_send_packet(audioContext,
                                       audioPacket);
          if (ret != 0) {
            av_packet_free(&audioPacket);
            av_free(audioPacket);
            audioPacket = NULL;
            continue;
         }
          //获取解码后的数据丢到
         ret = avcodec_receive_frame(audioContext, frame);
         LOGD("receive_frame  音频数据%d", ret);
         if (ret != 0) {
            av_packet_free(&audioPacket);
            av_free(audioPacket);
            audioPacket = NULL;
            continue;
        }
        if (ret >= 0) {

            // 开始转换音频
            swr_convert(swrContext,&outbuffer,44100*2,
                        (const uint8_t**)(frame->data),
                        frame->nb_samples);

            //获取按照参数给的音频缓存大小
            int size = av_samples_get_buffer_size(NULL,
                                                  out_channer_nb,
                                                  frame->nb_samples,
                                                  AV_SAMPLE_FMT_S16, 1);
            //如果在主线程
            JNIEnv *jniEnv;
            if (jvm ->AttachCurrentThread(&jniEnv, 0) != JNI_OK) {
                continue;
            }
            jbyteArray byteArrays = jniEnv->NewByteArray(size);

            jniEnv->SetByteArrayRegion(byteArrays, 0,
                                       size,
                                       reinterpret_cast<const jbyte *>(outbuffer));
            jniEnv->CallVoidMethod(mInstance, playTrack, byteArrays, size);

//               回调  能 1  不能2  jni  的子线程
//            回调应用
            LOGD("--------2----------------");
            jniEnv->DeleteLocalRef(byteArrays);
            LOGD("--------3----------------");
            jvm->DetachCurrentThread();
        }


    }
    av_frame_free(&frame);
    swr_free(&swrContext);
    avcodec_close(audioContext);
    avformat_close_input(&avFormatContext);
    return NULL;
}

extern "C"
JNIEXPORT void JNICALL
Java_com_ts_utils_splayer_play(JNIEnv *env, jobject instance, jstring url_, jobject surface) {

    LOGI("---splayer_play");
    jclass palyer=env->GetObjectClass(instance);

    jmethodID onSizeChange =env->GetMethodID(palyer, "onSizeChange", "(II)V");
    jmethodID createAudio =env->GetMethodID(palyer, "createTrack", "(II)V");
    const char *url = env->GetStringUTFChars(url_, 0);
    mInstance=env->NewGlobalRef(instance);
    avformat_network_init();

    avFormatContext=avformat_alloc_context();
//    avFormatContext->interrupt_callback.callback=cus;
    avformat_open_input(&avFormatContext,url,NULL,NULL);


    //查找流 失败
    int code = avformat_find_stream_info(avFormatContext,NULL);

    if (code<0){
        env->ReleaseStringUTFChars(url_,url);
    }


    for (int i = 0; i < avFormatContext->nb_streams; ++i) {
        LOGI("index %d", avFormatContext->nb_streams);
        if (avFormatContext->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_VIDEO) {
            videoIndex = i;
            //所有的参数 包括音频 视频  AVCodecParameters
            AVCodecParameters *parameters = avFormatContext->streams[i]->codecpar;
            LOGI("视频%d", i);
            LOGI("宽度width:%d ", parameters->width);
            LOGI("高度height:%d ", parameters->height);
            //实例化一个H264  全新解码  这里不能写死  根据视频文件动态获取
            AVCodec *dec = avcodec_find_decoder(parameters->codec_id);

            videoContext = avcodec_alloc_context3(dec);

            //把参数设置到解码器上下文
            avcodec_parameters_to_context(videoContext, parameters);

            //打开解码器
            avcodec_open2(videoContext, dec, 0);

//            avcodec_free_context(&videoContext);

        } else if (avFormatContext->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_AUDIO) {
            audioIndex = i;

            //获取音频参数
            AVCodecParameters *parameters=avFormatContext->streams[i]->codecpar;
            //寻找音频解码器
            AVCodec *dec= avcodec_find_decoder(parameters->codec_id);
            //获取音频上下文
            audioContext=avcodec_alloc_context3(dec);

            //把音频的参数信息高速解码器
            avcodec_parameters_to_context(audioContext,parameters);

            //打开解码器
            avcodec_open2(audioContext,dec,0);

        }
    }

        width=videoContext->width;
        height=videoContext->height;
        env->CallVoidMethod(instance, onSizeChange, width, height);
        env->CallVoidMethod(instance, createAudio, 44100,
                        av_get_channel_layout_nb_channels(AV_CH_LAYOUT_STEREO));
          playTrack = env->GetMethodID(palyer, "playTrack", "([BI)V");

        nativeWindow= ANativeWindow_fromSurface(env,surface);

       ANativeWindow_setBuffersGeometry(nativeWindow,width,height,WINDOW_FORMAT_RGBA_8888);
       //
       rgbFrame = av_frame_alloc();

       int numBytes=av_image_get_buffer_size(AV_PIX_FMT_RGBA,width, height, 1);

       outbuffer = (uint8_t *) av_malloc(numBytes * sizeof(uint8_t));
        //类似于  给data分配内存
       av_image_fill_arrays(rgbFrame->data,rgbFrame->linesize,
                         outbuffer,
                         AV_PIX_FMT_RGBA, width,height,1);


      //装欢
    swsContext=sws_getContext(width,
                                  height,
                              videoContext->pix_fmt,
                              width,
                              height,
                              AV_PIX_FMT_RGBA,
                              SWS_BICUBIC,
                              NULL,
                              NULL,
                              NULL);


//      audioQueue = new MNQueue();
//      videoQueue = new MNQueue();  //rgb  不支持yuv
      pthread_t thread_decode;
      pthread_t thread_vidio;
      isStart = true;

      pthread_t thread_audio;

      pthread_create(&thread_decode, NULL, decodePacket, NULL);

      pthread_create(&thread_vidio, NULL, decodeVideo, NULL);
      pthread_create(&thread_audio, NULL, decodeAudio, NULL);

      env->ReleaseStringUTFChars(url_, url);
}
extern "C"
JNIEXPORT jstring JNICALL
Java_com_ts_audiocamera_MainActivity_stringFromJNI(JNIEnv *env, jobject thiz) {
    char strBuffer[1024 * 4] = {0};
    strcat(strBuffer, "libavcodec : ");
    strcat(strBuffer, AV_STRINGIFY(LIBAVCODEC_VERSION));
    strcat(strBuffer, "\nlibavformat : ");
    strcat(strBuffer, AV_STRINGIFY(LIBAVFORMAT_VERSION));
    strcat(strBuffer, "\nlibavutil : ");
    strcat(strBuffer, AV_STRINGIFY(LIBAVUTIL_VERSION));
    strcat(strBuffer, "\nlibavfilter : ");
    strcat(strBuffer, AV_STRINGIFY(LIBAVFILTER_VERSION));
    strcat(strBuffer, "\nlibswresample : ");
    strcat(strBuffer, AV_STRINGIFY(LIBSWRESAMPLE_VERSION));
    strcat(strBuffer, "\nlibswscale : ");
    strcat(strBuffer, AV_STRINGIFY(LIBSWSCALE_VERSION));
    strcat(strBuffer, "\navcodec_configure : \n");
    strcat(strBuffer, avcodec_configuration());
    strcat(strBuffer, "\navcodec_license : ");
    strcat(strBuffer, avcodec_license());
    LOGCATE("GetFFmpegVersion\n%s", strBuffer);

    //ASanTestCase::MainTest();

    return env->NewStringUTF(strBuffer);
}
TSFFmpeg *mTSFFmpeg= nullptr;
TsCallJava *mTsCallJava;
TSPlayerStatus *mTSPlayerStatus;

extern "C"
JNIEXPORT void JNICALL
Java_com_ts_audioplayer_TsPlayer_n_1seek(JNIEnv *env, jobject thiz, jint secds) {
    if (mTSFFmpeg!= nullptr){
        mTSFFmpeg->seek(secds);
    }
}
extern "C"
JNIEXPORT void JNICALL
Java_com_ts_audioplayer_TsPlayer_n_1resume(JNIEnv *env, jobject thiz) {
    if (mTSFFmpeg!= nullptr){
        mTSFFmpeg->resume();
    }
}
extern "C"
JNIEXPORT void JNICALL
Java_com_ts_audioplayer_TsPlayer_n_1mute(JNIEnv *env, jobject thiz, jint mute) {
    if (mTSFFmpeg!= nullptr){
        mTSFFmpeg->setMute(mute);
    }
}
extern "C"
JNIEXPORT void JNICALL
Java_com_ts_audioplayer_TsPlayer_n_1volume(JNIEnv *env, jobject thiz, jint percent) {
    if (mTSFFmpeg!= nullptr){
//        mTSFFmpeg->setVolume(percent);
    }
}


extern "C"
JNIEXPORT void JNICALL
Java_com_ts_audioplayer_TsPlayer_prepare(JNIEnv *env, jobject thiz, jstring source_) {
//    LOGE("can not open url :%s", source);
    const char *source=env->GetStringUTFChars(source_,nullptr);
    LOGE("can not open urel :%s", source);
    if (mTSFFmpeg== nullptr){

        if (mTsCallJava==nullptr){
            mTsCallJava=new TsCallJava(jvm,env,thiz);
        }


        mTSPlayerStatus=new TSPlayerStatus();

        mTSFFmpeg=new TSFFmpeg(mTSPlayerStatus,mTsCallJava,source);

        mTSFFmpeg->callJava=mTsCallJava;
        mTSFFmpeg->prepare();



    }


    env->ReleaseStringUTFChars(source_, source);
}
extern "C"
JNIEXPORT void JNICALL
Java_com_ts_audioplayer_TsPlayer_n_1pause(JNIEnv *env, jobject thiz) {
    if (mTSFFmpeg!= nullptr){
        mTSFFmpeg->pasue();
    }
}
extern "C"
JNIEXPORT void JNICALL
Java_com_ts_audioplayer_TsPlayer_n_1start(JNIEnv *env, jobject thiz) {
    LOGE("Java_com_ts_audioplayer_TsPlayer_n_1start");
    if (mTSFFmpeg!= nullptr){
        LOGE("Java_com_ts_audioplayer_TsPlayer_n_1start111");
        mTSFFmpeg->start();
    }
}






extern "C"
JNIEXPORT void JNICALL
Java_com_ts_audiocamera_ffmepg_FFmpegVideoActivity_prepare(JNIEnv *env, jobject thiz, jstring path,
                                                           jobject surface) {
    const char *url=env->GetStringUTFChars(path,nullptr);

    avFormatContext=avformat_alloc_context();

    int ret= avformat_open_input(&avFormatContext,url, nullptr,nullptr);
    if (ret<0){
        env->ReleaseStringUTFChars(path,url);
        return;
    }

    ret=avformat_find_stream_info(avFormatContext,nullptr);
    if (ret<0){
        return;
    }

    for (int i = 0; i <avFormatContext->nb_streams ; ++i) {

        AVCodecParameters *mAVCodecParameters=avFormatContext->streams[i]->codecpar;
        if (mAVCodecParameters->codec_type==AVMEDIA_TYPE_VIDEO){
            //查找解码器
            AVCodec *pAvCodec=avcodec_find_decoder(mAVCodecParameters->codec_id);
            //创建解码上下文
            AVCodecContext *mAVCodecContext=avcodec_alloc_context3(pAvCodec);
             //设置解码器上下文
            ret=avcodec_parameters_to_context(mAVCodecContext,mAVCodecParameters);
            if (ret<0){
                return;
            }
            //打开解码器
            ret=avcodec_open2(mAVCodecContext,pAvCodec, nullptr);
            if (ret<0){
                return;
            }
        }else if (mAVCodecParameters->codec_type==AVMEDIA_TYPE_AUDIO){

        }
    }






}