#include <unistd.h>
#include <pthread.h>
#include "ggl.h"
#include "utils.h"
#include "scene.h"
#include "Section501.cpp"
#include "Section502.cpp"
#include "Section503.cpp"
#include "Section601.cpp"
#include "Section802.cpp"
#include "ffplayer/FFPlayerImpl.cpp"
#include "utils/FormatUtils.h"
#include "ffplayer/FFPlayer.h"

//在c++中采用下方的方式导入c头文件，否则编译器会各种方法乱报undefined reference而不指向具体的导入语句问题，编程体验极差
#ifdef __cplusplus
extern "C" {
#endif
#include <libavformat/avformat.h>
#include <libavcodec/avcodec.h>
#include <libavcodec/codec.h>
#include <libavutil/imgutils.h>
#include <libavutil/avutil.h>
#include <libavutil/log.h>
#include <libswscale/swscale.h>
#include <libavcodec/jni.h>
#include <libswresample/swresample.h>
#include <chrono>
#ifdef __cplusplus
} // endof extern "C"
#endif

/**
 * av_log方法输出日志的回调函数
 */
void log_callback_test2(void *ptr, int level, const char *fmt, va_list vl) {
    va_list vl2;
    char *line = static_cast<char *>(malloc(128 * sizeof(char)));
    static int print_prefix = 1;
    va_copy(vl2, vl);
    av_log_format_line(ptr, level, fmt, vl2, line, 128, &print_prefix);
    va_end(vl2);
    line[127] = '\0';
    LOGE("%s", line);
    free(line);
}

/**
 * 加载默认回调
 * @param vm
 * @param reserved
 * @return
 */
jint JNI_OnLoad(JavaVM *vm, void *reserved) {
    JNIEnv *env = NULL;
    if (vm->GetEnv((void **) &env, JNI_VERSION_1_6) != JNI_OK) {
        return -1;
    }
    //将jvm设置给ffmpeg，查询硬解码器h264_mediacodec才不会返回-542398533
    av_jni_set_java_vm(vm, nullptr);

    //设置日志回调方法
//    av_log_set_callback(log_callback_test2);
//    av_log_set_callback(av_log_default_callback);
    //设置ffmpeg输出日志等级
//    av_log_set_level(AV_LOG_ERROR);
    return JNI_VERSION_1_6;
}

Scene *scene = nullptr;

extern "C"
JNIEXPORT void JNICALL
Java_com_csw_android_ffmpegtest_gl_base_GLV3Renderer_setScene(JNIEnv *env, jobject thiz,
                                                              jint sceneType,
                                                              jobject asset_manager) {
    AAssetManager *aAssetManager = AAssetManager_fromJava(env, asset_manager);
    switch (sceneType) {
        default:
        case 501:
            scene = new Section501(aAssetManager);
            break;
        case 502:
            scene = new Section502(aAssetManager);
            break;
        case 503:
            scene = new Section503(aAssetManager);
            break;
        case 601:
            scene = new Section601(aAssetManager);
            break;
        case 802:
            scene = new Section802(aAssetManager);
            break;
        case 902:
            scene = new Section902(aAssetManager);
            break;
    }
}

extern "C"
JNIEXPORT void JNICALL
Java_com_csw_android_ffmpegtest_gl_base_GLV3Renderer_nativeOnSurfaceCreated(JNIEnv *env,
                                                                            jobject thiz) {
    scene->init();
}

extern "C"
JNIEXPORT void JNICALL
Java_com_csw_android_ffmpegtest_gl_base_GLV3Renderer_nativeOnSurfaceChanged(JNIEnv *env,
                                                                            jobject thiz,
                                                                            jint width,
                                                                            jint height) {
    scene->setViewPortSize(width, height);
}

extern "C"
JNIEXPORT void JNICALL
Java_com_csw_android_ffmpegtest_gl_base_GLV3Renderer_nativeOnDrawFrame(JNIEnv *env, jobject thiz) {
    scene->draw();
}

extern "C"
JNIEXPORT void JNICALL
Java_com_csw_android_ffmpegtest_gl_base_GLV3Renderer_clearColor(JNIEnv *env, jobject thiz,
                                                                jint color) {
    GLfloat redF = ((color >> 16) & 0xFF) * 1.0f / 255;
    GLfloat greenF = ((color >> 8) & 0xFF) * 1.0f / 255;
    GLfloat blueF = (color & 0xFF) * 1.0f / 255;
    GLfloat alphaF = ((color >> 24) & 0xFF) * 1.0f / 255;
    //设置清屏颜色
    glClearColor(redF, greenF, blueF, alphaF);
}

extern "C"
JNIEXPORT void JNICALL
Java_com_csw_android_ffmpegtest_gl_base_GLV3Renderer_setViewPort(JNIEnv *env, jobject thiz,
                                                                 jint x, jint y, jint width,
                                                                 jint height) {
    glViewport(x, y, width, height);
}

extern "C"
JNIEXPORT void JNICALL
Java_com_csw_android_ffmpegtest_gl_base_GLV3Renderer_clear(JNIEnv *env, jobject thiz, jint mash) {
    switch (mash) {
        case 0:
            //清除颜色缓冲区，使用指定的ClearColor来刷新背景
            glClear(GL_COLOR_BUFFER_BIT);
            break;
        default:
            break;
    }
}

extern "C"
JNIEXPORT void JNICALL
Java_com_csw_android_ffmpegtest_gl_base_GLV3Renderer_setAssetManager(JNIEnv *env, jobject thiz,
                                                                     jobject asset_manager) {
}
extern "C"
JNIEXPORT void JNICALL
Java_com_csw_android_ffmpegtest_gl_base_GLV3Renderer_setRGBPicture(JNIEnv *env, jobject thiz,
                                                                   jint scene_type,
                                                                   jint width,
                                                                   jint height,
                                                                   jbyteArray rgb_bytes) {
    unsigned char *byteData = ConvertJByteArrayToChars(env, rgb_bytes);
    switch (scene_type) {
        case 601:
            ((Section601 *) scene)->setPictureData(width, height, byteData);
            break;
    }
}extern "C"
JNIEXPORT void JNICALL
Java_com_csw_android_ffmpegtest_gl_base_GLV3Renderer_setYUVPicture(JNIEnv *env, jobject thiz,
                                                                   jint scene_type, jint width,
                                                                   jint height, jbyteArray y_data,
                                                                   jint y_pixel_stride,
                                                                   jbyteArray u_data,
                                                                   jint u_pixel_stride,
                                                                   jbyteArray v_data,
                                                                   jint v_pixel_stride) {
    unsigned char *yData = ConvertJByteArrayToChars(env, y_data);
    unsigned char *uData = ConvertJByteArrayToChars(env, u_data);
    unsigned char *vData = ConvertJByteArrayToChars(env, v_data);
    switch (scene_type) {
        case 802:
            ((Section802 *) scene)->setPictureData(width, height, yData, y_pixel_stride, uData,
                                                   u_pixel_stride, vData, v_pixel_stride);
            break;
    }
}

extern "C"
JNIEXPORT void JNICALL
Java_com_csw_android_ffmpegtest_gl_base_GLV3Renderer_setRotate(JNIEnv *env, jobject thiz,
                                                               jfloat angle) {
    scene->setRotate(angle);
}

extern "C"
JNIEXPORT void JNICALL
Java_com_csw_android_ffmpegtest_utils_FFmpegUtils_printSupportFormat(JNIEnv *env, jobject thiz) {
    AVCodec *p;
    void *i = 0;
    while ((p = (AVCodec *) av_codec_iterate(&i))) {
        switch (p->type) {
            case AVMEDIA_TYPE_VIDEO:
                LOGI("[Video]:%s", p->name);
                break;
            case AVMEDIA_TYPE_AUDIO:
                LOGI("[Audio]:%s", p->name);
                break;
            default:
                LOGI("[Other]:%s", p->name);
                break;
        }
    }
}
int isPause = 0;

void sendToScreen(AVFrame *avFrame, int width, int height);

void sendToScreen(AVFrame *avFrame, int width, int height) {
    //申请内存
    auto *y = new unsigned char[width * height];
    auto *u = new unsigned char[width * height / 4];
    auto *v = new unsigned char[width * height / 4];
    memcpy(y, avFrame->data[0], width * height);
    memcpy(u, avFrame->data[1], width * height / 4);
    memcpy(v, avFrame->data[2], width * height / 4);
    ((Section902 *) scene)->setPictureData(width, height, y, u, v);
}

void *thread(void *ptr) {
    for (int i = 0; i < 3; i++) {
        sleep(1);
        LOGI(" a pthread");
    }
    return 0;
}

extern "C"
JNIEXPORT void JNICALL
Java_com_csw_android_ffmpegtest_gl_base_GLV3Renderer_playVideo(JNIEnv *env, jobject thiz,
                                                               jstring path,
                                                               jobject audio_track_util) {
    //C++ 多线程示例
//    pthread_t id;
//    int ret = pthread_create(&id, nullptr, thread, nullptr);
//    if(ret) {
//        LOGE("Create pthread error!");
//        return;
//    }
//    for(int i = 0;i < 3;i++) {
//        LOGI("This is the main process.");
//        sleep(1);
//    }
//    pthread_join(id, nullptr);
//    pthread_exit(nullptr);

    //输出一条日志
    av_log(nullptr, AV_LOG_INFO, "LOG: begin!");
    //转化路径字符串为char数组
    const char *filePath = env->GetStringUTFChars(path, 0);
    av_log(nullptr, AV_LOG_INFO, "filePath: %s", filePath);
    //avformat 是处理音视频文件封装格式的，申请上下文用于解封装，读取文件头
    AVFormatContext *avFormatContext = avformat_alloc_context();
    //开启流输入，结束时需要调用avformat_close_input关闭流
    int result = avformat_open_input(&avFormatContext, filePath, nullptr, nullptr);
    if (result < 0) {//==0为正常开启
        av_log(nullptr, AV_LOG_ERROR, "can\'t open input %d", result);
        return;
    }
    //打印读取到的媒体时长
    av_log(nullptr, AV_LOG_INFO, "video duration: %" PRId64, avFormatContext->duration);
    //读取流信息，特别是针对一些没有文件头的格式，通过此方法可以计算帧率等
    result = avformat_find_stream_info(avFormatContext, nullptr);
    if (result < 0) {//==0为正常开启
        av_log(nullptr, AV_LOG_ERROR, "can\'t find stream info");
        return;
    }
    //解封装后从视频封装格式中分离出视频流与音频流
    int video_index = -1;
    int audio_index = -1;
    for (int i = 0; i < avFormatContext->nb_streams; i++) {
        //根据流类型找到视频流
        if (avFormatContext->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_VIDEO)
            video_index = i;
        if (avFormatContext->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_AUDIO)
            audio_index = i;
    }
    if (video_index == -1 || audio_index == -1) {
        //没有视频流
        av_log(nullptr, AV_LOG_ERROR, "can\'t find video stream");
        return;
    }
    //初始化视频解码器---------------------------------------------------------------------------------
    //avcodec主要工作是音视频解码，这里申请一个解码对象
    AVCodecContext *videoCodecContext = avcodec_alloc_context3(nullptr);
    //设置编解码参数，从解封装avFormatContext对象的视频流里面拿
    avcodec_parameters_to_context(videoCodecContext,
                                  avFormatContext->streams[video_index]->codecpar);
    //根据参数得到视频解码类型，查找解码器
    const AVCodec *videoCodec = avcodec_find_decoder(videoCodecContext->codec_id);
//    const AVCodec *videoCodec = avcodec_find_decoder_by_name("h264_mediacodec");
    if (videoCodec == nullptr) {
        av_log(nullptr, AV_LOG_ERROR, "can\'t find decoder");
        return;
    } else {
        av_log(nullptr, AV_LOG_ERROR, "find decoder %s %s", videoCodec->name,
               videoCodec->wrapper_name);
    }
    //根据编解码，初始化avCodecContext
    result = avcodec_open2(videoCodecContext, videoCodec, nullptr);
    if (result < 0) {
        av_log(nullptr, AV_LOG_ERROR, "can\'t open video decoder %d", result);
        return;
    }
    AVPixelFormat avPixelFormat = videoCodecContext->pix_fmt;
    int vWidth = videoCodecContext->width;
    int vHeight = videoCodecContext->height;

    AVFrame *yuv420pFrame = av_frame_alloc();
    int buffer_size = av_image_get_buffer_size(AV_PIX_FMT_YUV420P, vWidth, vHeight, 1);
    uint8_t *out_buffer = (uint8_t *) av_malloc(buffer_size * sizeof(uint8_t));
    av_image_fill_arrays(
            yuv420pFrame->data,
            yuv420pFrame->linesize,
            out_buffer,
            AV_PIX_FMT_YUV420P,
            vWidth,
            vHeight,
            1
    );
    //若当前帧像素格式不是YUV420P，可以通过这个进行转化
    SwsContext *swsContext = sws_getContext(
            vWidth, vHeight, videoCodecContext->pix_fmt,
            vWidth, vHeight, AV_PIX_FMT_YUV420P,
            SWS_BICUBIC, nullptr, nullptr, nullptr);

    //初始化音频解码器----------------------------------------------------------------------------------
    //申请一个解码对象
    AVCodecContext *audioCodecContext = avcodec_alloc_context3(nullptr);
    //设置编解码参数，从解封装avFormatContext对象的视频流里面拿
    avcodec_parameters_to_context(audioCodecContext,
                                  avFormatContext->streams[audio_index]->codecpar);
    //根据参数得到视频解码类型，查找解码器
    const AVCodec *audioCodec = avcodec_find_decoder(audioCodecContext->codec_id);
    if (audioCodec == nullptr) {
        av_log(nullptr, AV_LOG_ERROR, "can\'t find decoder");
        return;
    } else {
        av_log(nullptr, AV_LOG_ERROR, "find decoder %s %s", audioCodec->name,
               audioCodec->wrapper_name);
    }
    //根据编解码，初始化avCodecContext
    result = avcodec_open2(audioCodecContext, audioCodec, nullptr);
    if (result < 0) {
        av_log(nullptr, AV_LOG_ERROR, "can\'t open audio decoder %d", result);
        return;
    }
    //统一输出的音频编码参数
    SwrContext *swrContext = swr_alloc();
    uint8_t *out_v_buffer = (uint8_t *) av_malloc(44100 * 2);
    uint64_t out_ch_layout = AV_CH_LAYOUT_STEREO;
    enum AVSampleFormat out_format = AV_SAMPLE_FMT_S16;
    int out_sample_rate = audioCodecContext->sample_rate;
    swr_alloc_set_opts(
            swrContext,
            out_ch_layout,
            out_format,
            out_sample_rate,
            audioCodecContext->channel_layout,
            audioCodecContext->sample_fmt,
            audioCodecContext->sample_rate,
            0,
            NULL
    );
    swr_init(swrContext);

    int out_channel_nb = av_get_channel_layout_nb_channels(AV_CH_LAYOUT_STEREO);


    //申请包缓冲区packet
    AVPacket *avPacket = av_packet_alloc();
    //申请帧缓冲区frame
    AVFrame *avFrame = av_frame_alloc();
    int isLog = -1;
    int frames = 30;
//    jclass seakbar = jniEnv->GetObjectClass(seekbar);
//    jmethodID set_seekbar_progress = jniEnv->GetMethodID(seakbar, "setProgress", "(I)V");
    isPause = 0;

    long startTime = std::chrono::duration_cast<std::chrono::milliseconds>(
            std::chrono::system_clock::now().time_since_epoch()).count();
    long currTime;
    double showTime;
    double sleepTime;

    //类似反射，取得java类，取得他的方法签名，然后调用该方法传入目标对象
    jclass david_player = env->GetObjectClass(audio_track_util);
    jmethodID createAudio = env->GetMethodID(david_player, "createAudioTrack", "(II)V");
    env->CallVoidMethod(audio_track_util, createAudio, 44100, out_channel_nb);
    jmethodID audio_write = env->GetMethodID(david_player, "playBuffer", "([BI)V");
    jmethodID destroyAudio = env->GetMethodID(david_player, "destroyAudioTrack", "()V");
    av_log(nullptr, AV_LOG_INFO, "audioindex : %d  videoindex %d", audio_index, video_index);
    //一包一包读取
    while (av_read_frame(avFormatContext, avPacket) >= 0) {
        //dts为包应该去解码的时间，读取到一包数据后，根据dts，延迟到解码时间点送入解码器解码。
//        avPacket->dts;
        if (avPacket->stream_index == video_index) {
            //如果是视频数据包，传给视频解码器
            result = avcodec_send_packet(videoCodecContext, avPacket);
            //打印音视频宽高信息
            if (isLog == -1) {
                frames = avFormatContext->streams[video_index]->avg_frame_rate.num /
                         avFormatContext->streams[video_index]->avg_frame_rate.den;
                av_log(nullptr, AV_LOG_INFO, "frames : %d", frames);
                av_log(nullptr, AV_LOG_INFO, "timebase : %f", av_q2d(videoCodecContext->time_base));
                isLog = 1;
            }
            //解码异常
            if (result < 0 && result != AVERROR(EAGAIN) && result != AVERROR_EOF) {
                av_log(nullptr, AV_LOG_ERROR, "PLAYER ERROR");
                break;
            }
            avPacket->dts;
            while (avcodec_receive_frame(videoCodecContext, avFrame) == 0) {
                //读取到一帧
                //pts 是这一帧应该什么时候显示，读取到一帧后，根据帧的显示时间，延迟到他显示的时间点，将帧输出显示
                showTime = avFrame->pts * av_q2d(avFormatContext->streams[video_index]->time_base);
                LOGI("avFrame showTime %lf", showTime);
                currTime = std::chrono::duration_cast<std::chrono::milliseconds>(
                        std::chrono::system_clock::now().time_since_epoch()).count();
                LOGI("avFrame playTime %ld", currTime - startTime);
                sleepTime = showTime * 1000 - (currTime - startTime);
                LOGI("avFrame sleepTime %lf", sleepTime);
                //线程睡眠 微秒
                usleep(sleepTime * 1000);
                //转换成yuv420p格式
                if (avPixelFormat == AV_PIX_FMT_YUV420P) {
                    sendToScreen(avFrame, vWidth, vHeight);
                } else {
                    result = sws_scale(
                            swsContext,
                            (const uint8_t *const *) avFrame->data,
                            avFrame->linesize,
                            0,
                            avFrame->height,
                            yuv420pFrame->data,
                            yuv420pFrame->linesize
                    );
                    if (result < 0) {
                        av_log(nullptr, AV_LOG_ERROR, "player error 4");
                        return;
                    }
                    sendToScreen(yuv420pFrame, vWidth, vHeight);
                }
            }
        }
        if (avPacket->stream_index == audio_index) {
            result = avcodec_send_packet(audioCodecContext, avPacket);
            if (result < 0 && result != AVERROR(EAGAIN) && result != AVERROR_EOF) {
                av_log(nullptr, AV_LOG_ERROR, "PLAYER ERROR BY audio");
                break;
            }
            result = avcodec_receive_frame(audioCodecContext, avFrame);
            // receive frame 这里一般只有一次
            if (result == AVERROR(EAGAIN)) {
                av_log(nullptr, AV_LOG_ERROR, "player error 2");
                continue;
            }
            if (result < 0) {
                av_log(nullptr, AV_LOG_ERROR, "player error 4");
                return;
            } else {
                swr_convert(swrContext, &out_v_buffer, 44100 * 2,
                            (const uint8_t **) avFrame->data, avFrame->nb_samples);
                int size = av_samples_get_buffer_size(nullptr, out_channel_nb, avFrame->nb_samples,
                                                      AV_SAMPLE_FMT_S16, 1);
                jbyteArray audio_sample_array = env->NewByteArray(size);
                env->SetByteArrayRegion(audio_sample_array, 0, size,
                                        (const jbyte *) out_v_buffer);
                env->CallVoidMethod(audio_track_util, audio_write, audio_sample_array, size);
                env->DeleteLocalRef(audio_sample_array);
            }
        }

        av_packet_unref(avPacket);
        /////
        if (isPause) {
//            pauseIndex = pVFrame->best_effort_timestamp;
            break;
        }
    }
    env->CallVoidMethod(audio_track_util, destroyAudio);

    swr_free(&swrContext);
    av_free(out_v_buffer);
    avcodec_close(audioCodecContext);

    sws_freeContext(swsContext);
    av_free(out_buffer);
    av_frame_free(&avFrame);
    av_frame_free(&yuv420pFrame);
    av_packet_free(&avPacket);
    avcodec_close(videoCodecContext);
    avformat_close_input(&avFormatContext);
}

extern "C"
JNIEXPORT void JNICALL
Java_com_csw_android_ffmpegtest_gl_base_GLV3Renderer_pauseVideo(JNIEnv *env, jobject thiz) {
    isPause = 1;
}

FFPlayerImpl *ffPlayer = nullptr;
extern "C"
JNIEXPORT void JNICALL
Java_com_csw_android_ffmpegtest_gl_base_GLV3Renderer_ffPrepare(JNIEnv *env, jobject thiz,
                                                               jstring path,
                                                               jobject audioTrackUtil) {
    ffPlayer = new FFPlayerImpl((Section902 *) scene, env, audioTrackUtil);
    //转化路径字符串为char数组
    const char *filePath = env->GetStringUTFChars(path, 0);
    ffPlayer->setMedia(filePath);
}
extern "C"
JNIEXPORT void JNICALL
Java_com_csw_android_ffmpegtest_gl_base_GLV3Renderer_ffPplay(JNIEnv *env, jobject thiz) {
    ffPlayer->play();
}
extern "C"
JNIEXPORT void JNICALL
Java_com_csw_android_ffmpegtest_gl_base_GLV3Renderer_ffPause(JNIEnv *env, jobject thiz) {
    ffPlayer->pause();
}
extern "C"
JNIEXPORT void JNICALL
Java_com_csw_android_ffmpegtest_gl_base_GLV3Renderer_ffStop(JNIEnv *env, jobject thiz) {
    ffPlayer->stop();
}