#include <jni.h>
#include <string>
#include <android/native_window_jni.h>
#include <zconf.h>
#include "FFMPegControl.h"
#include "JavaCallback.h"

extern "C" {
#include <libavcodec/avcodec.h>
#include <libavformat/avformat.h>
#include <libswscale/swscale.h>
#include <libavutil/imgutils.h>
    //音频重采样
#include "libswresample/swresample.h"
}

ANativeWindow *window = 0;
FFMPegControl *ffmPegControl;
JavaCallback *javaCallback;
pthread_mutex_t mutex = PTHREAD_MUTEX_INITIALIZER;

JavaVM *javaVM = NULL;
JNIEXPORT jint JNICALL JNI_OnLoad(JavaVM *vm, void *reserved) {
    javaVM = vm;
    return JNI_VERSION_1_4;
}

void renderFrame(uint8_t *data, int linesize, int w, int h) {
    pthread_mutex_lock(&mutex);
    if (!window) {
        pthread_mutex_unlock(&mutex);
        return;
    }
    //渲染
    //设置窗口属性
    ANativeWindow_setBuffersGeometry(window, w, h, WINDOW_FORMAT_RGBA_8888);
    ANativeWindow_Buffer window_buffer;
    if (ANativeWindow_lock(window, &window_buffer, 0)) {
        ANativeWindow_release(window);
        window = 0;
        pthread_mutex_unlock(&mutex);
        return;
    }
    //window的缓冲区
    uint8_t *window_data = static_cast<uint8_t *>(window_buffer.bits);
    int window_linesize = window_buffer.stride * 4;

    uint8_t *src_data = data;
    for (int i = 0; i < window_buffer.height; ++i) {
        memcpy(window_data + i * window_linesize, src_data + i * linesize, window_linesize);
    }
    ANativeWindow_unlockAndPost(window);
    pthread_mutex_unlock(&mutex);
}

extern "C" JNIEXPORT jstring JNICALL
Java_com_zsd_android_ffmpegdemo_MainActivity_stringFromJNI(
        JNIEnv *env,
        jobject /* this */) {
    std::string hello = "Hello from C++";
    return env->NewStringUTF(av_version_info());
}

extern "C"
JNIEXPORT void JNICALL
Java_com_zsd_android_ffmpegdemo_VideoPlayer_nativeStart(JNIEnv *env, jobject instance,
                                                          jstring path_, jobject surface) {
    ANativeWindow *nativeWindow = ANativeWindow_fromSurface(env, surface);
    const char *path = env->GetStringUTFChars(path_, 0);

    //初始化FFMPeg网络模块
    avformat_network_init();
    //解码
    //AVFormatContext 当前视频的总上下文 获取所有的流，比如：音频流，视频流，字幕流
    //AVCodecContext，解压音视频流的上下文，这个上下文能获取视频的信息，比如宽度，高度和当前的编码信息
    //AVCodec(解码器)，通过它能得到上面，解码得到yuv数据格式
    //SwsContext，用来将yuv数据格式转换，可以对视频做旋转，全屏操作
    AVFormatContext *formatContext = avformat_alloc_context();
    AVDictionary *opts = NULL;
    av_dict_set(&opts, "timeout", "3000000", 0);
    int ret = avformat_open_input(&formatContext, path, NULL, &opts);
    if (ret) {
        return;
    }
    //通知FFMpeg将流解析出来
    //获取视频流的索引
    int video_stream_dex = -1;
    avformat_find_stream_info(formatContext, NULL);
    for (int i = 0; i < formatContext->nb_streams; ++i) {
        //获取视频流下标
        //AVMEDIA_TYPE_VIDEO这个是视频流
        //AVMEDIA_TYPE_AUDIO这个是音频流
        //AVMEDIA_TYPE_SUBTITLE这个是字幕流
        if (formatContext->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_VIDEO) {
            video_stream_dex = i;
            break;
        }
    }

    //视频流的解码参数
    AVCodecParameters *codecpar = formatContext->streams[video_stream_dex]->codecpar;
    //解码器 h264
    AVCodec *dec = avcodec_find_decoder(codecpar->codec_id);
    //解码器的上下文
    AVCodecContext *codecContext = avcodec_alloc_context3(dec);
    //将解码器参数copy到解码器的上下文
    avcodec_parameters_to_context(codecContext, codecpar);
    //打开解码器
    avcodec_open2(codecContext, dec, NULL);
    //解码
    AVPacket *packet = av_packet_alloc();

    //用于将YUV数据转化成rgb数据，才能渲染在surfaceView上
    SwsContext *swsContext = sws_getContext(codecContext->width, codecContext->height, codecContext->pix_fmt,
                   codecContext->width, codecContext->height, AV_PIX_FMT_RGBA,
                   SWS_BILINEAR, 0, 0, 0);

    ANativeWindow_setBuffersGeometry(nativeWindow, codecContext->width, codecContext->height, WINDOW_FORMAT_RGBA_8888);
    ANativeWindow_Buffer outBuffer;

    AVFrame *frame = av_frame_alloc();
    //接收的容器
    uint8_t *dst_data[4];
    //每一行的首地址
    int dst_lineSize[4];
    av_image_alloc(dst_data, dst_lineSize, codecContext->width, codecContext->height,
                   AV_PIX_FMT_RGBA, 1);

    //从视频流中读取数据包
    while (av_read_frame(formatContext, packet) >= 0) {
        avcodec_send_packet(codecContext, packet);
        //这里获取到了YUV数据
        ret = avcodec_receive_frame(codecContext, frame);
        if (ret == AVERROR(EAGAIN)) {
            continue;
        } else if (ret < 0) {
            break;
        }
        if (packet->stream_index == video_stream_dex) {
            if (ret == 0) {
                //绘制
                sws_scale(swsContext, frame->data, frame->linesize, 0, frame->height, dst_data, dst_lineSize);

                ANativeWindow_lock(nativeWindow, &outBuffer, NULL);
                //渲染
                uint8_t *firstWindow = static_cast<uint8_t *>(outBuffer.bits);
                //输入源（rbg）的
                uint8_t *src_data = dst_data[0];
                //拿到一行有多少个字节RGBA
                int destStride = outBuffer.stride * 4;
                int src_linesize = dst_lineSize[0];
                for (int i = 0; i < outBuffer.height; ++i) {
                    //内存拷贝来进行渲染
                    memcpy(firstWindow + i * destStride, src_data + i * src_linesize, destStride);
                }
                ANativeWindow_unlockAndPost(nativeWindow);
                usleep(16 * 1000);
            }
        }
    }
    av_frame_free(&frame);
    ANativeWindow_release(nativeWindow);
    avcodec_close(codecContext);
    avformat_free_context(formatContext);
    env->ReleaseStringUTFChars(path_, path);
}

#define MAX_AUDIO_FRAME_SIZE = 48000 * 4;
extern "C"
JNIEXPORT void JNICALL
Java_com_zsd_android_ffmpegdemo_VideoPlayer_nativeSound(JNIEnv *env, jobject instance,
                                                        jstring input_, jstring output_) {
    const char *input = env->GetStringUTFChars(input_, 0);
    const char *output = env->GetStringUTFChars(output_, 0);

    avformat_network_init();
    AVFormatContext *formatContext = avformat_alloc_context();
    AVDictionary *opts = NULL;
    av_dict_set(&opts, "timeout", "3000000", 0);
    int ret = avformat_open_input(&formatContext, input, NULL, &opts);
    if (ret) {
        return;
    }
    int audio_stream_dex = -1;
    avformat_find_stream_info(formatContext, NULL);
    for (int i = 0; i < formatContext->nb_streams; ++i) {
        if (formatContext->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_AUDIO) {
            audio_stream_dex = i;
            break;
        }
    }
    AVCodecParameters *codecpar = formatContext->streams[audio_stream_dex]->codecpar;
    AVCodec *dec = avcodec_find_decoder(codecpar->codec_id);
    AVCodecContext *codecContext = avcodec_alloc_context3(dec);
    avcodec_parameters_to_context(codecContext, codecpar);
    avcodec_open2(codecContext, dec, NULL);

    AVPacket *packet = av_packet_alloc();
    AVFrame *frame = av_frame_alloc();

    SwrContext *swRContext = swr_alloc();
    //输入参数 可变
    AVSampleFormat in_sample = codecContext->sample_fmt;
    //输入采样频率
    int in_sample_rate = codecContext->sample_rate;
    //输入声道布局
    uint64_t in_ch_layout = codecContext->channel_layout;

    //输出参数 固定
    //输出采样格式
    AVSampleFormat out_sample = AV_SAMPLE_FMT_S16;
    //输出采样频率
    int out_sample_rate = 44100;
    //输出声道布局
    uint64_t out_ch_layout = AV_CH_LAYOUT_STEREO;

    swr_alloc_set_opts(swRContext, out_ch_layout, out_sample, out_sample_rate,
            in_ch_layout, in_sample, in_sample_rate, 0, NULL);
    //初始化转换器
    swr_init(swRContext);
    uint8_t *out_buffer = (uint8_t *)(av_malloc(2 * 44100));
    FILE *fp_pcm = fopen(output, "wb");

    while(av_read_frame(formatContext, packet) >= 0) {
        avcodec_send_packet(codecContext, packet);
        ret = avcodec_receive_frame(codecContext, frame);
        if (ret == AVERROR(EAGAIN)) {
            continue;
        } else if (ret < 0) {
            break;
        }
        if (packet->stream_index != audio_stream_dex) {
            continue;
        }
        swr_convert(swRContext, &out_buffer, 2 * 44100,
                    (const uint8_t **)(frame->data), frame->nb_samples);

        //获取声道数
        int out_channel_nb = av_get_channel_layout_nb_channels(out_ch_layout);
        //对齐缓冲区
        int out_buffer_size = av_samples_get_buffer_size(NULL, out_channel_nb, frame->nb_samples, out_sample, 1);
        //
        fwrite(out_buffer, 1, out_buffer_size, fp_pcm);

    }
    fclose(fp_pcm);
    av_free(out_buffer);
    swr_free(&swRContext);
    av_frame_free(&frame);
    avcodec_close(codecContext);
    avformat_close_input(&formatContext);
    env->ReleaseStringUTFChars(input_, input);
    avcodec_close(codecContext);
    avformat_free_context(formatContext);
    env->ReleaseStringUTFChars(output_, output);
}

//-------------------------- 下面开始是对FFMPeg的优化，可用于项目实战中，上面的不行 --------------------------
extern "C"
JNIEXPORT void JNICALL
Java_com_zsd_android_ffmpegdemo_player_FFMPegPlayer_nativePrepare(JNIEnv *env, jobject instance,
                                                                  jstring dataSource_) {
    const char *dataSource = env->GetStringUTFChars(dataSource_, 0);

    javaCallback = new JavaCallback(javaVM, env, instance);
    ffmPegControl = new FFMPegControl(javaCallback, dataSource);
    ffmPegControl->setRenderCallback(renderFrame);
    ffmPegControl->prepare();

    env->ReleaseStringUTFChars(dataSource_, dataSource);
}

extern "C"
JNIEXPORT void JNICALL
Java_com_zsd_android_ffmpegdemo_player_FFMPegPlayer_nativeStart(JNIEnv *env, jobject instance) {
    //正式进入播放状态
    if (ffmPegControl) {
        ffmPegControl->start();
    }
}

extern "C"
JNIEXPORT void JNICALL
Java_com_zsd_android_ffmpegdemo_player_FFMPegPlayer_nativeSetSurface(JNIEnv *env, jobject instance,
                                                                     jobject mSurface) {
    if (window) {
        ANativeWindow_release(window);
        window = 0;
    }
    //创建新的窗口用于视频显示
    window = ANativeWindow_fromSurface(env, mSurface);
}
//--------------------------------------------------------------------------------------------------------
extern "C"
JNIEXPORT jint JNICALL
Java_com_zsd_android_ffmpegdemo_player_FFMPegPlayer_nativeGetDuration(JNIEnv *env,
                                                                      jobject instance) {
    if (ffmPegControl) {
        return ffmPegControl->getDuration();
    }
}
extern "C"
JNIEXPORT void JNICALL
Java_com_zsd_android_ffmpegdemo_player_FFMPegPlayer_nativeSeek(JNIEnv *env, jobject instance,
                                                               jint progress) {
    if (ffmPegControl) {
        ffmPegControl->seek(progress);
    }
}
extern "C"
JNIEXPORT void JNICALL
Java_com_zsd_android_ffmpegdemo_player_FFMPegPlayer_nativeStop(JNIEnv *env, jobject instance) {

    if (ffmPegControl) {
        ffmPegControl->stop();
    }
    if (javaCallback) {
        delete javaCallback;
        javaCallback = 0;
    }
}
extern "C"
JNIEXPORT void JNICALL
Java_com_zsd_android_ffmpegdemo_player_FFMPegPlayer_nativeRelease(JNIEnv *env, jobject instance) {

    if (window) {
        ANativeWindow_release(window);
        window = 0;
    }
}