#include <jni.h>
#include "com_zagj_videocomparess_utils_AudioPlayer.h"
#include "FFmpegMusic.cpp"
// for native asset manager
#include <sys/types.h>
#include <android/asset_manager.h>
#include <android/asset_manager_jni.h>
#include <malloc.h>

extern "C" {
#include <libavformat/avformat.h>
#include <android/log.h>
#include <libavdevice/avdevice.h>
#include <libavfilter/avcodec.h>
#include <libswresample/swresample.h>
#include <SLES/OpenSLES.h>
#include <SLES/OpenSLES_Android.h>
#include <cstdio>
}
#define LOGE(format, ...)  __android_log_print(ANDROID_LOG_ERROR, "(>_<)", format,  ##__VA_ARGS__)
//
// Created by Chen qin lang on 2018/6/21.
//
JNIEXPORT jint JNICALL Java_com_zagj_videocomparess_utils_AudioPlayer_playMusic
        (JNIEnv *env, jobject instance, jstring path) {
    char audio_str[512] = {0};
    AVFormatContext *pFormatCtx;
    AVCodec *pCodec;
    AVFrame *frame;
    AVPacket *packet;
    int ret;
    int audio_stream_idx;
    sprintf(audio_str, "%s", env->GetStringUTFChars(path, NULL));
    LOGE("open file %s \n", audio_str);
    av_register_all();
    avfilter_register_all();
    avformat_network_init();
    avdevice_register_all();
    pFormatCtx=avformat_alloc_context();
    if (ret = avformat_open_input(&pFormatCtx, audio_str, NULL, NULL) < 0) {
        LOGE(" open input failed %s", av_err2str(ret));
        return -1;
    }
    if (ret = avformat_find_stream_info(pFormatCtx, NULL)) {
        LOGE(" avformat_find_stream_info %s", av_err2str(ret));
        return -1;
    }
    for (int i = 0; i < pFormatCtx->nb_streams; ++i) {
        if (pFormatCtx->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_AUDIO) {
            LOGE("  找到音频id %d", pFormatCtx->streams[i]->codec->codec_type);
            audio_stream_idx = i;
            break;
        }
    }
    if (audio_stream_idx==-1){
        LOGE("not found audio stream ");
        return -1;
    }
    AVCodecContext *pCodecCtx = pFormatCtx->streams[audio_stream_idx]->codec;

    pCodec = avcodec_find_decoder(pCodecCtx->codec_id);
    LOGE("avcodec_find_decoder");
    if (!pCodec) {
        LOGE(" find decode failed ");
        return -1;
    }
    if (ret = avcodec_open2(pCodecCtx, pCodec, NULL) < 0) {
        LOGE("avcoec_open2 failed %s", av_err2str(ret));
        return -1;
    }
    LOGE("av_init_packet");
    packet = (AVPacket *)av_malloc(sizeof(AVPacket));
    LOGE("av_init_packet");
    frame = av_frame_alloc();
    LOGE(" av_frame_alloc");
    SwrContext *swrContext = swr_alloc();
    LOGE("swrContext");
    /*
     *  * @param s               existing Swr context if available, or NULL if not
 * @param out_ch_layout   output channel layout (AV_CH_LAYOUT_*)
 * @param out_sample_fmt  output sample format (AV_SAMPLE_FMT_*).
 * @param out_sample_rate output sample rate (frequency in Hz)
 * @param in_ch_layout    input channel layout (AV_CH_LAYOUT_*)
 * @param in_sample_fmt   input sample format (AV_SAMPLE_FMT_*).
 * @param in_sample_rate  input sample rate (frequency in Hz)
 * @param log_offset      logging level offset
 * @param log_ctx         parent logging context, can be NULL*/
    //缓存区
    uint8_t *out_buffer = (uint8_t *) av_malloc(44100*2 );
//输出的声道布局（立体声）
    uint64_t out_ch_layout = AV_CH_LAYOUT_STEREO;
//输出采样位数  16位
    enum AVSampleFormat out_formart = AV_SAMPLE_FMT_S16;
//输出的采样率必须与输入相同
    int out_sample_rate = pCodecCtx->sample_rate;


    //swr_alloc_set_opts将PCM源文件的采样格式转换为自己希望的采样格式
    swr_alloc_set_opts(swrContext, out_ch_layout, out_formart, out_sample_rate,
                       pCodecCtx->channel_layout, pCodecCtx->sample_fmt, pCodecCtx->sample_rate, 0,
                       NULL);
    swr_init(swrContext);
    int out_channer_nb = av_get_channel_layout_nb_channels(AV_CH_LAYOUT_STEREO);
    LOGE("av_get_channel_layout_nb_channels");
    jclass pJclass = env->GetObjectClass(instance);
    jmethodID pID = env->GetMethodID(pJclass, "createAudioTrack", "(II)V");
    env->CallVoidMethod(instance, pID, 44100, out_channer_nb);

    jmethodID pID2 = env->GetMethodID(pJclass, "setFileSize", "(II)V");
    env->CallVoidMethod(instance, pID2, 44100, out_channer_nb);
    jmethodID audio_write = env->GetMethodID(pJclass, "playTrack", "([BI)V");
    int got_frame;
    LOGE("GetMethodID");
    while  (av_read_frame(pFormatCtx, packet)>=0) {
        LOGE("av_read_frame");
        if (packet->stream_index == audio_stream_idx) {
            ret = avcodec_decode_audio4(pCodecCtx, frame, &got_frame, packet);
            LOGE("avcodec_decode_audio4");
            if (ret >= 0 && got_frame) {
                LOGE("jiema");
                swr_convert(swrContext, &out_buffer,44100*2, (const uint8_t **) frame->data, frame->nb_samples);
                LOGE("swr_convert");

                int bufferSize = av_samples_get_buffer_size(NULL, out_channer_nb, frame->nb_samples, AV_SAMPLE_FMT_S16, 1);
                LOGE("av_samples_get_buffer_size");

                jbyteArray pArray = env->NewByteArray(bufferSize);
                LOGE("NewByteArray");

                env->SetByteArrayRegion(pArray, 0, bufferSize, (jbyte *) out_buffer);
                env->CallVoidMethod(instance, audio_write, pArray, bufferSize);
                env->DeleteLocalRef(pArray);
            }
            av_frame_unref(frame);
        }
    }
    LOGE("play audio end");
    av_frame_free(&frame);
    LOGE("av_frame_free");
    swr_free(&swrContext);
    LOGE("swr_free");
    avcodec_close(pCodecCtx);
    LOGE("avcodec_close");
    avformat_close_input(&pFormatCtx);
    LOGE("avformat_close_input");
   // env->ReleaseStringUTFChars(path, audio_str);
    LOGE("ReleaseStringUTFChars");
    return 1;
}
SLObjectItf engineObject=NULL;//用SLObjectItf声明引擎接口对象
SLEngineItf engineEngine = NULL;//声明具体的引擎对象


SLObjectItf outputMixObject = NULL;//用SLObjectItf创建混音器接口对象
SLEnvironmentalReverbItf outputMixEnvironmentalReverb = NULL;////具体的混音器对象实例
SLEnvironmentalReverbSettings settings = SL_I3DL2_ENVIRONMENT_PRESET_DEFAULT;//默认情况


SLObjectItf audioplayer=NULL;//用SLObjectItf声明播放器接口对象
SLPlayItf  slPlayItf=NULL;//播放器接口
SLAndroidSimpleBufferQueueItf  slBufferQueueItf=NULL;//缓冲区队列接口


size_t buffersize =0;
void *buffer;
//将pcm数据添加到缓冲区中
void getQueueCallBack(SLAndroidSimpleBufferQueueItf  slBufferQueueItf, void* context){

    buffersize=0;

    getPcm(&buffer,&buffersize);
    if(buffer!=NULL&&buffersize!=0){
        //将得到的数据加入到队列中
        (*slBufferQueueItf)->Enqueue(slBufferQueueItf,buffer,buffersize);
    }
}

//创建引擎
void createEngine(){
    slCreateEngine(&engineObject,0,NULL,0,NULL,NULL);//创建引擎
    (*engineObject)->Realize(engineObject,SL_BOOLEAN_FALSE);//实现engineObject接口对象
    (*engineObject)->GetInterface(engineObject,SL_IID_ENGINE,&engineEngine);//通过引擎调用接口初始化SLEngineItf
}

//创建混音器
void createMixVolume(){
    (*engineEngine)->CreateOutputMix(engineEngine,&outputMixObject,0,0,0);//用引擎对象创建混音器接口对象
    (*outputMixObject)->Realize(outputMixObject,SL_BOOLEAN_FALSE);//实现混音器接口对象
    SLresult   sLresult = (*outputMixObject)->GetInterface(outputMixObject,SL_IID_ENVIRONMENTALREVERB,&outputMixEnvironmentalReverb);//利用混音器实例对象接口初始化具体的混音器对象
    //设置
    if (SL_RESULT_SUCCESS == sLresult) {
        (*outputMixEnvironmentalReverb)->
                SetEnvironmentalReverbProperties(outputMixEnvironmentalReverb, &settings);
    }
}

//创建播放器
void createPlayer(char*filename){
    //初始化ffmpeg
    int rate;
    int channels;
    createFFmpeg(&rate,&channels,filename);
    LOGE("RATE %d",rate);
    LOGE("channels %d",channels);
    /*
     * typedef struct SLDataLocator_AndroidBufferQueue_ {
    SLuint32    locatorType;//缓冲区队列类型
    SLuint32    numBuffers;//buffer位数
} */

    SLDataLocator_AndroidBufferQueue android_queue = {SL_DATALOCATOR_ANDROIDSIMPLEBUFFERQUEUE,2};
    /**
    typedef struct SLDataFormat_PCM_ {
        SLuint32 		formatType;  pcm
        SLuint32 		numChannels;  通道数
        SLuint32 		samplesPerSec;  采样率
        SLuint32 		bitsPerSample;  采样位数
        SLuint32 		containerSize;  包含位数
        SLuint32 		channelMask;     立体声
        SLuint32		endianness;    end标志位
    } SLDataFormat_PCM;
     */
    SLDataFormat_PCM pcm = {SL_DATAFORMAT_PCM,static_cast<SLuint32>(channels), static_cast<SLuint32>( rate*1000)
            ,SL_PCMSAMPLEFORMAT_FIXED_16
            ,SL_PCMSAMPLEFORMAT_FIXED_16
            ,SL_SPEAKER_FRONT_LEFT|SL_SPEAKER_FRONT_RIGHT,SL_BYTEORDER_LITTLEENDIAN};

    /*
     * typedef struct SLDataSource_ {
	        void *pLocator;//缓冲区队列
	        void *pFormat;//数据样式,配置信息
        } SLDataSource;
     * */
    SLDataSource dataSource = {&android_queue,&pcm};


    SLDataLocator_OutputMix slDataLocator_outputMix={SL_DATALOCATOR_OUTPUTMIX,outputMixObject};


    SLDataSink slDataSink = {&slDataLocator_outputMix,NULL};


    const SLInterfaceID ids[3]={SL_IID_BUFFERQUEUE,SL_IID_EFFECTSEND,SL_IID_VOLUME};
    const SLboolean req[3]={SL_BOOLEAN_FALSE,SL_BOOLEAN_FALSE,SL_BOOLEAN_FALSE};

    /*
     * SLresult (*CreateAudioPlayer) (
		SLEngineItf self,
		SLObjectItf * pPlayer,
		SLDataSource *pAudioSrc,//数据设置
		SLDataSink *pAudioSnk,//关联混音器
		SLuint32 numInterfaces,
		const SLInterfaceID * pInterfaceIds,
		const SLboolean * pInterfaceRequired
	);
     * */
    LOGE("执行到此处");
            (*engineEngine)->CreateAudioPlayer(engineEngine,&audioplayer,&dataSource,&slDataSink,3,ids,req);
    (*audioplayer)->Realize(audioplayer,SL_BOOLEAN_FALSE);
    LOGE("执行到此处2");
            (*audioplayer)->GetInterface(audioplayer,SL_IID_PLAY,&slPlayItf);//初始化播放器
    //注册缓冲区,通过缓冲区里面 的数据进行播放
    (*audioplayer)->GetInterface(audioplayer,SL_IID_BUFFERQUEUE,&slBufferQueueItf);
    //设置回调接口
    (*slBufferQueueItf)->RegisterCallback(slBufferQueueItf,getQueueCallBack,NULL);
    //播放
    (*slPlayItf)->SetPlayState(slPlayItf,SL_PLAYSTATE_PLAYING);

    //开始播放
    getQueueCallBack(slBufferQueueItf,NULL);

}

//释放资源
void realseResource(){
    if(audioplayer!=NULL){
        (*audioplayer)->Destroy(audioplayer);
        audioplayer=NULL;
        slBufferQueueItf=NULL;
        slPlayItf=NULL;
    }
    if(outputMixObject!=NULL){
        (*outputMixObject)->Destroy(outputMixObject);
        outputMixObject=NULL;
        outputMixEnvironmentalReverb=NULL;
    }
    if(engineObject!=NULL){
        (*engineObject)->Destroy(engineObject);
        engineObject=NULL;
        engineEngine=NULL;
    }
    realseFFmpeg();
}

JNIEXPORT jint JNICALL Java_com_zagj_videocomparess_utils_AudioPlayer_openes
       (JNIEnv *env, jobject instance, jstring pamPath_) {
    char  filename[512]={0};
    sprintf(filename,"%s",env->GetStringUTFChars(pamPath_,NULL));
    createEngine();
    createMixVolume();
    createPlayer(filename);
    //env->ReleaseStringUTFChars(pamPath_, pamPath);
    return 1;
}
JNIEXPORT jint JNICALL Java_com_zagj_videocomparess_utils_AudioPlayer_stop (JNIEnv *env, jobject instance){
    realseResource();
    return 0;
}
