#include <android/log.h>
#include <string>
#include <jni.h>

#undef	LOG_TAG
#define LOG_TAG "AACDecoder"
#define LOGI(...) __android_log_print (ANDROID_LOG_INFO,LOG_TAG,__VA_ARGS__)

extern "C" {
	#include "libavutil/opt.h"
	#include "libavcodec/avcodec.h"
	#include "libavformat/avformat.h"
	#include "libswscale/swscale.h"
	#include "libswresample/swresample.h"
};

#define FFMPEG_TAG "ffmpeg"

void logcat(void* ptr, int level, const char* fmt,va_list vl){
	__android_log_print (ANDROID_LOG_VERBOSE+level,FFMPEG_TAG,fmt, vl);
}

void decodeAudio(AVCodecID id,const char* srcPath,const char* dstPath,int bit_rate){
    av_log_set_callback(logcat);

	//获取解码器
    AVCodec* pCodec = avcodec_find_decoder(id);
	if (pCodec == nullptr)
	{
		LOGI("find %d decoder error", id);
		return;
	}

	//解码器上下文
	AVCodecContext* pCodecCtx = avcodec_alloc_context3(pCodec);
	pCodecCtx->channels = 1;//G726的通道数量固定为1
    pCodecCtx->channel_layout = AV_CH_LAYOUT_MONO;


	pCodecCtx->sample_rate = 8000;//G726的采样率固定为8k
	pCodecCtx->bit_rate = bit_rate;//比特率

	//码字,G726需要设置该值，相当于ffmpeg命令编码及播放g726音频时的参数：code_size,该值一般情况下为比特率除以8k,范围为2~5取整
	pCodecCtx->bits_per_coded_sample = bit_rate/8000;

	if(avcodec_open2(pCodecCtx, pCodec, nullptr) < 0)
	{
		LOGI("open codec error");
		return;
	}

	//重采样上下文
	SwrContext* au_convert_ctx = swr_alloc_set_opts(nullptr,
										pCodecCtx->channel_layout,
										AV_SAMPLE_FMT_S16,
										pCodecCtx->sample_rate,
										pCodecCtx->channel_layout,
										pCodecCtx->sample_fmt,
										pCodecCtx->sample_rate,
										0, nullptr);

	if (au_convert_ctx == nullptr){
		LOGI("open au_convert_ctx error");
		return;
	}

	swr_init(au_convert_ctx);

	uint8_t* readBuff = (uint8_t *) av_malloc(1024);;//文件读取的缓存
	FILE* rfile = fopen(srcPath,"r");

	FILE* wfile = fopen(dstPath,"w");//将解码的PCM数据保存到该文件

	if (rfile == nullptr){
		LOGI("open rfile fail");
		return;
	}

	if (wfile == nullptr){
		LOGI("open wfile fail");
		return;
	}

	uint8_t* buffer = nullptr;//PCM数据的缓存
    int buffer_size = 0;
	AVPacket* packet = nullptr;
	AVFrame* avFrame = nullptr;

	while (true){
		int count = fread(readBuff,1,1024, rfile);

		if (count <= 0){
			break;
		}

		packet = av_packet_alloc();
		packet->data = readBuff;
		packet->size = count;

		//将数据发送给解码器
		int ret = avcodec_send_packet(pCodecCtx,packet);
		if (ret != 0){
			break;
		}

		avFrame = av_frame_alloc();
        while (avcodec_receive_frame(pCodecCtx,avFrame) == 0){
            //从解码器中读取一帧，返回值为0时为读取成功
            //获取输出的一帧的PCM数据的大小
            int temp_size = av_samples_get_buffer_size(nullptr,pCodecCtx->channels, avFrame->nb_samples, AV_SAMPLE_FMT_S16, 1);

            //帧的大小是相对固定的，但不排除大小改变的可能
            if (buffer_size != temp_size){
                buffer_size = temp_size;

                if (buffer != nullptr){
                    av_free(buffer);
                }

                buffer = (uint8_t *) av_malloc(buffer_size);
            }

            //数据重采样，重采样后的PCM数据保存在第二个参数：buffer 中
            int len = swr_convert(
                    au_convert_ctx,
                    &buffer,
                    buffer_size,
                    (const uint8_t **) (avFrame->data) ,
                    avFrame->nb_samples);
            //第三个参数网上有人用avFrame->nb_samples，测试改后倒是也可以正常播放
            //但按理来说第三个参数应该是一帧的大小即buffer_size

            if (len > 0){
                LOGI("write size %d",buffer_size);
                fwrite(buffer,1,buffer_size,wfile);
                //将解码后的PCM保存进文件,在实际场景中，回调给Java层或直接播放也是可以的
            }
        }

		av_packet_free(&packet);
		av_free(packet);
		packet = nullptr;

		av_frame_free(&(avFrame));
		av_free(avFrame);
		avFrame = nullptr;
	}

	if (packet != nullptr){
		av_packet_free(&packet);
		av_free(packet);
		packet = nullptr;
	}

	if (avFrame){
		av_frame_free(&(avFrame));
		av_free(avFrame);
		avFrame = nullptr;
	}

	fclose(wfile);
	fclose(rfile);

	av_free(readBuff);

	if (buffer != nullptr){
		av_free(buffer);
	}

	swr_free(&au_convert_ctx);
	au_convert_ctx = nullptr;

	avcodec_close(pCodecCtx);
	avcodec_free_context(&pCodecCtx);
	pCodecCtx = nullptr;

	LOGI("decode finish");
}

extern "C"
JNIEXPORT void JNICALL
Java_com_hst_decoder_MainActivity_decodeG726le(JNIEnv *env, jobject thiz, jstring src_path,
											  jstring dst_path, jint bit_rate) {
	const char* src = env->GetStringUTFChars(src_path,NULL);
	const char* dst = env->GetStringUTFChars(dst_path,NULL);

	decodeAudio(AV_CODEC_ID_ADPCM_G726LE,src,dst,bit_rate);

	env->ReleaseStringUTFChars(src_path,src);
	env->ReleaseStringUTFChars(dst_path,dst);
}
extern "C"
JNIEXPORT void JNICALL
Java_com_hst_decoder_MainActivity_decodeG726(JNIEnv *env, jobject thiz, jstring src_path,
											jstring dst_path, jint bit_rate) {
	const char* src = env->GetStringUTFChars(src_path,NULL);
	const char* dst = env->GetStringUTFChars(dst_path,NULL);

	decodeAudio(AV_CODEC_ID_ADPCM_G726,src,dst,bit_rate);

	env->ReleaseStringUTFChars(src_path,src);
	env->ReleaseStringUTFChars(dst_path,dst);
}