/*
 *	rtmp player for Android
 *	LIDE&CoolBlue
 *	2015-08-13
 */
#include <jni>
#include <assert.h>
#include <string.h>

#include <SLES/OpenSLES.h>
#include <SLES/OpenSLES_Android.h>

#include <sys/types.h>
#include <android/assert_manager.h>
#include <android/assert_manager_jni.h>

#include <android/log.h>
#include <pthread.h>

#include <librtmp/rtmp.h>
#include <speex/speex.h>
#include <speex/speex_header.h>

#include <librtmp/rtmp.h>
#include <librtmp/log.h>

#define LOG_TAG "Say.NDK"
#define LOGI(...) ((void)__android_log_print(ANDROID_LOG_INFO, LOG_TAG, __VA_ARGS__))

// engine interfaces
static SLObjectItf engineObject = NULL;
static SLEngineItf engineEngine;

// buffer queue player interfaces
static SLObjectItf bqPlayerObject = NULL;
static SLPlayItf bqPlayerPlay;
static SLAndroidSimpleBufferQueueItf bqPlayerBufferQueue;
static SLEffectSendItf bqPlayerEffectSend;
static SLMuteSoloItf bqPlayerMuteSolo;
static SLVolumeItf bqPlayerVolume;

static short playerBuffer[RECORDER_FRAMES];
static unsigned playerSize = 0;

JavaVM *gJvm = NULL;
jobject gObj = NULL;
jmethodID eventMid;
pthread_attr_t attr;

//player variable
char* playRtmpUrl;

pthread_t openPlayerPid;
pthread_mutex_t playMutex;
pthread_cond_t playCond;

int isOpenPlay;
int isStartPlay;

SpeexBits dbits;
RTMP *playRtmp; //rtmp
int dec_frame_size; //speex
SpeexBits dbits;
void *dec_state;

// this callback handler is called every time a buffer finishes playing
void bqPlayerCallback(SLAndroidSimpleBufferQueueItf bq, void *context)
{
	    LOGI("bqPlayerCallback\n");
}

void initNativePlayer()
{
	SLresult result;

	// configure audio source
	SLDataLocator_AndroidSimpleBufferQueue loc_bufq = { SL_DATALOCATOR_ANDROIDSIMPLEBUFFERQUEUE, 2 };
	SLDataFormat_PCM format_pcm = { SL_DATAFORMAT_PCM, 1, SL_SAMPLINGRATE_16, SL_PCMSAMPLEFORMAT_FIXED_16, SL_PCMSAMPLEFORMAT_FIXED_16, SL_SPEAKER_FRONT_CENTER, SL_BYTEORDER_LITTLEENDIAN };
	SLDataSource audioSrc = { &loc_bufq, &format_pcm };

	// configure audio sink
	SLDataLocator_OutputMix loc_outmix = { SL_DATALOCATOR_OUTPUTMIX, outputMixObject };
	SLDataSink audioSnk = { &loc_outmix, NULL };

	// create audio player
	const SLInterfaceID ids[3] = { SL_IID_BUFFERQUEUE, SL_IID_EFFECTSEND, /*SL_IID_MUTESOLO,*/SL_IID_VOLUME };
	const SLboolean req[3] = { SL_BOOLEAN_TRUE, SL_BOOLEAN_TRUE, /*SL_BOOLEAN_TRUE,*/SL_BOOLEAN_TRUE };
	result = (*engineEngine)->CreateAudioPlayer(engineEngine, &bqPlayerObject, &audioSrc, &audioSnk, 3, ids, req);
	assert(SL_RESULT_SUCCESS == result);

	// realize the player
	result = (*bqPlayerObject)->Realize(bqPlayerObject, SL_BOOLEAN_FALSE);
	assert(SL_RESULT_SUCCESS == result);

	// get the play interface
	result = (*bqPlayerObject)->GetInterface(bqPlayerObject, SL_IID_PLAY, &bqPlayerPlay);
	assert(SL_RESULT_SUCCESS == result);

	// get the buffer queue interface
	result = (*bqPlayerObject)->GetInterface(bqPlayerObject, SL_IID_BUFFERQUEUE, &bqPlayerBufferQueue);
	assert(SL_RESULT_SUCCESS == result);

	// register callback on the buffer queue
	result = (*bqPlayerBufferQueue)->RegisterCallback(bqPlayerBufferQueue, bqPlayerCallback, NULL);
	assert(SL_RESULT_SUCCESS == result);

	// get the effect send interface
	result = (*bqPlayerObject)->GetInterface(bqPlayerObject, SL_IID_EFFECTSEND, &bqPlayerEffectSend);  
	assert(SL_RESULT_SUCCESS == result);

#if 0   // mute/solo is not supported for sources that are known to be mono, as this is
	// get the mute/solo interface
	result = (*bqPlayerObject)->GetInterface(bqPlayerObject, SL_IID_MUTESOLO, &bqPlayerMuteSolo);
	assert(SL_RESULT_SUCCESS == result);
#endif

	// get the volume interface
	result = (*bqPlayerObject)->GetInterface(bqPlayerObject, SL_IID_VOLUME, &bqPlayerVolume);
	assert(SL_RESULT_SUCCESS == result);

	// set the player's state to playing
	result = (*bqPlayerPlay)->SetPlayState(bqPlayerPlay, SL_PLAYSTATE_PLAYING);
	assert(SL_RESULT_SUCCESS == result);

}

int playerBufferIndex = 0;
void putAudioQueue(short* data, int dataSize)
{
	// LOGI("putAudioQueue");
	memcpy(playerBuffer+playerBufferIndex,data,dataSize*sizeof(short));
	playerBufferIndex+=dataSize;
	LOGI("playerBufferIndex %d   all:%d",playerBufferIndex,RECORDER_FRAMES);
	if(playerBufferIndex == RECORDER_FRAMES)
	{
		SLresult result;
		result = (*bqPlayerBufferQueue)->Enqueue(bqPlayerBufferQueue, playerBuffer, RECORDER_FRAMES * sizeof(short));
		playerBufferIndex = 0;
		LOGI("Enqueue player buffer");
	}
}

void* openPlayerThread(void* args)
{
	isOpenPlay = 1;
	short *output_buffer;
	do {
		playRtmp = RTMP_Alloc();
		RTMP_Init(playRtmp);
		LOGI("Play RTMP_Init %s\n", playRtmpUrl);
		if (!RTMP_SetupURL(playRtmp, playRtmpUrl)) {
			LOGI("Play RTMP_SetupURL error\n");
			break;
		}
		if (!RTMP_Connect(playRtmp, NULL) || !RTMP_ConnectStream(playRtmp, 0)) {
			LOGI("Play RTMP_Connect or RTMP_ConnectStream error\n");
			break;
		}
		LOGI("RTMP_Connected\n");

		// 初始化 opensl es
		initNativePlayer();
		// 初始化speex解码器
		speex_bits_init(&dbits);
		dec_state = speex_decoder_init(&speex_wb_mode);
		speex_decoder_ctl(dec_state, SPEEX_GET_FRAME_SIZE, &dec_frame_size);
		output_buffer = malloc(dec_frame_size * sizeof(short));

		RTMPPacket rtmp_pakt = { 0 };
		isStartPlay = 1;
		while (isStartPlay && RTMP_ReadPacket(playRtmp, &rtmp_pakt)) {
			if (RTMPPacket_IsReady(&rtmp_pakt)) {
				if (!rtmp_pakt.m_nBodySize)
					continue;
				if (rtmp_pakt.m_packetType == RTMP_PACKET_TYPE_AUDIO) {
					// 处理音频数据包
					// LOGI("AUDIO audio size:%d  head:%d  time:%d\n", rtmp_pakt.m_nBodySize, rtmp_pakt.m_body[0], rtmp_pakt.m_nTimeStamp);
					speex_bits_read_from(&dbits, rtmp_pakt.m_body + 1, rtmp_pakt.m_nBodySize - 1);
					speex_decode_int(dec_state, &dbits, output_buffer);
					putAudioQueue(output_buffer,dec_frame_size);               } else if (rtmp_pakt.m_packetType == RTMP_PACKET_TYPE_VIDEO) {
						// 处理视频数据包
					} else if (rtmp_pakt.m_packetType == RTMP_PACKET_TYPE_INFO) {
						// 处理信息包
					} else if (rtmp_pakt.m_packetType == RTMP_PACKET_TYPE_FLASH_VIDEO) {
						// 其他数据
						int index = 0;
						while (1) {
							int StreamType; //1-byte
							int MediaSize; //3-byte
							int TiMMER; //3-byte
							int Reserve; //4-byte
							char* MediaData; //MediaSize-byte
							int TagLen; //4-byte

							StreamType = rtmp_pakt.m_body[index];
							index += 1;
							MediaSize = AMF_DecodeInt24(rtmp_pakt.m_body + index);
							index += 3;
							TiMMER = AMF_DecodeInt24(rtmp_pakt.m_body + index);
							index += 3;
							Reserve = AMF_DecodeInt32(rtmp_pakt.m_body + index);
							index += 4;
							MediaData = rtmp_pakt.m_body + index;
							index += MediaSize;
							TagLen = AMF_DecodeInt32(rtmp_pakt.m_body + index);
							index += 4;
							//LOGI("bodySize:%d   index:%d",rtmp_pakt.m_nBodySize,index);
							//LOGI("StreamType:%d MediaSize:%d  TiMMER:%d TagLen:%d\n", StreamType, MediaSize, TiMMER, TagLen);
							if (StreamType == 0x08) {
								//音频包
								//int MediaSize = bigThreeByteToInt(rtmp_pakt.m_body+1);
								//  LOGI("FLASH audio size:%d  head:%d time:%d\n", MediaSize, MediaData[0], TiMMER);
								speex_bits_read_from(&dbits, MediaData + 1, MediaSize - 1);
								speex_decode_int(dec_state, &dbits, output_buffer);
								putAudioQueue(output_buffer,dec_frame_size);
							} else if (StreamType == 0x09) {          
								//视频包
								//  LOGI( "video size:%d  head:%d\n", MediaSize, MediaData[0]);
							}
							if (rtmp_pakt.m_nBodySize == index) {
								//     LOGI("one pakt over\n");
								break;
							}
						}
					}
				//  LOGI( "rtmp_pakt size:%d  type:%d\n", rtmp_pakt.m_nBodySize, rtmp_pakt.m_packetType);
				RTMPPacket_Free(&rtmp_pakt);
			}
		}
	} while (0);
	if (RTMP_IsConnected(playRtmp)) {
		RTMP_Close(playRtmp);
	}
	RTMP_Free(playRtmp);
	free(output_buffer);
	speex_bits_destroy(&dbits);
	speex_decoder_destroy(dec_state);
	(*bqPlayerObject)->Destroy(bqPlayerObject);
	isOpenPlay = 0;
}

JNIEXPORT void JNICALL Java_cn_cloudstep_sayhi_SayHi_Init(JNIEnv *env, jobject jobj)
{
	SLresult result;

	// create engine
	result = slCreateEngine(&engineObject, 0, NULL, 0, NULL, NULL);
	assert(SL_RESULT_SUCCESS == result);
	// realize the engine
	result = (*engineObject)->Realize(engineObject, SL_BOOLEAN_FALSE);
	assert(SL_RESULT_SUCCESS == result);
	// get the engine interface, which is needed in order to create other objects
	result = (*engineObject)->GetInterface(engineObject, SL_IID_ENGINE, &engineEngine);
	assert(SL_RESULT_SUCCESS == result);

	// create output mix, with environmental reverb specified as a non-required interface
	const SLInterfaceID ids[1] = { SL_IID_ENVIRONMENTALREVERB };
	const SLboolean req[1] = { SL_BOOLEAN_FALSE };
	result = (*engineEngine)->CreateOutputMix(engineEngine, &outputMixObject, 1, ids, req);
	assert(SL_RESULT_SUCCESS == result);

	// realize the output mix
	result = (*outputMixObject)->Realize(outputMixObject, SL_BOOLEAN_FALSE);
	assert(SL_RESULT_SUCCESS == result);

	pthread_attr_init(&attr);
	pthread_attr_setdetachstate(&attr, PTHREAD_CREATE_DETACHED);
	LOGI("Java_cn_cloudstep_sayhi_SayHi_Init");
	eventMid = (*env)->GetMethodID(env, (*env)->GetObjectClass(env, jobj), "onEventCallback", "(I)V");
	(*env)->GetJavaVM(env, &gJvm);
	gObj = (*env)->NewGlobalRef(env, jobj);
}

JNIEXPORT void JNICALL Java_cn_cloudstep_sayhi_SayHi_OpenPlayer(JNIEnv *env, jobject jobj, jstring jRtmpUrl)
{
	if (isOpenPlay) {
		return;
	}
	LOGI("Java_cn_cloudstep_sayhi_SayHi_OpenPlayer");
	const char* rtmpUrl = (*env)->GetStringUTFChars(env, jRtmpUrl, 0);
	playRtmpUrl = malloc(strlen(rtmpUrl) + 1);
	strcpy(playRtmpUrl, rtmpUrl);
	pthread_create(&openPlayerPid, &attr, openPlayerThread, NULL);
	(*env)->ReleaseStringUTFChars(env, jRtmpUrl, rtmpUrl);
}

JNIEXPORT void JNICALL Java_cn_cloudstep_sayhi_SayHi_ClosePlayer(JNIEnv *env, jobject jobj)
{
	isStartPlay = 0;
}

 JNIEXPORT void JNICALL Java_cn_cloudstep_sayhi_SayHi_Deinit(JNIEnv *env, jobject jobj)
{
	(*env)->DeleteGlobalRef(env, gObj);
}  
