/*
 * mediaplayer.cpp
 */

//#define LOG_NDEBUG 0
#define TAG "media"

#include <sys/types.h>
#include <sys/time.h>
#include <sys/stat.h>
#include <unistd.h>
#include <fcntl.h>
#include <math.h>
#include <android/log.h>

extern "C" {

#include "libavcodec/avcodec.h"
#include "libavformat/avformat.h"
#include "libswscale/swscale.h"
#include "libswresample/swresample.h"
#include "libavutil/log.h"
#include "libavfilter/avfiltergraph.h"
#include "libavfilter/buffersink.h"
#include "libavfilter/buffersrc.h"
#include "libavutil/avutil.h"
#include "libavutil/imgutils.h"
#include "global.h"

} // end of extern C

#include "mediaplayer.h"
#define  LOGI(...)  __android_log_print(ANDROID_LOG_INFO,LOG_TAG,__VA_ARGS__)
#define  LOGE(...)  __android_log_print(ANDROID_LOG_ERROR,LOG_TAG,__VA_ARGS__)

#define FPS_DEBUGGING false

static MediaPlayer* sPlayer;
static int stopStatus;
//const char *filter_descr = "lutyuv='u=128:v=128'";//灰度处理
const char *filter_descr =
		"drawtext=fontfile=simfont.ttf:fontcolor=white:shadowcolor=black:text='测试视频':x=10:y=10"; //灰度处理
//const char *filter_descr = "boxblur";

MediaPlayer::MediaPlayer(JNIEnv* env) {
	env->GetJavaVM(&gs_jvm); //锟斤拷锟芥到全锟街憋拷锟斤拷锟斤拷JVM
	jniEnv = env;
	mVideoFrame = NULL;
	mVideoFilterFrame = NULL;
	mMovieFile = NULL;
	mURL = NULL;
	deltaTime = 0;
	mCookie = NULL;
	mDuration = -1;
	mCurrentPosition = -1;
	mSeekPosition = -1;
	mCurrentState = MEDIA_PLAYER_IDLE;
	mPrepareSync = false;
	mLoop = false;
	isFinish = false;
	isRestart = false;
	pthread_mutex_init(&mLock, NULL);
	mLeftVolume = mRightVolume = 1.0;
	mVideoWidth = mVideoHeight = 0;
	sPlayer = this;
	mVideoParamCount = 0;
//	mTreaterVideo = NULL;
	mVideoStreamIndex = -1;
	avcodec_register_all();
	av_register_all();
	avformat_network_init();

	//gmq add begin

	slapRGBDataArray = NULL;
	revokeSlapDataMethod = NULL;
	getFilterBmpDataMethod = NULL;

	frameYDataArray = NULL;
	frameUDataArray = NULL;
	frameVDataArray = NULL;

	yData = NULL;
	uData = NULL;
	vData = NULL;

	revokeFrameDataMethod = NULL;

	isIFrame = false;
	isFrameIncompleted = false;
	mSlapOn = false;
	stopStatus = 0;
	isConnect = 0;

	//filter
	buffersink_ctx = NULL;
	buffersrc_ctx = NULL;
	filter_graph = NULL;

	//gmq add end
}

MediaPlayer::~MediaPlayer() {

	if (mURL != NULL) {
		delete[] mURL;
		mURL = NULL;
	}
}

void MediaPlayer::setParamCount(int videoParam) {
	if (videoParam != mVideoParamCount) {
		mVideoParamCount = videoParam;
	}
}

bool MediaPlayer::prepareVideo() {
	// Find the first video stream
	__android_log_print(ANDROID_LOG_ERROR, TAG, "prepare");
	mVideoStreamIndex = -1;
	for (int i = 0; i < mMovieFile->nb_streams; i++) {
		if (mMovieFile->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO) {
			mVideoStreamIndex = i;
			break;
		}
	}
	if (mVideoStreamIndex == -1) {
		return false;
	}
	__android_log_print(ANDROID_LOG_ERROR, TAG, "prepare1");
	AVStream* stream = mMovieFile->streams[mVideoStreamIndex];
	// Get a pointer to the codec context for the video stream
	AVCodecContext* codec_ctx = stream->codec;
	codec_ctx->thread_count = 2;
	AVCodec* codec = avcodec_find_decoder(codec_ctx->codec_id);

	if (codec == NULL) {
		return false;
	}
	__android_log_print(ANDROID_LOG_ERROR, TAG, "prepare2");
	codec_ctx->thread_count = 2;
	// Open codec
	if (avcodec_open2(codec_ctx, codec, NULL) < 0) {
		return false;
	}
	mVideoWidth = codec_ctx->width;
	mVideoHeight = codec_ctx->height;
	mDuration = mMovieFile->duration;
	videoDuration = mMovieFile->duration;
	if (stream->codec->pix_fmt == NULL) {
	}
	if (mVideoWidth == 0) {
		return false;
	}
	__android_log_print(ANDROID_LOG_ERROR, TAG, "prepare3+w%d+h%d", mVideoWidth,
			mVideoHeight);
	mVideoSwsContext = sws_getContext(mVideoWidth, mVideoHeight,
			stream->codec->pix_fmt, mVideoWidth, mVideoHeight, PIX_FMT_RGB565, //
			SWS_BILINEAR, NULL, NULL, NULL);
	if (mVideoSwsContext == NULL) {
		return false;
	}
	__android_log_print(ANDROID_LOG_ERROR, TAG, "prepare3.5");
	mRgbToYuvContext = sws_getContext(mVideoWidth, mVideoHeight, PIX_FMT_RGB565,
			mVideoWidth, mVideoHeight, stream->codec->pix_fmt, //
			SWS_BILINEAR, NULL, NULL, NULL);
	if (mRgbToYuvContext == NULL) {
		__android_log_print(ANDROID_LOG_ERROR, TAG,
				"get rbgtoYuv context failed");
	}
	__android_log_print(ANDROID_LOG_ERROR, TAG, "prepare4");

	avStreamFPSTimeBase(stream, 0.04, &mVideoFPS, &mVideoTimeBase);
	deltaTime = 1000000 / mVideoFPS;
	int avpictureValid = avpicture_alloc(&mVideoPicture, PIX_FMT_RGB565, //PIX_FMT_RGB565,gmq switch 565 to 24
			mVideoWidth, mVideoHeight);

	int avpictureValid2 = avpicture_alloc(&mFilterPicture, PIX_FMT_RGB565, //PIX_FMT_RGB565,gmq switch 565 to 24
			mVideoWidth, mVideoHeight);

	if (avpictureValid < 0) {
		return false;
	}
	__android_log_print(ANDROID_LOG_ERROR, TAG, "prepare5");
	return true;
}

void MediaPlayer::closeVideo() {

	if (mVideoSwsContext != NULL) {
		sws_freeContext(mVideoSwsContext);
		mVideoSwsContext = NULL;
	}
	avpicture_free(&mVideoPicture);
	avpicture_free(&mFilterPicture);

	if (mRgbToYuvContext != NULL) {
		sws_freeContext(mRgbToYuvContext);
		mRgbToYuvContext = NULL;
	}
	//av_free(mVideoFrame);
}

bool MediaPlayer::prepare() {
	status_t ret;
	mCurrentState = MEDIA_PLAYER_PREPARING;
	if (!(ret = prepareVideo())) {
		mCurrentState = MEDIA_PLAYER_STATE_ERROR;
		return false;
	}
//	prepareAudio();
//	if ((ret = prepareAudio()) != NO_ERROR) {
//		mCurrentState = MEDIA_PLAYER_STATE_ERROR;
//		return ret;
//	}
	mCurrentState = MEDIA_PLAYER_PREPARED;
	return true;
}

bool MediaPlayer::connect() {
	__android_log_print(ANDROID_LOG_ERROR, TAG, "begin connect url");
	if (mURL == NULL)
		return false;
	lastConnectTime = getCurrentTime();
	av_register_all();
	avfilter_register_all();
	AVFormatContext* formatCtx = avformat_alloc_context(); //avformat_free_context()
	__android_log_print(ANDROID_LOG_INFO, TAG, "setDataSource(%s)", mURL);
	AVIOInterruptCB icb = { check_interrupt, (void *) (this) };
	formatCtx->interrupt_callback = icb;
	int result = avformat_open_input(&formatCtx, mURL, NULL, NULL);
	__android_log_print(ANDROID_LOG_ERROR, TAG, "after open input");
	if (result != 0) {
		char buf[1024];
		av_strerror(result, buf, 1024);
		__android_log_print(ANDROID_LOG_INFO, TAG, "Couldn't open file %d(%s)",
				result, buf);
		if (formatCtx) {
			avformat_free_context(formatCtx);
		}

		return false;
	}
	__android_log_print(ANDROID_LOG_ERROR, TAG, "connect 1");

//	formatCtx->probesize = 4 * 1024;
	// Retrieve stream information
	int r = avformat_find_stream_info(formatCtx, NULL);
	if (r < 0) {
		avformat_close_input(&formatCtx);
		return false;
	}
	if (stopStatus == 1) {
		return false;
	}

	__android_log_print(ANDROID_LOG_ERROR, TAG, "connect 2+ r=%d", r);
	mCurrentState = MEDIA_PLAYER_INITIALIZED;
	mMovieFile = formatCtx;
	__android_log_print(ANDROID_LOG_ERROR, TAG, "connect 3");
	return true;
}

static int check_interrupt(void *ctx) {

	if (isConnect == 1) //宸茶繛鎺ワ紝鍒ゆ柇涓庝笂娆℃暟鎹洿鏂版椂闂达紝濡�8s鏃犳暟鎹垯閲嶈繛
			{
		if (getCurrentTime() - lastDecodeTime > 8000) {
			jmethodID revokeMethod = DataRecvThreadJniEnv->GetMethodID(
					revokeClass, "OnRequestReconnect", "()V");
			DataRecvThreadJniEnv->CallVoidMethod(revokeInstance, revokeMethod);
			lastDecodeTime = getCurrentTime();
		}
	} else if (isConnect == 0) //鏈繛鎺ワ紝鍒ゆ柇杩炴帴鐨勬椂闂达紝濡傝繛鎺ヨ秴杩�15s,鍒欒秴鏃跺仠姝�
			{
		if (getCurrentTime() - lastConnectTime > 1000000) {
			jmethodID revokeMethod = DataRecvThreadJniEnv->GetMethodID(
					revokeClass, "OnRequestReconnect", "()V");
			DataRecvThreadJniEnv->CallVoidMethod(revokeInstance, revokeMethod);
			lastConnectTime = getCurrentTime();
//			stopStatus=1;
		}
	}

	if (stopStatus == 1) {
//		__android_log_print(ANDROID_LOG_ERROR, TAG, "stop =---------------------1");
		return 1;
	} else {
//		__android_log_print(ANDROID_LOG_ERROR, TAG, "stop =---------------------0");
		return 0;
	}

}

void MediaPlayer::setDataSource(const char *url) {
	if (mURL != NULL) {
		delete[] mURL;
		mURL = NULL;
	}
	mURL = url;
}

bool MediaPlayer::suspend() {
	mCurrentState = MEDIA_PLAYER_STOPPED;

	if (pthread_join(mPlayerThread, NULL) != 0) {
	}
	if (mMovieFile != NULL) {
		avformat_close_input(&mMovieFile);
		avformat_free_context(mMovieFile);
		mMovieFile = NULL;
	}
	return true;
}

bool MediaPlayer::resume() {
	mCurrentState = MEDIA_PLAYER_STARTED;
	return true;
}

long MediaPlayer::getCurrTime() {
	struct timeval tv;
	gettimeofday(&tv, NULL);
	return tv.tv_sec * 1000 + tv.tv_usec / 1000;
}

void MediaPlayer::decodeMovie(void* ptr) {
	gs_jvm->AttachCurrentThread(&DataRecvThreadJniEnv, NULL);
	jmethodID revokeMethod = DataRecvThreadJniEnv->GetMethodID(revokeClass,
			"OnConnectionStateChanged", "(III)V");
	mCurrentState = MEDIA_PLAYER_BEGIN_THREAD;
	__android_log_print(ANDROID_LOG_ERROR, TAG, "decode begin");

	isConnect = 0;
	if (connect()) {
		if (prepare()) {

			isConnect = 1;
			lastDecodeTime = getCurrentTime();
			DataRecvThreadJniEnv->CallVoidMethod(revokeInstance, revokeMethod,
					1, mVideoWidth, mVideoHeight);
			AVCodecContext* videoCodec;
			AVStream* stream_video;
			AVPacket* pPacket = NULL;

			stream_video = mMovieFile->streams[mVideoStreamIndex];
			videoCodec = stream_video->codec;

			mCurrentState = MEDIA_PLAYER_STARTED;
			unsigned char* mVideoParam = NULL;
			if (mVideoParamCount > 0) {
				mVideoParam = new unsigned char[mVideoParamCount];
			}

			bool firstIn = true;
			int completed = 0;
			Operation operation;
			int nowTime = getCurrentTime();
			int lastTime = getCurrentTime();

			while (!isFinish && mCurrentState != MEDIA_PLAYER_STOPPED) {
				if (mCurrentState == MEDIA_PLAYER_PAUSED) {
					usleep(200000);
					continue;
				}
				if (isRestart) {
					//重新播放
					av_seek_frame(mMovieFile, mVideoStreamIndex, 1*AV_TIME_BASE,
							AVSEEK_FLAG_BACKWARD);
					isRestart = false;
					usleep(2000);
					continue;
				}
				nowTime = getCurrentTime();
//				__android_log_print(ANDROID_LOG_ERROR, TAG, "decode begin %ld",nowTime-lastTime);
				if (nowTime - lastTime >= deltaTime / 1000
						|| decodeType == DECODE_PROCESS) {
					__android_log_print(ANDROID_LOG_ERROR, TAG, "enter decode");
					operation = FrameOperation_None;
					completed = 0;

					pPacket = new AVPacket();
					int readFrame = av_read_frame(mMovieFile, pPacket);
					if (readFrame < 0) {
						__android_log_print(ANDROID_LOG_ERROR, TAG,
								"read frame <0");
						av_free_packet(pPacket);
						delete pPacket;
						if (decodeType == DECODE_DISPLAY) {
							//循环播放
							av_seek_frame(mMovieFile, mVideoStreamIndex, 1*AV_TIME_BASE,
									AVSEEK_FLAG_BACKWARD);
							DataRecvThreadJniEnv->CallVoidMethod(revokeInstance,
									revokeMethod, 2, mVideoWidth, mVideoHeight); //重新循环
						} else if (decodeType == DECODE_PROCESS) {
							//如果只是解码用于处理，则到终点时停止
							isFinish = true;
						}
//						isFinish = true;
						usleep(20000);
						continue;
					}

					pts = pPacket->pts;
					dts = pPacket->dts;

					__android_log_print(ANDROID_LOG_ERROR, TAG,
							"pts---%d---dts--%d---", pts, dts);

					if (pPacket->stream_index == mVideoStreamIndex) {
						int pktSize = pPacket->size;
						__android_log_print(ANDROID_LOG_ERROR, TAG,
								"frame video");
						while (pktSize > 0) {
							__android_log_print(ANDROID_LOG_ERROR, TAG,
									"pkt size >0");
							mVideoFrame = av_frame_alloc();
							mVideoFilterFrame = av_frame_alloc();

//							//相当于前一篇博文例子中的width * height * 2
//							int bytes_num = avpicture_get_size(
//									AV_PIX_FMT_YUV420P, mVideoWidth,
//									mVideoHeight); //AV_PIX_FMT_YUV420P是FFmpeg定义的标明YUV420P图像格式的宏定义
//
//							//申请空间来存放图片数据。包含源数据和目标数据
//							uint8_t* buff = (uint8_t*) av_malloc(bytes_num);
//
//							//前面的av_frame_alloc函数，只是为这个AVFrame结构体分配了内存，
//							//而该类型的指针指向的内存还没分配。这里把av_malloc得到的内存和AVFrame关联起来。
//							//当然，其还会设置AVFrame的其他成员
//							avpicture_fill((AVPicture*) mVideoFilterFrame, buff,
//									AV_PIX_FMT_YUV420P, mVideoWidth,
//									mVideoWidth);

							int len = avcodec_decode_video2(videoCodec,
									mVideoFrame, &completed, pPacket);

							if (len <= 0) {
								__android_log_print(ANDROID_LOG_ERROR, TAG,
										"decode frame < 0");
								av_frame_free(&mVideoFrame);
								mVideoFrame = NULL;
								break;
							}
							if (mVideoFrame->key_frame == 1) {
								isIFrame = true;
								if (firstIn) {
									firstIn = false;
								}

							} else {
								isIFrame = false;
								if (firstIn) {
									av_frame_free(&mVideoFrame);
									mVideoFrame = NULL;
									break;
								}
							}

							if (completed && mVideoFrame->isValid == 1) {
								lastDecodeTime = getCurrentTime();
								__android_log_print(ANDROID_LOG_ERROR, TAG,
										"decode complete");
//							if (init_filters(filter_descr) < 0) {
//								__android_log_print(ANDROID_LOG_ERROR, TAG,
//										"filter init failed");
//							}
//							if (av_buffersrc_add_frame(buffersrc_ctx,
//									mVideoFrame) < 0) {
//								__android_log_print(ANDROID_LOG_ERROR, TAG,
//										"fail to add filter to frame");
//								break;
//							}
//
//							while (1) {
//
//								int ret = av_buffersink_get_frame(
//										buffersink_ctx, mVideoFilterFrame);
//								if (ret < 0) {
//									__android_log_print(ANDROID_LOG_ERROR, TAG,
//											"get frame ret=%d", ret);
//									break;
//								}
//
//								__android_log_print(ANDROID_LOG_ERROR, TAG,
//										"get frame format=%d----video format=%d",
//										mVideoFilterFrame->format,
//										mVideoFrame->format);
//
//								if (mVideoFilterFrame->format
//										== AV_PIX_FMT_YUV420P) {
//									decodeYuvFilter(stream_video);
//								}
//							}
								lastTime = nowTime;
								getRGBData(stream_video);
//								getRGBDataAndProcess(stream_video);
//							decodeYuv(stream_video);
							} else {
								__android_log_print(ANDROID_LOG_ERROR, TAG,
										"frame incomplete");
								isFrameIncompleted = true;
								av_frame_free(&mVideoFrame);
								mVideoFrame = NULL;
							}

							pktSize -= len;
						}

						if (!isFrameIncompleted) {
						} else {
							av_free_packet(pPacket); //free it if we don't need it anymore
							delete pPacket;
						}
					}
					__android_log_print(ANDROID_LOG_ERROR, TAG,
							"finish decode frame and sleep 2ms");
				}

				usleep(200);
			}

			mCurrentState = MEDIA_PLAYER_STOPPED;
			__android_log_print(ANDROID_LOG_ERROR, TAG, "decode end--finish");
			DataRecvThreadJniEnv->CallVoidMethod(revokeInstance, revokeMethod,
					-1, -1, -1);
			releaseResource();
		} else {
			__android_log_print(ANDROID_LOG_ERROR, TAG, "prepare decode end");
			DataRecvThreadJniEnv->CallVoidMethod(revokeInstance, revokeMethod,
					0, -1, -1);
			mCurrentState = MEDIA_PLAYER_STOPPED;
		}
	} else {
		__android_log_print(ANDROID_LOG_ERROR, TAG, "connect decode end");
		DataRecvThreadJniEnv->CallVoidMethod(revokeInstance, revokeMethod, 0,
				-1, -1);
		mCurrentState = MEDIA_PLAYER_STOPPED;
	}

	isConnect = 2;
	gs_jvm->DetachCurrentThread();
	DataRecvThreadJniEnv = NULL;
}

void MediaPlayer::releaseResource() {
	//gmq add begin
	if (frameYDataArray != NULL) {
		DataRecvThreadJniEnv->DeleteLocalRef(frameYDataArray);
		frameYDataArray = NULL;
		DataRecvThreadJniEnv->DeleteLocalRef(frameUDataArray);
		frameUDataArray = NULL;
		DataRecvThreadJniEnv->DeleteLocalRef(frameVDataArray);
		frameVDataArray = NULL;
	}

	if (yData != NULL) {
		delete[] yData;
		delete[] uData;
		delete[] vData;
	}

	if (slapRGBDataArray != NULL) {
		DataRecvThreadJniEnv->DeleteLocalRef(slapRGBDataArray);
		slapRGBDataArray = NULL;
	}

	if (mCurrentState == MEDIA_PLAYER_STATE_ERROR) {
	}
	mCurrentState = MEDIA_PLAYER_PLAYBACK_COMPLETE;
}

void MediaPlayer::decodeYuv(AVStream* stream) {
	int ylength = mVideoFrame->width * mVideoFrame->height;
	//gmq changed begin
	if (frameYDataArray == NULL) {
		frameYDataArray = DataRecvThreadJniEnv->NewByteArray(ylength);
		frameUDataArray = DataRecvThreadJniEnv->NewByteArray(ylength / 4);
		frameVDataArray = DataRecvThreadJniEnv->NewByteArray(ylength / 4);
	}

	if (revokeFrameDataMethod == NULL) {
		revokeFrameDataMethod = DataRecvThreadJniEnv->GetMethodID(revokeClass,
				"OnFrameDataArrived", "(II[B[B[B)Z");
	}
	if (isIFrame) {
		isFrameIncompleted = false; //set incomplete until next key frame come
	}
	if (isFrameIncompleted) {
		av_frame_free(&mVideoFrame);
		mVideoFrame = NULL;
	} else {
		//gmq add begin
		__android_log_print(ANDROID_LOG_ERROR, TAG,
				"decode yuv 1 w=%d linesize=%d", stream->codec->width,
				mVideoFrame->linesize[0]);
		if (stream->codec->width == mVideoFrame->linesize[0]) {
			DataRecvThreadJniEnv->SetByteArrayRegion(frameYDataArray, 0,
					ylength, (jbyte *) mVideoFrame->data[0]);
			DataRecvThreadJniEnv->SetByteArrayRegion(frameUDataArray, 0,
					ylength / 4, (jbyte *) mVideoFrame->data[1]);
			DataRecvThreadJniEnv->SetByteArrayRegion(frameVDataArray, 0,
					ylength / 4, (jbyte *) mVideoFrame->data[2]);
			__android_log_print(ANDROID_LOG_ERROR, TAG,
					"decode yuv 1 add data");
		} else {
			__android_log_print(ANDROID_LOG_ERROR, TAG,
					"decode yuv 1 add else data");
			if (yData == NULL) {
				yData = new uint8_t[ylength];
				uData = new uint8_t[ylength / 4];
				vData = new uint8_t[ylength / 4];
			}
			int offset = 0;
			int lineSize = 0;
			int count = stream->codec->width;
			for (int i = 0; i < stream->codec->height; i++) {
				memcpy(yData + offset, mVideoFrame->data[0] + lineSize, count);
				offset += count;
				lineSize += mVideoFrame->linesize[0];
			}
			DataRecvThreadJniEnv->SetByteArrayRegion(frameYDataArray, 0,
					ylength, (jbyte *) yData);
			offset = 0;
			lineSize = 0;
			count = stream->codec->width / 2;
			for (int i = 0; i < stream->codec->height / 2; i++) {
				memcpy(uData + offset, mVideoFrame->data[1] + lineSize, count);
				memcpy(vData + offset, mVideoFrame->data[2] + lineSize, count);
				offset += count;
				lineSize += mVideoFrame->linesize[1];
			}
			DataRecvThreadJniEnv->SetByteArrayRegion(frameUDataArray, 0,
					ylength / 4, (jbyte *) uData);
			DataRecvThreadJniEnv->SetByteArrayRegion(frameVDataArray, 0,
					ylength / 4, (jbyte *) vData);

		}
		DataRecvThreadJniEnv->CallBooleanMethod(revokeInstance,
				revokeFrameDataMethod, mVideoFrame->width, mVideoFrame->height,
				frameYDataArray, frameUDataArray, frameVDataArray);
		if (mVideoFrame != NULL) {
			av_frame_free(&mVideoFrame);
			mVideoFrame = NULL;
		}

	}
}

void MediaPlayer::decodeYuvFilter(AVStream* stream) {
	int ylength = mVideoFilterFrame->width * mVideoFilterFrame->height;
	//gmq changed begin
	if (frameYDataArray == NULL) {
		frameYDataArray = DataRecvThreadJniEnv->NewByteArray(ylength);
		frameUDataArray = DataRecvThreadJniEnv->NewByteArray(ylength / 4);
		frameVDataArray = DataRecvThreadJniEnv->NewByteArray(ylength / 4);
	}
	__android_log_print(ANDROID_LOG_ERROR, TAG, "decode yuv w=%d---h=%d",
			mVideoFilterFrame->width, mVideoFilterFrame->height);

	if (revokeFrameDataMethod == NULL) {
		revokeFrameDataMethod = DataRecvThreadJniEnv->GetMethodID(revokeClass,
				"OnFrameDataArrived", "(II[B[B[B)Z");
	}
	if (isIFrame) {
		isFrameIncompleted = false; //set incomplete until next key frame come
	}
	if (isFrameIncompleted) {
		av_frame_free(&mVideoFilterFrame);
		mVideoFilterFrame = NULL;
	} else {
		//gmq add begin

		__android_log_print(ANDROID_LOG_ERROR, TAG,
				"decode yuv 1 w=%d linesize=%d", stream->codec->width,
				mVideoFilterFrame->linesize[0]);
		if (stream->codec->width == mVideoFilterFrame->linesize[0]) {
			DataRecvThreadJniEnv->SetByteArrayRegion(frameYDataArray, 0,
					ylength, (jbyte *) mVideoFilterFrame->data[0]);
			DataRecvThreadJniEnv->SetByteArrayRegion(frameUDataArray, 0,
					ylength / 4, (jbyte *) mVideoFilterFrame->data[1]);
			DataRecvThreadJniEnv->SetByteArrayRegion(frameVDataArray, 0,
					ylength / 4, (jbyte *) mVideoFilterFrame->data[2]);
			__android_log_print(ANDROID_LOG_ERROR, TAG,
					"decode yuv 1 add data");
		} else {
			if (yData == NULL) {
				yData = new uint8_t[ylength];
				uData = new uint8_t[ylength / 4];
				vData = new uint8_t[ylength / 4];
			}
			int offset = 0;
			int lineSize = 0;
			int count = stream->codec->width;
			for (int i = 0; i < stream->codec->height; i++) {
				memcpy(yData + offset, mVideoFilterFrame->data[0] + lineSize,
						count);
				offset += count;
				lineSize += mVideoFilterFrame->linesize[0];
			}
			DataRecvThreadJniEnv->SetByteArrayRegion(frameYDataArray, 0,
					ylength, (jbyte *) yData);
			offset = 0;
			lineSize = 0;
			count = stream->codec->width / 2;
			for (int i = 0; i < stream->codec->height / 2; i++) {
				memcpy(uData + offset, mVideoFilterFrame->data[1] + lineSize,
						count);
				memcpy(vData + offset, mVideoFilterFrame->data[2] + lineSize,
						count);
				offset += count;
				lineSize += mVideoFilterFrame->linesize[1];
			}
			DataRecvThreadJniEnv->SetByteArrayRegion(frameUDataArray, 0,
					ylength / 4, (jbyte *) uData);
			DataRecvThreadJniEnv->SetByteArrayRegion(frameVDataArray, 0,
					ylength / 4, (jbyte *) vData);

			__android_log_print(ANDROID_LOG_ERROR, TAG,
					"decode yuv 1 else add data");

		}

		__android_log_print(ANDROID_LOG_ERROR, TAG, "decode java method");
		DataRecvThreadJniEnv->CallBooleanMethod(revokeInstance,
				revokeFrameDataMethod, mVideoFilterFrame->width,
				mVideoFilterFrame->height, frameYDataArray, frameUDataArray,
				frameVDataArray);
		__android_log_print(ANDROID_LOG_ERROR, TAG, "decode java method after");
		if (mVideoFilterFrame != NULL) {
			av_frame_free(&mVideoFilterFrame);
			mVideoFilterFrame = NULL;
		}

	}
}

void MediaPlayer::avStreamFPSTimeBase(AVStream *st, float defaultTimeBase,
		float *pFPS, float *pTimeBase) {
	float fps, timebase;

	if (st->time_base.den && st->time_base.num)
		timebase = av_q2d(st->time_base);
	else if (st->codec->time_base.den && st->codec->time_base.num)
		timebase = av_q2d(st->codec->time_base);
	else
		timebase = defaultTimeBase;

	if (st->codec->ticks_per_frame != 1) {
//		__android_log_print(ANDROID_LOG_INFO, TAG,
//				"WARNING: st.codec.ticks_per_frame=%d",
//				st->codec->ticks_per_frame);
		//timebase *= st->codec->ticks_per_frame;
	}

	if (st->avg_frame_rate.den && st->avg_frame_rate.num)
		fps = av_q2d(st->avg_frame_rate);
	else if (st->r_frame_rate.den && st->r_frame_rate.num)
		fps = av_q2d(st->r_frame_rate);
	else
		fps = 1.0 / timebase;

	if (pFPS)
		*pFPS = fps;
	if (pTimeBase)
		*pTimeBase = timebase;
	__android_log_print(ANDROID_LOG_INFO, TAG, "timebase %f fps %f", timebase,
			fps);

	jmethodID timeBaseMethod = DataRecvThreadJniEnv->GetMethodID(revokeClass,
			"onGetFps", "(F)V");
	DataRecvThreadJniEnv->CallVoidMethod(revokeInstance, timeBaseMethod, fps);
}

void* MediaPlayer::startPlayer(void* ptr) {
	sPlayer->decodeMovie(ptr);
}

bool MediaPlayer::start(int type) {
	if (mCurrentState != MEDIA_PLAYER_IDLE
			&& mCurrentState != MEDIA_PLAYER_STOPPED) {
		return false;
	}
	__android_log_print(ANDROID_LOG_ERROR, TAG, "open start...");
	decodeType = type;
	pthread_create(&mPlayerThread, NULL, startPlayer, NULL);

	return true;
}

void MediaPlayer::setStop(int isStop) {
	stopStatus = isStop;
}

bool MediaPlayer::stop() {
	//pthread_mutex_lock(&mLock);
	if (isPlaying()) {
		mCurrentState = MEDIA_PLAYER_STOPPED;
		if (pthread_join(mPlayerThread, NULL) != 0) {
		}
		closeVideo();
//		__android_log_print(ANDROID_LOG_ERROR, TAG, "after close video");
		// Close the video file
		if (mMovieFile != NULL) {
			avformat_close_input(&mMovieFile);
			avformat_free_context(mMovieFile);
			mMovieFile = NULL;
		}
		return true;
	} else {
		return false;
	}
}

bool MediaPlayer::pause() {
	mCurrentState = MEDIA_PLAYER_PAUSED;
	return true;
}

bool MediaPlayer::isPlaying() {
	return mCurrentState == MEDIA_PLAYER_STARTED
			|| mCurrentState == MEDIA_PLAYER_DECODED
			|| mCurrentState == MEDIA_PLAYER_PAUSED;
}

bool MediaPlayer::isBegin() {
	return mCurrentState == MEDIA_PLAYER_BEGIN_THREAD
			|| mCurrentState == MEDIA_PLAYER_INITIALIZED
			|| mCurrentState == MEDIA_PLAYER_PREPARING
			|| mCurrentState == MEDIA_PLAYER_PREPARED;
}

bool MediaPlayer::getVideoWidth(int *w) {
	if (mCurrentState < MEDIA_PLAYER_PREPARED) {
		return false;
	}
	*w = mVideoWidth;
	return true;
}

bool MediaPlayer::getVideoHeight(int *h) {
	if (mCurrentState < MEDIA_PLAYER_PREPARED) {
		return false;
	}
	*h = mVideoHeight;
	return true;
}

bool MediaPlayer::getDuration(int *msec) {
	if (mCurrentState < MEDIA_PLAYER_PREPARED) {
		return false;
	}
	*msec = mDuration / 1000;
	return true;
}

int MediaPlayer::init_filters(const char *filters_descr) {
	char args[512];
	int ret;
	AVFilter *buffersrc = avfilter_get_by_name("buffer");
	AVFilter *buffersink = avfilter_get_by_name("ffbuffersink");
	AVFilterInOut *outputs = avfilter_inout_alloc();
	AVFilterInOut *inputs = avfilter_inout_alloc();
	enum AVPixelFormat pix_fmts[] = { AV_PIX_FMT_YUV420P, AV_PIX_FMT_NONE };
	AVBufferSinkParams *buffersink_params;

	filter_graph = avfilter_graph_alloc();

	/* buffer video source: the decoded frames from the decoder will be inserted here. */
	snprintf(args, sizeof(args),
			"video_size=%dx%d:pix_fmt=%d:time_base=%d/%d:pixel_aspect=%d/%d",
			mVideoWidth, mVideoHeight,
			mMovieFile->streams[mVideoStreamIndex]->codec->pix_fmt,
			mMovieFile->streams[mVideoStreamIndex]->codec->time_base.num,
			mMovieFile->streams[mVideoStreamIndex]->codec->time_base.den,
			mMovieFile->streams[mVideoStreamIndex]->codec->sample_aspect_ratio.num,
			mMovieFile->streams[mVideoStreamIndex]->codec->sample_aspect_ratio.den);

	ret = avfilter_graph_create_filter(&buffersrc_ctx, buffersrc, "in", args,
			NULL, filter_graph);
	if (ret < 0) {
		__android_log_print(ANDROID_LOG_ERROR, TAG, "filter src buffer fail%d",
				ret);
		return ret;
	}

	__android_log_print(ANDROID_LOG_ERROR, TAG, "filter src buffer fail%s",
			args);

	/* buffer video sink: to terminate the filter chain. */
	buffersink_params = av_buffersink_params_alloc();
	buffersink_params->pixel_fmts = pix_fmts;
	ret = avfilter_graph_create_filter(&buffersink_ctx, buffersink, "out", NULL,
			buffersink_params, filter_graph);
	av_free(buffersink_params);
	if (ret < 0) {
		__android_log_print(ANDROID_LOG_ERROR, TAG, "filter sink buffer fail");
		return ret;
	}

	if (ret < 0) {
		__android_log_print(ANDROID_LOG_ERROR, TAG,
				"can not set output format");
		return ret;
	}

	/* Endpoints for the filter graph. */
	outputs->name = av_strdup("in");
	outputs->filter_ctx = buffersrc_ctx;
	outputs->pad_idx = 0;
	outputs->next = NULL;

	inputs->name = av_strdup("out");
	inputs->filter_ctx = buffersink_ctx;
	inputs->pad_idx = 0;
	inputs->next = NULL;

	if ((ret = avfilter_graph_parse_ptr(filter_graph, filters_descr, &inputs,
			&outputs, NULL)) < 0) {
		__android_log_print(ANDROID_LOG_ERROR, TAG, "filter parse ptr fail%d",
				ret);
		return ret;
	}

	if ((ret = avfilter_graph_config(filter_graph, NULL)) < 0) {
		__android_log_print(ANDROID_LOG_ERROR, TAG, "filter config fail%d",
				ret);
		return ret;
	}
	__android_log_print(ANDROID_LOG_ERROR, TAG, "filter init success");
	return 0;
}
void MediaPlayer::getRGBData(AVStream* stream) {
	sws_scale(mVideoSwsContext, mVideoFrame->data, mVideoFrame->linesize, 0,
			stream->codec->height, mVideoPicture.data, mVideoPicture.linesize);
	int length = mVideoPicture.linesize[0] * mVideoHeight;
	if (slapRGBDataArray == NULL) {
		slapRGBDataArray = DataRecvThreadJniEnv->NewByteArray(length);
	}

	if (revokeSlapDataMethod == NULL) {
		revokeSlapDataMethod = DataRecvThreadJniEnv->GetMethodID(revokeClass,
				"OnSlapDataArrived", "(II[BIIZ)V");
	}
	DataRecvThreadJniEnv->SetByteArrayRegion(slapRGBDataArray, 0, length,
			(jbyte *) mVideoPicture.data[0]);
	DataRecvThreadJniEnv->CallVoidMethod(revokeInstance, revokeSlapDataMethod,
			mVideoWidth, mVideoHeight, slapRGBDataArray, decodeType, pts,
			isIFrame);
}

void MediaPlayer::getRGBDataAndProcess(AVStream* stream) {
	__android_log_print(ANDROID_LOG_ERROR, TAG, "get rgb and process begin");
	sws_scale(mVideoSwsContext, mVideoFrame->data, mVideoFrame->linesize, 0,
			stream->codec->height, mVideoPicture.data, mVideoPicture.linesize);
	int length = mVideoPicture.linesize[0] * mVideoHeight;
	if (slapRGBDataArray == NULL) {
		slapRGBDataArray = DataRecvThreadJniEnv->NewByteArray(length);
	}

	if (getFilterBmpDataMethod == NULL) {
		getFilterBmpDataMethod = DataRecvThreadJniEnv->GetMethodID(revokeClass,
				"addFilterToBmp", "(II[BZ)V");
	}
	DataRecvThreadJniEnv->SetByteArrayRegion(slapRGBDataArray, 0, length,
			(jbyte *) mVideoPicture.data[0]);

	DataRecvThreadJniEnv->CallVoidMethod(revokeInstance, getFilterBmpDataMethod,
			mVideoWidth, mVideoHeight, slapRGBDataArray, isIFrame);
	__android_log_print(ANDROID_LOG_ERROR, TAG, "get rgb and process end");
}

void MediaPlayer::setRestart() {
	isRestart = true;
}

void MediaPlayer::setDecodeType(int type) {
	decodeType = type;
}

//bool MediaPlayer::write_video_frame(AVStream *st, AVFrame *frame) {
//	int ret;
//	AVCodecContext *c = st->codec;
//
//	AVPacket pkt = { 0 };
//	av_init_packet(&pkt);
//	if (fmt->flags & AVFMT_RAWPICTURE) {
//		pkt.flags |= AV_PKT_FLAG_KEY;
//		pkt.stream_index = st->index;
//		pkt.data = mPicture.data[0];
//		pkt.size = sizeof(AVPicture);
//
//		ret = av_interleaved_write_frame(oc, &pkt);
//	} else {
//		int got_packet = 0;
//
//		/* encode the image */
//		if (mVideoSwsContext != NULL) {
//			sws_scale(mVideoSwsContext, frame->data, frame->linesize, 0,
//					picHeight, mPicture.data, mPicture.linesize);
//
//			//important, to control fps of the mp4
//			mFrame->pts = frame->pts;
//			ret = avcodec_encode_video2(c, &pkt, mFrame, &got_packet);
//		} else {
//			ret = avcodec_encode_video2(c, &pkt, frame, &got_packet);
//		}
//
//		if (ret < 0) {
//			fprintf(stderr, "Error encoding video frame: %s\n",
//					av_err2str(ret));
//			return false;
//		}
//		/* If size is zero, it means the image was buffered. */
//
//		if (!ret && got_packet && pkt.size > 0) {
//			pkt.stream_index = st->index;
//
//			/* Write the compressed frame to the media file. */
//			ret = av_interleaved_write_frame(oc, &pkt);
//		} else {
//			ret = 0;
//		}
//	}
//
//	if (ret != 0) {
//		fprintf(stderr, "Error while writing video frame: %s\n",av_err2str(ret));
//		return false;
//	}
//
//	frame_count++;
//	return true;
//}
