#include <android/log.h>
#include "writer.h"
#include "android/bitmap.h"
#include "libavcodec/avcodec.h"
#include "libavformat/avio.h"
#include "libswscale/swscale.h"
#include "libavutil/mathematics.h"

#define TAG "writer"

Writer::Writer() {
	_pInputCtx = NULL;
	oc = NULL;
	_pkgPtsIndex = 0;
	_pkgDtsIndex = 0;
	pts = 0;
	dts = 0;
	scopeOn = false;
	frame_count = 0;
	swr_ctx = NULL;
	input_stream = NULL;
	output_stream = NULL;
	mRunning = true;
	mQueue = new ScopeQueue();
	mVideoSwsContext = NULL;
}

Writer::~Writer() {
	if (mRunning) {
		mQueue->Abort();
		__android_log_print(ANDROID_LOG_INFO, TAG,
				"waiting on end of decoder thread");
		int ret = -1;
		if ((ret = wait()) != 0) {
			__android_log_print(ANDROID_LOG_ERROR, TAG,
					"Couldn't cancel IDecoder: %i", ret);
			return;
		}
	}

	delete mQueue;
}

void Writer::handleRun(void* ptr) {
	bool aborted = false;
	int queueSize = 0;

	while (mRunning) {
		__android_log_print(ANDROID_LOG_ERROR, TAG, "writer run");
		aborted = mQueue->isAborted();
		if (scopeOn) {
			__android_log_print(ANDROID_LOG_ERROR, TAG, "writer run scop on");
			queueSize = mQueue->Size();
			if (queueSize > 0) {
				ScopeFrame* frame = mQueue->Out();
				__android_log_print(ANDROID_LOG_ERROR, TAG,
						"get queue and write");
				record(frame->frame);
			} else {
				usleep(2000);
			}

			if (requireScopeStop && queueSize == 0) {
				requireScopeStop = false;
				scopeOn = false;

				__android_log_print(ANDROID_LOG_ERROR, TAG, "stop scop begin");

				if (av_write_trailer(oc) != 0) {
					__android_log_print(ANDROID_LOG_ERROR, TAG,
							"stop scop write trailer wrong");
					return;
				}

				/* Close each codec. */
//				if (video_st)
//					close_video(oc, video_st);
				/* Close the output file. */

				if (!(_pOutputFmt->flags & AVFMT_NOFILE)) {
					if (avio_close(oc->pb) < 0) {
						return;
						__android_log_print(ANDROID_LOG_ERROR, TAG,
								"stop scop close output file wrong");
					}
				}

				/* free the stream */
				avformat_free_context(oc);
				__android_log_print(ANDROID_LOG_ERROR, TAG,
						"stop scop after free");
//				swr_free(&swr_ctx);
//				swr_ctx = NULL;

				__android_log_print(ANDROID_LOG_ERROR, TAG, "stop scope...");
			}
		} else {
			usleep(2000);
		}

		if (aborted && queueSize == 0) {
			mRunning = false;
		}
	}
}
bool Writer::StartScopeEx(const char* outputFile, int videoWidth,
		int videoHeight) {
	__android_log_print(ANDROID_LOG_ERROR, TAG, "record  start");
	if (scopeOn) {
		__android_log_print(ANDROID_LOG_ERROR, TAG, "record  start return");
		return false;
	}
	__android_log_print(ANDROID_LOG_ERROR, TAG, "record  start1");
	av_register_all();
	avformat_alloc_output_context2(&oc, NULL, NULL, outputFile);
	if (!oc) {
		printf(
				"Could not deduce output format from file extension: using MPEG.\n");
		avformat_alloc_output_context2(&oc, NULL, "mpeg", outputFile);
	}
	__android_log_print(ANDROID_LOG_ERROR, TAG, "record  start2");
	// 强制指定 264 编码
	//	oc->oformat->video_codec = CODEC_ID_H264;

	_pOutputFmt = oc->oformat;

	/* Add the audio and video streams using the default format codecs
	 * and initialize the codecs. */
	video_st = NULL;
	__android_log_print(ANDROID_LOG_ERROR, TAG, "record  start3");
	//fmt->video_codec = AV_CODEC_ID_H264;
	if (_pOutputFmt->video_codec != AV_CODEC_ID_NONE) {
		picWidth = videoWidth;
		picHeight = videoHeight;

		video_st = add_stream(oc, &video_codec, _pOutputFmt->video_codec,
		STREAM_PIX_FMT);
	}
	__android_log_print(ANDROID_LOG_ERROR, TAG, "record  start4");
	mVideoSwsContext = NULL;
	mVideoSwsContext = sws_getContext(picWidth, picHeight, PIX_FMT_RGBA,
			picWidth, picHeight,
			STREAM_PIX_FMT, SWS_BICUBIC, NULL, NULL, NULL);

	/* Now that all the parameters are set, we can open the audio and
	 * video codecs and allocate the necessary encode buffers. */
	if (video_st) {
		if (!open_video(oc, video_codec, video_st)) {
			__android_log_print(ANDROID_LOG_ERROR, TAG,
					"record  start4.5 fail");
			return false;
		}
	}

	av_dump_format(oc, 0, outputFile, 1);
	int ret;

	__android_log_print(ANDROID_LOG_ERROR, TAG, "record  start5");
	/* open the output file, if needed */
	if (!(_pOutputFmt->flags & AVFMT_NOFILE)) {
		ret = avio_open(&oc->pb, outputFile, AVIO_FLAG_WRITE);
		if (ret < 0) {
			fprintf(stderr, "Could not open '%s': %s\n", outputFile,
					av_err2str(ret));
			return false;
		}
	}
	__android_log_print(ANDROID_LOG_ERROR, TAG, "record  start6");

	/* Write the stream header, if any. */
	ret = avformat_write_header(oc, NULL);
	if (ret < 0) {
		fprintf(stderr, "Error occurred when opening output file: %s\n",
				av_err2str(ret));
		return false;
	}

	videoPtsIndex = 0;
	frame_count = 0;
	scopeOn = true;
	requireScopeStop = false;
	__android_log_print(ANDROID_LOG_ERROR, TAG, "record  end");

	return true;

}

bool Writer::StopScope() {
	if (scopeOn) {
		requireScopeStop = true;

		return true;
	} else {
		return false;
	}
}

bool Writer::slapPic(const char *outputFile, AVFrame *frame,
		AVPixelFormat pixelFormat) {
	AVOutputFormat *fmt_pic;
	AVFormatContext *oc_pic;
	AVStream *video_st_pic;
	AVCodec *pic_codec;

	/* allocate the output media context */
	avformat_alloc_output_context2(&oc_pic, NULL, NULL, outputFile);
	if (!oc_pic) {
		printf(
				"Could not deduce output format from file extension: using MPEG.\n");

		return false;
	}

	int ret;
	fmt_pic = oc_pic->oformat;

	/* Add the audio and video streams using the default format codecs
	 * and initialize the codecs. */
	video_st_pic = NULL;

	if (fmt_pic->video_codec != AV_CODEC_ID_NONE) {
		picWidth = frame->width;
		picHeight = frame->height;

		video_st_pic = add_stream(oc_pic, &pic_codec, fmt_pic->video_codec,
				pixelFormat);
	}

	/* Now that all the parameters are set, we can open the audio and
	 * video codecs and allocate the necessary encode buffers. */
	if (video_st_pic) {
		if (!open_video(oc_pic, pic_codec, video_st_pic)) {
//		if (avcodec_open2(pCodecCtx, pic_codec, NULL) < 0) {
			printf("��������ʧ�ܣ�");
			return false;
		}
	}

	av_dump_format(oc_pic, 0, outputFile, 1);

	/* open the output file, if needed */
//	ret = avio_open(&oc_pic->pb, outputFile, AVIO_FLAG_READ_WRITE);
//	if (ret < 0) {
//		fprintf(stderr, "Could not open '%s': %s\n", outputFile,
//				av_err2str(ret));
//		return false;
//	}
	/* Write the stream header, if any. */
	ret = avformat_write_header(oc_pic, NULL);
	if (ret < 0) {
		fprintf(stderr, "Error occurred when opening output file: %s\n",
				av_err2str(ret));
		return false;
	}

//	/* write interleaved audio and video frames */
	//write_video_frame(video_st_pic, frame);

	AVPacket pkt = { 0 };
	int got_packet = 0;
	av_init_packet(&pkt);

	ret = avcodec_encode_video2(video_st_pic->codec, &pkt, frame, &got_packet);
	if (ret < 0) {
		fprintf(stderr, "Error encoding video frame: %s\n", av_err2str(ret));
		return false;
	}
	/* If size is zero, it means the image was buffered. */

	if (!ret && got_packet && pkt.size) {
		pkt.stream_index = video_st_pic->index;

		/* Write the compressed frame to the media file. */
		ret = av_interleaved_write_frame(oc_pic, &pkt);
	} else {
		ret = 0;
	}

	if (ret != 0) {
		fprintf(stderr, "Error while writing video frame: %s\n",
				av_err2str(ret));
		return false;
	}

	av_write_trailer(oc_pic);

	if (video_st_pic)
		close_video(oc_pic, video_st_pic);

	/* Close the output file. */
	avio_close(oc_pic->pb);

	/* free the stream */
	avformat_free_context(oc_pic);

	return true;
}

bool Writer::WriteVideoFrame1(AVFrame* frame, Operation operation) {
	if (scopeOn) {
		ScopeFrame* scopeFrame = mQueue->getExistFrame();
		__android_log_print(ANDROID_LOG_ERROR, TAG, "WriteVideoFrame1");
		if (scopeFrame == NULL) {
			scopeFrame = new ScopeFrame();
		}
		__android_log_print(ANDROID_LOG_ERROR, TAG, "WriteVideoFrame2");
		scopeFrame->setFrame(frame);
		scopeFrame->operation = operation;
		__android_log_print(ANDROID_LOG_ERROR, TAG, "WriteVideoFrame3");
		if (!mQueue->Enter(scopeFrame)) {
			__android_log_print(ANDROID_LOG_ERROR, TAG,
					"WriteVideoFrame--enter queue wrong");
			scopeFrame->setFrame(NULL);
		}
		__android_log_print(ANDROID_LOG_ERROR, TAG, "WriteVideoFrame4");

		return true;
	} else {
		return false;
	}
}

bool Writer::WriteVideoFrame(AVPacket* packet, Operation operation) {
	if (scopeOn) {
//		if (frame && frame_count == 0)
//			frame->pts = 0;
//
//		//important, to control fps of the mp4
//		frame->pts = videoPtsIndex;

		/* write interleaved audio and video frames */
//		__android_log_print(ANDROID_LOG_ERROR, TAG, "WriteVideoFrame--begin");
		ScopeFrame* scopeFrame = mQueue->getExistFrame();
		if (scopeFrame == NULL) {
			scopeFrame = new ScopeFrame();
		}
		//scopeFrame->frame = frame;
		scopeFrame->setPacket(packet);
		scopeFrame->operation = operation;

//		__android_log_print(ANDROID_LOG_ERROR, TAG,
//				"WriteVideoFrame--before enter queue");
		if (!mQueue->Enter(scopeFrame)) {
			__android_log_print(ANDROID_LOG_ERROR, TAG,
					"WriteVideoFrame--enter queue wrong");
			scopeFrame->setPacket(NULL);
		}

		return true;
	} else {
		return false;
	}
}

bool Writer::open_video(AVFormatContext *oc, AVCodec *codec, AVStream *st) {
	int ret;
	AVCodecContext *c = st->codec;

	//AVCodecContext *c = avcodec_alloc_context3(codec);
	/* open the codec */
	ret = avcodec_open2(c, codec, NULL);
	if (ret < 0) {
		__android_log_print(ANDROID_LOG_ERROR, TAG, "open video wrong");
		fprintf(stderr, "Could not open video codec: %s\n", av_err2str(ret));
		return false;
	}

	if (mVideoSwsContext != NULL) {
		mFrame = av_frame_alloc();
		if (!mFrame) {
			return false;
		}

		ret = avpicture_alloc(&mPicture, STREAM_PIX_FMT, c->width, c->height);
		if (ret < 0) {
			return false;
		}

		//copy data and linesize picture pointers to frame
		*((AVPicture *) mFrame) = mPicture;
	}

	return true;
}

void Writer::record(AVFrame* frame) {
	__android_log_print(ANDROID_LOG_ERROR, TAG, "record begin");
	int ret;
	AVCodecContext *c = video_st->codec;
	AVPacket pkt = { 0 };
	int got_packet = 0;
	av_init_packet(&pkt);
	uint8_t * yuv_buff;
	int size = picWidth * picHeight;
	yuv_buff = (uint8_t *) malloc((size * 3) / 2);
	yuv_buff =(uint8_t*) frame->data;
	YUVFrame = av_frame_alloc();
	avpicture_fill((AVPicture*) YUVFrame, (uint8_t*) yuv_buff,
			PIX_FMT_YUV420P, picWidth, picHeight);

	__android_log_print(ANDROID_LOG_ERROR, TAG, "record begin 1");
	ret = avcodec_encode_video2(c, &pkt, frame, &got_packet);
	__android_log_print(ANDROID_LOG_ERROR, TAG, "record begin 2");
	pkt.pts = _pkgPtsIndex;
	pkt.dts = pkt.pts;
	_pkgPtsIndex += 1000;

	if (!ret && got_packet && pkt.size > 0) {
		pkt.stream_index = video_st->index;
		/* Write the compressed frame to the media file. */
		ret = av_interleaved_write_frame(oc, &pkt);
		__android_log_print(ANDROID_LOG_ERROR, TAG, "record write frame");
	} else {
		ret = 0;
	}
	frame_count++;
	av_frame_free(&YUVFrame);
	av_free_packet(&pkt);
	free(yuv_buff);
	__android_log_print(ANDROID_LOG_ERROR, TAG, "record begin end");
}

void Writer::writeFrame(AVPacket* pkt) {

	/*
	 * pts and dts should increase monotonically
	 * pts should be >= dts
	 */
//	if (_pkgPtsIndex == 0) {
//		_pkgPtsIndex = pkt->pts;
//		_pkgDtsIndex = pkt->dts;
//	}
	__android_log_print(ANDROID_LOG_ERROR, TAG, "writeFram1");
	pkt->pts = _pkgPtsIndex;
	pkt->dts = _pkgPtsIndex;
	__android_log_print(ANDROID_LOG_ERROR, TAG, "writeFram2");
	_pkgPtsIndex += 1000;

	//Convert PTS/DTS
//	pkt->pts = av_rescale_q_rnd(pkt->pts - _pkgPtsIndex,
//			input_stream->time_base, output_stream->time_base,
//			(AVRounding)(AV_ROUND_NEAR_INF | AV_ROUND_PASS_MINMAX));
//	pkt->dts = av_rescale_q_rnd(pkt->dts - _pkgDtsIndex,
//			input_stream->time_base, output_stream->time_base,
//			(AVRounding)(AV_ROUND_NEAR_INF | AV_ROUND_PASS_MINMAX));
//	pkt->duration = av_rescale_q(pkt->duration, input_stream->time_base,
//			output_stream->time_base);
//	pkt->pos=-1;

	//write packet data to output
	__android_log_print(ANDROID_LOG_ERROR, TAG, "writeFram3");
	if (pkt->size > 0) {
		pkt->stream_index = video_st->index;
		int i = av_write_frame(oc, pkt);
		__android_log_print(ANDROID_LOG_ERROR, TAG, "writeFram-i=%d", i);

		if (i < 0) {
			__android_log_print(ANDROID_LOG_ERROR, TAG,
					"writeFram--write wrong");
		}
	}

	frame_count++;
//	__android_log_print(ANDROID_LOG_ERROR, TAG, "writeFram--count=%d",
//			frame_count);
}
void Writer::close_video(AVFormatContext *oc, AVStream *st) {
	avcodec_close(st->codec);

	if (mVideoSwsContext != NULL) {
		avpicture_free(&mPicture);
		av_frame_free(&mFrame);
		mFrame = NULL;
		mVideoSwsContext = NULL;
	}
}
void Writer::initialSampleData(AVCodecContext* c) {
	/* init signal generator */
	t = 0;
	tincr = 2 * M_PI * 110.0 / c->sample_rate;
	/* increment frequency by 110 Hz per second */
	tincr2 = 2 * M_PI * 110.0 / c->sample_rate / c->sample_rate;
}

AVStream* Writer::add_stream(AVFormatContext *oc, AVCodec **codec,
		AVCodecID codec_id, AVPixelFormat pixelFormat) {
	AVCodecContext *c;
	AVStream *st;

	/* find the encoder */
	*codec = avcodec_find_encoder(codec_id);
	if (!(*codec)) {
		fprintf(stderr, "Could not find encoder for '%s'\n",
				avcodec_get_name(codec_id));
		exit(1);
	}

	st = avformat_new_stream(oc, *codec);
	if (!st) {
		fprintf(stderr, "Could not allocate stream\n");
		exit(1);
	}

	st->id = oc->nb_streams - 1;
	c = st->codec;

	switch ((*codec)->type) {
	case AVMEDIA_TYPE_AUDIO:
		//c->codec_id    = AV_CODEC_ID_PCM_MULAW;
		c->sample_fmt = AV_SAMPLE_FMT_S16; //AV_SAMPLE_FMT_FLTP; //
		c->bit_rate = 64000;
		c->sample_rate = 8000;   //44100
		c->channels = 1;   //2
		break;

	case AVMEDIA_TYPE_VIDEO:
		c->codec_id = codec_id;

		c->bit_rate = 1 * 1024 * 1024;
		/* Resolution must be a multiple of two. */
		c->width = picWidth;
		c->height = picHeight;
		/* timebase: This is the fundamental unit of time (in seconds) in terms
		 * of which frame timestamps are represented. For fixed-fps content,
		 * timebase should be 1/framerate and timestamp increments should be
		 * identical to 1. */
		c->time_base.den = STREAM_FRAME_RATE;
		c->time_base.num = 1;
		c->gop_size = 12; /* emit one intra frame every twelve frames at most */
		if (pixelFormat == AV_PIX_FMT_NONE) {
			c->pix_fmt = STREAM_PIX_FMT;
		} else {
			c->pix_fmt = pixelFormat;
		}

		break;

	default:
		break;
	}

	/* Some formats want stream headers to be separate. */
	if (oc->oformat->flags & AVFMT_GLOBALHEADER)
		c->flags |= CODEC_FLAG_GLOBAL_HEADER;

	return st;
}
