#include "DHVideo.h"

extern "C"{
	#include <libavcodec/avcodec.h>
	#include <libavformat/avformat.h>
	#include <libavdevice/avdevice.h>
	#include <libavfilter/avfilter.h>
	#include <libavfilter/buffersrc.h>
	#include <libavfilter/buffersink.h>
	#include <libavutil/avutil.h>
	#include <libavutil/pixfmt.h>
	#include <libswresample/swresample.h>
	#include <libswscale/swscale.h>
	#include <libavutil/imgutils.h>
	#include <libavutil/hwcontext.h>
}

#include <thread>
#include <list>
#include <vector>
#include <boost/thread.hpp>
#include <chrono>
#include <iostream>

boost::thread_specific_ptr<std::chrono::steady_clock::time_point> t1;
void test_start()
{
	t1.reset(new std::chrono::steady_clock::time_point(std::chrono::steady_clock::now()));
}
double test_check()
{
	std::chrono::duration<double> time_span = std::chrono::
		duration_cast<std::chrono::duration<double>>(std::chrono::steady_clock::now() - *(t1.get()));
	t1.reset(new std::chrono::steady_clock::time_point(std::chrono::steady_clock::now()));
	return time_span.count()*1000.0;
}

typedef struct tagStFormat
{
	int frame_encode_index = 0;
    SwsContext* sws = nullptr;
    AVPacket *pkt = nullptr;
    AVCodecContext* ce=nullptr;
    AVCodec *codecEncode=nullptr;
    AVFrame *frame = nullptr, *swframe = nullptr;
    void (*recv)(void* obj, unsigned char* data, int& len, void** handle)=nullptr;

    //硬件加速
    AVFrame *hwframe = nullptr;
    AVBufferRef *hw_device_ctx = nullptr; 

	void* user_obj =nullptr;

    //文件流
	bool encode;
    __int64 npts_index = 0;
	AVStream *st = nullptr;
	AVPacket *spkt = nullptr;
	AVCodec *video_codec = nullptr;
	AVCodecContext *enc = nullptr;
    AVCodecParserContext *parser = nullptr;
	AVFormatContext *ofmt_ctx = nullptr;
}_StFormat;

typedef struct tagStSplitFile{
	std::list<AVFormatContext*> ifmt_ctx_lst;
	__int64 index_context = 0, last_start_pts=0,last_start_dts=0;
	AVFormatContext *ofmt_ctx = nullptr, *ifmt_ctx = nullptr;
}_StSplitFile;

int n_hw_accels = 0;
bool g_showlog = false;
int g_align = 32, g_gop = 6;
AVPixelFormat g_format = AV_PIX_FMT_ARGB;
AVHWDeviceType hwtype = AV_HWDEVICE_TYPE_NONE;

#pragma region 基本参数
void SetHWAccels(int type) {n_hw_accels = type;}
void SetBitmapFormat(int format){ g_format = (AVPixelFormat)format;}
void SetLogDisplay(bool bshow){g_showlog = bshow;}
#pragma endregion

#pragma region 文件分割
void BeginSplitFile(void** handle, const char* file, const char* out_file)
{
	_StSplitFile* StSplitFile = new _StSplitFile();
	*handle = (_StSplitFile*)StSplitFile;
	StSplitFile->index_context = 0;
	AVFormatContext* ifmt_ctx_item = nullptr;
	if ((avformat_open_input(&ifmt_ctx_item, file, 0, 0)) < 0)
		return;
	StSplitFile->ifmt_ctx_lst.push_back(ifmt_ctx_item);
	if ((avformat_find_stream_info(ifmt_ctx_item, 0)) < 0)
		return;
	if (StSplitFile->ofmt_ctx != nullptr) return;
	avformat_alloc_output_context2(&StSplitFile->ofmt_ctx, NULL, NULL, out_file);
	if (!StSplitFile->ofmt_ctx)	return;
	for (unsigned int i = 0; i < ifmt_ctx_item->nb_streams; i++) {
		AVStream *out_stream, *in_stream = ifmt_ctx_item->streams[i];
		AVCodecParameters *in_codecpar = in_stream->codecpar;
		out_stream = avformat_new_stream(StSplitFile->ofmt_ctx, NULL);
		if (!out_stream) return;
		if (avcodec_parameters_copy(out_stream->codecpar, in_codecpar) < 0)
			return;
		out_stream->codecpar->codec_tag = 0;
	}
	if (!(StSplitFile->ofmt_ctx->oformat->flags & AVFMT_NOFILE)){
		if (avio_open(&StSplitFile->ofmt_ctx->pb, out_file, AVIO_FLAG_WRITE) < 0)
			return;
	}
	if (avformat_write_header(StSplitFile->ofmt_ctx, NULL) < 0)
		return;
}
void SplitFile(void** handle, double from, double to)
{
	_StSplitFile* StSplitFile = (_StSplitFile*)*handle;
	std::list<AVFormatContext*>::iterator it = StSplitFile->ifmt_ctx_lst.begin();
	std::advance(it, StSplitFile->index_context);
	AVFormatContext* ifmt_ctx_item = *it;
	StSplitFile->index_context++;
	if (!ifmt_ctx_item || av_seek_frame(ifmt_ctx_item, -1, (__int64)from * AV_TIME_BASE, AVSEEK_FLAG_ANY) < 0)
		return;

	__int64 start_pts = -1,start_dts=-1;
	for (int i = 0;; i++){
		AVPacket pkt;
		AVStream *in_stream, *out_stream;
		if (!ifmt_ctx_item || av_read_frame(ifmt_ctx_item, &pkt) < 0)
			break;
		in_stream = ifmt_ctx_item->streams[pkt.stream_index];
		out_stream = StSplitFile->ofmt_ctx->streams[pkt.stream_index];

		double time_stamp = av_q2d(in_stream->time_base) * pkt.pts;
		if (time_stamp > to){
			StSplitFile->last_start_pts = pkt.pts - start_pts;
			StSplitFile->last_start_dts = pkt.dts - start_dts;
			break;
		}
		if (start_pts == -1) {
			start_pts = pkt.pts;
			start_dts = pkt.dts;
		}
		pkt.pts = av_rescale_q_rnd(pkt.pts - start_pts + StSplitFile->last_start_pts, in_stream->time_base, out_stream->time_base, (AVRounding)(AV_ROUND_NEAR_INF | AV_ROUND_PASS_MINMAX));
		pkt.dts = av_rescale_q_rnd(pkt.dts - start_dts + StSplitFile->last_start_dts, in_stream->time_base, out_stream->time_base, (AVRounding)(AV_ROUND_NEAR_INF | AV_ROUND_PASS_MINMAX));
		pkt.duration = av_rescale_q(pkt.duration, in_stream->time_base, out_stream->time_base);
		pkt.pos = -1;
		if (!StSplitFile->ofmt_ctx || av_interleaved_write_frame(StSplitFile->ofmt_ctx, &pkt) < 0)
			break;
		av_packet_unref(&pkt);
	}
	
}
void EndSplitFile(void** handle)
{
	_StSplitFile* StSplitFile = (_StSplitFile*)*handle;
	StSplitFile->index_context--;
	AVFormatContext* it = StSplitFile->ifmt_ctx_lst.front();
	if (it) avformat_close_input(&it);
	StSplitFile->ifmt_ctx_lst.pop_front();

	if (StSplitFile->ifmt_ctx_lst.size() == 0){
		if (StSplitFile->ofmt_ctx) av_write_trailer(StSplitFile->ofmt_ctx);
		if (StSplitFile->ofmt_ctx && !(StSplitFile->ofmt_ctx->oformat->flags & AVFMT_NOFILE))
			avio_closep(&StSplitFile->ofmt_ctx->pb);
		if (StSplitFile->ofmt_ctx) {
			avformat_free_context(StSplitFile->ofmt_ctx);
			StSplitFile->ofmt_ctx = nullptr;
		}
	}
	memset(&StSplitFile,0,sizeof(StSplitFile));
}

__int64 GetFileTimeSpan(const char* file)
{
    AVFormatContext *ifmt=avformat_alloc_context();
    if(avformat_open_input(&ifmt,file,NULL,NULL)<0)
        return -1;
    __int64 tduration=ifmt->duration;
    avformat_close_input(&ifmt);
    avformat_free_context(ifmt);
	return tduration / AV_TIME_BASE;
}
#pragma endregion

#pragma region 复用
void Parse(void* obj, unsigned char* data, int& len)
{
	_StFormat* StFormat = (_StFormat*)obj;
	if(!StFormat->spkt || !StFormat->parser || !StFormat->enc || !StFormat->ofmt_ctx) return;
	while (len > 0)
	{
		int ret = av_parser_parse2(StFormat->parser, StFormat->enc, &StFormat->spkt->data,
			&StFormat->spkt->size, data, len, AV_NOPTS_VALUE, AV_NOPTS_VALUE, AV_NOPTS_VALUE);
		if(ret<0)   break;
		data += ret;
		len -= ret;
		if(StFormat->spkt->size>0){
			StFormat->spkt->pts = StFormat->npts_index;
			StFormat->spkt->dts = StFormat->npts_index;
			StFormat->npts_index++;

			int ticks_per_frame = StFormat->enc->ticks_per_frame==0?1:StFormat->enc->ticks_per_frame;
			av_packet_rescale_ts(StFormat->spkt, av_make_q(StFormat->enc->time_base.num,
				StFormat->enc->time_base.den/ticks_per_frame), StFormat->st->time_base);
			av_interleaved_write_frame(StFormat->ofmt_ctx,StFormat->spkt);
			avio_flush(StFormat->ofmt_ctx->pb);
		}
		if(StFormat->spkt) av_packet_unref(StFormat->spkt);
	}
}
void BeginMuxing(void** handle, const char* fileName,int width,int height,int framerate,int bitrate, int code, bool encode)
{
	if(!g_showlog) av_log_set_level(AV_LOG_QUIET);
	_StFormat* StFormat = new _StFormat();
	*handle = StFormat;
	StFormat->encode = encode;
	StFormat->npts_index = 0;
	AVOutputFormat *fmt = nullptr;
	const char* fileExt = std::strchr(fileName,'.');
	avformat_alloc_output_context2(&StFormat->ofmt_ctx, NULL, fileExt!=nullptr?fileExt+1:NULL, fileName);
	if (!StFormat->ofmt_ctx){
		av_log(nullptr, AV_LOG_ERROR, "failed to allocate format context\n");
		return;
	}	
	fmt = StFormat->ofmt_ctx->oformat;
	if (fmt->video_codec != AV_CODEC_ID_NONE) {
		fmt->video_codec = (AVCodecID)code;
		StFormat->video_codec = avcodec_find_encoder(fmt->video_codec);
		if(!StFormat->video_codec){
			av_log(nullptr, AV_LOG_ERROR, "failed to find encoder\n");
			return;
		}
		StFormat->enc = avcodec_alloc_context3(StFormat->video_codec);
		if(!StFormat->enc){
			av_log(nullptr, AV_LOG_ERROR, "failed to allocate encoder context\n");
			return;
		}
		StFormat->parser = av_parser_init(StFormat->video_codec->id);
		if(!StFormat->parser){
			av_log(nullptr, AV_LOG_ERROR, "failed to initial parser\n");
			return;
		}
		StFormat->st = avformat_new_stream(StFormat->ofmt_ctx,NULL);
		if(!StFormat->st){
			av_log(nullptr, AV_LOG_ERROR, "failed to allocate format stream\n");
			return;
		}
		StFormat->st->id = StFormat->ofmt_ctx->nb_streams-1;	
		switch (StFormat->video_codec->type)
		{
			case AVMEDIA_TYPE_AUDIO:break;
			case AVMEDIA_TYPE_VIDEO:
				StFormat->enc->codec_id = fmt->video_codec;
				StFormat->enc->bit_rate = bitrate;
				StFormat->enc->width = width;
				StFormat->enc->height = height;
				StFormat->st->time_base = av_make_q(1,framerate);
				StFormat->enc->time_base = av_make_q(1,framerate);
				StFormat->enc->gop_size = g_gop;
				StFormat->enc->pix_fmt = AV_PIX_FMT_YUV420P;
				if(StFormat->enc->codec_id == AV_CODEC_ID_MPEG2VIDEO)
					StFormat->enc->max_b_frames = 0;
				if(StFormat->enc->codec_id == AV_CODEC_ID_MPEG1VIDEO)
					StFormat->enc->mb_decision = 2;
				break;
			default:break;
		}
		if(fmt->flags&AVFMT_GLOBALHEADER)
			StFormat->enc->flags |= AV_CODEC_FLAG_GLOBAL_HEADER;
	}

	if (avcodec_open2(StFormat->enc, StFormat->video_codec, NULL)<0){
		StFormat->enc->pix_fmt = AV_PIX_FMT_NV12;
		if (avcodec_open2(StFormat->enc, StFormat->video_codec, NULL)<0){
			av_log(nullptr, AV_LOG_ERROR, "failed to open encoder\n");
			return;
		}
	}
	if (avcodec_parameters_from_context(StFormat->st->codecpar, StFormat->enc)<0){
		av_log(nullptr, AV_LOG_ERROR, "failed to copy stream parameters\n");
		return;
	}
	
	if (!(fmt->flags & AVFMT_NOFILE)) {
		if(avio_open(&StFormat->ofmt_ctx->pb, fileName, AVIO_FLAG_WRITE)<0){
			av_log(nullptr, AV_LOG_ERROR, "failed to open stream file\n");
			return;
		}
	}
	if(StFormat->ofmt_ctx)
		avformat_write_header(StFormat->ofmt_ctx, NULL);

	if(encode){
		BeginEncode(handle,*handle,width,height,framerate,bitrate,code,Parse);
	}
	StFormat->spkt = av_packet_alloc();
	if(!StFormat->spkt) {
		av_log(nullptr, AV_LOG_ERROR, "failed to allocate format packet\n");
		return;
	}
}
void Muxing(void** handle, unsigned char* data, int len)
{
	_StFormat* StFormat = (_StFormat*)(*handle);
	if(StFormat->encode){
		ImageEncode(handle,data,len);
	}
	else{
		Parse(*handle,data,len);
	}
}
extern void EndEncode(void** handle, bool bDelHandle);
void EndMuxing(void** handle)
{
	_StFormat* StFormat = (_StFormat*)(*handle);
	if(StFormat->encode){
		EndEncode(handle,false);
	}
	if(StFormat->enc) avcodec_free_context(&StFormat->enc);
	if(StFormat->spkt) av_free_packet(StFormat->spkt);
	if(StFormat->ofmt_ctx) {
		av_write_trailer(StFormat->ofmt_ctx);
		if (!(StFormat->ofmt_ctx->oformat->flags & AVFMT_NOFILE))
			avio_closep(&StFormat->ofmt_ctx->pb);
	}
	if(StFormat->ofmt_ctx) {
		avformat_free_context(StFormat->ofmt_ctx);
		StFormat->ofmt_ctx = nullptr;
	}
	if(StFormat) {
		delete StFormat;
		StFormat = nullptr;
	}
}
#pragma endregion