﻿#include "YBaseMuxer.h"

extern "C"
{
#include "libavformat/avformat.h"
#include "libavcodec/avcodec.h"
#include "libavutil/avutil.h"
}
#include <QDebug>


static int TimeoutCallback(void* para)
{
	auto xf = (YBaseMuxer*)para;
	if (xf->IsTimeout())return 1;//超时退出Read
	return 0; //正常阻塞
}

YBaseMuxer::YBaseMuxer()
{
	static bool isFirst = true;
	if (isFirst)
	{
		avformat_network_init();
		isFirst = false;
	}
}

/// <summary>
	/// 将输入的编解码参数复制到输出的编解码参数中
	/// </summary>
	/// <param name="stream_index">输入流的index，用于区分是音频流还是视频流</param>
	/// <param name="dst">输出参数</param>
bool YBaseMuxer::CopyInCodecParametersToOut(int stream_index, AVCodecParameters* dst)
{
	std::unique_lock<std::mutex> lock(mutex);
	if (!c_context)
	{
		qDebug() << "CopyInCodecParametersToOut->AVFormatContext is NULL";
		return false;
	}
	if (stream_index<0 || stream_index>c_context->nb_streams)
		return false;
	//将输入编解码参数copy给输出流
	auto re = avcodec_parameters_copy(dst, c_context->streams[stream_index]->codecpar);
	if (re < 0)
	{
		PrintError(re, "avcodec_parameters_copy");
		return false;
	}
	return true;
}


bool YBaseMuxer::CopyParaParameters(int stream_index, AVCodecContext* dts)
{
	std::unique_lock<std::mutex> lock(mutex);
	if (!c_context)
	{
		qDebug() << "CopyParaParameters->AVFormatContext is  NULL";
		return false;
	}
	if (stream_index<0 || stream_index>c_context->nb_streams)
		return false;
	auto re = avcodec_parameters_to_context(dts, c_context->streams[stream_index]->codecpar);
	if (re < 0)
	{
		qDebug() << "avcodec_parameters_to_context->copy参数出现异常";
		return false;
	}
	return true;
}

/// <summary>
/// 设置封装上下文
/// </summary>
/// <param name="c"></param>
void YBaseMuxer::setContext(AVFormatContext* c)
{
	std::unique_lock<std::mutex> lock(mutex);
	Close();
	this->c_context = c;
	if (!this->c_context)
	{
		is_connected_ = false;
		qDebug() << "AVFormatContext is NULL";
		return;
	}

	is_connected_ = true;

	//计时 用于超时判断
	last_time_ = NowMs();

	//设定超时处理回调
	if (time_out_ms_ > 0)
	{
		AVIOInterruptCB cb = { TimeoutCallback ,this };
		this->c_context->interrupt_callback = cb;
	}
	//区分音视频stream 索引
	for (int i = 0; i < c->nb_streams; i++)
	{
		//音频
		if (c->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_AUDIO)
		{
			audio_index = i;
			audio_timebase.den = c->streams[i]->time_base.den;
			audio_timebase.num = c->streams[i]->time_base.num;
		}
		else if (c->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_VIDEO)
		{
			video_index = i;
			video_timebase.den = c->streams[i]->time_base.den;
			video_timebase.num = c->streams[i]->time_base.num;
			this->video_codec_id = c->streams[i]->codecpar->codec_id;
			this->videoWidth = c->streams[i]->codecpar->width;
			this->videoHeight = c->streams[i]->codecpar->height;
		}
	}
}

bool YBaseMuxer::RescaleTime(AVPacket* pkt, long long offset_pts, YRational time_base)
{
	AVRational in_time_base;
	in_time_base.num = time_base.num;
	in_time_base.den = time_base.den;
	return RescaleTime(pkt, offset_pts, &in_time_base);
}

bool YBaseMuxer::RescaleTime(AVPacket* pkt, long long offset_pts, AVRational* time_base)
{
	if (!pkt || !time_base)return false;
	std::unique_lock<std::mutex> lock(mutex);
	if (!c_context)return false;
	auto out_stream = c_context->streams[pkt->stream_index];
	pkt->pts = av_rescale_q_rnd(pkt->pts - offset_pts, *time_base,
		out_stream->time_base, (AVRounding)(AV_ROUND_NEAR_INF | AV_ROUND_PASS_MINMAX)
	);
	pkt->dts = av_rescale_q_rnd(pkt->dts - offset_pts, *time_base,
		out_stream->time_base, (AVRounding)(AV_ROUND_NEAR_INF | AV_ROUND_PASS_MINMAX)
	);
	pkt->duration = av_rescale_q(pkt->duration, *time_base, out_stream->time_base);
	pkt->pos = -1;
	return true;
}

//把pts dts duration 值转为毫秒
long long YBaseMuxer::RescaleToMs(long long pts, int index)
{
	std::unique_lock<std::mutex> lock(mutex);
	if (!c_context || index <0 || index > c_context->nb_streams)return 0;
	auto in_timebase = c_context->streams[index]->time_base;

	AVRational out_timebase = { 1,1000 };//输出timebase 毫秒
	return av_rescale_q(pts, in_timebase, out_timebase);
}

void YBaseMuxer::Close()
{
	if (c_context) //清理原值
	{
		if (c_context->oformat) //输出上下文
		{
			if (c_context->pb)
				avio_closep(&c_context->pb);
			avformat_free_context(c_context);
		}
		else if (c_context->iformat)  //输入上下文
		{
			avformat_close_input(&c_context);
		}
		else
		{
			avformat_free_context(c_context);
		}
	}
	audio_index = -1;
	video_index = -1;
}

//返回智能指针 复制视频参数
std::shared_ptr<YAVParameters> YBaseMuxer::CopyVideoParameters()
{
	int index = GetVideoIndex();
	std::shared_ptr<YAVParameters> re;
	std::unique_lock<std::mutex> lock(mutex);
	if (index < 0 || !c_context)return re;

	re.reset(YAVParameters::Create());
	*re->time_base = c_context->streams[index]->time_base;
	avcodec_parameters_copy(re->para, c_context->streams[index]->codecpar);
	//转换成毫秒
	re->total_ms = av_rescale_q(c_context->streams[index]->duration,
		c_context->streams[index]->time_base, { 1,1000 });
	return re;
}
//返回智能指针 音频参数
std::shared_ptr<YAVParameters> YBaseMuxer::CopyAudioParameters()
{
	int index = GetAudioIndex();
	std::shared_ptr<YAVParameters> re;
	std::unique_lock<std::mutex> lock(mutex);
	if (index < 0 || !c_context)return re;

	re.reset(YAVParameters::Create());
	*re->time_base = c_context->streams[index]->time_base;
	avcodec_parameters_copy(re->para, c_context->streams[index]->codecpar);
	//转换成毫秒
	re->total_ms = av_rescale_q(c_context->streams[index]->duration,
		c_context->streams[index]->time_base, { 1,1000 });
	return re;
}

//设定超时时间
void YBaseMuxer::set_time_out_ms(int ms)
{
	std::unique_lock<std::mutex> lock(mutex);
	this->time_out_ms_ = ms;
	//设置回调函数，处理超时退出
	if (c_context)
	{
		AVIOInterruptCB cb = { TimeoutCallback ,this };
		c_context->interrupt_callback = cb;
	}
}



void YBaseMuxer::PrintError(int err, QString msg)
{
	if (err < 0)
	{
		char buf[1024] = { 0 };
		av_strerror(err, buf, sizeof(buf) - 1);
		qDebug() << msg << "->" << "code:" << err << "-" << buf;
	}

}
YBaseMuxer::~YBaseMuxer()
{
}
