#include "FFmpegVideoCapture.h"
#include "../common/thread_pool.h"
#include "GlobalManager.h"
#include "YUVTS.h"
#include <iostream>
#include <fstream>
#include <mutex>
#include <filesystem>

#ifndef WIN32
#include <malloc.h>
#endif

#ifdef USE_CUDA
#include "../nvenc_encoder/cudaEncodeDLL.h"
#include "../nvenc_encoder/cudaCodecDLL.h"

extern ABL_cudaEncode_Init cudaEncode_Init;
extern ABL_cudaEncode_GetDeviceGetCount cudaEncode_GetDeviceGetCount;
extern ABL_cudaEncode_GetDeviceName cudaEncode_GetDeviceName;
extern ABL_cudaEncode_CreateVideoEncode cudaEncode_CreateVideoEncode;
extern ABL_cudaEncode_DeleteVideoEncode cudaEncode_DeleteVideoEncode;
extern ABL_cudaEncode_CudaVideoEncode cudaEncode_CudaVideoEncode;
extern ABL_cudaEncode_UnInit cudaEncode_UnInit;

extern ABL_cudaCodec_Init cudaCodec_Init;
extern ABL_cudaCodec_GetDeviceGetCount  cudaCodec_GetDeviceGetCount;
extern ABL_cudaCodec_GetDeviceName cudaCodec_GetDeviceName;
extern ABL_cudaCodec_GetDeviceUse cudaCodec_GetDeviceUse;
extern ABL_cudaCodec_CreateVideoDecode cudaCodec_CreateVideoDecode;
extern ABL_cudaCodec_CudaVideoDecode cudaCodec_CudaVideoDecode;
extern ABL_cudaCodec_DeleteVideoDecode cudaCodec_DeleteVideoDecode;
extern ABL_cudaCodec_GetCudaDecodeCount cudaCodec_GetCudaDecodeCount;
extern ABL_cudaCodec_UnInit cudaCodec_UnInit;

#endif // USE_CUDA

FFmpegVideoCapture::FFmpegVideoCapture(const std::string& uri, const std::map<std::string, std::string>& opts)
	:m_videourl(uri)
{
	//#ifdef USE_CUDA
	//	m_bUseCuda = false;
	//#endif // USE_CUDA

	m_bKey.store(false);
	m_bStop.store(true);
}

FFmpegVideoCapture::~FFmpegVideoCapture()
{
	SPDLOG_LOGGER_ERROR(spdlogptr, "FFmpegVideoCapture::stop start ");
	Destroy();
	SPDLOG_LOGGER_ERROR(spdlogptr, "FFmpegVideoCapture::stop end ");
}

bool FFmpegVideoCapture::Start()
{
	SPDLOG_LOGGER_INFO(spdlogptr, "LiveVideoSource::Start");
	if (m_bStop.load() == true)
	{
		m_bStop.store(false);
		m_pSource = MediaSourceAPI::CreateMediaSource(m_videourl, this, true, false);
		m_pSource->Run(false);
		startEncoderThread();
		return true;
	}
	else
	{
		return false;
	}
}

void FFmpegVideoCapture::Destroy()
{
	m_bStop.store(true);
	
	MediaSourceAPI::deleteMediaSource(m_pSource);
	{
		std::lock_guard<std::mutex> lock(queueMutex);
		while (!imageQueue.empty()) {
			imageQueue.pop();
		}
	}
	imageReady.notify_all();
	m_YuvCallbackList.clear();

	m_h264Callback = nullptr;
	m_callbackEvent = nullptr;
	if (m_FrameSWS != nullptr)
	{
		delete m_FrameSWS;
		m_FrameSWS = nullptr;
	}

	if (m_pDecoder != nullptr)
	{
		delete m_pDecoder;
		m_pDecoder = nullptr;

	}
	if (m_ffmpegEncoder != nullptr)
	{
		delete m_ffmpegEncoder;
		m_ffmpegEncoder = nullptr;

	}
	if (m_pOutEncodeBuffer != nullptr)
	{
		delete[] m_pOutEncodeBuffer;
		m_pOutEncodeBuffer = nullptr;
	}
#ifdef USE_RKNN
	if (mpp_encoder!=nullptr)
	{
		delete mpp_encoder;
		mpp_encoder = nullptr;
	}
	if (mpp_decoder != nullptr)
	{
		delete mpp_decoder;
		mpp_decoder = nullptr;
	}
	if (rga_handle_dst > 0) {
		releasebuffer_handle(rga_handle_dst);
	}
#endif //USE_RKNN
	
#ifdef USE_JESTON
	if (m_jestonEnc != nullptr)
	{
		delete m_jestonEnc;
		m_jestonEnc = nullptr;
	}
	if (m_jestonDec != nullptr)
	{
		delete m_jestonDec;
		m_jestonDec = nullptr;

	}
	if (m_jetson_addr) {
		free(m_jetson_addr);
		m_jetson_addr = NULL;
	}
#endif

#ifdef USE_YOLO
	m_objectDectect.reset();
#endif // USE_YOLO


	if (m_pusher)
	{
		delete m_pusher;
		m_pusher = nullptr;
	}

}

void FFmpegVideoCapture::Stop(VideoYuvCallBack yuvCallback)
{
	std::lock_guard<std::mutex> lock(m_mutex);
	using CallbackType = VideoYuvCallBack;

	auto it = std::find_if(m_YuvCallbackList.begin(), m_YuvCallbackList.end(), [&yuvCallback](const CallbackType& cb) {
		return *cb.target<void(*)()>() == *yuvCallback.target<void(*)()>();
		});
	if (it != m_YuvCallbackList.end())
	{
		m_YuvCallbackList.erase(it);
	}
}

void FFmpegVideoCapture::Init(const char* devicename, int nWidth, int nHeight, int nFrameRate)
{
	m_nWidth = nWidth;
	m_nHeight = nHeight;
	m_nFrameRate = nFrameRate;
}

void FFmpegVideoCapture::Initdectect(const char* strJson)
{
#ifdef USE_YOLO
	ABL::NSJsonObject object = ABL::NSJson::ParseStr(strJson);
	std::string taskid = object.GetString("taskId");
	if (!taskid.empty())
	{
		m_nTaskId = taskid;
	}

	if (m_objectDectect.get() == nullptr)
	{
		m_objectDectect = yoloDectectApi::CreateDectect();
		m_objectDectect->Init(strJson);
	}
	

#endif//USE_YOLO



}

void FFmpegVideoCapture::Init(std::map<std::string, std::string> opts)
{
	if (opts.find("width") != opts.end())
	{
		m_nWidth = std::stoi(opts.at("width"));
	}
	if (opts.find("height") != opts.end()) {
		m_nHeight = std::stoi(opts.at("height"));
	}
	if (opts.find("fps") != opts.end()) {
		m_nFrameRate = std::stoi(opts.at("fps"));
	}
}

void FFmpegVideoCapture::RegisterCallback(VideoYuvCallBack yuvCallback)
{
	std::lock_guard<std::mutex> _lock(m_mutex);
	std::list<VideoYuvCallBack>::iterator it = m_YuvCallbackList.begin();
	while (it != m_YuvCallbackList.end())
	{
		if (it->target<void*>() == yuvCallback.target<void*>())
		{
			return;
		}
		it++;
	}
	m_YuvCallbackList.push_back(yuvCallback);
}

bool FFmpegVideoCapture::onData(const char* id, unsigned char* buffer, int size, int64_t ts)
{
	if (m_h264Callback)
	{
		m_h264Callback((char*)buffer, size, 0, m_nWidth, m_nHeight, m_nFrameRate, ts);
		return true;
	}
	return false;

}

bool FFmpegVideoCapture::onData(uint8_t* y, int strideY, uint8_t* u, int strideU, uint8_t* v, int strideV, int nWidth, int nHeight, int64_t nTimeStamp)
{
	for (const auto& callback : m_YuvCallbackList)
	{
		callback(y, strideY, u, strideU, v, strideV, nWidth, nHeight, nTimeStamp);
	}
	return false;
}

bool FFmpegVideoCapture::decodercallback(uint8_t* y, int strideY, uint8_t* u, int strideU, uint8_t* v, int strideV, int nWidth, int nHeight, int64_t nTimeStamp)
{
	std::lock_guard<std::mutex> _lock(m_mutex);
	if (m_bStop.load()) {
		return false;
	}

	int nNewWidth = 960, nNewHeight = 540;
	// 创建 OpenCV Mat 对象并将 YUV 数据转换为 RGB
	cv::Mat yuvImage(nHeight + nHeight / 2, nWidth, CV_8UC1);
	memcpy(yuvImage.data, y, nHeight * nWidth);
	memcpy(yuvImage.data + nHeight * nWidth, u, nHeight * nWidth / 4);
	memcpy(yuvImage.data + nHeight * nWidth + nHeight * nWidth / 4, v, nHeight * nWidth / 4);
	cv::Mat bgrImage;
	cv::cvtColor(yuvImage, bgrImage, cv::COLOR_YUV2BGR_I420);

	// 将 RGB 图像缩放到目标大小
	cv::Mat resizedImage;
	cv::resize(bgrImage, resizedImage, cv::Size(nNewWidth, nNewHeight));

	// 将缩放后的图像封装到 ImageInfo 结构体并压入队列
	ImageInfo imageInfo = { resizedImage, nNewWidth, nNewHeight };
	{
		std::lock_guard<std::mutex> lock(queueMutex);
#ifdef USE_YOLO
		if (m_objectDectect.get()!=nullptr)
		{
			m_objectDectect->detector(resizedImage, nNewWidth, nNewHeight);
		}
		
#endif
		imageQueue.push(imageInfo);

		imageReady.notify_one();
	}



}

int FFmpegVideoCapture::calculateBitrate(int nWidth, int nHeight)
{
	if (nWidth <= 640 && nHeight <= 480)
	{
		return 500 * 1024;
	}
	else if (nWidth <= 960 && nHeight <= 540)
	{
		return 1 * 1024 * 1024;
	}
	else if (nWidth <= 1280 && nHeight <= 720)
	{
		return 3 * 1024 * 1024;
	}
	else if (nWidth <= 1920 && nHeight <= 1080)
	{
		return 5 * 1024 * 1024;
	}
	else
	{
		return 8 * 1024 * 1024;
	}
}


void FFmpegVideoCapture::EncodecYUV(uint8_t* yuv, int nYUVLength, int nWidth, int nHeight)
{
	if (m_bStop.load())
	{
		return;
	}
	int yuv_size = 0;
	if (m_pOutEncodeBuffer == nullptr)
	{
		//m_pOutEncodeBuffer = new char[nYUVLength];
	}

#ifdef USE_CUDA
	if (m_bUseCuda)
	{
		if (cudaEncode_CudaVideoEncode)
		{
			int	nOneFrameLength = cudaEncode_CudaVideoEncode(m_nCudaEncode, yuv,
				nYUVLength, m_pOutEncodeBuffer);
		}
	}
	else
#endif
	{
		int bitrate = calculateBitrate(nWidth, nHeight);


#ifdef JESTON			

		if (m_jestonEnc == nullptr)
		{
			SPDLOG_LOGGER_ERROR(spdlogptr, "CreateJetsonEnc");
			m_jestonEnc = JetsonEncAPI::CreateJetsonEnc(nWidth, nHeight, 25);
			if (CGlobalManager::getInstance().getUsePush())
			{
				// 实例化混流器
				std::map<std::string, std::string> pushopt;
				pushopt["format"] = "rtsp";
				m_pusher = FFmpegPusher::CreatePusher(pushopt);

				std::string strPush = CGlobalManager::getInstance().getPushPath();

				SPDLOG_LOGGER_ERROR(spdlogptr, "push  addr ={}", strPush);
				auto res = m_pusher->Start(strPush.c_str(), true, false);
				if (!res)
				{
					SPDLOG_LOGGER_INFO(spdlogptr, "m_pusher  Start error");
					delete m_pusher;
					m_pusher = nullptr;
					return;
				}
				m_pusher->addVideoStream(FFmpeg_AVCodecID::YST_CODEC_H264, nWidth, nHeight, 25);
			}
			
		
		}
		m_jestonEnc->SetEncCallBack(static_cast<JetsonEncListener*>(this));
		yuv_size = nWidth * nHeight * 3 / 2;
		unsigned char* buffer = new unsigned char[yuv_size];
		memcpy(buffer, yuv, yuv_size);
		m_jestonEnc->AddFrame(buffer, yuv_size);
		int size = m_jestonEnc->GetQueueSize();
		if (size >= 10) {
			std::this_thread::sleep_for(std::chrono::milliseconds(10));
			SPDLOG_LOGGER_ERROR(spdlogptr, "JetsonEnc GetQueueSize:{}", size);
		}

		return;
#endif // JESTON


		if (m_ffmpegEncoder == nullptr)
		{
			m_ffmpegEncoder = FFmpegVideoEncoderAPI::CreateEncoder();
			std::string encoder_name = CGlobalManager::getInstance().getEncoderName();
			m_ffmpegEncoder->Init(encoder_name.c_str(), CAREYE_FMT_YUV420P, nWidth, nHeight, m_nFrameRate, bitrate);
			m_ffmpegEncoder->RegisterCallBack([this](char* m_pOutEncodeBuffer, int nOneFrameLength, bool bKey, int nWidth, int nHeight, int64_t pts)
				{
					

					if (bKey == true)
					{
						m_bKey.store(true);
					}
					if (nOneFrameLength > 0 && m_bKey.load())
					{
						if (m_useNullCodec.load() == false)
						{
							if (m_callbackEvent)
							{
								m_callbackEvent->OnSourceVideoPacket("H264", (uint8_t*)m_pOutEncodeBuffer, nOneFrameLength, pts, bKey);
							}
						}
						if (m_pusher != nullptr)
						{
							int ret = 0;
							m_pusher->AppendVideo((uint8_t*)m_pOutEncodeBuffer, nOneFrameLength, bKey, ret, 0, 0);
							if (ret == -23 || ret == -32 || ret == -22)
							{
								delete m_pusher;
								m_pusher = 0;
							}
						}

					}
				}
			);
			m_ffmpegEncoder->Start();

			if (CGlobalManager::getInstance().getUsePush())
			{
				// 实例化混流器
				std::map<std::string, std::string> pushopt;
				pushopt["format"] = "rtsp";
				m_pusher = FFmpegPusher::CreatePusher(pushopt);

				std::string strPush = CGlobalManager::getInstance().getPushPath();

				SPDLOG_LOGGER_ERROR(spdlogptr, "push  addr ={}", strPush);
				auto res = m_pusher->Start(strPush.c_str(), true, false);
				if (!res)
				{
					SPDLOG_LOGGER_INFO(spdlogptr, "m_pusher  Start error");
					delete m_pusher;
					m_pusher = nullptr;
					return;
				}
				m_pusher->addVideoStream(FFmpeg_AVCodecID::YST_CODEC_H264, nWidth, nHeight, 25);
			}

		}

		auto i420_frame = std::make_unique<I420Frame>();
		yuv_size = nWidth * nHeight * 3 / 2;
		if (i420_frame->buffer == nullptr)
		{
			i420_frame->buffer = new uint8_t[yuv_size];
		}
		i420_frame->key_frame = true;
		memcpy(i420_frame->buffer, yuv, yuv_size);
		m_ffmpegEncoder->postFormat(std::move(i420_frame));
		return;
	}
}

#ifdef USE_CUDA
void FFmpegVideoCapture::InitCudaEncoder()
{
	if (cudaEncode_CreateVideoEncode)
	{
		m_bUseCuda = cudaEncode_CreateVideoEncode(cudaEncodeVideo_H264, cudaEncodeVideo_YUV420, 960,
			540, m_nCudaEncode);
		if (m_bUseCuda == false)
		{
			SPDLOG_LOGGER_ERROR(spdlogptr, "use webrtcEncode");
		}
		else
		{
			SPDLOG_LOGGER_ERROR(spdlogptr, "cudaEncode_CreateVideoEncode");
		}
	}
	else
	{
		SPDLOG_LOGGER_ERROR(spdlogptr, "use webrtcEncode");
	}
}
#endif // USE_CUDA

#ifdef USE_RKNN
void FFmpegVideoCapture::mpp_encoder_thread(image_buffer_t src_image)
{
	// Allocate buffer for encoded data
	int enc_buf_size = mpp_encoder->GetFrameSize();
	char* enc_data = (char*)malloc(enc_buf_size);
	if (!enc_data)
	{
		printf("Failed to allocate memory for encoded data!\n");
		return;
	}

	// Get input frame buffer and encode
	void* mpp_frame = mpp_encoder->GetInputFrameBuffer();

	std::vector<DetectionResult> results;
	if (m_objectDectect.get() != nullptr)
	{

		m_objectDectect->getresult(results);
	}
	// Here, you can apply object detection results (just for visualization, if needed)
	for (const auto& result : results)
	{
		int x1 = result.left;
		int y1 = result.top;
		int x2 = result.right;
		int y2 = result.bottom;

		// Example of drawing the bounding box
		draw_rectangle(&src_image, x1, y1, x2 - x1 + 1, y2 - y1 + 1, COLOR_BLACK, 3);

		// Add text with the class name and confidence
		char text[256];
		sprintf(text, "%s %.1f%%", coco_cls_to_name(result.class_id), result.confidence);
		draw_text(&src_image, text, x1, y1 - 35, COLOR_WHITE, 15);
	}
	int enc_data_size = 0;

	if (frame_index == 1)
	{
		enc_data_size = mpp_encoder->GetHeader(enc_data, enc_buf_size);
	}
	memset(enc_data, 0, enc_buf_size);

	enc_data_size = mpp_encoder->Encode(mpp_frame, enc_data, enc_buf_size);

	if (m_callbackEvent)
	{
		m_callbackEvent->OnSourceVideoPacket("H264", (uint8_t*)enc_data, enc_data_size, 0, true);
	}
	if (enc_data != nullptr)
	{
		free(enc_data);
	}

}
void FFmpegVideoCapture::mpp_decoder_frame_callback(void* userdata, int width_stride, int height_stride, int width, int height, int format, int fd, void* data)
{
	std::lock_guard<std::mutex> _lock(m_mutex);
	// 获取当前时间戳（开始时间）
	auto start_time = std::chrono::high_resolution_clock::now();
	int ret = 0;
	frame_index++;
	void* mpp_frame = NULL;
	int mpp_frame_fd = 0;
	void* mpp_frame_addr = NULL;
	int enc_data_size;

	if (mpp_encoder == nullptr)
	{
		mpp_encoder = new MppEncoder();
		MppEncoderParams enc_params;
		memset(&enc_params, 0, sizeof(MppEncoderParams));
		enc_params.width = width;
		enc_params.height = height;
		enc_params.hor_stride = width_stride;
		enc_params.ver_stride = height_stride;
		enc_params.fmt = MPP_FMT_YUV420SP;
		// enc_params.type = MPP_VIDEO_CodingHEVC;
		// Note: rk3562只能支持h264格式的视频流
		enc_params.type = MPP_VIDEO_CodingAVC;
		mpp_encoder->Init(enc_params, NULL);
	}

	int enc_buf_size = mpp_encoder->GetFrameSize();
	char* enc_data = (char*)malloc(enc_buf_size);
	image_frame_t img;
	img.width = width;
	img.height = height;
	img.width_stride = width_stride;
	img.height_stride = height_stride;
	img.fd = fd;
	img.virt_addr = (char*)data;
	img.format = RK_FORMAT_YCbCr_420_SP;	
	//SPDLOG_LOGGER_ERROR(spdlogptr, "mpp_frame_fd ={} {:p}   fd ={}  {:p}", mpp_frame_fd, static_cast<void*>(mpp_frame_addr), fd, static_cast<void*>(data));
	
	im_handle_param_t in_param;
	in_param.width = width;
	in_param.height = height_stride;
	in_param.format = RK_FORMAT_YCbCr_420_SP;
	rga_buffer_handle_t rga_handle_src = importbuffer_fd(fd, &in_param);
	rga_buffer_t rga_buf_src = wrapbuffer_handle(rga_handle_src, width, height, RK_FORMAT_YCbCr_420_SP, width_stride, height_stride);
	
	mpp_frame = mpp_encoder->GetInputFrameBuffer();
	mpp_frame_fd = mpp_encoder->GetInputFrameBufferFd(mpp_frame);
	mpp_frame_addr = mpp_encoder->GetInputFrameBufferAddr(mpp_frame);
	if (rga_handle_dst == 0)
	{		
	
		im_handle_param_t dst_param;
		dst_param.width = width;
		dst_param.height = height_stride;
		dst_param.format = RK_FORMAT_YCbCr_420_SP;	
		rga_handle_dst = importbuffer_fd(mpp_frame_fd, &dst_param);		
		rga_buf_dst = wrapbuffer_handle(rga_handle_dst, width, height, RK_FORMAT_YCbCr_420_SP, width_stride, height_stride);
	}

	std::vector<DetectionResult> results;
	if (m_objectDectect.get() != nullptr)
	{
		m_objectDectect->inference_model(&img, rga_buf_src);
		m_objectDectect->getresult(results);
	}

	ret = imcopy(rga_buf_src, rga_buf_dst);
	if (IM_STATUS_SUCCESS != ret) {
		printf("imcopy error! %s \r\n",  imStrError((IM_STATUS)ret));
		if (enc_data != nullptr)
		{
			free(enc_data);
		}
		return ;
	}


	image_buffer_t src_image;
	src_image.virt_addr = (unsigned char *)mpp_frame_addr;
	src_image.format = IMAGE_FORMAT_YUV420SP_NV12;
	src_image.width = width_stride;
	src_image.height = height_stride;
	char text[256];
	// Draw objects
	for (const auto& result : results)
	{
		int x1 = result.left;
		int y1 = result.top;
		int x2 = result.right;
		int y2 = result.bottom;

	//	draw_rectangle_yuv420sp((unsigned char*)mpp_frame_addr, width_stride, height_stride, x1, y1, x2 - x1 + 1, y2 - y1 + 1, 0x00FF0000, 4);
		draw_rectangle(&src_image, x1, y1, x2 - x1+1, y2 - y1+1, COLOR_BLACK, 3);

		sprintf(text, "%s %.1f%%", coco_cls_to_name(result.class_id), result.confidence );
		draw_text(&src_image, text, x1, y1 - 35, COLOR_WHITE, 15);
	}
	// Encode to file
	// Write header on first frame
	if (frame_index == 1)
	{
		enc_data_size = mpp_encoder->GetHeader(enc_data, enc_buf_size);	
	}
	memset(enc_data, 0, enc_buf_size);
	
	enc_data_size = mpp_encoder->Encode(mpp_frame, enc_data, enc_buf_size);

	if (m_callbackEvent)
	{
		m_callbackEvent->OnSourceVideoPacket("H264", (uint8_t*)enc_data, enc_data_size, 0, true);
	}
	if (enc_data != nullptr)
	{
		free(enc_data);
	}
	if (rga_handle_src > 0) {
		releasebuffer_handle(rga_handle_src);
	}

	//if (rga_handle_dst > 0) {
	//	releasebuffer_handle(rga_handle_dst);
	//}

}

#endif //USE_RKNN
bool  FFmpegVideoCapture::Initdecoder(const std::map<std::string, std::string>& opts)
{
	if (opts.count("videocodecname"))
	{
		std::string videoCodecName = opts.at("videocodecname");
		m_nWidth = std::stoi(opts.at("width"));
		m_nHeight = std::stoi(opts.at("height"));
		m_nFrameRate = 30;

		if (m_nFrameRate < 10) m_nFrameRate = 25;
#ifdef USE_YOLO	
		if (m_objectDectect.get() != nullptr)
		{
			m_objectDectect->InitDetector(m_nFrameRate);
		}
#endif	


#ifdef USE_RKNN
		if (mpp_decoder == nullptr)
		{
			mpp_decoder = new MppDecoder();
			int video_type = 264;
			if (videoCodecName == "H265" || videoCodecName == "hevc")
			{
				video_type = 265;
			}
			mpp_decoder->Init(video_type, 30, this);
			// 使用 lambda 设置回调
			mpp_decoder->SetCallback([=](void* userdata, int width_stride, int height_stride, int width, int height, int format, int fd, void* data) {
				mpp_decoder_frame_callback(userdata, width_stride, height_stride, width, height, format, fd, data);
				});
		}
		return true;

#endif //USE_RKNN


#ifdef JESTON

		if (m_jestonDec == nullptr)
		{
			m_jetson_addr = (unsigned char*)malloc(m_nWidth * m_nHeight * 4);
			if (videoCodecName == "H265" || videoCodecName == "hevc")
			{
				m_jestonDec = JetsonDecAPI::CreateJetsonDec("h265", m_nWidth, m_nHeight, m_jetson_addr);

			}
			else
			{
				m_jestonDec = JetsonDecAPI::CreateJetsonDec("h264", m_nWidth, m_nHeight, m_jetson_addr);

			}
			m_jestonDec->SetDecCallBack(static_cast<JetsonDecListener*>(this));

		}

		return true;
#endif

		/*	FrameInfo _info = {};
			_info.FramesPerSecond = m_nFrameRate;
			_info.DecType = DecodeType::kDecodeSoft;
			_info.Width = m_nWidth;
			_info.Height = m_nHeight;
			if (CGlobalManager::getInstance().getOutWidth() != 0 && CGlobalManager::getInstance().getOutHeight()!=0)
			{
				_info.out_width = CGlobalManager::getInstance().getOutWidth();
				_info.out_height = CGlobalManager::getInstance().getOutHeight();
			}

			_info.VCodec = (videoCodecName == "H265" || videoCodecName == "hevc") ? FFmpeg_AVCodecID::YST_CODEC_H265 : FFmpeg_AVCodecID::YST_CODEC_H264;*/

		m_pDecoder = FFmpegDecoderAPI::CreateDecoder();
		std::string decoder_name = CGlobalManager::getInstance().getDecoderName();
		if (m_pDecoder && m_pDecoder->createDecoder((char *)decoder_name.c_str(), m_nWidth, m_nHeight))
		{
			m_pDecoder->Start();
			/*	m_pDecoder->RegisterDecodeCallback([=](uint8_t* y, int strideY, uint8_t* u, int strideU, uint8_t* v, int strideV, int nWidth, int nHeight, int64_t nTimeStamp) {
					decodercallback(y, strideY, u, strideU, v, strideV, nWidth, nHeight, nTimeStamp);
					});*/
			m_pDecoder->RegisterDecodeCallback([=](uint8_t* yuv, int nWidth, int nHeight, int strideY, int strideU, int strideV, bool bKey, int64_t nTimeStamp)
				{

					if (m_bStop.load()) {
						return false;
					}

					int nNewWidth = nWidth, nNewHeight = nHeight;
					cv::Mat yuvImage(nHeight + nHeight / 2, nWidth, CV_8UC1);
					memcpy(yuvImage.data, yuv, nHeight * nWidth * 3 / 2);
					cv::Mat bgrImage;
					cv::cvtColor(yuvImage, bgrImage, cv::COLOR_YUV2BGR_I420);

					cv::Mat finalImage;
					if (nNewWidth != nWidth || nNewHeight != nHeight) {
						// 缩放图像到目标大小
						cv::resize(bgrImage, finalImage, cv::Size(nNewWidth, nNewHeight));
					}
					else {
						// 不进行缩放，直接使用转换后的图像
						finalImage = bgrImage.clone();
					}

					{
						std::lock_guard<std::mutex> lock(queueMutex);
#ifdef USE_YOLO
						if (m_objectDectect.get()!=nullptr)
						{
							m_objectDectect->detector(finalImage, finalImage.cols, finalImage.rows);
						}
#endif //USE_YOLO
						ImageInfo imageInfo = { finalImage, finalImage.cols, finalImage.rows };

						imageQueue.push(imageInfo);
						imageReady.notify_all();
					}
				});

			return true;
		}
		else
		{
			m_pDecoder = nullptr;
			SPDLOG_LOGGER_ERROR(spdlogptr, "InitDecoder: fail");
			return false;
		}

#ifdef USE_CUDA
		if (m_bUseCuda) initCudaEncoder();
#endif
		return true;
	}


}

const Json::Value FFmpegVideoCapture::detectorinfo(const Json::Value& jmessage)
{
#ifdef USE_YOLO
	if (m_objectDectect.get()!=nullptr)
	{
		return m_objectDectect->detectorinfo(jmessage);
	}

#endif
	Json::Value answer;
	return answer;
}

void FFmpegVideoCapture::startEncoderThread()
{
	netlib::ThreadPool::getInstance().append([this]()
		{
			if (m_nFrameRate < 1)
			{
				m_nFrameRate = 25;
			}
			int frameTime = static_cast<int>(1000.0 / m_nFrameRate) - 5; // 每帧的时间间隔，单位为毫秒
			auto lastTime = std::chrono::steady_clock::now(); // 初始化为当前时间

			while (!m_bStop.load())
			{
				// 计算从上次循环到现在的时间差
				auto currentTime = std::chrono::steady_clock::now();
				std::chrono::duration<double, std::milli> elapsedTime = currentTime - lastTime;

				if (elapsedTime.count() < frameTime) {
					// 如果处理时间小于帧时间，休眠剩余时间
					std::this_thread::sleep_for(std::chrono::milliseconds(frameTime - static_cast<int>(elapsedTime.count())));
				}

				lastTime = std::chrono::steady_clock::now(); // 更新上次时间为当前时间

				ImageInfo imageInfo;
				// 从队列中取出数据
				{
					std::unique_lock<std::mutex> lock(queueMutex);
					imageReady.wait(lock, [this] { return !imageQueue.empty() || m_bStop.load(); });
					if (m_bStop.load()) {
						SPDLOG_LOGGER_INFO(spdlogptr, "EncoderThread  m_bStop");
						return;
					}
					if (!imageQueue.empty()) {
						imageInfo = std::move(imageQueue.front());
						imageQueue.pop();
					}
				}
				cv::Mat matRGB24 = imageInfo.image;
				if (!imageInfo.image.empty())
				{
#ifdef USE_YOLO

					if (m_objectDectect.get() != nullptr)
					{
						m_objectDectect->drawRectangle(matRGB24);
					}					

#endif
					// 将 OpenCV Mat 转换为 YUV 格式
					cv::Mat yuvImage;
					cv::cvtColor(matRGB24, yuvImage, cv::COLOR_BGR2YUV_I420);				

					int nYuvLength = yuvImage.total() * yuvImage.elemSize();
					// 调用 EncodecYUV 函数
					EncodecYUV(yuvImage.data, nYuvLength, imageInfo.width, imageInfo.height);
				}
			}
		});

}

void FFmpegVideoCapture::OnSourceConnected(void* arg, const std::map<std::string, std::string>& opts)
{
	/*netlib::ThreadPool::getInstance().append([=]()
		{
			if (m_bStop.load())
			{
				return;
			}*/
			Initdecoder(opts);		
	//	});


}

void FFmpegVideoCapture::OnSourceDisConnected(int err)
{
	SPDLOG_LOGGER_ERROR(spdlogptr, "OnSourceDisConnected, error ={}", err);
	return;

}

void FFmpegVideoCapture::OnSourceVideoPacket(const char* id, uint8_t* aBytes, int aSize, int64_t ts, bool bKey)
{
	if (m_bStop.load())
	{
		return;
	}
	std::string strid = id;

	if (strid.find("H264") != std::string::npos)
	{
		m_useNullCodec.store(false);

#ifdef JESTON
		struct timeval time_now;
		gettimeofday(&time_now, NULL);
		uint64_t timestamp = 1000 * (time_now.tv_sec) + (time_now.tv_usec) / 1000;

		if (m_jestonDec != nullptr)
		{
			int size = m_jestonDec->GetQueueSize();
			if (size > 30)
			{
				return;
			}

			if (size > 5)
			{
				std::this_thread::sleep_for(std::chrono::milliseconds(size));
				SPDLOG_LOGGER_ERROR(spdlogptr, "GetQueueSize:{}", size);
			}
			m_jestonDec->AddEsData((unsigned char*)aBytes, aSize, timestamp);
			//SPDLOG_LOGGER_ERROR(spdlogptr, "GetQueueSize:{}", size);
		}

#endif

#ifdef USE_RKNN
		if (mpp_decoder!=nullptr)
		{
			mpp_decoder->Decode((uint8_t*)aBytes, aSize, 0);
		}
		return;
#endif //USE_RKNN
	
		if (m_pDecoder)
		{
			m_pDecoder->PostFrame(aBytes, aSize, bKey, ts);
		}
		if (m_callbackEvent && m_useNullCodec.load())
		{
			m_callbackEvent->OnSourceVideoPacket(id, aBytes, aSize, ts, bKey);
		}
	}
	if (strid.find("H265") != std::string::npos)
	{
		m_useNullCodec.store(false);
#ifdef JESTON
		struct timeval time_now;
		gettimeofday(&time_now, NULL);
		uint64_t timestamp = 1000 * (time_now.tv_sec) + (time_now.tv_usec) / 1000;
		if (m_jestonDec != nullptr)
		{
			int size = m_jestonDec->GetQueueSize();
			if (size > 5)
			{
				std::this_thread::sleep_for(std::chrono::milliseconds(size));
			}
			if (size > 50)
			{
				return;
			}
			m_jestonDec->AddEsData((unsigned char*)aBytes, aSize, timestamp);
		}
#else
		if (m_pDecoder)
		{
			m_pDecoder->PostFrame(aBytes, aSize, true, 0);
		}
#endif
		
	}
	return;


}

void FFmpegVideoCapture::OnJetsonDecData(unsigned char* yuv, int data_len, uint64_t timestamp)
{
	std::lock_guard<std::mutex> _lock(m_mutex);
	if (m_bStop.load()) {
		return;
	}

	int nNewWidth = m_nWidth, nNewHeight = m_nHeight;
	if (CGlobalManager::getInstance().getOutWidth() != 0 && CGlobalManager::getInstance().getOutHeight()!=0)
	{
		nNewWidth = CGlobalManager::getInstance().getOutWidth();
		nNewHeight = CGlobalManager::getInstance().getOutHeight();
	}

	cv::Mat yuvImage(m_nHeight * 3 / 2, m_nWidth, CV_8UC1, yuv);
	cv::Mat bgrImage;
	cv::cvtColor(yuvImage, bgrImage, cv::COLOR_YUV2BGR_NV12);

	cv::Mat finalImage;
	if (nNewWidth != m_nWidth || nNewHeight != m_nHeight) {
		cv::resize(bgrImage, finalImage, cv::Size(nNewWidth, nNewHeight));
	}
	else {
		finalImage = bgrImage;
	}
	{
		std::lock_guard<std::mutex> lock(queueMutex);
	
#ifdef USE_YOLO
		if (m_objectDectect.get() != nullptr) {
			m_objectDectect->detector(finalImage, finalImage.cols, finalImage.rows);
		}
#endif
		ImageInfo imageInfo = { finalImage, finalImage.cols, finalImage.rows };
		imageQueue.push(imageInfo);
		imageReady.notify_one();
	}
}