#include "FFmpegVideoCapture.h"

#include <iostream>
#include <fstream>
#include <mutex>
#include <filesystem>

#ifndef WIN32
#include <malloc.h>
#endif
#include "thread_pool.h"


FFmpegVideoCapture::FFmpegVideoCapture(const std::string& uri, const std::map<std::string, std::string>& opts)
	:m_videourl(uri)
{
	m_bStop.store(true);
	m_bKey.store(false);
}

FFmpegVideoCapture::~FFmpegVideoCapture()
{

	SPDLOG_LOGGER_ERROR(spdlogptr, "FFmpegVideoCapture::stop start ");
	Destroy();
	SPDLOG_LOGGER_ERROR(spdlogptr, "FFmpegVideoCapture::stop end ");
}

bool FFmpegVideoCapture::Start()
{
	SPDLOG_LOGGER_INFO(spdlogptr, "LiveVideoSource::Start");
	if (m_bStop.load() == true)
	{
		m_bStop.store(false);
		m_pSource = MediaSourceAPI::CreateMediaSource(m_videourl, this, true, false);
		m_pSource->Run(false);
		startEncoderThread();
		return true;
	}
	else
	{
		return false;
	}
}

void FFmpegVideoCapture::Destroy()
{
	m_bStop.store(true);
	MediaSourceAPI::deleteMediaSource(m_pSource);
	
	{
		std::lock_guard<std::mutex> lock(queueMutex);
		while (!imageQueue.empty()) {
			imageQueue.pop();
		}
	}
	imageReady.notify_all();
	m_YuvCallbackList.clear();

	m_h264Callback = nullptr;
	m_callbackEvent = nullptr;
	if (m_FrameSWS != nullptr)
	{
		delete m_FrameSWS;
		m_FrameSWS = nullptr;
	}

	if (m_pDecoder != nullptr)
	{
		delete m_pDecoder;
		m_pDecoder = nullptr;

	}
	if (m_ffmpegEncoder != nullptr)
	{
		delete m_ffmpegEncoder;
		m_ffmpegEncoder = nullptr;

	}
	if (m_pOutEncodeBuffer != nullptr)
	{
		delete[] m_pOutEncodeBuffer;
		m_pOutEncodeBuffer = nullptr;
	}
}

void FFmpegVideoCapture::Stop(VideoYuvCallBack yuvCallback)
{
	std::lock_guard<std::mutex> lock(m_mutex);
	using CallbackType = VideoYuvCallBack;

	auto it = std::find_if(m_YuvCallbackList.begin(), m_YuvCallbackList.end(), [&yuvCallback](const CallbackType& cb) {
		return *cb.target<void(*)()>() == *yuvCallback.target<void(*)()>();
		});
	if (it != m_YuvCallbackList.end())
	{
		m_YuvCallbackList.erase(it);
	}
}

void FFmpegVideoCapture::Init(const char* devicename, int nWidth, int nHeight, int nFrameRate)
{
	m_nWidth = nWidth;
	m_nHeight = nHeight;
	m_nFrameRate = nFrameRate;
}

void FFmpegVideoCapture::Initdectect(const char* strJson)
{
	return;

}

void FFmpegVideoCapture::Init(std::map<std::string, std::string> opts)
{
	if (opts.find("width") != opts.end())
	{
		m_nWidth = std::stoi(opts.at("width"));
	}
	if (opts.find("height") != opts.end()) {
		m_nHeight = std::stoi(opts.at("height"));
	}
	if (opts.find("fps") != opts.end()) {
		m_nFrameRate = std::stoi(opts.at("fps"));
	}
}

void FFmpegVideoCapture::RegisterCallback(VideoYuvCallBack yuvCallback)
{
	std::lock_guard<std::mutex> _lock(m_mutex);
	std::list<VideoYuvCallBack>::iterator it = m_YuvCallbackList.begin();
	while (it != m_YuvCallbackList.end())
	{
		if (it->target<void*>() == yuvCallback.target<void*>())
		{
			return;
		}
		it++;
	}
	m_YuvCallbackList.push_back(yuvCallback);
}

bool FFmpegVideoCapture::onData(const char* id, unsigned char* buffer, int size, int64_t ts)
{
	if (m_h264Callback)
	{
		m_h264Callback((char*)buffer, size, 0, m_nWidth, m_nHeight, m_nFrameRate, ts);
		return true;
	}
	return false;

}

bool FFmpegVideoCapture::onData(uint8_t* y, int strideY, uint8_t* u, int strideU, uint8_t* v, int strideV, int nWidth, int nHeight, int64_t nTimeStamp)
{
	for (const auto& callback : m_YuvCallbackList)
	{
		callback(y, strideY, u, strideU, v, strideV, nWidth, nHeight, nTimeStamp);
	}
	return false;
}

bool FFmpegVideoCapture::decodercallback(uint8_t* y, int strideY, uint8_t* u, int strideU, uint8_t* v, int strideV, int nWidth, int nHeight, int64_t nTimeStamp)
{
	std::lock_guard<std::mutex> _lock(m_mutex);
	if (m_bStop.load()) {
		return false;
	}

	int nNewWidth = 960, nNewHeight =540 ;
	// 创建 OpenCV Mat 对象并将 YUV 数据转换为 RGB
	cv::Mat yuvImage(nHeight + nHeight / 2, nWidth, CV_8UC1);
	memcpy(yuvImage.data, y, nHeight * nWidth);
	memcpy(yuvImage.data + nHeight * nWidth, u, nHeight * nWidth / 4);
	memcpy(yuvImage.data + nHeight * nWidth + nHeight * nWidth / 4, v, nHeight * nWidth / 4);
	cv::Mat bgrImage;
	cv::cvtColor(yuvImage, bgrImage, cv::COLOR_YUV2BGR_I420);

	// 将 RGB 图像缩放到目标大小
	cv::Mat resizedImage;
	cv::resize(bgrImage, resizedImage, cv::Size(nNewWidth, nNewHeight));

	// 将缩放后的图像封装到 ImageInfo 结构体并压入队列
	ImageInfo imageInfo = { resizedImage, nNewWidth, nNewHeight };
	{
		std::lock_guard<std::mutex> lock(queueMutex);
		imageQueue.push(imageInfo);
		imageReady.notify_one();
	}



}

int FFmpegVideoCapture::calculateBitrate(int nWidth, int nHeight)
{
	if (nWidth <= 640 && nHeight <= 480)
	{
		return 500 * 1024;
	}
	else if (nWidth <= 960 && nHeight <= 540)
	{
		return 1 * 1024 * 1024;
	}
	else if (nWidth <= 1280 && nHeight <= 720)
	{
		return 3 * 1024 * 1024;
	}
	else if (nWidth <= 1920 && nHeight <= 1080)
	{
		return 5 * 1024 * 1024;
	}
	else
	{
		return 8 * 1024 * 1024;
	}
}

void FFmpegVideoCapture::EncodecYUV(uint8_t* yuv, int nYUVLength, int nWidth, int nHeight)
{
	if (m_bStop.load())
	{
		return;
	}

	if (m_pOutEncodeBuffer == nullptr)
	{
		//m_pOutEncodeBuffer = new char[nYUVLength];
	}

	{
		int bitrate = calculateBitrate(nWidth, nHeight);
	

	
			if (m_ffmpegEncoder == nullptr)
			{
				m_ffmpegEncoder = FFmpegVideoEncoderAPI::CreateEncoder();
				m_ffmpegEncoder->Init("h264_bm", CAREYE_FMT_YUV420P, nWidth, nHeight, m_nFrameRate, bitrate);

				m_ffmpegEncoder->RegisterCallBack([this](char* m_pOutEncodeBuffer, int nOneFrameLength, bool bKey, int nWidth, int nHeight, int64_t pts)
					{
						if (bKey == true)
						{
							m_bKey.store(true);
						}
						if (nOneFrameLength > 0 && m_bKey.load())
						{
							if (m_useNullCodec.load() == false)
							{
								if (m_callbackEvent)
								{
									m_callbackEvent->OnSourceVideoPacket("H264", (uint8_t*)m_pOutEncodeBuffer, nOneFrameLength, pts, bKey);
								}
							}

						}
					}
				);
				m_ffmpegEncoder->Start();
			}
		
			auto i420_frame = std::make_unique<I420Frame>();
			int yuv_size = nWidth * nHeight * 3 / 2;
			if (i420_frame->buffer == nullptr)
			{
				i420_frame->buffer = new uint8_t[yuv_size];
			}
			i420_frame->key_frame = true;
			memcpy(i420_frame->buffer, yuv, yuv_size);
			m_ffmpegEncoder->postFormat(std::move(i420_frame));
			return;
		}
}



bool  FFmpegVideoCapture::Initdecoder(const std::map<std::string, std::string>& opts)
{
	if (opts.count("videocodecname"))
	{
	

		std::string videoCodecName = opts.at("videocodecname");
		m_nWidth = std::stoi(opts.at("width"));
		m_nHeight = std::stoi(opts.at("height"));
		m_nFrameRate = 30;
		if (m_nFrameRate < 25) m_nFrameRate = 25;


	
		FrameInfo _info = {};
		_info.FramesPerSecond = m_nFrameRate;
		_info.DecType = DecodeType::kDecodeSoft;
		_info.Width = m_nWidth;
		_info.Height = m_nHeight;	
		_info.VCodec = (videoCodecName == "H265" || videoCodecName == "hevc") ? FFmpeg_AVCodecID::YST_CODEC_H265 : FFmpeg_AVCodecID::YST_CODEC_H264;

		m_pDecoder = FFmpegDecoderAPI::CreateDecoder();
		if (m_pDecoder && m_pDecoder->createDecoder("h264_bm", m_nWidth, m_nHeight))
		{
			m_pDecoder->Start();
			/*	m_pDecoder->RegisterDecodeCallback([=](uint8_t* y, int strideY, uint8_t* u, int strideU, uint8_t* v, int strideV, int nWidth, int nHeight, int64_t nTimeStamp) {
					decodercallback(y, strideY, u, strideU, v, strideV, nWidth, nHeight, nTimeStamp);
					});*/
			m_pDecoder->RegisterDecodeCallback([=](uint8_t* yuv, int nWidth, int nHeight, int strideY, int strideU, int strideV, bool bKey, int64_t nTimeStamp)
				{
			
					if (m_bStop.load()) {
						return false;
					}

					EncodecYUV(yuv, nHeight * nWidth * 3 / 2, nWidth,nHeight);
					return true;
					int nNewWidth = nWidth, nNewHeight = nHeight;
					cv::Mat yuvImage(nHeight + nHeight / 2, nWidth, CV_8UC1);
					memcpy(yuvImage.data, yuv, nHeight * nWidth * 3 / 2);
					cv::Mat bgrImage;
					cv::cvtColor(yuvImage, bgrImage, cv::COLOR_YUV2BGR_I420);

					cv::Mat finalImage;
					if (nNewWidth != nWidth || nNewHeight != nHeight) {
						// 缩放图像到目标大小
						cv::resize(bgrImage, finalImage, cv::Size(nNewWidth, nNewHeight));
					}
					else {
						// 不进行缩放，直接使用转换后的图像
						finalImage = bgrImage.clone();
					}
				
					{
						std::lock_guard<std::mutex> lock(queueMutex);
					


						ImageInfo imageInfo = { finalImage, finalImage.cols, finalImage.rows };				
			
						imageQueue.push(imageInfo);
						imageReady.notify_all();
					}
				});

			return true;
		}
		else
		{
			m_pDecoder = nullptr;
			SPDLOG_LOGGER_ERROR(spdlogptr, "InitDecoder: fail");
			return false;
		}

#ifdef USE_CUDA
		if (m_bUseCuda) initCudaEncoder();
#endif
		return true;
	}
	
	
}

const Json::Value FFmpegVideoCapture::detectorinfo(const Json::Value& jmessage)
{
#ifdef USE_YOLO

	return m_objectDectect->detectorinfo(jmessage);
#endif
	Json::Value answer;
	return answer;
}

void FFmpegVideoCapture::startEncoderThread()
{
	netlib::ThreadPool::getInstance().append([this]()
		{
			while (!m_bStop.load())
			{			
				ImageInfo imageInfo;
				// 从队列中取出数据
				{
					std::unique_lock<std::mutex> lock(queueMutex);
					imageReady.wait(lock, [this] { return !imageQueue.empty() || m_bStop.load(); });
					if (m_bStop.load()) {
						SPDLOG_LOGGER_INFO(spdlogptr, "EncoderThread  m_bStop");
						return;
					}
					if (!imageQueue.empty()) {
						imageInfo = std::move(imageQueue.front());
						imageQueue.pop();
					}
				}
				cv::Mat matRGB24 = imageInfo.image;
				if (!imageInfo.image.empty())
				{

					// 将 OpenCV Mat 转换为 YUV 格式
					cv::Mat yuvImage;
					cv::cvtColor(imageInfo.image, yuvImage, cv::COLOR_BGR2YUV_I420);
					int nYuvLength = yuvImage.total() * yuvImage.elemSize();
					// 调用 EncodecYUV 函数
					EncodecYUV(yuvImage.data, nYuvLength, imageInfo.width, imageInfo.height);
				}
			}
		});

}

void FFmpegVideoCapture::OnSourceConnected(void* arg, const std::map<std::string, std::string>& opts)
{
	netlib::ThreadPool::getInstance().append([=]()
		{
			if (m_bStop.load())
			{
				return;
			}
			Initdecoder(opts);
#ifdef USE_YOLO
			std::this_thread::sleep_for(std::chrono::milliseconds(1000*5));
			if (m_nFrameRate < 10) m_nFrameRate = 25;
			//m_objectDectect->InitDetector(m_nFrameRate);
#endif	
		});


}

void FFmpegVideoCapture::OnSourceDisConnected(int err)
{
	SPDLOG_LOGGER_ERROR(spdlogptr, "OnSourceDisConnected, error ={}", err);
	return;

}

void FFmpegVideoCapture::OnSourceVideoPacket(const char* id, uint8_t* aBytes, int aSize, int64_t ts,bool bKey)
{
	if (m_bStop.load())
	{
		return;
	}
	std::string strid = id;

	if (strid.find("H264") != std::string::npos)
	{
		m_useNullCodec.store(false);

#ifdef JESTON
		struct timeval time_now;
		gettimeofday(&time_now, NULL);
		uint64_t timestamp = 1000 * (time_now.tv_sec) + (time_now.tv_usec) / 1000;

		if (m_jestonDec != nullptr)
		{
			int size = m_jestonDec->GetQueueSize();
			if (size > 30)
			{
				return;
			}

			if (size > 5)
			{
				std::this_thread::sleep_for(std::chrono::milliseconds(size));
				SPDLOG_LOGGER_ERROR(spdlogptr, "GetQueueSize:{}", size);
			}
			m_jestonDec->AddEsData((unsigned char*)aBytes, aSize, timestamp);
			//SPDLOG_LOGGER_ERROR(spdlogptr, "GetQueueSize:{}", size);
		}

#endif
		
		if (m_pDecoder)
		{
			m_pDecoder->PostFrame(aBytes, aSize, true, 0);
		}
		if (m_callbackEvent && m_useNullCodec.load())
		{
			m_callbackEvent->OnSourceVideoPacket(id, aBytes, aSize, ts, bKey);
		}
	}
	if (strid.find("H265") != std::string::npos)
	{
		m_useNullCodec.store(false);
#ifdef JESTON
		struct timeval time_now;
		gettimeofday(&time_now, NULL);
		uint64_t timestamp = 1000 * (time_now.tv_sec) + (time_now.tv_usec) / 1000;
		if (m_jestonDec != nullptr)
		{
			int size = m_jestonDec->GetQueueSize();
			if (size > 5)
			{
				std::this_thread::sleep_for(std::chrono::milliseconds(size));
			}
			if (size > 50)
			{
				return;
			}

			m_jestonDec->AddEsData((unsigned char*)aBytes, aSize, timestamp);

		}
#endif
		

			if (m_pDecoder)
			{
				m_pDecoder->PostFrame(aBytes, aSize, true, 0);
			}
	}
	return;


}
