#include "sophonVideoCapture.h"

#include <iostream>
#include <mutex>
#include <filesystem>
#include <fstream>
#include <string.h>
#include <dirent.h>
#include <unistd.h>
#include <sys/stat.h>
#define  USE_FFMPEG
#include "ffmpeg_headers.h"
#include "thread_pool.h"


sophonVideoCapture::sophonVideoCapture(const std::string& uri, const std::map<std::string, std::string>& opts)
	:m_videourl(uri)
{

	m_bStop.store(true);
	std::string bmodel_file = R"(/media/usb-sda1/sophon-ffmpeg/models/yolov8s_int8_1b.bmodel)";
	std::string input = R"(/media/usb-sda1/sophon-ffmpeg/libsophon/bin/test_car_person_1080P.mp4)";
	std::string coco_names = R"(/media/usb-sda1/sophon-ffmpeg/models/coco.names)";

	bm_dev_request(&handle, dev_id);

	// initialize net
	yolov8 = new YoloV8_det(bmodel_file, coco_names, dev_id, 0.25, 0.7);


	yolov8->m_ts = &yolov8_ts;

	// get batch_size
	batch_size = yolov8->batch_size;
	m_decoder = new VideoDecFFM;
	int ret = m_decoder->openDec(&handle, uri.c_str());
	if (ret != 0) {
		SPDLOG_LOGGER_ERROR(spdlogptr, "Open decoder failed.");

	}
	return;

}

sophonVideoCapture::~sophonVideoCapture()
{
	SPDLOG_LOGGER_ERROR(spdlogptr, "sophonVideoCapture::stop start ");
	Destroy();
	SPDLOG_LOGGER_ERROR(spdlogptr, "sophonVideoCapture::stop end ");
}

bool sophonVideoCapture::Start()
{
	if (m_bStop.load() == false)
	{
		return false;
	}
	m_bStop.store(false);
	SPDLOG_LOGGER_INFO(spdlogptr, "sophonVideoCapture::Start");
	netlib::ThreadPool::getInstance().append([&]()
		{
			int ret = 0;
			int got_frame = 0;
			bool end_flag = false;
			std::vector<bm_image> batch_imgs;
			std::vector<YoloV8BoxVec> boxes;
			int id = 0;
			while (true)
			{			
				bm_image* img = m_decoder->grab();
				if (img == nullptr)
				{
					SPDLOG_LOGGER_INFO(spdlogptr, "grab error\n");
		
					delete decoder;
					m_decoder = new VideoDecFFM;
					ret = m_decoder->openDec(&handle, m_videourl.c_str());				
					continue;
				}
				else
				{
					batch_imgs.push_back(*img);
					delete img;
					img = nullptr;
				}		
				if ((batch_imgs.size() == batch_size) && !batch_imgs.empty())
				{		
					CV_Assert(0 == yolov8->Detect(batch_imgs, boxes));
					for (int i = 0; i < batch_size; i++) {				
						id++;				
						SPDLOG_LOGGER_INFO(spdlogptr, "id = {}   det_nums {}\n", id,boxes[i].size());
						yolov8->draw_result_bmcv(batch_imgs[i], boxes[i],true);					
					
						AVFrame* frame_yuv420p = av_frame_alloc(); //for encoder
						bm_image* bm_image_yuv420p = (bm_image*)malloc(sizeof(bm_image)); // for bmBufferDeviceMemFree
						memcpy(bm_image_yuv420p, &batch_imgs[i], sizeof(bm_image));
						ret = bm_image_to_avframe(handle, bm_image_yuv420p, frame_yuv420p);

						assert(0 == ret); //on 1684x, this bmimg must on vpu heap.
						
						int nwidth = frame_yuv420p->width;
						int nheight = frame_yuv420p->height;
						int pix_fmt_ = frame_yuv420p->format;
						if (m_encoder == nullptr)
						{
							m_encoder = new VideoEnc_FFMPEG;
							m_encoder->openEnc("h264_bm",0,25, nwidth, nheight, pix_fmt_, 25 * frame_yuv420p->width * frame_yuv420p->height / 8);//h264_bm
							/*ret = m_encoder->openEnc(R"(rtsp://192.168.100.131:1554/live/2)",
								"h264_bm",
								0,
								25,
								frame_yuv420p->width,
								frame_yuv420p->height,
								frame_yuv420p->format,
								25 * frame_yuv420p->width * frame_yuv420p->height / 8);		*/				
						}

						if (m_szEncodeBuffer == nullptr)
						{
							m_szEncodeBuffer = new unsigned char[nwidth * nheight * 3 / 2];
						}
						int            nOneFrameLength = 0;
						int64_t pts = 0, dts = 0;
						bool  bKeyFrame = false;

						AVPacket *enc_pkt   = av_packet_alloc();					
					
						if (0 != m_encoder->writeFrame(frame_yuv420p, *enc_pkt)) {
							std::cout << "encode failed once." << std::endl;
						}
						memcpy(m_szEncodeBuffer, enc_pkt->data, enc_pkt->size);
						nOneFrameLength = enc_pkt->size;
						bKeyFrame = (enc_pkt->flags & AV_PKT_FLAG_KEY);

						if (m_h264Callback != nullptr && nOneFrameLength > 0)
						{
							m_h264Callback((char*)m_szEncodeBuffer, nOneFrameLength, bKeyFrame, nwidth, nheight, 30, pts);
						}					
						SPDLOG_LOGGER_INFO(spdlogptr, "nOneFrameLength = {} ", nOneFrameLength);
						av_packet_free(&enc_pkt);
					
						av_frame_unref(frame_yuv420p);
						av_frame_free(&frame_yuv420p);					
					}
					batch_imgs.clear();
					boxes.clear();			
				}		
			}

		});
	return true;
}

void sophonVideoCapture::Destroy()
{
	m_bStop.store(true);

}

void sophonVideoCapture::Stop(VideoYuvCallBack yuvCallback)
{
	std::lock_guard<std::mutex> lock(m_mutex);
	using CallbackType = VideoYuvCallBack;

	auto it = std::find_if(m_YuvCallbackList.begin(), m_YuvCallbackList.end(), [&yuvCallback](const CallbackType& cb) {
		return *cb.target<void(*)()>() == *yuvCallback.target<void(*)()>();
		});
	if (it != m_YuvCallbackList.end())
	{
		m_YuvCallbackList.erase(it);
	}
}

void sophonVideoCapture::Init(const char* devicename, int nWidth, int nHeight, int nFrameRate)
{
	m_nWidth = nWidth;
	m_nHeight = nHeight;
	m_nFrameRate = nFrameRate;
}

void sophonVideoCapture::Initdectect(const char* strJson)
{

}

void sophonVideoCapture::RegisterCallback(VideoYuvCallBack yuvCallback)
{
	std::lock_guard<std::mutex> _lock(m_mutex);
	std::list<VideoYuvCallBack>::iterator it = m_YuvCallbackList.begin();
	while (it != m_YuvCallbackList.end())
	{
		if (it->target<void*>() == yuvCallback.target<void*>())
		{
			return;
		}
		it++;
	}
	m_YuvCallbackList.push_back(yuvCallback);
}

bool sophonVideoCapture::onData(const char* id, unsigned char* buffer, int size, int64_t ts)
{
	if (m_h264Callback)
	{
		m_h264Callback((char*)buffer, size, 0, m_nWidth, m_nHeight, m_nFrameRate, ts);
		return true;
	}
	return false;

}

bool sophonVideoCapture::onData(uint8_t* y, int strideY, uint8_t* u, int strideU, uint8_t* v, int strideV, int nWidth, int nHeight, int64_t nTimeStamp)
{
	for (const auto& callback : m_YuvCallbackList)
	{
		callback(y, strideY, u, strideU, v, strideV, nWidth, nHeight, nTimeStamp);
	}
	return false;
}

bool sophonVideoCapture::decodercallback(uint8_t* y, int strideY, uint8_t* u, int strideU, uint8_t* v, int strideV, int nWidth, int nHeight, int64_t nTimeStamp)
{
	return false;
}


const Json::Value sophonVideoCapture::detectorinfo(const Json::Value& jmessage)
{
	Json::Value answer;
	answer["code"] = 200;
	answer["data"] = "";
	answer["msg"] = "sucess";
	return answer;
}
int sophonVideoCapture::openCodecContext(int* stream_idx,
	AVCodecContext** dec_ctx,
	AVFormatContext* fmt_ctx,
	enum AVMediaType type,
	int sophon_idx) {
	int ret, stream_index;
	AVStream* st;
	AVCodec* dec = NULL;
	AVDictionary* opts = NULL;

	ret = av_find_best_stream(fmt_ctx, type, -1, -1, NULL, 0);
	if (ret < 0) {
		av_log(NULL, AV_LOG_ERROR, "Could not find %s stream \n", av_get_media_type_string(type));
		return ret;
	}

	stream_index = ret;
	st = fmt_ctx->streams[stream_index];

	if (st->codecpar->codec_id != AV_CODEC_ID_H264 && st->codecpar->codec_id != AV_CODEC_ID_HEVC) {
		this->data_on_device_mem = false;
	}

	/* find decoder for the stream */
	decoder = const_cast<AVCodec*>(avcodec_find_decoder(st->codecpar->codec_id));

	if (!decoder) {
		av_log(NULL, AV_LOG_FATAL, "Failed to find %s codec\n", av_get_media_type_string(type));
		return AVERROR(EINVAL);
	}

	/* Allocate a codec context for the decoder */
	*dec_ctx = avcodec_alloc_context3(decoder);
	if (!*dec_ctx) {
		av_log(NULL, AV_LOG_FATAL, "Failed to allocate the %s codec context\n", av_get_media_type_string(type));
		return AVERROR(ENOMEM);
	}

	/* Copy codec parameters from input stream to output codec context */
	ret = avcodec_parameters_to_context(*dec_ctx, st->codecpar);
	if (ret < 0) {
		av_log(NULL, AV_LOG_FATAL, "Failed to copy %s codec parameters to decoder context\n",
			av_get_media_type_string(type));
		return ret;
	}

	video_dec_par = st->codecpar;
	/* Init the decoders, with or without reference counting */
	av_dict_set(&opts, "refcounted_frames", refcount ? "1" : "0", 0);
	av_dict_set_int(&opts, "sophon_idx", sophon_idx, 0);
	av_dict_set_int(&opts, "extra_frame_buffer_num", EXTRA_FRAME_BUFFER_NUM, 0);  // if we use dma_buffer mode
	av_dict_set_int(&opts, "output_format", output_format, 18);
	ret = avcodec_open2(*dec_ctx, dec, &opts);
	if (ret < 0) {
		av_log(NULL, AV_LOG_FATAL, "Failed to open %s codec\n", av_get_media_type_string(type));
		return ret;
	}
	*stream_idx = stream_index;

	av_dict_free(&opts);

	return 0;
}
