﻿#include "camera_manager.h"
#include <cstdlib>
#include <fstream>

#pragma warning (disable:4996)

static int g_w = 2448;
static int g_h = 2048;

static FILE* fin = NULL;
static FILE* fin2 = NULL;


#define RGB2Y(r,g,b) \
	((unsigned char)((66 * r + 129 * g + 25 * b + 128) >> 8) + 16)
//	(((r << 6) + (r << 3) + (r << 2) + r + (g << 7) + (g << 4) + (g << 2) + (g << 1) + (b << 4) + (b << 3) + (b << 2) + b) >> 8)

#define RGB2U(r,g,b) \
	((unsigned char)((-38 * r - 74 * g + 112 * b + 128) >> 8) + 128)
//	((-((r << 5) + (r << 2) + (r << 1)) - ((g << 6) + (g << 3) + (g << 1)) + ((b << 6) + (b << 5) + (b << 4))) >> 8)

#define RGB2V(r,g,b) \
	((unsigned char)((112 * r - 94 * g - 18 * b + 128) >> 8) + 128)
//	(((r << 7) + (r << 4) + (r << 3) + (r << 2) + (r << 1) - ((g << 7) + (g << 2)) - ((b << 4) + (b << 3) + (b << 1))) >> 8)


void rgb2yuv420(unsigned char* rgb_buf, unsigned char* yuv_buf, int width, int heigh) {

	if (!rgb_buf || !yuv_buf || !width || !heigh) {
		printf("invalid param\n");
		return;
	}
	unsigned char* y = yuv_buf;

	unsigned char* u = yuv_buf + width * heigh;
	unsigned char* v = u + width * heigh / 4;

	for (int r = 0; r < heigh; r++) {
		for (int c = 0; c < width; c++) {
			int index = r * width + c;
			*y++ = RGB2Y(rgb_buf[index * 3 + 2], rgb_buf[index * 3 + 1], rgb_buf[index * 3]);
			if ((r % 2 == 0) && (c % 2 == 0)) //偶数行，偶数列获取U
				*u++ = RGB2U(rgb_buf[index * 3 + 2], rgb_buf[index * 3 + 1], rgb_buf[index * 3]);
			else if (c % 2 == 0) //奇数行，偶数列获取V
				*v++ = RGB2V(rgb_buf[index * 3 + 2], rgb_buf[index * 3 + 1], rgb_buf[index * 3]);
		}
	}

}


Recorder::Recorder()
{
	m_pRecordingThread = nullptr;
	m_bRunning = false;
	m_bImageSaved = false;
	m_funcImageCB = nullptr;
}

Recorder::~Recorder()
{
	if (m_pRecordingThread)
	{
		m_pRecordingThread->join();
		delete m_pRecordingThread;
		m_pRecordingThread = nullptr;
	}
}

void Recorder::start()
{

	if (nullptr == m_pRecordingThread)
	{
		m_pRecordingThread = new thread(startRecording, this);
	}

}

void Recorder::stop()
{
	m_bRunMutex.lock();
	m_bRunning = false;
	m_bRunMutex.unlock();
}

void Recorder::setCallback(const FuncIStImageCb& func)
{
	m_funcImageCB = func;
}

void Recorder::pushFileData(const char* inputPath)
{
	int cnt = 0;
	if (!inputPath) return;
	ifstream ifs;
	ifs.open(inputPath);
	if (!ifs) return;
	ifs.seekg(0, ios_base::end);
	int len = ifs.tellg();
	printf("file length is %d MB\n", len / 1024 / 1024);
	ifs.seekg(0, ios_base::beg);
	auto rgb = new char[len];
	ifs.read(rgb,len);

	auto yuv = new char[len / 2];
	rgb2yuv420((unsigned char*)rgb, (unsigned char*)yuv, g_w, g_h);
	delete[] rgb;
	rgb = nullptr;

	delete[] yuv;
	yuv = nullptr;

	//while (1)
	//{

	//	m_objFF.yuv2h264((unsigned char*)yuv, g_w, g_h);
	//	Sleep(40);
	//	//cnt++;
	//	//if (cnt > 100) break;
	//}

	//delete[]  buffer;
	ifs.close();
	
}

void Recorder::pushYUV(const char* inputPath, int w, int h)
{
	//if (!inputPath) return;
	//ifstream ifs;
	//ifs.open(inputPath);
	//if (!ifs) return;
	//ifs.seekg(0, ios_base::end);
	//int len = ifs.tellg();
	//printf("file length is %d\n", len );
	//ifs.seekg(0, ios_base::beg);
	//auto buffer = new char[w*h/2];
	//ifs.read(buffer, w * h / 2);
	//ifs.close();

	//int oneFrameLen = w * h * 3 / 2;
	//FILE* fin = nullptr;
	//fin = fopen(inputPath, "rb");
	//if (fin == nullptr) printf("fin open wrong\n");
	//char* buf = new char[oneFrameLen];

	//int n = 0;
	//while (1)
	//{
	//	n = fread(buf, 1, oneFrameLen, fin);
	//	if (n <= 0)
	//	{
	//		fseek(fin, 0, SEEK_SET);
	//		n = fread(buf, 1, oneFrameLen, fin);
	//	}
	//	m_objFF.testH264((unsigned char*)buf, oneFrameLen, w, h);
	//	Sleep(40);
	//}

	//delete[] buf;
	//buf = nullptr;


	int oneFrameLen = w * h * 3 / 2;
	FILE* fin = nullptr;
	fin = fopen(inputPath, "rb");
	if (fin == nullptr) printf("fin open wrong\n");
	char* buf = new char[oneFrameLen];

	int n = 0;
	while (1)
	{
		n = fread(buf, 1, oneFrameLen, fin);
		if (n <= 0)
		{
			fseek(fin, 0, SEEK_SET);
			n = fread(buf, 1, oneFrameLen, fin);
		}
		m_objFF.testH264((unsigned char*)buf, w, h, AV_PIX_FMT_YUV420P, w / 2, h / 2);
		Sleep(40);
	}

	delete[] buf;
	buf = nullptr;
}

void Recorder::printCurrentTime()
{
	// 获取当前系统时间
	std::chrono::system_clock::time_point now = std::chrono::system_clock::now();

	// 转换为时间戳类型
	std::time_t time = std::chrono::system_clock::to_time_t(now);

	// 转换为本地时间
	char* timeStr = std::ctime(&time);

	// 打印当前时间
	std::cout << "当前时间为：" << timeStr << std::endl;
}

void Recorder::printCurrentTime_plus()
{
	// 使用高精度时钟
	auto now = std::chrono::system_clock::now();

	// 转换为time_t以便格式化
	std::time_t now_c = std::chrono::system_clock::to_time_t(now - std::chrono::hours(24));

	// 转换为tm结构
	std::tm* now_tm = std::localtime(&now_c);

	// 获取毫秒数
	auto millis = std::chrono::duration_cast<std::chrono::milliseconds>(now.time_since_epoch()) % 1000;

	// 输出日期和时间，精确到毫秒
	std::cout << std::put_time(now_tm, "%Y-%m-%d %X") << '.' << std::setfill('0') << std::setw(3) << millis.count() << std::endl;
}


void Recorder::handleRecording()
{
	//while (1)
	//{
	//	pushYUV("ds_480x272.yuv", 480, 272);
	//	Sleep(3000);
	//}



	try
	{
		// Initialize StApi before using.
		CStApiAutoInit objStApiAutoInit;

		// Create a system object for device scan and connection.
		// Here we use CIStSystemPtr instead of IStSystemReleasable for automatically managing the IStSystemReleasable class with auto initial/deinitial.
		CIStSystemPtr pIStSystem(CreateIStSystem());

		// Create a camera device object and connect to first detected device by using the function of system object.
		// We use CIStDevicePtr instead of IStDeviceReleasable for automatically managing the IStDeviceReleasable class with auto initial/deinitial.
		CIStDevicePtr pIStDevice(pIStSystem->CreateFirstIStDevice());

		// Displays the DisplayName of the device.
		cout << "Device=" << pIStDevice->GetIStDeviceInfo()->GetDisplayName() << endl;

#ifdef ENABLED_ST_GUI

		// If using GUI for display, create a display window here.
		CIStImageDisplayWndPtr pIStImageDisplayWnd(CreateIStWnd(StWindowType_ImageDisplay));
#endif

		// Create a DataStream object for handling image stream data.
		// We use CIStDataStreamPtr instead of IStDataStreamReleasable for automatically managing the IStDataStreamReleasable class with auto initial/deinitial.
		CIStDataStreamPtr pIStDataStream(pIStDevice->CreateIStDataStream(0));

		// Start the image acquisition of the host (local machine) side.
		pIStDataStream->StartAcquisition();

		// Start the image acquisition of the camera side.
		pIStDevice->AcquisitionStart();


		// A while loop for acquiring data and checking status. 
		// Here, the acquisition runs until it reaches the assigned numbers of frames.
		m_bRunMutex.lock();
		m_bRunning = true;
		cout << "xptest: camera starts running now\n";
		//printfCurrentTime();
		m_bRunMutex.unlock();

		bool fFirstFrame = true;
		uint64_t nFirstFrameTimestamp = 0;
		while (pIStDataStream->IsGrabbing())
		{
			if (false == m_bRunning)
			{
				cout << "xptest: camera shut down now\n";
				//printfCurrentTime();
				break;
			}
			//printCurrentTime_plus();
			// Retrieve the buffer pointer of image data with a timeout of 5000ms.
			// Use CIStStreamBufferPtr for automatically managing the buffer re-queue action when it's no longer needed.
			CIStStreamBufferPtr pIStStreamBuffer(pIStDataStream->RetrieveBuffer(5000));
			// Check if the acquired data contains image data.
			if (pIStStreamBuffer->GetIStStreamBufferInfo()->IsImagePresent())
			{
				// If yes, we create a IStImage object for further image handling.
				IStImage* pIStImage = pIStStreamBuffer->GetIStImage();

#ifdef ENABLED_ST_GUI
				// Acquire detail information of received image and display it onto the status bar of the display window.
				stringstream ss;
				ss << pIStDevice->GetIStDeviceInfo()->GetDisplayName();
				ss << "  ";
				ss << pIStImage->GetImageWidth() << " x " << pIStImage->GetImageHeight();
				ss << "  ";
				ss << fixed << std::setprecision(2) << pIStDataStream->GetCurrentFPS();
				ss << "[fps]";
				GenICam::gcstring strText(ss.str().c_str());
				pIStImageDisplayWnd->SetUserStatusBarText(strText);

				// Check if display window is visible.
				if (!pIStImageDisplayWnd->IsVisible())
				{
					// Set the position and size of the window.
					pIStImageDisplayWnd->SetPosition(0, 0, pIStImage->GetImageWidth(), pIStImage->GetImageHeight());

					// Create a new thread to display the window.
					pIStImageDisplayWnd->Show(NULL, StWindowMode_ModalessOnNewThread);
				}

				// Register the image to be displayed.
				// This will have a copy of the image data and original buffer can be released if necessary and original buffer can be released if necessary.
				pIStImageDisplayWnd->RegisterIStImage(pIStImage);
#else
				// Display the information of the acquired image data.
				void* buffer = pIStImage->GetImageBuffer();
				int frameId = pIStStreamBuffer->GetIStStreamBufferInfo()->GetFrameID();
				int width = pIStImage->GetImageWidth(); g_w = width;
				int height = pIStImage->GetImageHeight(); g_h = height;
				int fps = pIStDataStream->GetCurrentFPS();
				//cout << "BlockId=" << frameId << " W:" << width << " H: " << height
				//	<< " First byte =" << (uint32_t) * (uint8_t*)pIStImage->GetImageBuffer() << endl;
				//cout << "current fps : " << fps << endl;


				

				// Create a data converter object for pixel format conversion.
				CIStPixelFormatConverterPtr pIStPixelFormatConverter = CreateIStConverter(StConverterType_PixelFormat);

				// Convert the image data to StPFNC_BGR8 format
				// Converting a pixelformat from  BayerRG8 to YUV422_8 is not supported.
				pIStPixelFormatConverter->SetDestinationPixelFormat(StPFNC_BGR8);
				CIStImageBufferPtr pIStImageBuffer = CreateIStImageBuffer();
				pIStPixelFormatConverter->Convert(pIStImage, pIStImageBuffer);
				pIStImage = pIStImageBuffer->GetIStImage();
				void* src_data = pIStImage->GetImageBuffer();
				AVPixelFormat src_fmt = AV_PIX_FMT_RGB24;
				if (src_data)
				{
					m_objFF.yuv2h264((unsigned char*)src_data, width, height, src_fmt, 
																width / 8, height / 8, AV_PIX_FMT_YUV420P);
				}

				//auto yuv = new unsigned char[width * height * 3 / 2];
				//rgb2yuv420((unsigned char*)src_data, yuv, width, height);
				//AVPixelFormat src_fmt = AV_PIX_FMT_YUV420P;
				//if (yuv)
				//{
				//	m_objFF.yuv2h264((unsigned char*)yuv, width, height, src_fmt,
				//		width, height, AV_PIX_FMT_YUV420P);
				//}

#endif
			}
			else
			{
				// If the acquired data contains no image data...
				cout << "Image data does not exist" << endl;
				
			}
		}

		// Stop the image acquisition of the camera side.
		pIStDevice->AcquisitionStop();

		// Stop the image acquisition of the host side.
		pIStDataStream->StopAcquisition();
	}
	catch (const GenICam::GenericException& e)
	{
		// If any exception occurred, display the description of the error here.
		cerr << endl << "An exception occurred." << endl << e.GetDescription() << endl;

		pushYUV("ds_480x272.yuv", 480, 272);
		//pushYUV("123.yuv", 2448, 2048);
		//pushFileData("xxx.rgb");
		//while (1)
		//{
		//	int w = 176, h = 144;
		//	int size = w * h * 3;
		//	char* buf = new char[size];
		//	if (!fin)
		//	{
		//		
		//		fin = fopen("akiyo_qcif.yuv", "rb");
		//		fread(buf, 1, size, fin);

		//		
		//	}

		//	if (fin)
		//	{
		//		m_objFF.yuv2h264((unsigned char*)buf, w, h);
		//		Sleep(40);
		//	}
		//}
	}
	

}

void Recorder::startRecording(void* args)
{
	Recorder* r = (Recorder*)args;
	r->handleRecording();
}


