#include "pch.h"
#include "VideoProcesser.h"
#include"comLib\VideoEncoder.h"
#include "common\MsgDef.h"
#include "common\def.h"
#include "comLib\InputDevices.h"
#include "VideoDXGICaptor.h"
#include "DataProcess.h"
#include<comLib\VideoDecoder.h>
#include<comLib\DesktopDecoder.h>
#include<comLib\net.h>
#include <iostream>
#include <Windows.h>

#define STB_IMAGE_IMPLEMENTATION
#define STB_IMAGE_WRITE_IMPLEMENTATION
#include "stb_image.h"
#include "stb_image_write.h"

CVideoProcesser::CVideoProcesser(QObject *parent):
QThread(parent)
{
	m_pVideoEncoder.reset(new  CVideoEncoder());
	m_pVideoDecoder.reset(new CVideoDecoder);
	m_pInputDevices.reset(new CInputDevices);
	m_pVideoDXGICaptor = new VideoDXGICaptor();
	m_pNetClient = DNetClient::getInstance();

	m_pVideoDXGICaptor->Init();
}

void CVideoProcesser::run()
{
	int ret,recvData;
	int width, height, n;
	auto srcPic= stbi_load("src.png",&width,&height,&n,4);

	initScreenDecode();
}

bool CVideoProcesser::init(AVFormatContext *pFormatCtx)
{
	return true;
}

void CVideoProcesser::stopThread()
{
	m_bRun = false;
	m_ExitSignal.acquire();
}

void CVideoProcesser::initScreenDecode()
{
	int ret,i=0, recvData,width,height,channel,widthBytes;
	SizeInfo sizeInfo;
	uchar *pPic;
	//
	AVFormatContext *pFormatContext = m_pInputDevices->getFormatContext();
	AVPacket *pPacket = av_packet_alloc();
	
	width = m_pVideoDXGICaptor->getWidth();
	height = m_pVideoDXGICaptor->getHeight();
	sizeInfo.width= m_pVideoDXGICaptor->getWidth();
	sizeInfo.height = m_pVideoDXGICaptor->getHeight();
	int imgLength = m_pVideoDXGICaptor->getWidthBytes() * sizeInfo.height;
	unsigned char *pImgData = new unsigned char[imgLength];
	initPicFrame(sizeInfo.width, sizeInfo.height);
	m_pVideoEncoder->init(sizeInfo.width, sizeInfo.height);

	HeadInfo headInfo;
	headInfo.msgType = CLIENT_MSG::MSG_IMAGE_SIZE;
	headInfo.bodySize = sizeof(SizeInfo);
	m_pNetClient->sendPacket(headInfo, (char*)&sizeInfo);

	while (m_bRun)
	{
		if (pPacket->stream_index == 0 || true)
		{
			m_pVideoDXGICaptor->CaptureImage(pImgData, imgLength, widthBytes);
			//stbi_write_bmp("test.bmp", width, height, 4, pImgData);
			//pPic = stbi_load_from_memory(pImgData, imgLength, &width, &height, &channel, 4);
			writePicFrame(width, height, pImgData);

			m_pYuvFrame->pts = i;
			++i;
			ret = m_pVideoEncoder->sendFrame(m_pYuvFrame);
			while (ret >= 0) {
				ret = m_pVideoEncoder->recvPacket(&pPacket);
				if (ret == AVERROR(EAGAIN) || ret == AVERROR_EOF){
					continue;
				}
				else if (ret < 0) {
					assert(false);
				}

				HeadInfo headInfo;
				headInfo.msgType = CLIENT_MSG::UPDATE_IMAGE;
				headInfo.bodySize = pPacket->size;
				m_pNetClient->sendPacket(headInfo, (char*)pPacket->data);

				::Sleep(10);
			}
		}
	}

	m_ExitSignal.release();
}


AVFrame *CVideoProcesser::getPicFrame(int width,int height,uint8_t *srcPic)
{
	int ret;
	AVFrame *m_pFrame = av_frame_alloc();
	m_pFrame->format = AV_PIX_FMT_RGBA;
	m_pFrame->width = width;
	m_pFrame->height = height;
	ret = av_frame_get_buffer(m_pFrame, 32);
	ret = av_image_fill_arrays(m_pFrame->data, m_pFrame->linesize, srcPic, AV_PIX_FMT_RGBA,
		width, height, 4);

	m_pYuvFrame = av_frame_alloc();
	m_pYuvFrame->format = AV_PIX_FMT_YUV420P;
	m_pYuvFrame->width = width;
	m_pYuvFrame->height = height;
	ret = av_frame_get_buffer(m_pYuvFrame, 32);

	m_pSwsContext = sws_getContext(width, height, AV_PIX_FMT_RGBA,
	width, height, AV_PIX_FMT_YUV420P,
	SWS_BILINEAR, NULL, NULL, NULL);
	ret=sws_scale(m_pSwsContext, m_pFrame->data, m_pFrame->linesize, 0, height,
		m_pYuvFrame->data, m_pYuvFrame->linesize);

	return m_pYuvFrame;
}

AVFrame *CVideoProcesser::getYuvFrame(int width, int height)
{
	int ret = 0;
	AVFrame *m_pYuvFrame = av_frame_alloc();
	m_pYuvFrame->format = AV_PIX_FMT_YUV420P;
	m_pYuvFrame->width = width;
	m_pYuvFrame->height = height;
	ret = av_frame_get_buffer(m_pYuvFrame, 32);

	int x, y;
	for (y = 0; y < m_pYuvFrame->height; y++) {
		for (x = 0; x < m_pYuvFrame->width; x++) {
			m_pYuvFrame->data[0][y * m_pYuvFrame->linesize[0] + x] = x + y + 0 * 3;
		}
	}

	/* Cb and Cr */
	for (y = 0; y < m_pYuvFrame->height / 2; y++) {
		for (x = 0; x < m_pYuvFrame->width / 2; x++) {
			m_pYuvFrame->data[1][y * m_pYuvFrame->linesize[1] + x] = 128 + y + 0 * 2;
			m_pYuvFrame->data[2][y * m_pYuvFrame->linesize[2] + x] = 64 + x + 0 * 5;
		}
	}

	return m_pYuvFrame;
}

void CVideoProcesser::initPicFrame(int width, int height)
{
	int ret = 0;
	m_pScreenFrame = av_frame_alloc();
	m_pScreenFrame->format = AV_PIX_FMT_RGBA;
	m_pScreenFrame->width = width;
	m_pScreenFrame->height = height;
	ret = av_frame_get_buffer(m_pScreenFrame, 64);

	m_pYuvFrame = av_frame_alloc();
	m_pYuvFrame->format = AV_PIX_FMT_YUV420P;
	m_pYuvFrame->width = width;
	m_pYuvFrame->height = height;
	ret = av_frame_get_buffer(m_pYuvFrame, 32);

	m_pSwsContext = sws_getContext(width, height, AV_PIX_FMT_BGRA,
		width, height, AV_PIX_FMT_YUV420P,
		SWS_BILINEAR, NULL, NULL, NULL);
}

void CVideoProcesser::writePicFrame(int width, int height, uint8_t *srcPic)
{
	int ret;

	ret = av_image_fill_arrays(m_pScreenFrame->data, m_pScreenFrame->linesize, srcPic, AV_PIX_FMT_BGRA,
		width, height, 64);

	ret = sws_scale(m_pSwsContext, m_pScreenFrame->data, m_pScreenFrame->linesize, 0, height,
		m_pYuvFrame->data, m_pYuvFrame->linesize);
}