#include "FFMpegSamplePlayerWindow.h"
#if !defined(MIN)
#define MIN(A,B)	((A) < (B) ? (A) : (B))
#endif

//原子
std::atomic<bool>   m_bVideoThreadRunning(false);
std::atomic<bool>   m_bThreadRunning(false);

FFMpegSamplePlayerWindow::FFMpegSamplePlayerWindow(QWidget* parent)
	: QOpenGLWidget(parent)
{
	this->resize(QSize(640, 480));
	this->setWindowTitle("简单视频播放器");
	this->setWindowIcon(QIcon("images/opencv.png"));
	QPushButton* btnStart = new QPushButton(this);
	btnStart->setText("开始播放");
	QLabel* imgLabel = new QLabel();
	imgLabel->resize(640, 480);
	connect(btnStart, &QPushButton::clicked, [=]() {
		startFFMPEG("C:/Users/DBF-DEV-103/Downloads/1e2fced773f0583e7ae85e08461ec384.mp4");
		});

	connect(this, &FFMpegSamplePlayerWindow::sendSignalsImage, [=](const QImage& img) {
		QMessageBox::information(this, "温馨提示", "收到了信号槽");
		imgLabel->setPixmap(QPixmap::fromImage(img));
		});

}

//void FFMpegSamplePlayerWindow::receiveSlotsImage() {
//	QMessageBox::information(this, "温馨提示", "收到了信号槽");
//}
void FFMpegSamplePlayerWindow::initializeGL() {
	initializeOpenGLFunctions();
	glClearColor(1.0f, 0.0f, 1.0f, 1.0f);//设置清屏颜色为红色
	glClear(GL_COLOR_BUFFER_BIT);

	float vertices[] = {
		//     ---- 位置 ----          - 纹理坐标 -
			 0.5f,  0.5f, 0.0f,   1.0f, 1.0f,   // 右上
			 0.5f, -0.5f, 0.0f,   1.0f, 0.0f,   // 右下
			-0.5f, -0.5f, 0.0f,   0.0f, 0.0f,   // 左下
			-0.5f,  0.5f, 0.0f,    0.0f, 1.0f    // 左上
	};
	unsigned int indices[] = {
	   0, 1, 3, // first triangle
	   1, 2, 3  // second triangle
	};
	//创建VAO
	glGenVertexArrays(1, &VAO);
	glGenBuffers(1, &VBO);
	glGenBuffers(1, &EBO);
	glBindVertexArray(VAO);

	//创建VBO
	glBindBuffer(GL_ARRAY_BUFFER, VBO);
	glBufferData(GL_ARRAY_BUFFER, sizeof(vertices), vertices, GL_STATIC_DRAW);

	//创建EBO
	glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, EBO);
	glBufferData(GL_ELEMENT_ARRAY_BUFFER, sizeof(indices), indices, GL_STATIC_DRAW);

	//告知显卡如何解析顶点数据
	glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 5 * sizeof(float), (void*)0);
	glEnableVertexAttribArray(0);

	glVertexAttribPointer(1, 2, GL_FLOAT, GL_FALSE, 5 * sizeof(float), (void*)(3 * sizeof(float)));
	glEnableVertexAttribArray(1);


	//glBindVertexArray(0);
	//glBindBuffer(GL_ARRAY_BUFFER, 0);
	glGenTextures(3, textures);
	glBindTexture(GL_TEXTURE_2D, textures[0]);
	glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
	glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
	glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
	glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
	glBindTexture(GL_TEXTURE_2D, 0);

	glActiveTexture(GL_TEXTURE1);
	glBindTexture(GL_TEXTURE_2D, textures[1]);
	glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
	glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
	glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
	glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
	glBindTexture(GL_TEXTURE_2D, 0);

	glActiveTexture(GL_TEXTURE2);
	glBindTexture(GL_TEXTURE_2D, textures[2]);
	glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
	glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
	glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
	glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
	glBindTexture(GL_TEXTURE_2D, 0);


	//小程序
	programId = buildAttachShaderAndReturnProgramId(":/QtForOpenCV4Tool/shader/yuv_texture_vert.glsl",
		":/QtForOpenCV4Tool/shader/yuv_texture_frag.glsl");
}

void FFMpegSamplePlayerWindow::resizeGL(int w, int h) {
	glViewport(0, 0, w, h);
	qDebug() << "----------width=" << w << ",height=" << h;
}
void FFMpegSamplePlayerWindow::paintGL() {
	glClearColor(1.0f, 1.0f, 1.0f, 1.0f);//设置清屏颜色为红色
	glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);//清除颜色缓冲及深度缓冲
	if (mVideoWidth <= 0 || mVideoHeight <= 0) {
		return;
	}
	glBindVertexArray(VAO);
	glUseProgram(programId);
	uModelMatrix.setToIdentity();//设置成为单位矩阵
	glUniformMatrix4fv(glGetUniformLocation(programId, "uni_mat"), 1, GL_FALSE, uModelMatrix.data());

	qDebug() << "mVideoWidth=" << mVideoWidth << ",mVideoHeight=" << mVideoHeight;
	//qDebug() <<"buf[0]=" << sizeof(buf[0]) << ",buf[1]=" << sizeof(buf[1]) << ",buf[2]=" << sizeof(buf[2]);


	glActiveTexture(GL_TEXTURE0);
	glBindTexture(GL_TEXTURE_2D, textures[0]);
	glPixelStorei(GL_UNPACK_ALIGNMENT, 1);//设置1字节对齐
	glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, mVideoWidth, mVideoHeight, 0, GL_LUMINANCE, GL_UNSIGNED_BYTE, mYUVFrame->data[0]);
	glPixelStorei(GL_UNPACK_ALIGNMENT, 4);//设置4字节对齐
	glUniform1i(glGetUniformLocation(programId, "uni_textureY"), 0);

	glActiveTexture(GL_TEXTURE1);
	glBindTexture(GL_TEXTURE_2D, textures[1]);
	glPixelStorei(GL_UNPACK_ALIGNMENT, 1);//设置1字节对齐
	//GL_LUMINANCE 代表yuv数据中的通道中的一个。记住固定这样就行了。
	glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, mVideoWidth / 2, mVideoHeight / 2, 0,
		GL_LUMINANCE, GL_UNSIGNED_BYTE, mYUVFrame->data[1]);
	glPixelStorei(GL_UNPACK_ALIGNMENT, 4);//设置4字节对齐
	glUniform1i(glGetUniformLocation(programId, "uni_textureU"), 1);


	glActiveTexture(GL_TEXTURE2);
	glBindTexture(GL_TEXTURE_2D, textures[2]);
	glPixelStorei(GL_UNPACK_ALIGNMENT, 1);//1字节对齐
	glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, mVideoWidth / 2, mVideoHeight / 2, 0,
		GL_LUMINANCE, GL_UNSIGNED_BYTE, mYUVFrame->data[2]);
	glPixelStorei(GL_UNPACK_ALIGNMENT, 4);//4字节对齐
	glUniform1i(glGetUniformLocation(programId, "uni_textureV"), 2);

	glDrawElements(GL_TRIANGLES, 6, GL_UNSIGNED_INT, 0);


	//解绑
	glBindVertexArray(0);
	glBindTexture(GL_TEXTURE_2D, 0);
	//renderVideo();
	qDebug() << "paintGL()=>>>>>>>>>>>>>>>>>>>>";
}

void FFMpegSamplePlayerWindow::copyDecodedFrame420(uint8_t* src, uint8_t* dist, int linesize, int width, int height)
{
	width = MIN(linesize, width);
	qDebug() << "宽高：width=%d,height=%d" << "," << width << "," << height;
	for (int i = 0; i < height; ++i) {
		memcpy(dist, src, width);
		dist += width;
		src += linesize;
	}

}
FFMpegSamplePlayerWindow::~FFMpegSamplePlayerWindow()
{
}


//以下是shader相关的方法
GLuint FFMpegSamplePlayerWindow::buildAttachShaderAndReturnProgramId(QString vertexResPath, QString fragmentResPath) {
	//编译顶点着色器和片元着色器
	GLuint vertexShader = getShaderId(GL_VERTEX_SHADER, vertexResPath);
	GLuint fragmentShader = getShaderId(GL_FRAGMENT_SHADER, fragmentResPath);
	GLuint programId = glCreateProgram();
	glAttachShader(programId, vertexShader);
	glAttachShader(programId, fragmentShader);
	glLinkProgram(programId);
	getLinkProgramErrorInfo(programId);

	glDeleteShader(vertexShader);
	glDeleteShader(fragmentShader);
	return programId;
}
void FFMpegSamplePlayerWindow::getLinkProgramErrorInfo(GLuint programId) {
	int success;
	char infoLog[512];
	glGetProgramiv(programId, GL_LINK_STATUS, &success);
	if (!success) {
		glGetProgramInfoLog(programId, 512, NULL, infoLog);
		qDebug() << "ERROR::SHADER::PROGRAM::LINKING_FAILED\n" << infoLog;
	}
}
GLuint FFMpegSamplePlayerWindow::getShaderId(GLenum shaderType, QString resPath) {
	//创建顶点着色器
	unsigned int shaderId = glCreateShader(shaderType);
	QFile vertexShaderFile(resPath);
	if (!vertexShaderFile.open(QIODevice::ReadOnly)) {
		qDebug() << "Cannot open vertex shader file for reading";
	}
	QString verQStr = vertexShaderFile.readAll();
	std::string verStdStr = verQStr.toStdString();
	const char* vertexStr = verStdStr.c_str();
	qDebug() << "vertexStr-------------" << vertexStr;
	vertexShaderFile.flush();
	vertexShaderFile.close();
	glShaderSource(shaderId, 1, &vertexStr, NULL);
	glCompileShader(shaderId);
	getCompileShaderErrorInfo(shaderId);
	return shaderId;
}
void FFMpegSamplePlayerWindow::getCompileShaderErrorInfo(GLuint shaderId) {
	int success;
	char infoLog[512];
	glGetShaderiv(shaderId, GL_COMPILE_STATUS, &success);
	if (!success)
	{
		glGetShaderInfoLog(shaderId, 512, NULL, infoLog);
		qDebug() << "ERROR::SHADER::VERTEX::COMPILATION_FAILED\n" << infoLog;
	}
}

//以下是ffmpeg相关的方法
void FFMpegSamplePlayerWindow::startFFMPEG(const char* filePath) {
	//ffmpeg5.x av_register_all会自动被注册
	//1.打开媒体文件
	int ret = avformat_open_input(&avFormatContext, filePath, NULL, NULL);
	if (ret != 0) {
		qDebug() << "fail to open avformat_open_input";
		avformat_close_input(&avFormatContext);
		avFormatContext = NULL;
		return;
	}
	qDebug() << "打开文件成功";
	//2.发现媒体流
	ret = avformat_find_stream_info(avFormatContext, NULL);
	if (ret < 0) {
		qDebug() << "fail avformat_find_stream_info";
		return;
	}
	qDebug() << "发现媒体流";
	//3.找到视频流
	videoStreamId = -1;
	for (int i = 0;i < avFormatContext->nb_streams;i++) {
		AVCodecParameters* codecParameters = avFormatContext->streams[i]->codecpar;
		if (codecParameters->codec_type == AVMEDIA_TYPE_VIDEO) {
			videoStreamId = i;
			//找到解码器
			const AVCodec* codec = avcodec_find_decoder(codecParameters->codec_id);
			if (codec == NULL) {
				qDebug() << "没有找到解码器，解码器为空";
				return;
			}
			//生成解码器上下文
			avCodecContext = avcodec_alloc_context3(codec);
			//将解码器参数copy给解码器上下文
			avcodec_parameters_to_context(avCodecContext, codecParameters);
			//打开解码器
			ret = avcodec_open2(avCodecContext, codec, NULL);
			if (ret < 0) {
				qDebug() << "打开解码器失败";
				return;
			}
			break;
		}
	}
	qDebug() << "解码器已成功找到并打开成功";

	//获取解码视频的宽高
	if (avCodecContext != NULL) {
		mVideoWidth = avCodecContext->width;
		mVideoHeight = avCodecContext->height;
		qDebug() << "解码视频的宽高为:=width=" << mVideoWidth << ",height=" << mVideoHeight;
	}

	if (videoStreamId > -1) {
		avStream = avFormatContext->streams[videoStreamId];
		//获取视频的时间基及fps
		mVideoTimeBase = avStream->time_base;
		mVideoFPS = avStream->avg_frame_rate.num / avStream->avg_frame_rate.den;
		qDebug() << "视频的fps为=" << mVideoFPS;
	}

	if (avCodecContext != NULL) {
		m_pYUV420Buffer = (uint8_t*)av_malloc(av_image_get_buffer_size(AV_PIX_FMT_YUV420P, avCodecContext->width, avCodecContext->height, 1));
		if (mYUVFrame == NULL) {
			mYUVFrame = av_frame_alloc();
			av_image_fill_arrays(
				mYUVFrame->data,
				mYUVFrame->linesize,
				m_pYUV420Buffer,
				AV_PIX_FMT_YUV420P,
				avCodecContext->width,
				avCodecContext->height,
				1);
		}
	}

	startMediaProcessThreads();
}

void FFMpegSamplePlayerWindow::startMediaProcessThreads() {
	m_bThreadRunning = true;
	//读取packet线程
	std::thread readThread(&FFMpegSamplePlayerWindow::doReadMediaFrameThread, this);
	readThread.detach();

	//解码packet线程
	std::thread videoThread(&FFMpegSamplePlayerWindow::doVideoDecodeShowThread, this);
	videoThread.detach();

}
void FFMpegSamplePlayerWindow::doReadMediaFrameThread() {
	while (m_bThreadRunning) {
		if (avCodecContext != NULL) {
			float sleepTime = 1000.0 / (float)mVideoFPS;
			stdThreadSleep((int)sleepTime);
		}

		if (mReadFileEOF == false) {
			//AVPacket* packet = (AVPacket*)malloc(sizeof(AVPacket));
			AVPacket* packet = av_packet_alloc();
			if (!packet) {
				return;
			}
			//av_init_packet(packet);

			int retValue = av_read_frame(avFormatContext, packet);
			if (retValue == 0)
			{
				if (packet->stream_index == videoStreamId) //Video frame
				{
					mVideoPktQueue.enqueue(packet);//向队列中写入数据
					//qDebug() << "向队列中写入数据";
					if (packet != NULL) {
						//av_packet_unref(packet);
						//av_packet_free(&packet);
						/*if (packet != NULL) {
							free(packet);
						}*/
					}
				}

			}
			else if (retValue < 0)
			{
				if ((mReadFileEOF == false) && (retValue == AVERROR_EOF)) {
					mReadFileEOF = true;
				}
				return;
			}
		}
		else {
			stdThreadSleep(10);
		}
	}
}
void FFMpegSamplePlayerWindow::stdThreadSleep(int mseconds)
{
	std::chrono::milliseconds sleepTime(mseconds);
	std::this_thread::sleep_for(sleepTime);
}
void FFMpegSamplePlayerWindow::doVideoDecodeShowThread() {
	if (avFormatContext == NULL) {
		return;
	}

	if (mAVFrame == NULL)
	{
		mAVFrame = av_frame_alloc();
	}


	while (m_bThreadRunning)
	{

		if (mVideoPktQueue.isEmpty()) {
			stdThreadSleep(1);
			continue;
		}

		AVPacket* pkt = mVideoPktQueue.dequeue();
		if (pkt == NULL) {
			break;
		}

		if (!m_bThreadRunning) {
			av_packet_free(&pkt);
			break;
		}
		//读取完成后向解码器中传空AVPacket，否则无法读取出最后几帧
		int retValue = avcodec_send_packet(avCodecContext, pkt);//将packet发送到解码器
		if (retValue != 0)
		{
			av_packet_free(&pkt);
			continue;
		}
		//av_packet_unref(pkt);//引用计数减-

		//av_frame_unref(mAVFrame);//引用计数减一
		int decodeRet = avcodec_receive_frame(avCodecContext, mAVFrame);//得到解码后的记过，mAVFrame
		if (decodeRet == 0)
		{
			if (mAVFrame == NULL) {
				return;
			}
			//将mAVFrame转成mYUVFrame
			if (mVideoSwsCtx == NULL)
			{
				//获取sws上下文,并将视频数据格式转换为YUV420P
				mVideoSwsCtx = sws_getContext(
					avCodecContext->width,
					avCodecContext->height,
					avCodecContext->pix_fmt,
					avCodecContext->width,
					avCodecContext->height,
					AV_PIX_FMT_YUV420P,
					SWS_BICUBIC, NULL, NULL, NULL);
			}
			if (yuvBuffer == NULL) {
				yuvBuffer = new unsigned char[avCodecContext->width * avCodecContext->height * 3 / 2];
			}
			int yuvWidth = avCodecContext->width;
			int yuvHeight = avCodecContext->height;
			int y_length = yuvWidth * yuvHeight;
			int u_length = yuvWidth / 2 * yuvHeight / 2;
			int v_length = yuvWidth / 2 * yuvHeight / 2;
			uint8_t* y_data= new  uint8_t[y_length];
			uint8_t* u_data = new  uint8_t[u_length];
			uint8_t* v_data = new  uint8_t[v_length];
			//把内存中的char初始化为0
			memset(y_data, '0', y_length);
			memset(u_data, '0', u_length);
			memset(v_data, '0', v_length);
			uint8_t* mData = new  uint8_t[yuvWidth*yuvHeight*3/2];
			memset(mData, '0', yuvWidth * yuvHeight * 3 / 2);
			int *outStrides = new int[4];
			sws_scale(mVideoSwsCtx,
				(const uint8_t* const*)mAVFrame->data,//源图像数据（YUV三个平面）
				mAVFrame->linesize, //步长数组（YUV，分别多长）
				0,//要处理数据的第一行的其实位置0
				(yuvHeight + yuvHeight / 2),//要处理多少行
				(uint8_t *const*)mData,//输出的图像数据
				outStrides);//输出的linesize

			//这块是重点
			//将data中的y_length长度的数据copy到y_data，从第0个位置开始copy
			memcpy(y_data, mData, y_length);
			//从data中copy u_length长度的数据到u_data中。从y_length位置开始copy
			memcpy(u_data, mData + y_length, u_length);
			//从data中copy v_length长度的数据到v_data中，从y_length+u_length位置开始copy
			memcpy(v_data, mData + y_length + u_length, v_length);

			mYUVFrame->width = avCodecContext->width;
			mYUVFrame->height = avCodecContext->height;
			mYUVFrame->pts = pkt->pts;

			//根据之前设置的输出图片格式，将原始数据转成一张图片
			//QImage image(mYUVFrame->data[0], mYUVFrame->width, mYUVFrame->height, QImage::Format_RGB32);
			//emit sendSignalsImage(image);
			update();//让OpenGL执行paintGL函数
			qDebug() << "解码成功了，开始渲染";
		}
		av_packet_free(&pkt);
	}


	qDebug() << "video decode show  thread exit...";

	m_bVideoThreadRunning = false;
}

void FFMpegSamplePlayerWindow::updateYUVData() {
	unsigned int lumaLength = avCodecContext->height * (MIN(mAVFrame->linesize[0], avCodecContext->width));
	unsigned int chromBLength = ((avCodecContext->height) / 2) * (MIN(mAVFrame->linesize[1], (avCodecContext->width) / 2));
	unsigned int chromRLength = ((avCodecContext->height) / 2) * (MIN(mAVFrame->linesize[2], (avCodecContext->width) / 2));

	///AVFrame->data[0]代表Y分量   AVFrame->linesize[0]代表每行的字节数
	///AVFrame->data[1]代表U分量   AVFrame->linesize[0]代表每行的字节数
	///AVFrame->data[2]代表V分量   AVFrame->linesize[0]代表每行的字节数

	updateYUVFrame = new YUVData_Frame();
	if (updateYUVFrame == NULL) {
		updateYUVFrame = new YUVData_Frame();
	}

	updateYUVFrame->luma.length = lumaLength;
	updateYUVFrame->chromaB.length = chromBLength;
	updateYUVFrame->chromaR.length = chromRLength;

	updateYUVFrame->luma.dataBuffer = (unsigned char*)malloc(lumaLength);
	updateYUVFrame->chromaB.dataBuffer = (unsigned char*)malloc(chromBLength);
	updateYUVFrame->chromaR.dataBuffer = (unsigned char*)malloc(chromRLength);


	copyDecodedFrame420(mYUVFrame->data[0], updateYUVFrame->luma.dataBuffer, mYUVFrame->linesize[0],
		avCodecContext->width, avCodecContext->height);
	copyDecodedFrame420(mYUVFrame->data[1], updateYUVFrame->chromaB.dataBuffer, mYUVFrame->linesize[1],
		avCodecContext->width / 2, avCodecContext->height / 2);
	copyDecodedFrame420(mYUVFrame->data[2], updateYUVFrame->chromaR.dataBuffer, mYUVFrame->linesize[2],
		avCodecContext->width / 2, avCodecContext->height / 2);

	updateYUVFrame->width = avCodecContext->width;
	updateYUVFrame->height = avCodecContext->height;

	//updateYUVFrame->pts = pkt->pts;


}

void FFMpegSamplePlayerWindow::releaseYUVData() {
	if (updateYUVFrame->luma.dataBuffer) {
		free(updateYUVFrame->luma.dataBuffer);
		updateYUVFrame->luma.dataBuffer = NULL;
	}

	if (updateYUVFrame->chromaB.dataBuffer) {
		free(updateYUVFrame->chromaB.dataBuffer);
		updateYUVFrame->chromaB.dataBuffer = NULL;
	}

	if (updateYUVFrame->chromaR.dataBuffer) {
		free(updateYUVFrame->chromaR.dataBuffer);
		updateYUVFrame->chromaR.dataBuffer = NULL;
	}

	if (updateYUVFrame) {
		delete updateYUVFrame;
		updateYUVFrame = NULL;
	}
}

void FFMpegSamplePlayerWindow::renderVideo() {

	//if (m_nVideoH != yuvFrame->height || m_nVideoW != yuvFrame->width) {

	//	if (NULL != m_pBufYuv420p)
	//	{
	//		free(m_pBufYuv420p);
	//		m_pBufYuv420p = NULL;
	//	}
	//}

	//m_nVideoW = yuvFrame->width;
	//m_nVideoH = yuvFrame->height;

	m_yFrameLength = updateYUVFrame->luma.length;
	m_uFrameLength = updateYUVFrame->chromaB.length;
	m_vFrameLength = updateYUVFrame->chromaR.length;


	//申请内存存一帧yuv图像数据,其大小为分辨率的1.5倍
	int nLen = m_yFrameLength + m_uFrameLength + m_vFrameLength;

	if (NULL == m_pBufYuv420p)
	{
		m_pBufYuv420p = (unsigned char*)malloc(nLen);
	}

	memcpy(m_pBufYuv420p, updateYUVFrame->luma.dataBuffer, m_yFrameLength);
	memcpy(m_pBufYuv420p + m_yFrameLength, updateYUVFrame->chromaB.dataBuffer, m_uFrameLength);
	memcpy(m_pBufYuv420p + m_yFrameLength + m_uFrameLength, updateYUVFrame->chromaR.dataBuffer, m_vFrameLength);

	//m_bUpdateData = true;
}

