#include "stdafx.h"
#include <streams.h>
#include "H264SourceFilter.h"
#include "stdio.h"
#include <MMSystem.h>
#include <utility>

#ifdef NEED_DEINTERLACE
extern "C" BOOL __cdecl Deinterlace(int SearchEffort, int _src_pitch, int _dst_pitch,
		int _rowsize, const BYTE* _pWeaveSrc, const BYTE* _pWeaveSrcP,
		BYTE* _pWeaveDest, BOOL _TopFirst, const BYTE* _pCopySrc,
		const BYTE* _pCopySrcP, int _FldHeight);
#endif

#define USE_YUV_FORMAT //If not define this macro,RGB color format will be used.

/**********************************************
 *
 *  H264SourcePin Class
 *  
 *
 **********************************************/

#ifdef SUPPORT_H264

bool H264SourcePin::Decode(int width, int height, BYTE *outbuf)
{
	H264SourceFilter *filter = (H264SourceFilter *) m_pFilter;
	int offset = width * abs(height);
	if (_decodeCallback != NULL)
		offset = abs(m_iImageWidth * m_iImageHeight);

	if (m_session == 0)
		m_session = CreateDecodeSessionNoSize(1);

	if (_lastFpsCountTime == 0)
		_lastFpsCountTime = timeGetTime();

	if (_videoBuffer1 == NULL && _getsize)
	{
		if (_decodeCallback != NULL)
		{
			int videoWidth, videoHeight;
			GetVideoSize(m_session, &videoWidth, &videoHeight);
			m_iImageWidth = videoWidth;
			m_iImageHeight = videoHeight;
			_videoBufferSize = videoWidth * videoHeight * 3 / 2;
			_videoBuffer1 = new BYTE[_videoBufferSize];
			_videoBuffer2 = new BYTE[_videoBufferSize];
			_currBuffer = _videoBuffer1;

			int yBufSize = videoWidth * videoHeight;
			memset(_videoBuffer1, 0, yBufSize);
			memset(_videoBuffer1 + yBufSize, 128, _videoBufferSize - yBufSize);
			memset(_videoBuffer2, 0, yBufSize);
			memset(_videoBuffer2 + yBufSize, 128, _videoBufferSize - yBufSize);
		}
		else
		{
			_videoBufferSize = m_iImageWidth * m_iImageHeight * 3 / 2;
			_videoBuffer1 = new BYTE[_videoBufferSize];
			_videoBuffer2 = new BYTE[_videoBufferSize];
			_currBuffer = _videoBuffer1;

			int yBufSize = m_iImageWidth * m_iImageHeight;
			memset(_videoBuffer1, 0, yBufSize);
			memset(_videoBuffer1 + yBufSize, 128, _videoBufferSize - yBufSize);
			memset(_videoBuffer2, 0, yBufSize);
			memset(_videoBuffer2 + yBufSize, 128, _videoBufferSize - yBufSize);
		}
	}

	if (_videoBuffer1 != NULL && _getsize && (width > m_iImageWidth || abs(height) > m_iImageHeight) && _decodeCallback == NULL)
	{
		if ((int) _videoBufferSize < width * abs(height) * 3 / 2)
		{
			_videoBufferSize = width * abs(height) * 3 / 2;
			delete[] _videoBuffer1;
			delete[] _videoBuffer2;
			_videoBuffer1 = new BYTE[_videoBufferSize];
			_videoBuffer2 = new BYTE[_videoBufferSize];
			_currBuffer = _videoBuffer1;

			int yBufSize = width * abs(height);
			memset(_videoBuffer1, 0, yBufSize);
			memset(_videoBuffer1 + yBufSize, 128, _videoBufferSize - yBufSize);
			memset(_videoBuffer2, 0, yBufSize);
			memset(_videoBuffer2 + yBufSize, 128, _videoBufferSize - yBufSize);
		}
	}

	_currBuffer = _videoBuffer1;
	outbuf = _currBuffer;

	int getframe = 0;
	DWORD startTime = timeGetTime();
	while (!getframe)
	{
		DWORD now = timeGetTime();
		if (now < startTime)
			startTime = now;
		else if (now - startTime > 300 && m_iImageWidth != 0)
			return true;

		if (_len <= 0)
		{
			if (!filter->GetData(_buffer, _len))
			{
				_buffer = NULL;
				_len = 0;
				CloseDecodeSession(m_session);
				m_session = 0;
				_getsize = 0;

				return false;
			}
			else if (_len == 0)
			{
				Sleep(3);
				continue;
			}
		}

		BYTE *inbuf = _buffer;

		while (_len > 0)
		{
			if (_beginNewFrame)
			{
				UINT bytesToCopy = min(_bytesForHeader, _len);
				memcpy(((char *)&_header) + sizeof(BtlFrameHeader) - _bytesForHeader, inbuf, bytesToCopy);
				_bytesForHeader -= bytesToCopy;
				_len -= bytesToCopy;
				inbuf += bytesToCopy;

				if (_len <= _bytesForHeader)
					break;

				_beginNewFrame = false;
			}

			BYTE *ybuf = outbuf, *ubuf = outbuf + offset * 5 / 4, *vbuf = outbuf + offset;

			int decodelen = DecodeData(m_session, inbuf, _len, &getframe, ybuf, ubuf, vbuf, ((_decodeCallback != NULL) ? m_iImageWidth : width), 0);//(height < 0) ? 1 : 0);
			if (decodelen < 0)
			{
				_buffer = NULL;
				_len = 0;
				CloseDecodeSession(m_session);
				m_session = 0;
				_getsize = 0;

				getframe = 0;

				m_session = CreateDecodeSessionNoSize(1);
				filter->GetData(_buffer, _len);
				_beginNewFrame = true;
				_bytesForHeader = sizeof(BtlFrameHeader);
			}

			if (decodelen > 0)
			{
				inbuf += decodelen;
				_len -= decodelen;
				_header.videoLen -= decodelen;
				if (_header.videoLen <= 0)
				{
					_beginNewFrame = true;
					_bytesForHeader = sizeof(BtlFrameHeader);
				}
			}

			if (getframe)
			{
#ifdef NEED_DEINTERLACE
				std::swap(_videoBuffer1, _videoBuffer2);
#endif
				if (_len > 0)
					memmove(_buffer, inbuf, _len);

				int vwidth, vheight;
				_getsize = (GetVideoSize(m_session, &vwidth, &vheight) == 0);

				++_totalFrame;
				DWORD now = timeGetTime();
				if (now < _lastFpsCountTime)
				{
					_lastFpsCountTime = now;
					_totalFrame = 0;
				}
				if (now - _lastFpsCountTime >= BITRATE_COUNT_INTERVAL)
				{
					_lastActualFps = (DWORD) (double(_totalFrame) * 1000.0 / double(now - _lastFpsCountTime) + 0.5);
					_lastFpsCountTime = now;
					_totalFrame = 0;
				}

				return true;
			}
		}

		Sleep(0);
	}

	return true;
}
#endif


H264SourcePin::H264SourcePin(HRESULT *phr, CSource *pFilter, UINT fps, UINT width, UINT height, bool isRawYuv)
        : CSourceStream(NAME("Push Source Desktop"), phr, pFilter, L"Out"),
        m_iFrameNumber(0),
        m_rtFrameLength(UNITS / 25), // Capture and display desktop 5 times per second
        m_nCurrentBitDepth(32),
#ifdef SUPPORT_H264
		m_session(0),
#endif
		_getsize(false),
		_buffer(NULL),
		_len(0),
		_startTime(0),
		_videoBuffer1(NULL),
		_videoBuffer2(NULL),
		_currBuffer(NULL),
		_width(width),
		_height(height),
		_isRawYuv(isRawYuv),
		_videoBufferSize(0),
		m_lastUpdateTime(0),
		_drawCallback(NULL),
		_decodeCallback(NULL),
		_fps(fps),
		_beginNewFrame(true),
		_bytesForHeader(sizeof(BtlFrameHeader)),
		_lastFpsCountTime(0),
		_lastActualFps(0),
		_totalFrame(0)
{
	if (fps > 0)
	{
		m_rtFrameLength = UNITS / fps;
		if (m_rtFrameLength < UNITS / 120)
			m_rtFrameLength = UNITS / 120;
	}

    m_iImageWidth  = _width;
    m_iImageHeight = _height;
}

H264SourcePin::~H264SourcePin()
{   
#ifdef SUPPORT_H264
	if (m_session != 0)
		CloseDecodeSession(m_session);
#endif

	ReleaseVideoBuffers();
}

int H264SourcePin::GetActualVideoSize(int *width, int *height)
{
	if (!_getsize)
		return -1;
	
#ifdef SUPPORT_H264
	return GetVideoSize(m_session, width, height);
#else
	*width = m_iImageWidth;
	*height = m_iImageHeight;
		
	return 0;
#endif
}

int H264SourcePin::SetDrawCallback(PLAYBACKSESSION session, DRAWCALLBACK drawCallback, long param)
{
	CAutoLock cAutoLockShared(&m_cSharedState);

	_drawCallback = drawCallback;
	_drawCallbackParam = param;
	_playbackSession = session;

	return 0;
}

int H264SourcePin::SetDecodeCallback(PLAYBACKSESSION session, DECODECALLBACK decodeCallback, long param)
{
	CAutoLock cAutoLockShared(&m_cSharedState);

	_decodeCallback = decodeCallback;
	_decodeCallbackParam = param;
	_playbackSession = session;

	return 0;
}

int H264SourcePin::ChangePlaybackFps(UINT fps)
{
	_fps = fps;

	return 0;
}

HRESULT H264SourcePin::GetMediaType(int iPosition, CMediaType *pmt)
{
	CheckPointer(pmt,E_POINTER);
	CAutoLock cAutoLock(m_pFilter->pStateLock());

	if(iPosition < 0)
        return E_INVALIDARG;

    // Have we run off the end of types?
    if(iPosition > 0)
        return VFW_S_NO_MORE_ITEMS;

    VIDEOINFO *pvi = (VIDEOINFO *) pmt->AllocFormatBuffer(sizeof(VIDEOINFO));
    if(NULL == pvi)
        return(E_OUTOFMEMORY);

    // Initialize the VideoInfo structure before configuring its members
    ZeroMemory(pvi, sizeof(VIDEOINFO));

	pvi->AvgTimePerFrame = m_rtFrameLength;

#ifdef SUPPORT_H264
	if (m_iImageWidth == 0)
	{
		while (Decode(0, 0, NULL) && !_getsize) ;
		if (!_getsize)
			return S_FALSE;

		GetVideoSize(m_session, &m_iImageWidth, &m_iImageHeight);
	}
#endif

    SetRectEmpty(&(pvi->rcSource)); // we want the whole image area rendered.
    SetRectEmpty(&(pvi->rcTarget)); // no particular destination rectangle

    // Adjust the parameters common to all formats
    pvi->bmiHeader.biSize       = sizeof(BITMAPINFOHEADER);
    pvi->bmiHeader.biWidth      = m_iImageWidth;
    pvi->bmiHeader.biHeight     = m_iImageHeight;
    pvi->bmiHeader.biClrImportant = 0;

    pvi->bmiHeader.biCompression = MAKEFOURCC('Y', 'V', '1', '2');
    pvi->bmiHeader.biBitCount    = 12;
	pvi->bmiHeader.biPlanes      = 3;
    pvi->bmiHeader.biSizeImage   = m_iImageHeight * m_iImageWidth * 3 / 2;
	pmt->SetSubtype(&MEDIASUBTYPE_YV12); 

    pmt->SetType(&MEDIATYPE_Video);
    pmt->SetFormatType(&FORMAT_VideoInfo);
    pmt->SetTemporalCompression(FALSE);
	pmt->SetSampleSize(pvi->bmiHeader.biSizeImage);

    // Work out the GUID for the subtype from the header info.
    //const GUID SubTypeGUID = GetBitmapSubtype(&pvi->bmiHeader);

    return NOERROR;

} // GetMediaType


//
// CheckMediaType
//
// We will accept 8, 16, 24 or 32 bit video formats, in any
// image size that gives room to bounce.
// Returns E_INVALIDARG if the mediatype is not acceptable
//
HRESULT H264SourcePin::CheckMediaType(const CMediaType *pMediaType)
{
    CheckPointer(pMediaType,E_POINTER);

    if((*(pMediaType->Type()) != MEDIATYPE_Video))                  // in fixed size samples
    {                                                  
        return E_INVALIDARG;
    }

    // Check for the subtypes we support
    const GUID *SubType = pMediaType->Subtype();
    if (SubType == NULL)
        return E_INVALIDARG;

    if(*SubType != MEDIASUBTYPE_YV12) 
        return E_INVALIDARG;

    // Get the format area of the media type
    VIDEOINFO *pvi = (VIDEOINFO *) pMediaType->Format();

    if(pvi == NULL)
        return E_INVALIDARG;

    // Check if the image width & height have changed
    if(    pvi->bmiHeader.biWidth < m_iImageWidth || 
       abs(pvi->bmiHeader.biHeight) != m_iImageHeight)
    {
        // If the image width/height is changed, fail CheckMediaType() to force
        // the renderer to resize the image.
        return E_INVALIDARG;
    }

    // Don't accept formats with negative height, which would cause the desktop
    // image to be displayed upside down.
    //if (pvi->bmiHeader.biHeight < 0)
    //    return E_INVALIDARG;

	if (m_mt != *pMediaType)
		m_mt = *pMediaType;

    return S_OK;  // This format is acceptable.

} // CheckMediaType

int H264SourcePin::GetSnapshotSize()
{
	if (!_getsize)
		return -1;

	BITMAPINFOHEADER bmpHdr;
	memset(&bmpHdr, 0, sizeof(bmpHdr));
	bmpHdr.biBitCount = 32;
#ifdef SUPPORT_H264
	GetVideoSize(m_session, (int *) &bmpHdr.biWidth, (int *) &bmpHdr.biHeight);
#ifdef NEED_DEINTERLACE
	if (bmpHdr.biHeight == 256)
		bmpHdr.biHeight = 240;
#endif
#else
	bmpHdr.biWidth = m_iImageWidth;
	bmpHdr.biHeight = m_iImageHeight;
#endif

	return sizeof(BITMAPINFOHEADER) + DIBSIZE(bmpHdr);
}

#ifdef NEED_DEINTERLACE
int H264SourcePin::DeinterlaceFrames(BYTE *destBuf, int destWidth, int destHeight)
{
	int newWidth, newHeight, halfHeight, quaterHeight; 
	GetVideoSize(m_session, &newWidth, &newHeight);
	halfHeight = abs(destHeight) / 2;
	quaterHeight = halfHeight / 2;
	BYTE *oddFieldYPtr, *evenFieldYPtr, *oddFieldUPtr, *evenFieldUPtr, *oddFieldVPtr, *evenFieldVPtr;
	BYTE *dstY, *dstU, *dstV;
	evenFieldYPtr = _currBuffer;
	oddFieldYPtr = _currBuffer+destWidth*newHeight/2;
	evenFieldUPtr = _currBuffer+destWidth*abs(destHeight);
	oddFieldUPtr = _currBuffer+destWidth*abs(destHeight)+destWidth*abs(newHeight)/8;
	evenFieldVPtr = _currBuffer+destWidth*abs(destHeight)*5/4;
	oddFieldVPtr = _currBuffer+destWidth*abs(destHeight)*5/4 + destWidth*abs(newHeight)/8;
	dstY = destBuf;
	dstU = destBuf + destWidth*abs(destHeight);
	dstV = destBuf + destWidth*abs(destHeight)*5/4;
	BYTE *oddFieldYPtrP, *evenFieldYPtrP, *oddFieldUPtrP, *evenFieldUPtrP, *oddFieldVPtrP, *evenFieldVPtrP;
	if (_currBuffer == _videoBuffer1)
	{
		evenFieldYPtrP = _videoBuffer2;
		oddFieldYPtrP = _videoBuffer2+destWidth*newHeight/2;
		evenFieldUPtrP = _videoBuffer2+destWidth*abs(destHeight);
		oddFieldUPtrP = _videoBuffer2+destWidth*abs(destHeight)+destWidth*abs(newHeight)/8;
		evenFieldVPtrP = _videoBuffer2+destWidth*abs(destHeight)*5/4;
		oddFieldVPtrP = _videoBuffer2+destWidth*abs(destHeight)*5/4 + destWidth*abs(newHeight)/8;
	}
	else
	{
		evenFieldYPtrP = _videoBuffer1;
		oddFieldYPtrP = _videoBuffer1+destWidth*newHeight/2;
		evenFieldUPtrP = _videoBuffer1+destWidth*abs(destHeight);
		oddFieldUPtrP = _videoBuffer1+destWidth*abs(destHeight)+destWidth*abs(newHeight)/8;
		evenFieldVPtrP = _videoBuffer1+destWidth*abs(destHeight)*5/4;
		oddFieldVPtrP = _videoBuffer1+destWidth*abs(destHeight)*5/4 + destWidth*abs(newHeight)/8;
	}

#define IS_TOPFIRST FALSE
#if 1
	Deinterlace(6, destWidth, destWidth, destWidth, evenFieldYPtr, evenFieldYPtrP, dstY, IS_TOPFIRST, oddFieldYPtr, oddFieldYPtrP, halfHeight);
	Deinterlace(6, destWidth / 2, destWidth / 2, destWidth / 2, evenFieldUPtr, evenFieldUPtrP, dstU, IS_TOPFIRST, oddFieldUPtr, oddFieldUPtrP, halfHeight / 2);
	Deinterlace(6, destWidth / 2, destWidth / 2, destWidth / 2, evenFieldVPtr, evenFieldVPtrP, dstV, IS_TOPFIRST, oddFieldVPtr, oddFieldVPtrP, halfHeight / 2);
#else
	int i;
	int halfWidth = destWidth / 2;
	for (i = 0; i < halfHeight; ++i)
	{
		memcpy(dstY, evenFieldYPtr, destWidth);
		dstY += destWidth;
		memcpy(dstY, oddFieldYPtr, destWidth);
		dstY += destWidth;
		evenFieldYPtr += destWidth;
		oddFieldYPtr += destWidth;
	}
	for (i = 0; i < quaterHeight; ++i)
	{
		memcpy(dstU, evenFieldUPtr, halfWidth);
		dstU += halfWidth;
		evenFieldUPtr += halfWidth;
		memcpy(dstU, oddFieldUPtr, halfWidth);
		dstU += halfWidth;
		oddFieldUPtr += halfWidth;
	}
	for (i = 0; i < quaterHeight; ++i)
	{
		memcpy(dstV, evenFieldVPtr, halfWidth);
		dstV += halfWidth;
		evenFieldVPtr += halfWidth;
		memcpy(dstV, oddFieldVPtr, halfWidth);
		dstV += halfWidth;
		oddFieldVPtr += halfWidth;
	}
#endif

	return 0;
}
#endif

int H264SourcePin::Snapshot(char *buf, int bufLen)
{
	if (_currBuffer == NULL || !_getsize)
		return -1;

	LPBITMAPINFOHEADER bmpHdr = (LPBITMAPINFOHEADER) buf;
	memset(bmpHdr, 0, sizeof(BITMAPINFOHEADER));
	bmpHdr->biSize = sizeof(BITMAPINFOHEADER);
#ifdef SUPPORT_H264
	GetVideoSize(m_session, (int *) &bmpHdr->biWidth, (int *) &bmpHdr->biHeight);
#ifdef NEED_DEINTERLACE
	if (bmpHdr->biHeight == 256)
		bmpHdr->biHeight = 240;
#endif
#else
	bmpHdr->biWidth = m_iImageWidth;
	bmpHdr->biHeight = m_iImageHeight;
#endif
	bmpHdr->biCompression = BI_RGB;
	bmpHdr->biBitCount = 32;
	bmpHdr->biPlanes = 1;
	bmpHdr->biSizeImage = GetBitmapSize(bmpHdr);

	int height = abs(_height);
	int offset = _width * height;
#ifdef NEED_DEINTERLACE
	BYTE *frameBuffer = new BYTE[_videoBufferSize + 10240];
	BYTE *y = frameBuffer;
	BYTE *v = frameBuffer + offset;
	BYTE *u = frameBuffer + offset * 5 / 4;
	{
		CAutoLock cAutoLockShared(&m_cSharedState);
		DeinterlaceFrames(frameBuffer, _width, _height);
	}
#else
	BYTE *y = _currBuffer;
	BYTE *v = _currBuffer + offset;
	BYTE *u = _currBuffer + offset * 5 / 4;
#endif
	yuv2rgb_32(y, _width, u, v, _width / 2, 
		(BYTE *) buf + sizeof(BITMAPINFOHEADER) + (bmpHdr->biHeight - 1) * WIDTHBYTES(bmpHdr->biWidth * 32), bmpHdr->biWidth, bmpHdr->biHeight, -1 * WIDTHBYTES(bmpHdr->biWidth * 32));

#ifdef NEED_DEINTERLACE
	delete[] frameBuffer;
	frameBuffer = NULL;
#endif

	return sizeof(BITMAPINFOHEADER) + bmpHdr->biSizeImage;
}


//
// DecideBufferSize
//
// This will always be called after the format has been sucessfully
// negotiated. So we have a look at m_mt to see what size image we agreed.
// Then we can ask for buffers of the correct size to contain them.
//
HRESULT H264SourcePin::DecideBufferSize(IMemAllocator *pAlloc,
                                      ALLOCATOR_PROPERTIES *pProperties)
{
    CheckPointer(pAlloc,E_POINTER);
    CheckPointer(pProperties,E_POINTER);

    CAutoLock cAutoLock(m_pFilter->pStateLock());
    HRESULT hr = NOERROR;

    VIDEOINFO *pvi = (VIDEOINFO *) m_mt.Format();
    pProperties->cBuffers = 2;
    pProperties->cbBuffer = pvi->bmiHeader.biSizeImage;

    ASSERT(pProperties->cbBuffer);

    // Ask the allocator to reserve us some sample memory. NOTE: the function
    // can succeed (return NOERROR) but still not have allocated the
    // memory that we requested, so we must check we got whatever we wanted.
    ALLOCATOR_PROPERTIES Actual;
    hr = pAlloc->SetProperties(pProperties,&Actual);
    if(FAILED(hr))
    {
        return hr;
    }

    // Is this allocator unsuitable?
    if(Actual.cbBuffer < pProperties->cbBuffer)
    {
        return E_FAIL;
    }

    // Make sure that we have only 1 buffer (we erase the ball in the
    // old buffer to save having to zero a 200k+ buffer every time
    // we draw a frame)
    ASSERT(Actual.cBuffers == 2);
    return NOERROR;

} // DecideBufferSize


//
// SetMediaType
//
// Called when a media type is agreed between filters
//
HRESULT H264SourcePin::SetMediaType(const CMediaType *pMediaType)
{
    CAutoLock cAutoLock(m_pFilter->pStateLock());

    // Pass the call up to my base class
	HRESULT hr = CSourceStream::SetMediaType(pMediaType);

    if(SUCCEEDED(hr))
    {
        VIDEOINFO * pvi = (VIDEOINFO *) m_mt.Format();
        if (pvi == NULL)
            return E_UNEXPECTED;

        switch(pvi->bmiHeader.biBitCount)
        {
        case 8:     // 8-bit palettized
		case 12:
        case 16:    // RGB565, RGB555
        case 24:    // RGB24
        case 32:    // RGB32
            // Save the current media type and bit depth
            m_MediaType = *pMediaType;
            m_nCurrentBitDepth = pvi->bmiHeader.biBitCount;
            hr = S_OK;
            break;

        default:
            // We should never agree any other media types
            ASSERT(FALSE);
            hr = E_INVALIDARG;
            break;
        }
    } 

    return hr;

} // SetMediaType

void H264SourcePin::ReleaseVideoBuffers()
{
	if (_videoBuffer1 != NULL)
	{
		delete[] _videoBuffer1;
		_videoBuffer1 = NULL;
	}

	if (_videoBuffer2 != NULL)
	{
		delete[] _videoBuffer2;
		_videoBuffer2 = NULL;
	}

	_currBuffer = NULL;
}

void H264SourcePin::AdjustVideoSize(int newWidth, int newHeight)
{
	// TODO:
}

HRESULT H264SourcePin::DoBufferProcessingLoop(void) 
{
	HRESULT hr = S_OK;
	long cbData = 0;
	IMediaSample *pSample = NULL;

	OnThreadStartPlay();

	VIDEOINFOHEADER *pVih = (VIDEOINFOHEADER*)m_mt.pbFormat;
	int width = pVih->bmiHeader.biWidth;
	int height = pVih->bmiHeader.biHeight;
	_width = width;
	_height = height;

#ifdef NEED_DEINTERLACE
	memset(m_blockTotalY, 0, sizeof(m_blockTotalY));
#endif

	//SetThreadPriority(GetCurrentThread(), THREAD_PRIORITY_ABOVE_NORMAL);

	DWORD lastFrameTime = timeGetTime();
	Command com;

	//Thread Loop

	//outer loop : response to filter command
	do 
	{ 
		//inner loop : decode then render a video frame.
		while (!CheckRequest(&com)) 
		{ 
			CAutoLock cAutoLockShared(&m_cSharedState);

			if (!_isRawYuv)
			{
#ifdef SUPPORT_H264
				int decodeWidth = width, decodeHeight = height;

				if (!Decode(decodeWidth, decodeHeight, _currBuffer))
				{
					hr = S_FALSE;
				}
				else
				{
					if (_decodeCallback != NULL)
					{
						//Not display the frame,just send out by callback
						if (_currBuffer != NULL)
							_decodeCallback(_playbackSession, reinterpret_cast<char *>(_currBuffer), _videoBufferSize, m_iImageWidth, _decodeCallbackParam);

						continue;
					}

					if (m_videoWnd == NULL)
						continue;

					//Check if the video size has been changed.

					if (_getsize)
					{
						int newWidth, newHeight;
						GetVideoSize(m_session, &newWidth, &newHeight);
#ifdef NEED_DEINTERLACE //for Tn86-21D
						if (newHeight == 256)
							newHeight = 240;
#endif //NEED_DEINTERLACE
						if (newWidth != m_iImageWidth || newHeight != m_iImageHeight)
							AdjustVideoSize(newWidth, newHeight);
					}

					DWORD beginTime = timeGetTime();
					while (hr = GetDeliveryBuffer(&pSample,NULL,NULL,0) && timeGetTime() - beginTime < 10) 
						Sleep(2);

					if (hr || pSample == NULL) 
						continue;	

					BYTE *pData;

					// Access the sample's data buffer
					HRESULT gotPtr = pSample->GetPointer(&pData);
					cbData = pSample->GetSize();

					if (gotPtr == S_OK && cbData > 0)
					{
						try 
						{
							if (!IsBadWritePtr(pData, cbData))
							{
								if(_currBuffer != NULL)
								{
#ifndef NEED_DEINTERLACE
									memcpy(pData, _currBuffer, cbData);
#else
									DeinterlaceFrames(pData, width, height);
#endif //NEED_DEINTERLACE
								}
								else
								{
									int yBufSize = width * abs(height);
									memset(pData, 0, yBufSize);
									memset(pData + yBufSize, 128, cbData - yBufSize);
								}
							}
						}
						catch(...)
						{}
					}
				}
#endif //SUPPORT_H264
			}
			else
			{
				H264SourceFilter *filter = (H264SourceFilter *) m_pFilter;
				if (filter->GetData(_buffer, _len))
				{
					DWORD beginTime = timeGetTime();
					while (hr = GetDeliveryBuffer(&pSample,NULL,NULL,0) && timeGetTime() - beginTime < 10) 
						Sleep(2);

					if (hr || pSample == NULL) {
						continue;	// go round again. Perhaps the error will go away
						// or the allocator is decommited & we will be asked to
						// exit soon.
					}

					BYTE *pData;

					// Access the sample's data buffer
					HRESULT gotPtr = pSample->GetPointer(&pData);
					cbData = pSample->GetSize();

					if (gotPtr == S_OK && cbData > 0)
					{
						try 
						{
							if (!IsBadWritePtr(pData, cbData))
							{
								if (width == m_iImageWidth && height == m_iImageHeight)
									memcpy(pData, _buffer, cbData);
								else
								{
									int uOffset = width * abs(height);
									BYTE *y = pData, *u = pData + uOffset, *v = u + uOffset / 4;
									int halfWidth = width / 2;
									BYTE *ySrc, *uSrc, *vSrc;
									int srcWidth = m_iImageWidth, srcHalfWidth = srcWidth / 2;
									int copyWidth = srcWidth, copyHalfWidth = srcHalfWidth;
									int srcOffset = m_iImageWidth * m_iImageHeight;
									//if (height > 0)
									//{
										ySrc = _buffer;
										uSrc = _buffer + srcOffset;
										vSrc = uSrc + srcOffset / 4;
									//}
									//else
									//{
									//	ySrc = _buffer + srcOffset - srcWidth;
									//	uSrc = _buffer + srcOffset + srcOffset / 4 - srcHalfWidth;
									//	vSrc = uSrc + srcOffset / 4;

									//	srcWidth *= -1;
									//	srcHalfWidth *= -1;
									//}

									for (int i = m_iImageHeight; i; --i)
									{
										memcpy(y, ySrc, copyWidth);
										y += width;
										ySrc += srcWidth;

										if (i % 2)
										{
											memcpy(u, uSrc, copyHalfWidth);
											memcpy(v, vSrc, copyHalfWidth);
											u += halfWidth;
											v += halfWidth;
											uSrc += srcHalfWidth;
											vSrc += srcHalfWidth;
										}
									}
								}

							}
						}
						catch(...)
						{}
					}
				}
				else
					hr = S_FALSE;
			}

			if (hr != S_OK) 
			{
				//The stream will be stopped.
				//derived class wants us to stop pushing data
				if (pSample != NULL)
				{
					pSample->Release();
					pSample = NULL;
				}

				DeliverEndOfStream();
				if (hr != S_FALSE) 
					m_pFilter->NotifyEvent(EC_ERRORABORT, hr, 0);

				ReleaseVideoBuffers();

				return S_OK;
			} 

			//Deliver the frame to renderer filter.

			if (_fps > 0)
			{
				//limit the playback speed
				DWORD now = timeGetTime();
				if (now >= lastFrameTime)
				{
					if (now - lastFrameTime < max(4, 1000 / _fps) - 4)
					{
						Sleep(max(4, 1000 / _fps) - 4 - (now - lastFrameTime));
						now = timeGetTime();
					}
				}
				lastFrameTime = now;
			}

			REFERENCE_TIME rtStart = 0;
			REFERENCE_TIME rtStop  = 0;
			pSample->SetTime(&rtStart, &rtStop);
			pSample->SetSyncPoint(TRUE);

			hr = Deliver(pSample);
			pSample->Release();
			pSample = NULL;

			// downstream filter returns S_FALSE if it wants us to
			// stop or an error if it's reporting an error.
			if(hr != S_OK)
			{
				ReleaseVideoBuffers();

				return S_OK;
			}

			//User can draw something on video now(if he wants).
			if (_drawCallback != NULL && m_videoWnd != NULL)
			{
				HDC hdc = GetDC(m_videoWnd);
				_drawCallback(_playbackSession, hdc, _drawCallbackParam);
				ReleaseDC(m_videoWnd, hdc);
			}
		} //End of inner loop

		// For all commands sent to us there must be a Reply call!
		if (com == CMD_RUN || com == CMD_PAUSE) 
			Reply(NOERROR);
		else if (com != CMD_STOP) 
			Reply((DWORD) E_UNEXPECTED);

	} while (com != CMD_STOP);

	ReleaseVideoBuffers();

	return S_FALSE;
}


// This is where we insert the DIB bits into the video stream.
// FillBuffer is called once for every sample in the stream.
HRESULT H264SourcePin::FillBuffer(IMediaSample *pSample)
{
	BYTE *pData;
    long cbData;

    CheckPointer(pSample, E_POINTER);

    CAutoLock cAutoLockShared(&m_cSharedState);

    // Access the sample's data buffer
	pSample->GetPointer(&pData);
    cbData = pSample->GetSize();

    // Check that we're still using video
    ASSERT(m_mt.formattype == FORMAT_VideoInfo);

    VIDEOINFOHEADER *pVih = (VIDEOINFOHEADER*)m_mt.pbFormat;

	int width = pVih->bmiHeader.biWidth;
	int height = pVih->bmiHeader.biHeight;

	if (_videoBuffer1 == NULL)
	{
		_videoBuffer1 = new BYTE[cbData];
	}

#ifdef SUPPORT_H264
	if (!Decode(width, height, _videoBuffer1))
	{
		m_iFrameNumber = 0;
		return S_FALSE;
	}
#endif

	memcpy(pData, _videoBuffer1, cbData);

	if (_startTime == 0)
		_startTime = timeGetTime();

    REFERENCE_TIME rtStart = 0;//MILLISECONDS_TO_100NS_UNITS(timeGetTime() - _startTime);//m_iFrameNumber * m_rtFrameLength;
    REFERENCE_TIME rtStop  = 0;//rtStart + 10;//rtStart + m_rtFrameLength;

    pSample->SetTime(&rtStart, &rtStop);
    m_iFrameNumber++;

	// Set TRUE on every sample for uncompressed frames
    pSample->SetSyncPoint(TRUE);

    return S_OK;
}



/**********************************************
 *
 *  H264SourceFilter Class
 *
 **********************************************/
#ifdef PUSHSOURCE_EXPORTS
#define PUSHSOURCE_FILTER_CLSID CLSID_PushSourceDesktop
#else
#define PUSHSOURCE_FILTER_CLSID CLSID_NULL
#endif

H264SourceFilter::H264SourceFilter(IUnknown *pUnk, HRESULT *phr, UINT fps, UINT width, UINT height, bool isRawYuv)
: CSource(NAME("PushSourceDesktop"), pUnk, PUSHSOURCE_FILTER_CLSID),
		   _dataProvider(NULL),
		   _isFileStream(true),
		   _file(NULL),
		   _len(0),
		   _isRawYuv(isRawYuv),
		   _lastBitrateCountTime(0),
		   _lastBitrate(0),
		   _totalBit(0)
{
    // The pin magically adds itself to our pin array.
    m_pPin = new H264SourcePin(phr, this, fps, width, height, isRawYuv);

	if (phr)
	{
		if (m_pPin == NULL)
			*phr = E_OUTOFMEMORY;
		else
			*phr = S_OK;
	}  

	memset(_filename, 0, sizeof(_filename));

	AddRef();
}


H264SourceFilter::~H264SourceFilter()
{
    delete m_pPin;

	if (_file != NULL)
		fclose(_file);
}

void H264SourceFilter::SetDataProvider(PLAYBACKSESSION session, 
									   DataProviderCallBack dataProvider, 
									   LPVOID param) 
{ 
	CAutoLock lock(&_callbackLock);

	m_session = session; 
	_dataProvider = dataProvider; 
	_param = param; 
	_isFileStream = false;
}

bool H264SourceFilter::GetData(BYTE *&buffer, UINT &len)
{
	if (_lastBitrateCountTime == 0)
		_lastBitrateCountTime = timeGetTime();

	if (_isFileStream)
	{
		if (_file == NULL)
			_file = fopen(_filename, "rb");
		if (_file == NULL)
			return false;

		_len = fread(_buffer, 1, DATASIZE, _file);
		if (feof(_file) || ferror(_file)) 
		{
			fclose(_file);
			_file = NULL;
			_len = 0;

			return false;
		}

		len = _len;
		buffer = _buffer;
	}
	else
	{
		_callbackLock.Lock();
		if (_dataProvider == NULL)
		{
			_callbackLock.Unlock();
			return false;
		}

		int readLen = _dataProvider(m_session, _param, _buffer, MAX_FRAME_SIZE);
		_callbackLock.Unlock();

		if (readLen < 0)
			return false;

		buffer = _buffer;
		len = readLen;
	}

	_totalBit += len * 8;
	DWORD now = timeGetTime();
	if (now < _lastBitrateCountTime)
	{
		_lastBitrateCountTime = now;
		_totalBit = 0;
	}
	if (now - _lastBitrateCountTime >= BITRATE_COUNT_INTERVAL)
	{
		_lastBitrate = _totalBit * 1000 / (now - _lastBitrateCountTime);
		_lastBitrateCountTime = now;
		_totalBit = 0;
	}

	return true;
}

int H264SourceFilter::GetBitrate()
{
	if (timeGetTime() - _lastBitrateCountTime > 2 * BITRATE_COUNT_INTERVAL)
		return 0;
	else
		return _lastBitrate;
}

void H264SourceFilter::SetSourceName(const char *filename)
{
	if (filename == NULL || _file != NULL)
		return;

	strcpy(_filename, filename);

	_isFileStream = true;
}
