#include "StdAfx.h"
#include "VideoAna.h"

#ifdef _DEBUG
#define new DEBUG_NEW
#endif

#define SAFE_RELEASE(x) { if (x) x->Release(); x = NULL; }

STDMETHODIMP SVideoProcessAdapter::BufferCB(double SampleTime, BYTE *pBuffer, long nBufferLen)
{
	return m_pVideoAnaDoc->ProcessFrame(SampleTime, pBuffer, nBufferLen);
}

// CVideoAna

// CVideoAna construction/destruction
CVideoAna::CVideoAna()
: m_pGraph(NULL)
, m_pGrabberFilter(NULL)
, m_pGrabber(NULL)
, m_pControl(NULL)
, m_pEvent(NULL)
, m_GraphStatus(GRAPH_STOPPED/*GRAPH_NONE*/)
, m_pSrcFilter(NULL)
, m_pNullRenderer(NULL)
, m_VideoProcessAdapter(this)
, m_nCurFrame(0)
, m_sVideoFile(L"")
, m_hWndNotifier(NULL)
{
	memset(&m_Bfh, 0, sizeof(m_Bfh));
	m_Bfh.bfType = 0x4d42;
	//m_Bfh.bfSize = sizeof(m_Bfh) + sizeof(BITMAPINFOHEADER) + cbBuffer;
	m_Bfh.bfOffBits = sizeof(m_Bfh) + sizeof(BITMAPINFOHEADER);

	//m_hUpdateEvent = CreateEvent(NULL, FALSE, FALSE, NULL);

	memset(&m_Bih, 0, sizeof(m_Bih));
}

CVideoAna::~CVideoAna()
{
	//ClearAll();
}

CVideoAna::GraphStatus CVideoAna::GetGraphStatus()
{
	return m_GraphStatus;
}

void CVideoAna::SetVideoFile(CString& sFile)
{
	m_sVideoFile = sFile;
}

void CVideoAna::SetGraphNotifier(HWND hWnd)
{
	m_hWndNotifier = hWnd;
}

// CVideoAnaDoc commands
void CVideoAna::Start()
{
	ASSERT(GRAPH_STOPPED == m_GraphStatus);
	ASSERT(m_hWndNotifier);
	ASSERT(m_sVideoFile.GetLength() > 0);

	// Create the graph builder
	HRESULT hr = ::CoCreateInstance(CLSID_FilterGraph, NULL, CLSCTX_INPROC_SERVER,
		IID_IGraphBuilder, (void**)(&m_pGraph));
	if (FAILED(hr))
	{
		MessageBox(NULL, L"Failed creating DirectShow objects!", L"Error", MB_ICONERROR);
		return;
	}

	// Create the Sample Grabber
	ASSERT(m_pGrabber == NULL);
	hr = CoCreateInstance(CLSID_SampleGrabber, NULL, CLSCTX_INPROC_SERVER,
		IID_IBaseFilter, (void **)(&m_pGrabberFilter));
	hr = m_pGrabberFilter->QueryInterface(IID_ISampleGrabber,
		(void **)(&m_pGrabber));
	hr = m_pGraph->AddFilter(m_pGrabberFilter, L"SampleGrabber");

	// Set the media type
	AM_MEDIA_TYPE mt;
	ZeroMemory(&mt, sizeof(AM_MEDIA_TYPE));
	mt.formattype = FORMAT_VideoInfo; 
	mt.majortype = MEDIATYPE_Video;
	mt.subtype = MEDIASUBTYPE_RGB24;	// only accept 24-bit bitmaps
	hr = m_pGrabber->SetMediaType(&mt);

	// Create the src filter
	wchar_t strFilename[MAX_PATH];
	//MultiByteToWideChar(CP_ACP, 0, m_sVideoFile, -1, strFilename, MAX_PATH);
	wcscpy_s(strFilename, MAX_PATH, m_sVideoFile);
	hr = m_pGraph->AddSourceFilter(strFilename, L"Source", &m_pSrcFilter);
	if(FAILED(hr))
	{
		MessageBox(NULL, L"Unsupported media type!", L"Error", MB_ICONERROR);
		return;
	}

	// Connect the src and grabber
	hr = ConnectFilters(m_pGraph, m_pSrcFilter, m_pGrabberFilter);
	if(FAILED(hr))
	{
		SAFE_RELEASE(m_pSrcFilter);
		SAFE_RELEASE(m_pGrabber);
		SAFE_RELEASE(m_pGrabberFilter);
		SAFE_RELEASE(m_pGraph);
		MessageBox(NULL, L"Unsupported media type!", L"Error", MB_ICONERROR);
		return;
	}

	// Create the NULL renderer and connect
	m_pNullRenderer = NULL;
	hr = CoCreateInstance(CLSID_NullRenderer, NULL, CLSCTX_INPROC_SERVER,
		IID_IBaseFilter, (void **)(&m_pNullRenderer));
	hr = m_pGraph->AddFilter(m_pNullRenderer, L"NullRenderer");
	hr = ConnectFilters(m_pGraph, m_pGrabberFilter, m_pNullRenderer);

	m_nCurFrame = 0;

	// Set modes
	m_pGrabber->SetBufferSamples(FALSE);	// Buffer seems to be no use in callback mode
	m_pGrabber->SetCallback(&m_VideoProcessAdapter, 1);

	// Necessary interfaces for controlling
	m_pGraph->QueryInterface(IID_IMediaControl, (void **)(&m_pControl));
	m_pGraph->QueryInterface(IID_IMediaEventEx, (void **)(&m_pEvent));

	m_pEvent->SetNotifyWindow((OAHWND)m_hWndNotifier, WM_APP_GRAPHNOTIFY, 0);

	// Turn off the sync clock for max speed
	IMediaFilter *pMediaFilter = NULL;
	m_pGraph->QueryInterface(IID_IMediaFilter, reinterpret_cast<void**>(&pMediaFilter));
	pMediaFilter->SetSyncSource(NULL);
	SAFE_RELEASE(pMediaFilter);

	// Retrieve the actual media type
	ZeroMemory(&mt, sizeof(mt));
	hr = m_pGrabber->GetConnectedMediaType(&mt);
	VIDEOINFOHEADER *pVih;
	if (mt.formattype == FORMAT_VideoInfo) 
		pVih = reinterpret_cast<VIDEOINFOHEADER*>(mt.pbFormat);
	else 
	{
		SAFE_RELEASE(m_pControl);
		SAFE_RELEASE(m_pEvent);
		SAFE_RELEASE(m_pSrcFilter);
		SAFE_RELEASE(m_pNullRenderer);
		SAFE_RELEASE(m_pGrabber);
		SAFE_RELEASE(m_pGrabberFilter);
		SAFE_RELEASE(m_pGraph);
		MessageBox(NULL, L"No video stream found!", L"Error", MB_ICONERROR);

		return; // Something went wrong, perhaps not appropriate media type
	}

	// Save the video info header
	memcpy(&m_Bih, &pVih->bmiHeader, sizeof(m_Bih));
	m_Bfh.bfSize = sizeof(m_Bfh) + sizeof(BITMAPINFOHEADER) + m_Bih.biSizeImage;
	//m_pVideoAnaView->m_PreviewBox.SetBih(&m_Bih);
	m_lTimeperFrame = pVih->AvgTimePerFrame;

	// Free the media type
	if (mt.cbFormat != 0)
	{
		CoTaskMemFree((PVOID)mt.pbFormat);
		// Strictly unnecessary but tidier
		mt.cbFormat = 0;
		mt.pbFormat = NULL;
	}
	if (mt.pUnk != NULL)
	{
		// Unnecessary because pUnk should not be used, but safest.
		mt.pUnk->Release();
		mt.pUnk = NULL;
	}

	// Get video info
	IMediaSeeking *pSeeking = NULL;
	m_pGraph->QueryInterface(IID_IMediaSeeking, (void **)(&pSeeking));
	pSeeking->GetDuration(&m_lDuration);
	if(FAILED(pSeeking->SetTimeFormat(&TIME_FORMAT_FRAME)))
		m_nTotalFrames = m_lDuration / m_lTimeperFrame;
	else
		pSeeking->GetDuration(&m_nTotalFrames);
	SAFE_RELEASE(pSeeking);

	m_GraphStatus = GRAPH_RUNNING;
	m_pControl->Run(); // Run the graph to start the analyzing process!
}

void CVideoAna::Pause()
{
	// TODO: Add your command handler code here
	ASSERT(GRAPH_RUNNING == m_GraphStatus || GRAPH_PAUSED == m_GraphStatus);
	if(GRAPH_RUNNING == m_GraphStatus)
	{
		if(S_FALSE == m_pControl->Pause())
		{
			OAFilterState oState;
			m_pControl->GetState(INFINITE, &oState);
		}
		m_GraphStatus = GRAPH_PAUSED;
	}
	else
	{
		m_pControl->Run();
		m_GraphStatus = GRAPH_RUNNING;
	}

}

void CVideoAna::Stop()
{
	ASSERT(GRAPH_RUNNING == m_GraphStatus || GRAPH_PAUSED == m_GraphStatus);

	if(GRAPH_RUNNING == m_GraphStatus && S_FALSE == m_pControl->Pause())
	{
		OAFilterState oState;
		m_pControl->GetState(INFINITE, &oState);	// Wait until pause finished
		TRACE(L"Wait to paused\n");
	}
	m_pControl->Stop();
	m_GraphStatus = GRAPH_STOPPED;

	OnAnaFinished();
}

void CVideoAna::OnAnaFinished()
{
	ASSERT(GRAPH_STOPPED == m_GraphStatus || GRAPH_RUNNING == m_GraphStatus);

	m_GraphStatus = GRAPH_STOPPED;
	// Free DirectShow resources when finished analyzing
	SAFE_RELEASE(m_pControl);
	SAFE_RELEASE(m_pEvent);
	SAFE_RELEASE(m_pSrcFilter);
	SAFE_RELEASE(m_pGraph);
	SAFE_RELEASE(m_pNullRenderer);
	SAFE_RELEASE(m_pGrabber);
	SAFE_RELEASE(m_pGrabberFilter);
}

// Helper functions:
HRESULT CVideoAna::GetPin(IBaseFilter *pFilter, PIN_DIRECTION PinDir, IPin **ppPin)
{
	IEnumPins  *pEnum;
	IPin       *pPin;
	pFilter->EnumPins(&pEnum);
	while(pEnum->Next(1, &pPin, 0) == S_OK)
	{
		PIN_DIRECTION PinDirThis;
		pPin->QueryDirection(&PinDirThis);
		if (PinDir == PinDirThis)
		{
			pEnum->Release();
			*ppPin = pPin;
			return S_OK;
		}
		pPin->Release();
	}
	pEnum->Release();
	return E_FAIL;  
}

HRESULT CVideoAna::ConnectFilters(IGraphBuilder *pGraph, IBaseFilter *pFirst, IBaseFilter *pSecond)
{
	IPin *pOut = NULL, *pIn = NULL;
	HRESULT hr = GetPin(pSecond, PINDIR_INPUT, &pIn);
	if (FAILED(hr)) return hr;
	// The previous filter may have multiple outputs, so try each one!
	IEnumPins  *pEnum;
	pFirst->EnumPins(&pEnum);
	while(pEnum->Next(1, &pOut, 0) == S_OK)
	{
		PIN_DIRECTION PinDirThis;
		pOut->QueryDirection(&PinDirThis);
		if (PINDIR_OUTPUT == PinDirThis)
		{
			hr = pGraph->Connect(pOut, pIn);
			if(!FAILED(hr))
			{
				break;
			}
		}
		SAFE_RELEASE(pOut);
	}
	SAFE_RELEASE(pOut);
	SAFE_RELEASE(pEnum);
	SAFE_RELEASE(pIn);
	return hr;
}

HRESULT CVideoAna::ProcessFrame(double SampleTime, BYTE *pBuffer, long nBufferLen)
{
	// Create HBITMAP
	HDC hDCRef = GetDC(NULL);
	HDC hDC = ::CreateCompatibleDC(hDCRef);
	HBITMAP hBmpRet = CreateCompatibleBitmap(hDCRef, m_Bih.biWidth, m_Bih.biHeight);
	HBITMAP hBmpOld = (HBITMAP)::SelectObject(hDC, hBmpRet);

	::SetStretchBltMode(hDC, COLORONCOLOR);
	::StretchDIBits(hDC, 0, 0, m_Bih.biWidth, m_Bih.biHeight,
		0, 0, m_Bih.biWidth, m_Bih.biHeight,
		pBuffer, (LPBITMAPINFO)&m_Bih, DIB_RGB_COLORS, SRCCOPY);
	::SelectObject(hDC, hBmpOld);
	::DeleteDC(hDC);
	::ReleaseDC(NULL, hDCRef);

	::PostMessage(m_hWndNotifier, WM_USER_VIDEOANA, VIDEOANA_WPARAM_NEWFRAME, (LPARAM)hBmpRet);

	// TODO: Put the frame processing code here
	// Keep in mind that code here is executed within another thread,
	// so do consider the data access problem among threads

	// Here just do nothing but send a preview image and update progress view every 0.5 seconds
	// Comment the following "if" line if you want to see each frame :)
	//if(WAIT_OBJECT_0 == WaitForSingleObject(m_hUpdateEvent, 0) || m_nCurFrame == m_nTotalFrames - 1)
	//{
	//	m_pVideoAnaView->SendMessage(WM_USER_PREVIEW_FRAME, (WPARAM)pBuffer, nBufferLen);
	//	m_pVideoAnaView->SendMessage(WM_USER_UPDATE_PROGRESS, (WPARAM)m_nCurFrame + 1);
	//}

	//// The following code demonstrates how to save a snapshot to BMP file every 10 frames
	//if(0 == m_nCurFrame % 10)
	//{
	//	CString strFilename;
	//	strFilename.Format("C:\\Snap%d.bmp", m_nCurFrame / 10);
	//	FILE *pfSnap = fopen(strFilename, "wb");
	//	fwrite(&m_Bfh, sizeof(m_Bfh), 1, pfSnap);	// BITMAPFILEHEADER
	//	fwrite(&m_Bih, sizeof(m_Bih), 1, pfSnap);	// BITMAPINFOHEADER
	//	fwrite(pBuffer, nBufferLen, 1, pfSnap);	// DIBits
	//	fclose(pfSnap);
	//}

	//// The following code demonstrates how to get rgb values of a specified pixel
	//// You can write a loop to examine all pixels
	//// Keep in mind the pixel data is stored from bottom to top in pBuffer
	//int x = 0;
	//int y = 0;
	//int nLineBytes = (m_Bih.biWidth * 24 + 31) / 32 * 4;	// # of bytes per line
	//BYTE *pLine = pBuffer + (m_Bih.biHeight - y - 1) * nLineBytes;
	//BYTE *pPixel = pLine + 3 * x;
	//BYTE B = *pPixel;
	//BYTE G = *(pPixel + 1);
	//BYTE R = *(pPixel + 2);

	m_nCurFrame++;	// m_nCurFrame indicates which frame is being processed
	return S_OK;
}

void CVideoAna::OnGraphNotify()
{
	if(!m_pEvent)
		return;

	long lEventCode;
	LONG_PTR lParam1;
	LONG_PTR lParam2;

	while(S_OK == m_pEvent->GetEvent(&lEventCode, &lParam1, &lParam2, 0))
	{
		TRACE(L"%d\n", lEventCode);
		//if(EC_PAUSED == lEventCode && GRAPH_RUNNING == m_GraphStatus)
		//{
		//}
		if(EC_COMPLETE == lEventCode)	// All data has been rendered.
		{
			if(S_FALSE == m_pControl->Pause())
			{
				OAFilterState oState;
				m_pControl->GetState(INFINITE, &oState);
			}
			m_pControl->Stop();
			//AfxMessageBox("Graph complete");
			//AfxGetMainWnd()->PostMessage(WM_COMMAND, ID_ANA_FINISHED);
			::PostMessage(m_hWndNotifier, WM_USER_VIDEOANA, VIDEOANA_WPARAM_FINISHED, NULL);
		}

		m_pEvent->FreeEventParams(lEventCode, lParam1, lParam2);
	}

}