// Virtual PoleDoc.cpp : implementation of the CVirtualPoleDoc class

#include "stdafx.h"
#include "afx.h"
#include "Virtual Pole.h"
#include "Virtual PoleDoc.h"
#include "math.h"
#include "string.h"
#include "float.h"

#define _NIWIN 

#ifdef _DEBUG
#define new DEBUG_NEW
#undef THIS_FILE
static char THIS_FILE[] = __FILE__;
#endif

// macro for swaping elements in array
#define ELEM_SWAP(a,b) { register elem_type t=(a);(a)=(b);(b)=t; }

// macro for finding the median using the kth_smallest algorithm
#define WMedian(a,n) kth_smallest(a,n,(((n)&1)?((n)/2):(((n)/2)-1)))

// NIMAQ Error Checking Macro
#define errChk(fCall) if (m_iError = (fCall), m_iError <0) {goto Error;} else
// NI-DAQmx Error Checking Macro
#define DAQmxErrChk(functionCall) if( DAQmxFailed(DAQmxError=(functionCall)) ) goto DAQmxError; else

/////////////////////////////////////////////////////////////////////////////
// CVirtualPoleDoc

IMPLEMENT_DYNCREATE(CVirtualPoleDoc, CDocument)

BEGIN_MESSAGE_MAP(CVirtualPoleDoc, CDocument)
	//{{AFX_MSG_MAP(CVirtualPoleDoc)
	//}}AFX_MSG_MAP
END_MESSAGE_MAP()

/////////////////////////////////////////////////////////////////////////////
// CVirtualPoleDoc construction/destruction

CVirtualPoleDoc::CVirtualPoleDoc() : m_bMissedFramesReported(false) , m_bDarkFrameExists(false)
, m_nDarkFrameMaxVal(0)
{
	m_bDataLoaded			= false;
	m_bStopRing				= true;
	m_Results.bPoleTouch	= false;
	m_Results.lFrameNumber	= 0;
	m_Iid					= 0;
	m_Sid					= 0;
	m_pCurrAnalyzedBuffer	= NULL;
	m_hAnalyzeThread		= NULL;
	m_hStopEvent			= NULL;
	m_hStopThread			= NULL;
	m_hStartSave			= NULL;
	m_hAnalysisStarted		= NULL;
	m_bDarkFrameExists		= false;
	m_iSavedFrameCounter	= 0;
	
	Out32(0x378, 0x00);		// Initialize all parallel port pins (default=LOW)

	// Initialize and NULL ring image buffers
	for (int i=0; i<RingBufferSize; i++)
		m_pRingBuffer[i]= NULL;

	m_pAnalyzeBuffer = NULL;
	m_pRawBuffer	 = NULL;

	for (int i=0; i<655360; i++) {
		m_MotionX[i] = 0;
		m_MotionY[i] = 0;
	}

	//for (int i=0; i<100; i++)
	//	m_Medians[i] = 0;
}

CVirtualPoleDoc::~CVirtualPoleDoc() {
	CloseVideo();
}

BOOL CVirtualPoleDoc::OnNewDocument() {
	if (!CDocument::OnNewDocument())
		return false;
	return true;
}


/////////////////////////////////////////////////////////////////////////////
// CVirtualPoleDoc diagnostics

#ifdef _DEBUG
void CVirtualPoleDoc::AssertValid() const {
	CDocument::AssertValid();
}

void CVirtualPoleDoc::Dump(CDumpContext& dc) const {
	CDocument::Dump(dc);
}
#endif //_DEBUG

/////////////////////////////////////////////////////////////////////////////
// CVirtualPoleDoc commands

void CVirtualPoleDoc::CloseVideo() {
	// Close IMAQ interface and session
	if (m_Sid)
		imgClose(m_Sid,true);

	if (m_Iid)
		imgClose(m_Iid,true);

	m_Sid = 0;
	m_Iid = 0;

	for (int i=0; i<RingBufferSize; i++) {
		// we don't need to delete the image buffer becuase the driver allocated and freed the memory
		m_pRingBuffer[i] = NULL; 
	}

	delete [] m_pAnalyzeBuffer;
	m_pAnalyzeBuffer = NULL;
	delete [] m_pRawBuffer;
	m_pRawBuffer = NULL;
	m_bDataLoaded = false;
}

bool CVirtualPoleDoc::isDataLoded() {
	return m_bDataLoaded;
}

// Snap a video image
bool CVirtualPoleDoc::OnFileStartCaptureVideo(CRect *ROI, LPVOID pParam) {

	// If we already have a video stream open - close it
	if (m_bDataLoaded)
		 CloseVideo();

	/*	Initialize camera interface. By default, img0 is used. If multiple cameras
		are installed on your system, you may have to change the camera interface
		string below */
	if (m_Iid == 0) {
		/*	The call to imgInterfaceOpen for some reason always starts camera exposure
			(indicated by FlashOut). Why does it do it (RTSI line activated)? And, how
			to suppress it? It cases first frame to sometimes be brighter than subsequent
			frames
		*/
		errChk(imgInterfaceOpen ("img0", &m_Iid)); // img0
	}

	// Create IMAQ session
	if (m_Sid == 0)
		errChk(imgSessionOpen (m_Iid, &m_Sid));

	// Get acquisition parameters from GUI
	m_pParam_Struct =(Analyze_Params*)pParam;

	ExSyncReset();

	/*	Maximise the IMAQ acquisition window
		By default, the acquisition window is WIDTH x HEIGHT pixels. These values should correspond
		with the camera acquisition parameters set in NI MAX. The image size is hard-coded with
		the defines VP_ROI_MAX_WIDTH and VP_ROI_MAX_HEIGHT in MainFrm.h
	*/
	errChk(imgSetAttribute2(m_Sid, IMG_ATTR_ACQWINDOW_LEFT, 0));
	errChk(imgSetAttribute2(m_Sid, IMG_ATTR_ACQWINDOW_TOP, 0));
	errChk(imgSetAttribute2(m_Sid, IMG_ATTR_ACQWINDOW_WIDTH, VP_ROI_MAX_WIDTH));
	errChk(imgSetAttribute2(m_Sid, IMG_ATTR_ACQWINDOW_HEIGHT, VP_ROI_MAX_HEIGHT));

	if (!ROI) {
		// Maximize region of interest
		errChk(imgSetAttribute2(m_Sid, IMG_ATTR_ROWPIXELS, VP_ROI_MAX_WIDTH));
		errChk(imgSessionSetROI(m_Sid, 0, 0, VP_ROI_MAX_HEIGHT, VP_ROI_MAX_WIDTH));
		m_FrameSize.cx = VP_ROI_MAX_WIDTH;
		m_FrameSize.cy = VP_ROI_MAX_HEIGHT;
	} else {
		errChk(imgSetAttribute2(m_Sid, IMG_ATTR_ROWPIXELS, ROI->Width()));
		errChk(imgSessionSetROI(m_Sid, ROI->top, ROI->left,ROI->Height(), ROI->Width()));
		m_FrameSize = ROI->Size();
		// Check for bogus values in m_FrameSize
		if (m_FrameSize.cx < 0 || m_FrameSize.cx > VP_ROI_MAX_WIDTH) {
			errChk(imgSetAttribute2(m_Sid, IMG_ATTR_ROWPIXELS, VP_ROI_MAX_WIDTH));
			errChk(imgSessionSetROI(m_Sid, 0, 0, VP_ROI_MAX_HEIGHT, VP_ROI_MAX_WIDTH));
			m_FrameSize.cx = VP_ROI_MAX_WIDTH;
			m_FrameSize.cy = VP_ROI_MAX_HEIGHT;
			ROI->left = 0;
			ROI->top = 0;
			ROI->right = VP_ROI_MAX_WIDTH;
			ROI->bottom = VP_ROI_MAX_HEIGHT;
		}
	}
	
	m_pRingBuffer[0]=NULL;

	// Prepare image snap buffer and let IMAQ allocate memory
	if (m_pRawBuffer) {
		delete m_pRawBuffer;
		m_pRawBuffer = NULL;
	}
	m_pRawBuffer = new byte[m_FrameSize.cx*m_FrameSize.cy];
	
	// Start ExSync preview mode (will do nothing if camera is not in ExSync mode)
	EnableExSyncPreview(true, false, false);

	// Grab/snap frame and check for error/timeout
	errChk(imgSnap(m_Sid, (void **)&m_pRingBuffer[0]));

	// Stop ExSync preview mode
	EnableExSyncPreview(false, false, false);

	CopyMemory(m_pRawBuffer, (void *)m_pRingBuffer[0], m_FrameSize.cx*m_FrameSize.cy);

	// Manage error handling through IMAQ macro
	Error: 
	if(m_iError < 0) {
		// Stop ExSync preview mode
		EnableExSyncPreview(false, false, false);
		DisplayImaqError(m_iError);
		return false;
	}

	// Signal that data is available
	m_bDataLoaded = true;

	return true;
}

// Display IMAQ error
// TODO: Should be managed through the View class
void CVirtualPoleDoc::DisplayImaqError(int error) {
    m_Results.bError = true;
	static Int8 ErrorMessage[256];
    memset(ErrorMessage, 0x00, sizeof(ErrorMessage));
	imgShowError(error,ErrorMessage);
	AfxMessageBox(ErrorMessage, MB_ICONINFORMATION | MB_OK);
}

// Analysis thread function
UINT CVirtualPoleDoc::AnalyzeThread(LPVOID pParam) {
	CVirtualPoleDoc *ThisClass = (CVirtualPoleDoc *)pParam;
	bool Thread_Result = ThisClass->AnalyzeProc();
	AfxEndThread(0);
	return 0;
}

// The real-time video analysis worker function
bool CVirtualPoleDoc::AnalyzeProc() {
	ULONG lBuffer_Addrress = 0, lRequestedBuffer=1, lCurrentBuffer=0;
	int VPCounter = 0;
	long DotsCounter = 0;
	int col, row, Prev_Loop_Index;
	long i, j;
	double dWhiskPos = 0; // dWhiskPos can be either angle or median
	CString LogLine;
	unsigned short NumMissedFrames = 0;
	unsigned int LastSavedFrame = 1;
	long lLastAnalyzedFrame = 1;
	m_Results.dFFTWBandPassSNR = 0;
	m_bMissedFramesReported = 0;

	// Initialize variables that are used by DAQmx functions
	char		DAQmxErrBuff[2048] = {'\0'};
	TaskHandle	DAQmxTaskHandle = 0;
	TaskHandle	DAQmxTaskHandleDig = 0;
	TaskHandle	DAQmxTaskHandleReadCtr = 0;
	int			DAQmxError = 0;
	float64     fDAQmxVal[2] = {0.0};

	// Initialize FFTW plans. This may take 1 sec of execution time...
	double *dFFTW_in, *dFFTW_out;
	int nFFTWFrames = 1024; // 2^9 for speed
	double dFFTW_AbsVal[1024], dFFTW_FreqVal[1024];
	double nCorr = (double)(*m_pParam_Struct).FPS / (double)nFFTWFrames;
	for (int i = 0; i < nFFTWFrames; i++) { // Initialize all elements to zero
		dFFTW_AbsVal[i] = 0;
		dFFTW_FreqVal[i] = 0;
		m_Results.MedianArray[i] = 0;
		m_Results.AngleArray[i] = 0;
	}
	dFFTW_in = (double *)fftw_malloc(sizeof(double) * nFFTWFrames);
	dFFTW_out = (double *)fftw_malloc(sizeof(double) * nFFTWFrames);

	// Create plan
	fftw_plan pFFTW_rplan;
	pFFTW_rplan = fftw_plan_r2r_1d(nFFTWFrames, dFFTW_in, dFFTW_out, FFTW_R2HC, FFTW_ESTIMATE ); 
	// Done with FFTW initialization

	// Compute area required for triggers (for all objects)
	CSize csVPArea[10];
	int csVPThreshold[10] = {0};
	for (int i = 0; i< 10; i++) {
		csVPArea[i] = (*m_pParam_Struct).AllVP[i].Size();
		csVPThreshold[i] = (int)ceil(csVPArea[i].cx * csVPArea[i].cy
							* double(*(*m_pParam_Struct).Pole_Threshold)/100.0);
	}

	volatile bool *stop = &m_bStopRing;
	byte Analyzed_Loop_Index = 0;

	// Create a pointer to the write index
	volatile ULONG *WriteIndex = &m_lWrite_Index;

	volatile ULONG *SavedFrames = &m_iSavedFrameCounter;

	// - process first frame separately 

	// Lock the raw video for the video show thread
	EnterCriticalSection((*m_pParam_Struct).LockRawVideo);

	// Grab first frame
	errChk(imgSessionExamineBuffer(m_Sid, lRequestedBuffer, &lCurrentBuffer, &lBuffer_Addrress));
	
	// Copy 1st frame  CopyMemory(dest, src, len)
	CopyMemory(m_pRawBuffer, (void *)lBuffer_Addrress, (*m_pParam_Struct).RawBufferSize.y);

	// Unlock and reinsert buffer back in ring
	errChk(imgSessionReleaseBuffer(m_Sid));

	// Unlock raw video for the video show thread
	LeaveCriticalSection((*m_pParam_Struct).LockRawVideo);

	m_Results.lFrameNumber++;

	long nStart, nEnd;
	uInt32 iCounter = 0;
	uInt32 iCounterPrevious = 0;
	int32 iSampsWritten = 0;
	long nLen = (m_FrameSize.cx*m_FrameSize.cy); // length of frame buffer

	// Initialize variables for angle (dTheta) computation
	bool bThreshold = false;
	double dTheta = 0;
	unsigned long lIndx = 0;
	bool bOutputHigh = false;

	// Initialize variables used for angle computations
	double SUMx = 0.0;     //sum of x values
	double SUMy = 0.0;     //sum of y values
	double SUMxy = 0.0;    //sum of x * y
	double SUMxx = 0.0;    //sum of x^2
	double dT, dB, dSlope, dCurFrameWhiskPos, nRand, nXi, nYi;

	// Detect trigger IN line (for white overlay square #1)
	//m_csCameraTriggerLine = (*m_pParam_Struct).CameraTriggerLine;
	//IMG_SIGNAL_TYPE sigTriggerType;
	//uInt32 triggerNumber;
	//if (strncmp((LPCTSTR)m_csCameraTriggerLine, "RTSI", 4) == 0) {	
	//	sigTriggerType = IMG_SIGNAL_RTSI;									// RTSI
	//	triggerNumber = (uInt32)atoi(m_csCameraTriggerLine.Mid(5,1));		// Line number
	//} else if (strncmp((LPCTSTR)m_csCameraTriggerLine, "External", 8) == 0) {
	//	sigTriggerType = IMG_SIGNAL_EXTERNAL;								// External
	//	triggerNumber = (uInt32)atoi(m_csCameraTriggerLine.Mid(9,1));		// Line number
	//} else {
	//	sigTriggerType = IMG_SIGNAL_STATUS;									// Immediate
	//	triggerNumber = IMG_IMMEDIATE;										// Line number
	//}

	// DAQmx: Initialize analog output task
	if (DAQmxTaskHandle == 0) { // TODO: Decide first if analog outputs should be generated
		// Reset device
		char sDevName[10];
		sDevName[0] = '\0';
		strncat_s(sDevName, (*m_pParam_Struct).TrackingOutputCh, 4);
		sDevName[4] = '\0';
		DAQmxErrChk (DAQmxResetDevice(sDevName));
		DAQmxErrChk (DAQmxCreateTask("", &DAQmxTaskHandle));
		DAQmxErrChk (DAQmxCreateAOVoltageChan(DAQmxTaskHandle,
			(*m_pParam_Struct).TrackingOutputCh,
			"", -10.0, 10.0, DAQmx_Val_Volts, ""));
		DAQmxErrChk (DAQmxStartTask(DAQmxTaskHandle));
		DAQmxErrChk (DAQmxWriteAnalogF64(DAQmxTaskHandle, 1, 1, 0, 
			DAQmx_Val_GroupByScanNumber, fDAQmxVal,	&iSampsWritten,	NULL ) );
	}

	// DAQmx: Initialize counter read task for Indicator Trigger status
	if (DAQmxTaskHandleReadCtr == 0) { // TODO: Check first if indicators are being displayed
		DAQmxErrChk (DAQmxCreateTask("", &DAQmxTaskHandleReadCtr));
		DAQmxErrChk (DAQmxCreateCICountEdgesChan(DAQmxTaskHandleReadCtr,
			(*m_pParam_Struct).IndicatorTriggerINCh, "VPIndicatorTrigger", 
			DAQmx_Val_Rising, 0, DAQmx_Val_CountUp) );
		DAQmxErrChk (DAQmxStartTask(DAQmxTaskHandleReadCtr));
	}

	// DAQmx: Initialize Event Trigger task
	if (DAQmxTaskHandleDig == 0) { // TODO: Decide first if event triggers are enabled
		DAQmxErrChk (DAQmxCreateTask("", &DAQmxTaskHandleDig));
		DAQmxErrChk (DAQmxCreateCOPulseChanTime(DAQmxTaskHandleDig,
			(*m_pParam_Struct).EventTriggerDev, "VPEeventChannel",
			DAQmx_Val_Seconds, DAQmx_Val_Low, 0.0, 0.00001, 20)); //0.0005

		// Optional: Route to RTSI pin
		if (strncmp((*m_pParam_Struct).EventTriggerCh, "RTSI", 4) == 0) {	
			char sSource[22];	// eg. '/Dev3/VPEeventChannel'
			char sDest[12];		// eg. '/Dev3/RTSI0'
			// Generate source string
			sSource[0] = '/';
			sSource[1] = '\0';
			strncat_s(sSource, (*m_pParam_Struct).EventTriggerDev, 4);
			sSource[5] = '\0';
			strcat_s(sSource, "/VPEeventChannel");
			sSource[21] = '\0';
			// Generate destination string
			sDest[0] = '/';
			sDest[1] = '\0';
			strncat_s(sDest, (*m_pParam_Struct).EventTriggerDev, 4);
			sDest[5] = '/';
			sDest[6] = '\0';
			strcat_s(sDest, (*m_pParam_Struct).EventTriggerCh);
			sDest[11] = '\0';
			DAQmxErrChk (DAQmxExportSignal(DAQmxTaskHandleDig, DAQmx_Val_CounterOutputEvent, sDest) );
		}
	}

	// Seed the random-number generator with the current time so that
	// the numbers will be different every time we run.
	srand( (unsigned)time( NULL ) );

	// Iterate over frames
    while((!*stop) && (!m_iError) && (m_Sid > 0)) {
		// Set the requested buffer num
		lRequestedBuffer = lCurrentBuffer+1;
		
		// Previous index
		Prev_Loop_Index = m_lLoop_Index;

		// Current frame index
		m_lLoop_Index = m_Results.lFrameNumber % (*m_pParam_Struct).RawBufferSize.x;
		Analyzed_Loop_Index	= m_Results.lFrameNumber % (*m_pParam_Struct).AnalyzedBufferSize.x;

		// Reset save thread and exit preview if we run out of video RAM buffer
		if ((*WriteIndex == m_lLoop_Index) && (*SavedFrames > 0)) {
			m_bResetSave = true;
			*stop = true;
			AfxMessageBox("Warning: RAM buffer is full. Acquisition was halted. You can avoid this warning by either increasing the RAM buffer size or reducing the region of interest.", MB_APPLMODAL);
		}
		
		// Lock the raw video for the video show thread
		EnterCriticalSection((*m_pParam_Struct).LockRawVideo+m_lLoop_Index);

		// Grab next frame
		// Hold the buffer whose index is m_lCumulative_Bufffer_Num. This is cumulative buffer
		// index. If the buffer has not been acquired yet this function will block until it is
		// available. If the buffer has been overwritten this function will return the last 
		// available buffer. Once the buffer is hold the buffer won't be overwritten until 
		// it is released with imgSessionReleaseBuffer.
		errChk(imgSessionExamineBuffer(m_Sid, lRequestedBuffer, &lCurrentBuffer, &lBuffer_Addrress));

		// This is the logical place for EnableExSyncPreview to be;
		// Turn off ExSync train right after the very last frame was grabbed.
		if (*stop) { EnableExSyncPreview(false, false, false); }

		// Copy the frame for viewing and save purposes
		CopyMemory((m_pRawBuffer+(*m_pParam_Struct).RawBufferSize.y*m_lLoop_Index),
			(void *)lBuffer_Addrress, (*m_pParam_Struct).RawBufferSize.y);

		// Subtract dark frame (if taken) from current frame (pixel by pixel)
		if (m_bDarkFrameExists) {
			nStart = (*m_pParam_Struct).RawBufferSize.y * m_lLoop_Index; // first pixel in frame buffer
			nEnd = nStart + (*m_pParam_Struct).RawBufferSize.y;	 // last pixel in frame buffer
			lIndx = 0;
			for (i = nStart ; i < nEnd ; i++) {
				m_pRawBuffer[i] = max(0, m_pRawBuffer[i] - m_pDarkFrameBuffer[lIndx]);
				lIndx++;
			}
		}

		// Unlock and reinsert the buffer back in ring
		errChk(imgSessionReleaseBuffer(m_Sid));

		// Unlock the raw video for the video show thread
		LeaveCriticalSection((*m_pParam_Struct).LockRawVideo + m_lLoop_Index);
	
		// Lock the analyzed video for the video show thread 
		EnterCriticalSection((*m_pParam_Struct).LockAnalyzedVideo + Analyzed_Loop_Index);

		// Analyze image one pixel at a time. This loop performs either motion or
		// threshold detection (right = width, bottom = height).
		for (row=0; row < m_FrameSize.cy; row++) {
			for (col=0; col < m_FrameSize.cx; col++) {
				lIndx = row * (m_FrameSize.cx) + col;

				// Set result to 0, and change only below if threshold is crossed
				*((m_pAnalyzeBuffer+Analyzed_Loop_Index*(*m_pParam_Struct).AnalyzedBufferSize.y) + lIndx) = 0;

				// Check that this pixel is in mask and should be analyzed
				if ( *(*m_pParam_Struct).DetectMotionInObjects 
					&& (m_pMotionMaskBuffer[max(0, lIndx-1)] != 255) ) { continue; }

				bThreshold = false;

				if ( *(*m_pParam_Struct).DetectMotion ) {
					// Motion detection (frame2 - frame1)
					if((*(*m_pParam_Struct).Motion_Threshold) <=
						abs(*((m_pRawBuffer+(*m_pParam_Struct).RawBufferSize.y * m_lLoop_Index) + lIndx ) - 
						*((m_pRawBuffer+(*m_pParam_Struct).RawBufferSize.y * Prev_Loop_Index) + lIndx)))
					{ bThreshold = true; }
				} else {
					// Threshold detection (frame2 < T)
					// We assume the feature of interest is darker than the background
					if( *((m_pRawBuffer+(*m_pParam_Struct).RawBufferSize.y * m_lLoop_Index) + lIndx )
						<= (*(*m_pParam_Struct).Motion_Threshold) )
					{ bThreshold = true; }
				}

				if (bThreshold) {
					*((m_pAnalyzeBuffer+Analyzed_Loop_Index*(*m_pParam_Struct).AnalyzedBufferSize.y) + lIndx) = 255;
					m_MotionX[DotsCounter] = col;
					m_MotionY[DotsCounter] = row;
					DotsCounter++;

					// Check if pixel is also inside the current virtual object
					// Create mask for virtual object to replace PtInRect() ?
					if (*(*m_pParam_Struct).VPIndx > -1) {
						if ( (*m_pParam_Struct).AllVP[*(*m_pParam_Struct).VPIndx].PtInRect(CPoint(col,row)) )
							VPCounter++;
					}
				}
			}
		}

		// Unlock the analyzed video for the video show thread 
		LeaveCriticalSection((*m_pParam_Struct).LockAnalyzedVideo + Analyzed_Loop_Index);

		if (!(*m_pParam_Struct).AnalyzeVideo) 		{
			// Saves numbers of missed frames, if needed
			if((lCurrentBuffer != lRequestedBuffer) && (*SavedFrames>0)) {
				if (LastSavedFrame == *SavedFrames) 				{
					NumMissedFrames = (unsigned short)(NumMissedFrames+lCurrentBuffer - lRequestedBuffer);
				} else {
					m_csMissedFramesList.Format(m_csMissedFramesList + "%u,%u\n",LastSavedFrame,NumMissedFrames);
					LastSavedFrame = *SavedFrames;
					m_iMissedNbBuf = m_iMissedNbBuf + NumMissedFrames;
					NumMissedFrames = (unsigned short)(lCurrentBuffer - lRequestedBuffer);
				}
			}
			VPCounter	= 0;
			DotsCounter	= 0;
			m_Results.lFrameNumber++;
		}

		/* ------------------------------------------------------ */
		/*   Real-time Whisker tracking and Event trigger logic   */
		/* ------------------------------------------------------ */
		if ( *(*m_pParam_Struct).EnableRealTimeTracking ) {

			// Compute angle from least-square linear regression
			if (DotsCounter > *(*m_pParam_Struct).Median_Threshold) {
				// Iterate over thresholded points and calculate various sums 
				dCurFrameWhiskPos = 0.0;
				SUMx = 0.0; SUMy = 0.0;
				SUMxy = 0.0; SUMxx = 0.0;
				for (i = 0; i <= DotsCounter; i++) {
					// Jitter x/y coordinate by up to 1/2 pixel (smoothing)
					nRand = (double)rand() / RAND_MAX - 1;
					nXi = m_MotionX[i] + nRand;
					nYi = m_MotionY[i] + nRand;

					// Compute sums for slope
					SUMx += nYi; // sum of x
					SUMy += nXi; // sum of y
					SUMxy += nYi * nXi; // sum of squared x*y
					SUMxx += nYi * nYi; // sum of squared x
					dCurFrameWhiskPos += nXi; // sum of pixel location (for position)
				}

				// Check that division is valid
				dT = DotsCounter * SUMxy - SUMx * SUMy; // ok to be zero
				dB = DotsCounter * SUMxx - SUMx * SUMx; // can't be zero
				( dB == 0 ) ? dSlope = DBL_MAX : dSlope = dT/dB;

				m_dIntersect = (SUMy / DotsCounter) - dSlope * (SUMx / DotsCounter);
				m_dSlope = dSlope;

				dWhiskPos = dCurFrameWhiskPos / DotsCounter; // average position
				dTheta = atan2(1.0, dSlope); // whisker angle
			} else {
				dWhiskPos = dTheta = m_dSlope = m_dIntersect = DBL_MIN;
			}

			// Save current whisker angle and average position
			m_Results.MedianArray[lRequestedBuffer % 1024] = dWhiskPos;
			m_Results.AngleArray[lRequestedBuffer % 1024] = dTheta;

			// Set result of missed frames to zero
			if (lRequestedBuffer > (lLastAnalyzedFrame + 1)) {
				for (i = lLastAnalyzedFrame + 1; i < lRequestedBuffer; i++) {
					m_Results.MedianArray[i % 1024] = 0;//dWhiskPos;
					m_Results.AngleArray[i % 1024] = 0;//dTheta;
				}
			}
			lLastAnalyzedFrame = lRequestedBuffer;

			// Output whisker position and angle to analog outputs on NI-DAQmx device
			// Average position is normalized with respect to frame width, and scaled 0-5 V
			// Whisker angle is its raw value
			m_LastDAQOutput[0] = dWhiskPos;
			m_LastDAQOutput[1] = dTheta;
			fDAQmxVal[0] = (dWhiskPos / double(m_FrameSize.cx)) * 5.0;	// position, normalized
			fDAQmxVal[1] = dTheta;										// angle, raw
			DAQmxErrChk (DAQmxWriteAnalogF64(DAQmxTaskHandle, 1, 1, 0,
				DAQmx_Val_GroupByScanNumber, fDAQmxVal,	&iSampsWritten,	NULL ) );
		}

		/* ------------------------------------------------------ */
		/*                 Real-time FFT analysis                 */
		/* ------------------------------------------------------ */
		if (*(*m_pParam_Struct).ComputeFFTW && (m_Results.lFrameNumber >= unsigned int(nFFTWFrames))) {

			for	(i = 1; i <= nFFTWFrames; i++) {
				dFFTW_in[i] = m_Results.MedianArray[i] - m_Results.MedianArray[i-1]; // velocity
			}

			fftw_execute(pFFTW_rplan); // FFT

			// Size of FFTW array
			int nFFTWLen = sizeof(m_Results.FFTWAmplitude)/sizeof(double); 

			double dAvgPower = 0;
			double dBandPassPower = 0;
			for (i = 0; i < (nFFTWFrames/4); i++) {
				m_Results.FFTWAmplitude[i] = abs(dFFTW_out[i]);
				m_Results.FFTWFrequency[i] = (double)i * nCorr;
				if (m_Results.FFTWFrequency[i] < 100) {
					if (m_Results.FFTWFrequency[i] >= *(*m_pParam_Struct).FrequencyBandpassLow 
						&& m_Results.FFTWFrequency[i] <= *(*m_pParam_Struct).FrequencyBandpassHigh) {
						// bandpass power is the max
						dBandPassPower = max(m_Results.FFTWAmplitude[i], dBandPassPower);
					} else {
						dAvgPower = (dAvgPower + m_Results.FFTWAmplitude[i]) / 2;
					}
				}
			}
			m_Results.dFFTWBandPassSNR = min(sqrt((dBandPassPower/dAvgPower)*(dBandPassPower-dAvgPower))/10, 1000);
			if (m_Results.dFFTWBandPassSNR == 1000)
				m_Results.dFFTWBandPassSNR = 0.0;

			// Trigger output if SNR exceeds threshold
			if (m_Results.dFFTWBandPassSNR >= *(*m_pParam_Struct).FreqSNRThreshold
				&& *(*m_pParam_Struct).EnableFrequencyTrigger ) {
					// Signal trigger event through NI-DAQmx
					DAQmxErrChk (DAQmxStopTask(DAQmxTaskHandleDig));
					DAQmxErrChk (DAQmxStartTask(DAQmxTaskHandleDig));
					DAQmxErrChk (DAQmxStopTask(DAQmxTaskHandleDig));
			}
		}

		/* ------------------------------------------------------ */
		/*                 Detect missing frames                  */
		/* ------------------------------------------------------ */
		if((lCurrentBuffer != lRequestedBuffer) && (*SavedFrames > 0)) {				
			if (LastSavedFrame == *SavedFrames) {
				NumMissedFrames = (unsigned short)(NumMissedFrames+lCurrentBuffer - lRequestedBuffer);
			} else {
				m_csMissedFramesList.Format(m_csMissedFramesList + "%u,%u\n",LastSavedFrame,NumMissedFrames);
				LastSavedFrame=*SavedFrames;
				m_iMissedNbBuf = m_iMissedNbBuf + NumMissedFrames;
				NumMissedFrames = (unsigned short)(lCurrentBuffer - lRequestedBuffer);
				if (!m_bMissedFramesReported) {
					AfxMessageBox("Some frames are missed. Try decreasing resolution of framerate.", MB_ICONINFORMATION | MB_OK);
					m_bMissedFramesReported = 1;
				}
			}
		}

		/* ------------------------------------------------------ */
		/*                 Virtual Contact Logic                  */
		/* ------------------------------------------------------ */
		m_Results.bPoleTouch = false;
		if (*(*m_pParam_Struct).VPIndx > -1) {
			if ( csVPThreshold[*(*m_pParam_Struct).VPIndx] <= VPCounter ) {
				m_Results.bPoleTouch = true;
			}
		}

		// Output trigger turns ON when contact is registered and OFF when
		// detachment is registered (timeout is ~20 s)
		if (!m_Results.bPoleTouch && bOutputHigh) {
			DAQmxErrChk (DAQmxStopTask(DAQmxTaskHandleDig));
			bOutputHigh = false;
		} else {
			// Signal contact on parallel port pins
			if (*(*m_pParam_Struct).EnableProximityTrigger) {
				// Signal trigger event through NI-DAQmx
				//bool32 bIsTaskDone;
				//DAQmxErrChk (DAQmxIsTaskDone(DAQmxTaskHandleDig, &bIsTaskDone));
				if (m_Results.bPoleTouch && !bOutputHigh) {
					//DAQmxErrChk (DAQmxStopTask(DAQmxTaskHandleDig));
					DAQmxErrChk (DAQmxStartTask(DAQmxTaskHandleDig));
					bOutputHigh = true;
				}
			}
		}
		
		/* ------------------------------------------------------ */
		/*               Save analysis to log file                */
		/* ------------------------------------------------------ */
		if ((*m_pParam_Struct).SaveLog) {
			// write data to file
			//LogLine.Format("\n%u|%u|%u|%u|%u",m_Results.lFrameNumber,(long)dpCurrMedian.x,(long)dpCurrMedian.y,MissedNbBuf,DotsCounter);
			//m_LogFile.WriteString(LogLine);
		}
		
		/* ------------------------------------------------------ */
		/*     Compute video overlay indicators (triggers etc)    */
		/* ------------------------------------------------------ */
		if ((*m_pParam_Struct).TriggerOverlays) {
			// Strip of pixels starting at [1,1] (upper left corner)
			nStart = (*m_pParam_Struct).RawBufferSize.y * m_lLoop_Index; // first pixel in frame buffer

			// [1,1-4]	Framenumber WORD bytes 1-4
			m_pRawBuffer[nStart] = (int)((m_Results.lFrameNumber >> 24) & 0xFF);
			m_pRawBuffer[nStart+1] = (int)((m_Results.lFrameNumber >> 16) & 0xFF);
			m_pRawBuffer[nStart+2] = (int)((m_Results.lFrameNumber >> 8) & 0XFF);
			m_pRawBuffer[nStart+3] = (int)((m_Results.lFrameNumber & 0XFF));

			// [1,5]	Trigger OUT	BOOL (object contact, frequency trigger)
			m_pRawBuffer[nStart+4] = 0;
			if (m_Results.bPoleTouch) { m_pRawBuffer[nStart+4] = 0xFF; }

			// [1,6]	Trigger IN	BOOL
			DAQmxErrChk (DAQmxReadCounterScalarU32(DAQmxTaskHandleReadCtr, 0.01, &iCounter, NULL));
			m_pRawBuffer[nStart+5] = 0;
			if (iCounter > iCounterPrevious) { m_pRawBuffer[nStart+5] = 0xFF; } // Input is HIGH
			iCounterPrevious = iCounter;

			// [1,7]	FFT power
			m_pRawBuffer[nStart+6] = 0;
			if (*(*m_pParam_Struct).ComputeFFTW) {
				m_pRawBuffer[nStart+6] = (byte)m_Results.dFFTWBandPassSNR;
			}

			// [1,8]	Tracking, Channels #1 and #2
			m_pRawBuffer[nStart+7] = 0;
			m_pRawBuffer[nStart+8] = 0;
			if (*(*m_pParam_Struct).EnableRealTimeTracking) {
				// Scale tracking to 0-255
				m_pRawBuffer[nStart+7] = (byte)((m_Results.MedianArray[lRequestedBuffer % 1024] / double(m_FrameSize.cx)) * 255.0);
				m_pRawBuffer[nStart+8] = (byte)((m_Results.AngleArray[lRequestedBuffer % 1024] / PI) * 255.0);
			}
		}

		// When the first frame has been processed, signal to the SaveProc()
		// thread that it can start too.
		SetEvent(m_hAnalysisStarted);

		// Reset counters
		VPCounter	= 0;
		DotsCounter	= 0;
		m_Results.lFrameNumber++;
	}

	// IMAQ Error Handler
	Error:
	if( m_iError < 0 ) {
		// Unlock the Critical sections for the video show thread
		LeaveCriticalSection((*m_pParam_Struct).LockRawVideo + m_lLoop_Index);
		LeaveCriticalSection((*m_pParam_Struct).LockAnalyzedVideo + Analyzed_Loop_Index);

		if ((*m_pParam_Struct).SaveLog)
			m_LogFile.Close();
		
		DisplayImaqError(m_iError);
		SetEvent(m_hStopEvent);
		
		return false;
	}

	// NI-DAQmx Error Handler
	DAQmxError:
	if (DAQmxFailed(DAQmxError) || DAQmxTaskHandle!=0 || DAQmxTaskHandleReadCtr!=0) {
		if( DAQmxFailed(DAQmxError) )
			DAQmxGetExtendedErrorInfo(DAQmxErrBuff,2048);
		if( DAQmxTaskHandle!=0 ) {
			// DAQmx Stop Code
			DAQmxStopTask(DAQmxTaskHandle);
			DAQmxClearTask(DAQmxTaskHandle);
		}
		if( DAQmxTaskHandleReadCtr!=0 ) {
			// DAQmx Stop Code
			DAQmxStopTask(DAQmxTaskHandleReadCtr);
			DAQmxClearTask(DAQmxTaskHandleReadCtr);
		}
		if( DAQmxFailed(DAQmxError) ) {
			MessageBoxA(NULL, DAQmxErrBuff, "Virtual Pole NI-DAQmx Error", MB_ICONERROR|MB_OK);
		}
	}

	if ((*m_pParam_Struct).SaveLog)
		m_LogFile.Close();
	
	// Delete FFTW plans
	fftw_destroy_plan(pFFTW_rplan);
	//fftw_free(dFFTW_in);  // causes heap bug
	//fftw_free(dFFTW_out); // causes heap bug

	// Clear NI-DAQmx tasks
	DAQmxErrChk (DAQmxClearTask(DAQmxTaskHandle));
	DAQmxErrChk (DAQmxClearTask(DAQmxTaskHandleDig));
	DAQmxErrChk (DAQmxClearTask(DAQmxTaskHandleReadCtr));

    return true; // Thread completed successfully
}


/*	Signal contact on parallel port pins
	Contact is signalled on CH7 and 8 of the parallel port. CH7 carries a differential
	signal, flipping sign each time contact is detected. CH8 signals touch by flipping
	briefly to the UP state.
*/
void CVirtualPoleDoc::SignalContactOnParPort() {
	short ParPortResult = Inp32(0x378); // read current par port values, 0 - 255

	// Turn CH8 ON (without touching the other channels)
	ParPortResult = (ParPortResult | 0x80);
	Out32(0x378, ParPortResult);

	// Change status of CH7 (ON->OFF or OFF->ON)
	ParPortResult = (~ParPortResult & 0x40);
	Out32(0x378, ParPortResult);	// flip value of CH7
	
	// Turn CH8 OFF
	ParPortResult = (ParPortResult & 0x7F);
	Out32(0x378, ParPortResult);
	return;
}


// Run ring, stop, save and analyzer threads
bool CVirtualPoleDoc::OnStartAnalyze(LPVOID pParam) {
	m_bStopRing = false;		// Initialize

	if (!m_bDataLoaded)			// if nothing to analyze
		return false;

	// Update analysis parameters from GUI
	m_pParam_Struct =(Analyze_Params*)pParam;

	// Abort if we didnt retrieve parameter structure
	if (!m_pParam_Struct)
		return false;

	m_Results.bError		= false;	// There are no errors yet
	m_Results.lFrameNumber	= 0;		// set the master counter to 0 
	m_lWrite_Index			= 0;		// starts to write from buffer 0
	m_iSavedFrameCounter	= 0;
	m_iMissedNbBuf			= 0;		// No missed frames yet
	m_iSavedFrameCounter	= 0;		// We are not saving no frames yet	
	m_lLoop_Index			= 0;		// Intialize loop index
	
	// Initialize IMAQ ring buffer. IMAQ will allocate memory.
	for(int i = 0; i< RingBufferSize; i++)
		m_pRingBuffer[i] = NULL;

	// Allocate memory for and initialize raw and analysis image buffers.
	delete [] m_pRawBuffer;
	int iLen = (*m_pParam_Struct).AnalyzedBufferSize.x*(*m_pParam_Struct).AnalyzedBufferSize.y;
	m_pRawBuffer	 = new byte[(*m_pParam_Struct).RawBufferSize.x*(*m_pParam_Struct).RawBufferSize.y];
	m_pAnalyzeBuffer = new byte[iLen];

	// Intialize movement arrays
	for (int i = 0; i < (*m_pParam_Struct).AnalyzedBufferSize.y; i++) {
		m_MotionX[i] = 0;
		m_MotionY[i] = 0;
	}

	// Clear memory for amplitude array
	for(unsigned int i = 0; i< (*m_pParam_Struct).FPS; i++) {
		m_Results.MedianArray[i] = 0;
		m_Results.AngleArray[i] = 0;
	}

	// compute the factors in each direction to isolate movement only in the median line direction
	m_dpFactors.x = sin((*m_pParam_Struct).Angle);
	m_dpFactors.y = cos((*m_pParam_Struct).Angle);

	// - handling the log file 
	m_tNow = CTime::GetCurrentTime();
	CString FileName;

	// create the log file if user wants to log
	if ((*m_pParam_Struct).SaveLog) {
		FileName = CString((*m_pParam_Struct).LogPath) + "VPLog " + m_tNow.Format("%H_%M_%d_%m_%y") + ".bin";

		// Create path for FileName if it does not already exist
		CreateDirectoryFromPath(FileName);

		TRY {	// Try to open the file
			m_LogFile.Open(FileName, CFile::modeCreate | CFile::modeWrite | CFile::typeBinary);
		} CATCH(CFileException, e) {
			#ifdef _DEBUG
				// if we failed to open the log file - show some message and exit
				afxDump << "Log file could not be opened " << e->m_cause << "\n";
				return false;
			#endif
		} END_CATCH
		
		// add header to log file
		//char buf[] ="#Frame|Median X[pix]|Median Y[pix]|#Missed Frames|FPS|Pole Touched|Area";
		char buf[] ="#Frame|Median X|R/P|Area|PositiveDiff X|NegativeDiff X";
		
		m_LogFile.WriteString(buf);
	}
	
	// Create event that is signaled when we start the Save thread.
	m_hStartSave = CreateEvent(NULL, TRUE, FALSE, NULL);
	if (!m_hStartSave) {
		if ((*m_pParam_Struct).SaveLog)
			m_LogFile.Close();
		return false;
	}

	// Create event that is signaled when we start processing frames.
	// SaveProc() uses this event to prevent saving unprocessed frames.
	m_hAnalysisStarted = CreateEvent(NULL, TRUE, FALSE, NULL);
	if (!m_hAnalysisStarted) {
		return false;
	}

	// Starts STOP, analysis, save and ring threads  
	
	// Create the event that needs to be signaled when we wish to STOP the acquisition.
	m_hStopEvent = CreateEvent(NULL, TRUE, FALSE, NULL);
	if (!m_hStopEvent) {
		if ((*m_pParam_Struct).SaveLog)
				m_LogFile.Close();
		return false;
	}

	// Sets the results parameters
	m_Results.bSaveStopped = true;
	m_Results.hAnalyzeStopped = CreateEvent(NULL, TRUE, FALSE, NULL);
	if (!m_hStopEvent) {
		if ((*m_pParam_Struct).SaveLog)
				m_LogFile.Close();
		return false;
	}

	// Create thread responsible for stopping acquisition
	m_hStopThread = AfxBeginThread(StopThread,this,THREAD_PRIORITY_NORMAL,0,0,NULL);
	if (!m_hStopThread) {
		if ((*m_pParam_Struct).SaveLog)
				m_LogFile.Close();
		return false;
	}

	// Start camera ExSync control
	EnableExSyncPreview(true, false, false);

	// Start ring buffer acquisition
	errChk(imgRingSetup (m_Sid, RingBufferSize, (void**)m_pRingBuffer, 0, true));

	// Start analysis thread
	m_hAnalyzeThread = AfxBeginThread(AnalyzeThread,this,THREAD_PRIORITY_TIME_CRITICAL,0,0,NULL);               
	if (!m_hAnalyzeThread) {
		if ((*m_pParam_Struct).SaveLog)
				m_LogFile.Close();
		SetEvent(m_hStopEvent);
		return false;
	}

	m_hAnalyzeThread -> m_bAutoDelete = false;

	m_bResetSave = false;
		
	// Start save-to-disk thread
	m_hSaveThread = AfxBeginThread(SaveThread,this,THREAD_PRIORITY_TIME_CRITICAL,0,0,NULL);               
	if (!m_hSaveThread) {
		if ((*m_pParam_Struct).SaveLog)
			m_LogFile.Close();
		SetEvent(m_hStopEvent);
		return false;
	}

	m_hSaveThread -> m_bAutoDelete = false;
	
	// IMAQ Error handling
	Error:
	if(m_iError < 0) {
		DisplayImaqError(m_iError);
        if ((*m_pParam_Struct).SaveLog)
				m_LogFile.Close();
		SetEvent(m_hStopEvent);
	}
	return true;
}

// A thread that waits for the stop event to occur, then stops the acquisition
UINT CVirtualPoleDoc::StopThread(LPVOID pParam) {
	CVirtualPoleDoc *ThisClass = (CVirtualPoleDoc *)pParam;
	ThisClass->StopProc();
	return 0;
}
	
// The STOP function
void CVirtualPoleDoc::StopProc() {
	DWORD dwResult;

    // Wait for the stop event to occur
    dwResult = WaitForSingleObject(m_hStopEvent, INFINITE);
    if (dwResult != WAIT_FAILED) {
        // since Stop event was triggered and fulfiled it's purpose it's time to close it
		CloseHandle(m_hStopEvent);
        m_hStopEvent = NULL;
    }

	// Stop the thread. now thread should stop itself - the healthy solution
	m_bStopRing = true;

	// Wait for analysis thread to end with 5000 msec timeout
	dwResult = WaitForSingleObject(m_hAnalyzeThread -> m_hThread, 5000);

	// If the wait timed out, terminate the analyzer thread - this isn't very good because it 
	//might lock up some system resources until next reboot - but the thread is stuck and there's nothing much to do
	if (dwResult == WAIT_TIMEOUT)
		TerminateThread(m_hAnalyzeThread, 0);
	
	m_hAnalyzeThread = NULL;
	delete m_hAnalyzeThread;
	
	// If the save-to-file thread has not been triggered then just release it
	if (m_hStartSave)
		SetEvent(m_hStartSave);

	// Stop the Save thread - Wait for the save thread to end
	dwResult = WaitForSingleObject(m_hSaveThread -> m_hThread, INFINITE);

	// clear the save thread
	m_hSaveThread = NULL;
	delete m_hSaveThread;

	// since Start Save event was triggered and fulfilled it's purpose it's time to close it
	CloseHandle(m_hStartSave);
    m_hStartSave = NULL;
	
	 // Stop the acquisition
    imgSessionStopAcquisition(m_Sid);

	// Close all parameters
	CloseVideo();

	// Signal that we stopped analyzing
	SetEvent(m_Results.hAnalyzeStopped);

	return;
}

// Signal that image acquisition should stop.
// This function is called mainly by PoleView.cpp functions
void CVirtualPoleDoc::TriggerStop() {
	SetEvent(m_hStopEvent);
}

CSize CVirtualPoleDoc::GetFrameSize() {
	return m_FrameSize;
}

/*---------------------------------------------------------------------------
   Function :   kth_smallest()
   In       :   array of elements, # of elements in the array, rank k
   Out      :   one element from the array
   Aim      :   find the kth smallest element in the array
   Notice   :   use the median() macro defined below to get the median. 
 ---------------------------------------------------------------------------*/
elem_type CVirtualPoleDoc::kth_smallest(elem_type a[], int n, int k) {
	register int i,j,l,m;
    register elem_type x;

    l=0; m=n-1;
    while (l<m)
	{
        x=a[k];
        i=l;
        j=m;
        do 
		{
            while (a[i]<x) 
				i++;
            while (x<a[j]) 
				j--;
            if (i<=j) 
			{
                ELEM_SWAP(a[i],a[j]) ;
                i++; 
				j--;
            }
        } 
		while (i<=j);
		
		if (j<k) 
			l=i;
        if (k<i) 
			m=j;
    }
    return a[k];
}

// This function executes when user presses the 'Start Saving Video' GUI button
UINT CVirtualPoleDoc::SaveThread(LPVOID pParam) {
	CVirtualPoleDoc *ThisClass = (CVirtualPoleDoc *)pParam;
	ThisClass->SaveProc();
	AfxEndThread(0);
	return 0;
}

/* Create a directory in local filesystem from a full path
	e.g. Create 'C:\aa\bb\' from 'C:\aa\bb\cc.dat'
*/
bool CVirtualPoleDoc::CreateDirectoryFromPath(CString csFileName) {
	int nLastDelim = csFileName.ReverseFind(_T('\\'));
	CString csPath = csFileName.Mid(0, nLastDelim);
	int nResult = SHCreateDirectoryEx(NULL, csPath, NULL);
	// Check for errors
	switch (nResult) {
		case ERROR_BAD_PATHNAME:
			AfxMessageBox("Could not create directory " + csPath + ": Path invalid.");
			return false;
		case ERROR_PATH_NOT_FOUND:
			AfxMessageBox("Could not create directory " + csPath + ": Path not found.");
			return false;
	}
	return true;
}

/*	This function executes the save-to-file thread. This thread is executed whenever
	preview is enabled via the GUI or TCP interface. It will run before the explicit
	command to start saving frames to disk is given. Thus, output files are initialized
	before saving starts.

	When the user initiated Save, an internal loop to this function starts to grab
	frames to disk. When Saving ends, this function will exist and Preview is by default
	turned off.
*/
void CVirtualPoleDoc::SaveProc() {
	volatile bool *stop			= &m_bStopRing;
	volatile bool *reset		= &m_bResetSave;
	unsigned long* framelist	= NULL;
	uInt32 err					= 0;
	DWORD dwResult				= 0;
	volatile ULONG *RingIndex	= &m_lLoop_Index;
	bool bVideoFileExist		= false;
	CString csFilePath, csFileNameBase, csFileName, csSettings;
	CStdioFile	cfVideoFile, cfSettingsFile;
	
	m_Results.bSaveStopped = true;
	*reset = false;
	m_tNow = CTime::GetCurrentTime();

	// default video filename
	csFileNameBase = CString((*m_pParam_Struct).VideoPath) + "VPVideo_" + m_tNow.Format("%H_%M_%S_%m%d%y");

	// Check if path includes .bin or .BIN. If it does, use specified filename
	csFilePath = (*m_pParam_Struct).VideoPath;
	if (csFilePath.GetLength() >= 4) {
		if ((csFilePath.Find(".bin", csFilePath.GetLength()-4))>1  ||  (csFilePath.Find(".BIN", csFilePath.GetLength()-4))>1) {
			csFileNameBase = csFilePath.Left(csFilePath.GetLength()-4);
		}
	}
	csFileName = csFileNameBase + ".bin";
	CFileStatus csStatus;

	TRY {
		// Open video file
		if( cfVideoFile.GetStatus(csFileName, csStatus ) ) { // TRUE 
			// If file already exists we need to edit the filename so we do not overwrite it
			// Append filename with current time and date
			csFileNameBase = csFileNameBase + m_tNow.Format("_%H_%M_%S_%m%d%y");
			csFileName = csFileNameBase + ".bin";

			// Filemode used for overwriting existing files
			//cfVideoFile.Open(csFileName, CFile::modeWrite | CFile::typeBinary);
		}
		// Create video directory if it does not exist
		CreateDirectoryFromPath(csFileName);

		// Potential error may occur here is csFileName is already open
		//cfVideoFile.Close();
		cfVideoFile.Open(csFileName, CFile::modeCreate | CFile::modeWrite | CFile::typeBinary);

	} CATCH( CFileException, e ) {
		#ifdef _DEBUG
			// If we failed to open video file show error message and exit
			afxDump << "Video file could not be opened " << e->m_cause << "\n";
			SetEvent(m_hStopEvent);
			return;
		#endif
	} END_CATCH

	// Create file for general video settings
	csFileName = csFileNameBase + ".txt";

	TRY {
		// Open the video settings file
		if( cfSettingsFile.GetStatus(csFileName, csStatus ) ) {
			cfSettingsFile.Open(csFileName, CFile::modeWrite | CFile::typeText);
		} else {
			cfSettingsFile.Open(csFileName, CFile::modeCreate | CFile::modeWrite | CFile::typeText);
		}
	} CATCH( CFileException, e) {
		#ifdef _DEBUG
			// If we failed to open the video settings file show an error and exit
			afxDump << "Video settings file could not be opened " << e->m_cause << "\n";
			SetEvent(m_hStopEvent);
			return;
		#endif
	} END_CATCH

	// Wait for user to Stop or Start saving frames
	dwResult = WaitForSingleObject(m_hStartSave, INFINITE);
	
	if (dwResult != WAIT_OBJECT_0) {
		// Which event brings us here?
		SetEvent(m_hStopEvent);
		*reset = true;
	}

	m_lWrite_Index = *RingIndex;
	m_Results.bSaveStopped = false;

	// Wait for analysis thread to report is has started analysing frames.
	// Otherwise, we risk saving frames before they have been processed.
	dwResult = WaitForSingleObject(m_hAnalysisStarted, INFINITE);

	// Determine if we are to STOP or SAVE the running buffer acquisition
	if (*stop && !*reset) {
		// User pressed Stop
	} else if (!*stop && !*reset) {
		// User pressed Save
		EnableExSyncPreview(true, true, true);
	}

	// Run loop that saves frames in ring buffer to disk
	// Abort when the stop or reset pointer is set to false
	// Loop runs until RAM buffer is emptied

	bool bRAMBufferEmpty = false;
	bool bStopOnceSwitch = false; // one-time variable reset after stop signal is detected
	while( (!*stop && !*reset)		// manual stop conditions
		|| !bRAMBufferEmpty ) {		// dont stop saving until RAM buffer has been emptied

		//   RingIndex		  Current buffer index stored in memory
		//   m_lWrite_Index	  Current index saved to disk

		// Write another frame to disk if ring and write indices are not equal
		bRAMBufferEmpty = true;
		if ((*RingIndex) != m_lWrite_Index) {
			EnterCriticalSection((*m_pParam_Struct).LockRawVideo+m_lWrite_Index);

			cfVideoFile.Write((m_pRawBuffer+(m_lWrite_Index*m_FrameSize.cx*m_FrameSize.cy)), m_FrameSize.cx*m_FrameSize.cy);

			LeaveCriticalSection((*m_pParam_Struct).LockRawVideo+m_lWrite_Index);

			cfVideoFile.Flush();
			m_lWrite_Index++;
			m_lWrite_Index = m_lWrite_Index % (*m_pParam_Struct).RawBufferSize.x; // cycle m_lWrite_Index from 0 to RawBufferSize.x-1
				
			// Increment frame counter
			m_iSavedFrameCounter++;
			bVideoFileExist = true;

			// Check if video file has reached max length
			if (( (*m_pParam_Struct).FrameLimit > -1 )
				&& (m_iSavedFrameCounter+m_iMissedNbBuf >= (*m_pParam_Struct).FrameLimit)) {
				*reset = true;
			}
			bRAMBufferEmpty = false;
		}

		// Stop camera ExSync ONCE after stop signal is received
		if (*stop && !bStopOnceSwitch) {
			EnableExSyncPreview(false, false, false);
			bStopOnceSwitch = true;
		}

	} // end of saving frames to disk

	// Stop ExSync pulses in case we missed above
	if (!bStopOnceSwitch) {
		EnableExSyncPreview(false, false, false);
	}

	cfSettingsFile.WriteString("No video file exists\n"); // default
	if (bVideoFileExist) {
		// Write the video size
		csSettings.Format("%d,%d\n",m_FrameSize.cy,m_FrameSize.cx);
		cfSettingsFile.WriteString(csSettings);

		// Write the frame rate (frames Per Second) and number of saved frames
		csSettings.Format("%d,%d\n",(*m_pParam_Struct).FPS,m_iSavedFrameCounter);
		cfSettingsFile.WriteString(csSettings);
			
		// Write the number of the missed frames and the total number of frames
		csSettings.Format("%d,%d\n",m_iMissedNbBuf,m_iMissedNbBuf+m_iSavedFrameCounter);
		cfSettingsFile.WriteString(csSettings);

		// If there are missed frames then write their numbers
		if (m_iMissedNbBuf > 0)
			cfSettingsFile.WriteString(m_csMissedFramesList);	
	}
	cfSettingsFile.Flush();

	// TEST
	//CloseHandle(hVideoFile);

	// Close video file
	cfVideoFile.Close();
	cfVideoFile.m_hFile = NULL;
	cfSettingsFile.Close();
	cfSettingsFile.m_hFile = NULL;

	// Reset everything
	m_iSavedFrameCounter = 0;
	m_lWrite_Index = 0;
	ResetEvent(m_hStartSave);
	dwResult = 0;
	bVideoFileExist = false;
	framelist = NULL;
	m_Results.bSaveStopped = true;

	return;
}


void CVirtualPoleDoc::TriggerSave() {
	SetEvent(m_hStartSave);
}


/* Enable/disable the ExSync preview mode.
	When enabled (input is 'true'), a pulse train is generated on the camera control
	line that drives the camera exposure. For the duration of the pulse train (aborted
	with input 'false') frames are acquired by the camera and can be buffered by the
	frame grabber. This function must run whenever a frame needs to be grabbed for 
	preview purposes, settings ROIs etc if the camera is configured to operate in one
	of the ExSync modes (either via a serial interface or the Basler CCT+ tool). The
	parameters for the pulse train are same same as those used for data acquisition
	(i.e. obtained from the user-configurable settings in the GUI)

	Note that during preview, a pulse train is generated on the user-configured camera
	control line. This must be taken into account if other hardware is configured to
	respond to events on that line. If the bUserTrig input is false, the pulse train
	starts immediately and does not wait for any incoming triggers. If its true, the
	pulse train will wait for the configured Trig IN line to fire.

	If camera is in free-run mode, frames are already being stored in the buffer and we
	are unable to synchronize. The free-run mode should only be used when there is no
	need for synchronization (ie. video is the only signal being recorded. Nevertheless,
	in case we need to know if the camera is on/off, we will route the
	IMG_TRIG_DRIVE_AQ_IN_PROGRESS status signal to the framegrabber's digital output channel
	(ch0). On the PCI-1429 that output is on the SMB connector.
*/
void CVirtualPoleDoc::EnableExSyncPreview(bool bEnable, bool bUseTrigIn, bool bEnableTrigOut) {
	// Determine if we are in ExSync mode. If not, abort here.
	if (strcmp((LPCTSTR)(*m_pParam_Struct).CameraMode, "ExSync, level-controlled") != 0) {
		// TODO: Add sync outputs for free-run mode
		//imgSessionTriggerDrive2(m_Sid, IMG_SIGNAL_EXTERNAL, 0, IMG_TRIG_POLAR_ACTIVEH, IMG_TRIG_DRIVE_AQ_IN_PROGRESS);
		//imgSessionTriggerDrive2(m_Sid, IMG_SIGNAL_EXTERNAL, 0, IMG_TRIG_POLAR_ACTIVEH, IMG_TRIG_DRIVE_FRAME_START);
		return;
	}

	if (bEnable) {
		// Stop any running pulses
		imgPulseStop(m_plsExSyncTrain);
		imgPulseStop(m_plsExSyncHalfCopy);
		imgPulseStop(m_plsExSyncTrigOut);
		if (bEnableTrigOut) // trig out is only used when we start saving
			::Sleep(10);	// sleep for 10 msec to clearly indicate onset of saved frames

		// Detect camera control (ExSync) line (RTSI_0 -> RTSI_7 or External_0 -> External_7)
		CString csCameraControlLine = (*m_pParam_Struct).CameraControlLine;
		IMG_SIGNAL_TYPE sigOutputType;
		uInt32 outputNumber;
		if (strncmp((LPCTSTR)csCameraControlLine, "RTSI", 4) == 0) {	
			sigOutputType = IMG_SIGNAL_RTSI;											// RTSI
			outputNumber = (uInt32)atoi(csCameraControlLine.Mid(5,1));					// Line number
		} else {																		
			sigOutputType = IMG_SIGNAL_EXTERNAL;										// External
			outputNumber = (uInt32)atoi(csCameraControlLine.Mid(9,1));					// Line number
		}

		// Determine camera trigger line (RTSI_0 -> RTSI_7 or External)
		IMG_SIGNAL_TYPE sigTriggerType;
		uInt32 triggerNumber;
		sigTriggerType = IMG_SIGNAL_STATUS;								// Immediate (default)
		triggerNumber = IMG_IMMEDIATE;									// Line number (default)
		if (bUseTrigIn) {
			CString csCameraTriggerLine = (*m_pParam_Struct).CameraTriggerLine;
			if (strncmp((LPCTSTR)(*m_pParam_Struct).CameraTriggerLine, "RTSI", 4) == 0) {	
				sigTriggerType = IMG_SIGNAL_RTSI;								// RTSI
				triggerNumber = (uInt32)atoi(csCameraTriggerLine.Mid(5,1));		// Line number
			} else if (strncmp((LPCTSTR)(*m_pParam_Struct).CameraTriggerLine, "External", 8) == 0) {
				sigTriggerType = IMG_SIGNAL_EXTERNAL;							// External
				triggerNumber = (uInt32)atoi(csCameraTriggerLine.Mid(9,1));		// Line number
			} else {
				sigTriggerType = IMG_SIGNAL_STATUS;								// Immediate
				triggerNumber = IMG_IMMEDIATE;									// Line number
			}
		}

		// Detect trigger output (Trig OUT) line (RTSI_0 -> RTSI_7 or External_0 -> External_7)
		// Signals ONSET of camera ExSyn pulse-train
		CString csCameraTriggerOutLine = (*m_pParam_Struct).CameraTriggerOutLine;
		IMG_SIGNAL_TYPE sigTriggerOutType;
		uInt32 triggerOutNumber;
		if (strncmp((LPCTSTR)(*m_pParam_Struct).CameraTriggerOutLine, "RTSI", 4) == 0) {	
			sigTriggerOutType = IMG_SIGNAL_RTSI;									// RTSI
			triggerOutNumber = (uInt32)atoi(csCameraTriggerOutLine.Mid(5,1));		// Line number
		} else {																		
			sigTriggerOutType = IMG_SIGNAL_EXTERNAL;								// External
			triggerOutNumber = (uInt32)atoi(csCameraTriggerOutLine.Mid(9,1));		// Line number
		}

		// Compute pulse delay and width
		double dTrainPulseDelay, dTrainPulseWidth;
		dTrainPulseWidth = (*m_pParam_Struct).FrameExposure / 1000000;				// Exposure
		dTrainPulseDelay = (1 / (double)(*m_pParam_Struct).FPS) - dTrainPulseWidth;	// No-exposure
		uInt32 nPulseDelay, nPulseWidth, nTimeBase; // sec
		imgPulseRate(dTrainPulseDelay, dTrainPulseWidth, &nPulseDelay, &nPulseWidth, &nTimeBase); // clock timebase

		// Program pulse train (ExSync)
		// Note that in ExSync level-controlled mode camera exposure is controlled by
		// the FALLING edge of the camera control pulse!!
		//
		//       |---------|            |---------|
		//    ___|         |____________|         |___
		//
		//       |--Delay--|---Width----|--Delay--|
		//                 |---EXPOSE---|     
		//
		imgPulseCreate2(nTimeBase,										// timebase
			nPulseDelay, nPulseWidth,									// pulse parameters
			sigTriggerType, triggerNumber, IMG_TRIG_POLAR_ACTIVEH,		// trigger on HIGH input
			sigOutputType, outputNumber, IMG_TRIG_POLAR_ACTIVEL,		// LOW during pulse width
			PULSE_MODE_TRAIN, &m_plsExSyncTrain );
		
		// Program trigger out pulse (Trig OUT)
		//  Single pulse, triggered by Camera Control pulse but with inverted polarity.
		//	Duration of pulse is the same as camera exposure duration.
		imgPulseCreate2(nTimeBase,										// timebase
			nPulseDelay, nPulseWidth,									// pulse parameters
			sigOutputType, outputNumber, IMG_TRIG_POLAR_ACTIVEH,		// trigger on HIGH, i.e DELAY
			sigTriggerOutType, triggerOutNumber, IMG_TRIG_POLAR_ACTIVEH,// HIGH during pulse width
			PULSE_MODE_SINGLE, &m_plsExSyncTrigOut );

		// Program half-rate pulse-train copied from camera control pulse-train
		// By default this pulse-train outputs on RTSI_7, which is a reserved channel for this purpose
		imgPulseCreate2(nTimeBase,										// timebase
			nPulseDelay*20, nPulseWidth,								// pulse parameters
			sigOutputType, outputNumber, IMG_TRIG_POLAR_ACTIVEH,		// trigger on HIGH, i.e DELAY
			IMG_SIGNAL_RTSI, 7, IMG_TRIG_POLAR_ACTIVEL,					// HIGH during pulse width
			PULSE_MODE_SINGLE, &m_plsExSyncHalfCopy );

		// Arm and start pulse train
		// Start the dependables first, and last the camera control (ExSync) pulse-train
		if (bEnableTrigOut)
			imgPulseStart(m_plsExSyncTrigOut, m_Sid);
		imgPulseStart(m_plsExSyncHalfCopy, m_Sid);
		imgPulseStart(m_plsExSyncTrain, m_Sid);
	} else {
		// Disarm pulse and stop triggering frame acquisition in the camera
		// Stop camera control (ExSync) pulse-train first, and then the dependables
		imgPulseDispose(m_plsExSyncTrain);
		imgPulseDispose(m_plsExSyncTrigOut);
		imgPulseDispose(m_plsExSyncHalfCopy);
		ExSyncReset();
	}
	return;
}

/*	Reset camera exposure by sending a single, brief ExSync pulse. This will cause the camera
	to expose briefly but serves to terminate exposure when the pulse is sent. This function
	should be called whenever initializing the camera connecting or any other time when the
	camera should not be allowed to expose.
*/
void CVirtualPoleDoc::ExSyncReset() {

	// Determine if we are in ExSync mode. If not, abort here.
	if (strcmp((LPCTSTR)(*m_pParam_Struct).CameraMode, "ExSync, level-controlled") != 0)
		return;

	// Detect camera control (ExSync) line (RTSI_0 -> RTSI_7 or External_0 -> External_7)
	CString csCameraControlLine = (*m_pParam_Struct).CameraControlLine;
	IMG_SIGNAL_TYPE sigOutputType;
	uInt32 outputNumber;
	if (strncmp((LPCTSTR)csCameraControlLine, "RTSI", 4) == 0) {	
		sigOutputType = IMG_SIGNAL_RTSI;											// RTSI
		outputNumber = (uInt32)atoi(csCameraControlLine.Mid(5,1));					// Line number
	} else {																		
		sigOutputType = IMG_SIGNAL_EXTERNAL;										// External
		outputNumber = (uInt32)atoi(csCameraControlLine.Mid(9,1));					// Line number
	}

	// Compute pulse delay and width; 10 microsec pulse
	uInt32 nPulseDelay, nPulseWidth, nTimeBase; // sec
	imgPulseRate(0.00001, 0.00001, &nPulseDelay, &nPulseWidth, &nTimeBase);

	imgPulseCreate2(nTimeBase,										// timebase
		nPulseDelay, nPulseWidth,									// pulse parameters
		IMG_SIGNAL_STATUS, IMG_IMMEDIATE, IMG_TRIG_POLAR_ACTIVEH,	// trigger immediately
		sigOutputType, outputNumber, IMG_TRIG_POLAR_ACTIVEL,		// LOW during pulse width
		PULSE_MODE_SINGLE, &m_plsExSyncTrain );

	// Arm and start pulse
	imgPulseStart(m_plsExSyncTrain, m_Sid);

	return;
}
