#include "hdpointsunprojection.h"
#include "hdshareddata.h"
#include "testwindow.h"
#include <QMessageBox>
#include <QDataStream>

#define AVERAGING_WINDOW 5
extern TestWindow *w;

CvMat* imagePoint = 0;
CvMat* worldPoint = 0;

HDPointsUnprojection::HDPointsUnprojection(QObject *parent)
	: QThread(parent)
{
	sharedMemory = new QSharedMemory("WorldCoordinate");
	sharedMemory->create(1024);

	start();
}

HDPointsUnprojection::~HDPointsUnprojection()
{
	//sharedMemory->destroy();
}

void HDPointsUnprojection::run()
{
	QString strSharedData = "";

	forever
	{
		// Wait for all cameras to finish processing.
		unprojectionReady->acquire(camerasCount);

		// Process the 2D points that has been tracked.
		// The 2D point of object i in the left camera is:
		// viewers[0]->objectMarkers[i]->centroid();
		// and the corresponding 2D point of object i in the right camera is:
		// viewers[1]->objectMarkers[i]->centroid();


		if(bCalibrationFinished)
		{
			UnProjectingPoints();
		}
		else if (viewers.count() >= 2 && viewers[0]->objectMarkers.count() > 0)			// The Naive way works unless you make the calibration.
		{
			strSharedData = "";

			for (int i=0; i < viewers[0]->objectMarkers.count(); i++)
			{
				float x = -(viewers[0]->objectMarkers[i]->centroid().x() * viewers[0]->width() - viewers[0]->width()/2.0);
				//float x2 = -(viewers[1]->objectMarkers[i]->centroid().x() * viewers[1]->width() - viewers[1]->width()/2.0);

				float y = (viewers[0]->objectMarkers[i]->centroid().y() * viewers[0]->height() - viewers[0]->height()/2.0);
				float z = -(viewers[1]->objectMarkers[i]->centroid().x() * viewers[1]->width() - viewers[1]->width() / 2.0);
				//float z = (x2 - x)*5;

				viewers[0]->objectMarkers[i]->setWorldCoordinates(x, y, z);
				viewers[1]->objectMarkers[i]->setWorldCoordinates(x, y, z);

				//emit New3DCoordinateCalculated(x, y, z);

				//strSharedData += QString("ID = %1; Position = {%2, %3, %4};\n").arg(i).arg(x).arg(y).arg(z);

// 				int *ptrID = (int*)sharedMemory->data();
// 				float *ptrX = (float*)(ptrID + sizeof(*ptrID));
// 				float *ptrY = (float*)(ptrX + sizeof(*ptrX));
// 				float *ptrZ = (float*)(ptrY + sizeof(*ptrY));
// 
// 				*ptrID = i;
// 				*ptrX = x;
// 				*ptrY = y;
// 				*ptrZ = z;

				controllers[i]->setPosition(viewers[0]->objectMarkers[i]->worldCoordinates());
				//emit New3DCoordinateCalculated(x, y, z);
			}

			writeToSharedMemory();
		}

		// Signal the CaptureReady for each camera.
		for (int i = 0; i < camerasCount; i++)
		{
			captureReady[i]->release();
		}
	}
}

void HDPointsUnprojection::writeToSharedMemory()
{
	QBuffer buffer;
	buffer.open(QBuffer::ReadWrite);
	QDataStream out(&buffer);
	out << controllers.count();
	
	for(int i=0; i< controllers.count(); i++) 
	{
		out << *(controllers[i]);
	}

	int size = buffer.size();

	sharedMemory->lock();
	char *to = (char*)sharedMemory->data();
	const char *from = buffer.data().data();
	memcpy(to, from, qMin(sharedMemory->size(), size));
	sharedMemory->unlock();

// 	sharedMemory->lock();
// 	char *to = (char*)sharedMemory->data();
// 
// 	QByteArray ba = strData.toLatin1();
// 	char *text = ba.data();
// 	memcpy(to, text, strlen(text)+1);
// 	sharedMemory->unlock();
}

void HDPointsUnprojection::UnProjectingPoints()
{
	if (bRectify)
	{
		if(viewers.count()>=2 && viewers[0]->objectMarkers.count()>0)
		{
			int frameWidth = cameras[0]->frame()->width;
			int frameHeight = cameras[0]->frame()->height;

			int objCount = viewers[0]->objectMarkers.count();
			QPointF zeroLocation; //Camera Zero
			QPointF oneLocation;  //Camera One
			
			if(!imagePoint)
			{
				imagePoint = cvCreateMat(objCount,1,CV_32FC3);
				worldPoint = cvCreateMat(objCount,1,CV_32FC3);
			}

			for(int objIndex=0;objIndex<objCount;objIndex++)
			{	
				zeroLocation = viewers[0]->objectMarkers[objIndex]->centroid();
				oneLocation = viewers[1]->objectMarkers[objIndex]->centroid();

				imagePoint->data.fl[objIndex*3] = (float) zeroLocation.x()*frameWidth;
				imagePoint->data.fl[objIndex*3 +1] = (float) zeroLocation.y()*frameHeight;
				imagePoint->data.fl[objIndex*3 +2] = (float) (zeroLocation.x()*frameWidth-oneLocation.x()*frameWidth);
			}

			cvPerspectiveTransform(imagePoint,worldPoint,_matUnproject);

			for(int objIndex=0;objIndex<objCount;objIndex++)
			{	
				float x = worldPoint->data.fl[objIndex*3];
				float y = worldPoint->data.fl[objIndex*3+1];
				float z = worldPoint->data.fl[objIndex*3+2];
				
// 				if(_zBuffer.count()==AVERAGING_WINDOW)
// 				{
// 					_zBuffer.removeFirst();
// 					_zBuffer.append(z);
// 					z=0;
// 
// 					for (int i=0;i<AVERAGING_WINDOW;i++)
// 					{
// 						z+=_zBuffer[i];
// 					}
// 
// 					z /= (float)AVERAGING_WINDOW;
// 				}
				
				z+=320;
				emit New3DCoordinateCalculated(x,y,z);

				//writeToSharedMemory(QString("{%1, %2, %3}").arg(x).arg(y).arg(z));

				viewers[0]->objectMarkers[objIndex]->setWorldCoordinates(x,y,z);
				viewers[1]->objectMarkers[objIndex]->setWorldCoordinates(x,y,z);

				controllers[objIndex]->setPosition(viewers[0]->objectMarkers[objIndex]->worldCoordinates());
			}

			writeToSharedMemory();
		}
	}
}