/*==============================================================================

@file 
    FindGiliberto.cpp

==============================================================================*/

#include <jni.h>
#include <android/log.h>
#include <stdio.h>
#include <string.h>
#include <assert.h>
#include <stdlib.h>

#ifdef USE_OPENGL_ES_1_1
#include <GLES/gl.h>
#include <GLES/glext.h>
#else
#include <GLES2/gl2.h>
#include <GLES2/gl2ext.h>
#endif

#include <QCAR/QCAR.h>
#include <QCAR/CameraDevice.h>
#include <QCAR/Renderer.h>
#include <QCAR/VideoBackgroundConfig.h>
#include <QCAR/Trackable.h>
#include <QCAR/Tool.h>
#include <QCAR/Tracker.h>
#include <QCAR/CameraCalibration.h>


#include "SampleUtils.h"
#include "SampleMath.h"
#include "Texture.h"
#include "CubeShaders.h"
#include "Teapot.h"
#include "Object.h"
#include "Behaviors.h"
#include <time.h>

#ifdef __cplusplus
extern "C"
{
#endif

// Textures:
int textureCount                = 0;
Texture** textures              = 0;

// OpenGL ES 2.0 specific:
#ifdef USE_OPENGL_ES_2_0
unsigned int shaderProgramID    = 0;
GLint vertexHandle              = 0;
GLint normalHandle              = 0;
GLint textureCoordHandle        = 0;
GLint mvpMatrixHandle           = 0;
#endif

// Screen dimensions:
unsigned int screenWidth        = 480;
unsigned int screenHeight       = 854;

// Indicates whether screen is in portrait (true) or landscape (false) mode
bool isActivityInPortraitMode   = false;

// The projection matrix used for rendering virtual objects:
QCAR::Matrix44F projectionMatrix;

// The inverted projection matrix is used for the touch behavior
QCAR::Matrix44F inverseProjMatrix;

QCAR::Matrix44F modelViewMatrix;

typedef struct _TouchEvent {
	bool isActive;
	int actionType;
	int pointerId;
	float x;
	float y;
	float lastX;
	float lastY;
	float startX;
	float startY;
	float tapX;
	float tapY;
	unsigned long startTime;
	unsigned long dt;
	float dist2;
	bool didTap;
} TouchEvent;

TouchEvent touch1, touch2;

enum ActionType {
	ACTION_DOWN,
	ACTION_MOVE,
	ACTION_UP,
	ACTION_CANCEL
};

#define MAX_TAP_TIMER 200
#define MAX_TAP_DISTANCE2 400

unsigned long lastTapTime;

QCAR::Vec3F personBaseVertices[8];
QCAR::Vec3F personTransformedVerticesA[8];
QCAR::Vec3F personNormals[3];

QCAR::Matrix44F gilPickingTransform;

// Constants:
static const float kObjectScale	= 0.5f;


//Objetos!!!!!!!!!!!!!!!!!!
int step = 0;
//TeaPot *Tps;
//obj3d * obj2;
const int numGroup1 = 20;
const int numGroup2 = 20;
TeaPot group1[numGroup1];
TeaPot group2[numGroup2];
TeaPot gil;
ObjObject miobjeto;
//ObjObject lego;

JNIEXPORT int JNICALL
Java_mx_itesm_gda_giliberto_FindGiliberto_getOpenGlEsVersionNative(JNIEnv *, jobject)
{
#ifdef USE_OPENGL_ES_1_1        
	return 1;
#else
	return 2;
#endif
}


JNIEXPORT void JNICALL
Java_mx_itesm_gda_giliberto_FindGiliberto_setActivityPortraitMode(JNIEnv *, jobject, jboolean isPortrait)
{
	isActivityInPortraitMode = isPortrait;
}

void
initPersonBaseVertices()
{
	// Initialize a set of vertices describing the unit cube

	personBaseVertices[0] = QCAR::Vec3F(1.0f, 1.0f, 1.0f);
	personBaseVertices[1] = QCAR::Vec3F(-1.0f, 1.0f, 1.0f);
	personBaseVertices[2] = QCAR::Vec3F(1.0f, -1.0f, 1.0f);
	personBaseVertices[3] = QCAR::Vec3F(-1.0f, -1.0f, 1.0f);
	personBaseVertices[4] = QCAR::Vec3F(1.0f, 1.0f, -1.0f);
	personBaseVertices[5] = QCAR::Vec3F(-1.0f, 1.0f, -1.0f);
	personBaseVertices[6] = QCAR::Vec3F(1.0f, -1.0f, -1.0f);
	personBaseVertices[7] = QCAR::Vec3F(-1.0f, -1.0f, -1.0f);
}


void
initPersonNormals()
{
	// Initialize a set of normals for the unit cube

	personNormals[0] = QCAR::Vec3F(1, 0, 0);
	personNormals[1] = QCAR::Vec3F(0, 1, 0);
	personNormals[2] = QCAR::Vec3F(0, 0, 1);
}

void updateGilPickingTransform()
{
	// Reset the picking transform to the identity matrix
	    gilPickingTransform = SampleMath::Matrix44FIdentity();
	    float* transformPtr = &gilPickingTransform.data[0];

	    // The following transformations happen in reverse order
	    // For picking, we want a slightly wider target to improve responsiveness
	    // We can also skip the tilting transformation, since picking only occurs when the dominoes are upright
	    SampleUtils::translatePoseMatrix(gil.getX(), gil.getY(), 0.0f, transformPtr);

	    float angle = -atan2(gil._vehicle.getCurrVel().x,gil._vehicle.getCurrVel().y);
		float dangle = angle*180.0/3.1416 + gil.getGZ();

	    SampleUtils::rotatePoseMatrix(dangle, 0, 0, 1, transformPtr);
	    SampleUtils::translatePoseMatrix(0.0f, 0.0f, 15.0f, transformPtr);
	    SampleUtils::scalePoseMatrix(3.0f * 2, 8.0f, 15.0f, transformPtr);
}

void setGilTapped(bool tapped)
{
	if (tapped)
	{
		gil.setTexture(1);
	}
	return;
}

void handleTouches();

JNIEXPORT void JNICALL
Java_mx_itesm_gda_giliberto_FindGiliberto_onQCARInitializedNative(JNIEnv *, jobject)
{
	// Comment in to enable tracking of up to 2 targets simultaneously and
	// split the work over multiple frames:
	// QCAR::setHint(QCAR::HINT_MAX_SIMULTANEOUS_IMAGE_TARGETS, 2);
	// QCAR::setHint(QCAR::HINT_IMAGE_TARGET_MULTI_FRAME_ENABLED, 1);

	initPersonBaseVertices();
	initPersonNormals();
}


JNIEXPORT void JNICALL
Java_mx_itesm_gda_giliberto_FindGilibertoRenderer_renderFrame(JNIEnv *, jobject)
{
	//LOG("Java_mx_itesm_gda_giliberto_GLRenderer_renderFrame");

	// Clear color and depth buffer
	glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);

	// Render video background:
	QCAR::State state = QCAR::Renderer::getInstance().begin();

#ifdef USE_OPENGL_ES_1_1
	// Set GL11 flags:
	glEnableClientState(GL_VERTEX_ARRAY);
	glEnableClientState(GL_NORMAL_ARRAY);
	glEnableClientState(GL_TEXTURE_COORD_ARRAY);

	glEnable(GL_TEXTURE_2D);
	glDisable(GL_LIGHTING);

#endif

	glEnable(GL_DEPTH_TEST);
	glEnable(GL_CULL_FACE);

	// Did we find any trackables this frame?
	for(int tIdx = 0; tIdx < state.getNumActiveTrackables(); tIdx++)
	{
		// Get the trackable:
		const QCAR::Trackable* trackable = state.getActiveTrackable(tIdx);
		modelViewMatrix =
				QCAR::Tool::convertPose2GLMatrix(trackable->getPose());

		// Choose the texture based on the target name:
		int textureIndex = (!strcmp(trackable->getName(), "stones")) ? 0 : 1;
		const Texture* const thisTexture = textures[0];

#ifdef USE_OPENGL_ES_1_1
		// Load projection matrix:
		glMatrixMode(GL_PROJECTION);
		glLoadMatrixf(projectionMatrix.data);

		// Load model view matrix:
		glMatrixMode(GL_MODELVIEW);
		glLoadMatrixf(modelViewMatrix.data);
		/*
        glTranslatef(0.0f, 0.0f, kObjectScale);
        glScalef(kObjectScale, kObjectScale, kObjectScale);

        // Draw object:
        glBindTexture(GL_TEXTURE_2D, thisTexture->mTextureID);
        glTexCoordPointer(2, GL_FLOAT, 0, (const GLvoid*) &teapotTexCoords[0]);
        glVertexPointer(3, GL_FLOAT, 0, (const GLvoid*) &teapotVertices[0]);
        glNormalPointer(GL_FLOAT, 0,  (const GLvoid*) &teapotNormals[0]);
        glDrawElements(GL_TRIANGLES, NUM_TEAPOT_OBJECT_INDEX, GL_UNSIGNED_SHORT,(const GLvoid*) &teapotIndices[0]);
		 */
		//glBindTexture(GL_TEXTURE_2D, thisTexture->mTextureID);

		for (int i = 0 ; i< 5; i++)
		{
			if(float(rand())/RAND_MAX > 0.80)
			{
				Tps[i]._vehicle.setCurrVel( SteeringBehaviors::wander(&Tps[i]._vehicle, (double)50.0, (double)0.0 ));
			}
			Tps[i]._vehicle.update(1/20.0);

			Tps[i].draw();

		}
		++step;

		/*
        TeaPot tp ;
        tp.setColor(0.0,1.0,0.0,1.0);
        tp.setX(130.0);
        tp.setY(100.0);
        tp.draw();
		 */
#else

		float update = 1;
		Vehicle Group1Vehicles[numGroup1];
		for (int i = 0 ; i< numGroup1; i++)
		{
			Group1Vehicles[i] = group1[i]._vehicle;
		}
		for (int i = 0 ; i< numGroup1; i++)
		{
			group1[i]._vehicle.setCurrVel( SteeringBehaviors::flockLead(&group1[i]._vehicle, &Group1Vehicles[0], &gil._vehicle, numGroup1 , 90.0, 100.0, 10.0, 10.0));
			group1[i]._vehicle.update(update);
			group1[i].draw(&modelViewMatrix,&projectionMatrix,textures);
		}

		for (int i = 0 ; i< numGroup2; i++)
		{
			if(float(rand())/RAND_MAX > 0.80)
			{
				group2[i]._vehicle.setCurrVel( SteeringBehaviors::wander(&group2[i]._vehicle, (double)100.0, (double)20.0 ));
			}
			group2[i]._vehicle.update(update);
			group2[i].draw(&modelViewMatrix,&projectionMatrix,textures);
		}

		if(float(rand())/RAND_MAX > 0.90)
		{
			gil._vehicle.setCurrVel( SteeringBehaviors::wander(&gil._vehicle, (double)100.0, (double)20.0 ));
		}
		gil._vehicle.update(update);
		gil.draw(&modelViewMatrix,&projectionMatrix,textures);

		miobjeto.draw(&modelViewMatrix,&projectionMatrix,textures);
		//lego.draw(&modelViewMatrix,&projectionMatrix,textures);

		++step;
		SampleUtils::checkGlError("FindGiliberto renderFrame");

		updateGilPickingTransform();

		handleTouches();
#endif

	}

	glDisable(GL_DEPTH_TEST);

#ifdef USE_OPENGL_ES_1_1        
	glDisable(GL_TEXTURE_2D);
	glDisableClientState(GL_VERTEX_ARRAY);
	glDisableClientState(GL_NORMAL_ARRAY);
	glDisableClientState(GL_TEXTURE_COORD_ARRAY);
#else
	glDisableVertexAttribArray(vertexHandle);
	glDisableVertexAttribArray(normalHandle);
	glDisableVertexAttribArray(textureCoordHandle);
#endif

	QCAR::Renderer::getInstance().end();
}



void
configureVideoBackground()
{
	// Get the default video mode:
	QCAR::CameraDevice& cameraDevice = QCAR::CameraDevice::getInstance();
	QCAR::VideoMode videoMode = cameraDevice.
			getVideoMode(QCAR::CameraDevice::MODE_DEFAULT);

	// Configure the video background
	QCAR::VideoBackgroundConfig config;
	config.mEnabled = true;
	config.mSynchronous = true;
	config.mPosition.data[0] = 0.0f;
	config.mPosition.data[1] = 0.0f;

	if (isActivityInPortraitMode)
	{
		//LOG("configureVideoBackground PORTRAIT");
		config.mSize.data[0] = videoMode.mHeight
				* (screenHeight / (float)videoMode.mWidth);
		config.mSize.data[1] = screenHeight;
	}
	else
	{
		//LOG("configureVideoBackground LANDSCAPE");
		config.mSize.data[0] = screenWidth;
		config.mSize.data[1] = videoMode.mHeight
				* (screenWidth / (float)videoMode.mWidth);
	}

	// Set the config:
	QCAR::Renderer::getInstance().setVideoBackgroundConfig(config);
}


JNIEXPORT void JNICALL
Java_mx_itesm_gda_giliberto_FindGiliberto_initApplicationNative(
		JNIEnv* env, jobject obj, jint width, jint height)
{




	// Store screen dimensions
	screenWidth = width;
	screenHeight = height;

	// Handle to the activity class:
	jclass activityClass = env->GetObjectClass(obj);

	jmethodID getTextureCountMethodID = env->GetMethodID(activityClass,
			"getTextureCount", "()I");
	if (getTextureCountMethodID == 0)
	{
		LOG("Function getTextureCount() not found.");
		return;
	}

	textureCount = (int) env->CallObjectMethod(obj, getTextureCountMethodID);
	if (!textureCount)
	{
		LOG("getTextureCount() returned zero.");
		return;
	}

	textures = new Texture*[textureCount];

	jmethodID getTextureMethodID = env->GetMethodID(activityClass,
			"getTexture", "(I)Lmx/itesm/gda/giliberto/Texture;");

	if (getTextureMethodID == 0)
	{
		LOG("Function getTexture() not found.");
		return;
	}

	// Register the textures
	for (int i = 0; i < textureCount; ++i)
	{

		jobject textureObject = env->CallObjectMethod(obj, getTextureMethodID, i);
		if (textureObject == NULL)
		{
			LOG("GetTexture() returned zero pointer");
			return;
		}

		textures[i] = Texture::create(env, textureObject);
	}


	//vector<personaje*> personitas;
	/*
        LOG("carga obj");
    	obj2 = loadOBJ("./sdcard/juego/Edificios.obj");
    	LOG("%d",obj2);
    	if(obj2){
    		LOG("obj cargado");
    		LOG("%d", obj2->numVerts);
    		LOG("%f",obj2->verts[0]);
    	}
    	else{
    		LOG("obj null");
    	}
	 */

	miobjeto.loadObjObject("./sdcard/juego/Edificios.obj");
	//miobjeto.loadObjObject("./sdcard/juego/legook.obj");
	miobjeto.setGZ(-90.0);
	miobjeto.setTexture(7);
	//lego.loadObjObject("./sdcard/juego/lego1.obj");


	//OBjetos!!!!!!!!!!!!!1
	srand((unsigned) time(0));
	for (int i = 0 ; i< numGroup1 ; i++)
	{
		group1[i]._vehicle.setMaxSpeed( float(rand())/RAND_MAX * 0.5+0.1 );
		group1[i].setGZ(90.0);
		group1[i].setSize(0.3);
		group1[i]._vehicle.setCurrVel(Vector2D( Vector2D(float(rand())/RAND_MAX * 50 - 25, float(rand())/RAND_MAX * 50 - 25)));
		group1[i].setTexture(rand()%6+1);
	}
	for (int i = 0 ; i< numGroup2 ; i++)
	{
		group2[i]._vehicle.setMaxSpeed( float(rand())/RAND_MAX * 0.5+0.1 );
		group2[i].setGZ(90.0);
		group2[i].setSize(0.3);
		group2[i]._vehicle.setCurrVel(Vector2D( Vector2D(float(rand())/RAND_MAX * 50 - 25, float(rand())/RAND_MAX * 50 - 25)));
		group2[i].setTexture(rand()%6+1);
	}

	gil._vehicle.setMaxSpeed( 0.9 );
	gil.setGZ(90.0);
	gil.setSize(0.3);
	gil.setTexture(0);
	//lego.setTexture(0);
	gil._vehicle.setCurrVel(Vector2D( Vector2D(float(rand())/RAND_MAX * 50 - 25, float(rand())/RAND_MAX * 50 - 25)));

}


JNIEXPORT void JNICALL
Java_mx_itesm_gda_giliberto_FindGiliberto_deinitApplicationNative(
		JNIEnv* env, jobject obj)
{
	LOG("Java_mx_itesm_gda_giliberto_FindGiliberto_deinitApplicationNative");

	// Release texture resources
	if (textures != 0)
	{
		for (int i = 0; i < textureCount; ++i)
		{
			delete textures[i];
			textures[i] = NULL;
		}

		delete[]textures;
		textures = NULL;

		textureCount = 0;
	}
	//delete [] Tps;
}


JNIEXPORT void JNICALL
Java_mx_itesm_gda_giliberto_FindGiliberto_startCamera(JNIEnv *,
		jobject)
{
	LOG("Java_mx_itesm_gda_giliberto_FindGiliberto_startCamera");

	// Initialize the camera:
	if (!QCAR::CameraDevice::getInstance().init())
		return;

	// Configure the video background
	configureVideoBackground();

	// Select the default mode:
	if (!QCAR::CameraDevice::getInstance().selectVideoMode(
			QCAR::CameraDevice::MODE_DEFAULT))
		return;

	// Start the camera:
	if (!QCAR::CameraDevice::getInstance().start())
		return;

	// Start the tracker:
	QCAR::Tracker::getInstance().start();

	// Cache the projection matrix:
	const QCAR::Tracker& tracker = QCAR::Tracker::getInstance();
	const QCAR::CameraCalibration& cameraCalibration =
			tracker.getCameraCalibration();
	projectionMatrix = QCAR::Tool::getProjectionGL(cameraCalibration, 2.0f,
			2000.0f);

	// Invert the projection matrix
	inverseProjMatrix = SampleMath::Matrix44FInverse(projectionMatrix);
}


JNIEXPORT void JNICALL
Java_mx_itesm_gda_giliberto_FindGiliberto_stopCamera(JNIEnv *,
		jobject)
{
	LOG("Java_mx_itesm_gda_giliberto_FindGiliberto_stopCamera");

	QCAR::Tracker::getInstance().stop();

	QCAR::CameraDevice::getInstance().stop();
	QCAR::CameraDevice::getInstance().deinit();
}


JNIEXPORT void JNICALL
Java_mx_itesm_gda_giliberto_FindGilibertoRenderer_initRendering(
		JNIEnv* env, jobject obj)
{
	LOG("Java_mx_itesm_gda_giliberto_FindGilibertoRenderer_initRendering");

	// Define clear color
	glClearColor(0.0f, 0.0f, 0.0f, QCAR::requiresAlpha() ? 0.0f : 1.0f);

	// Now generate the OpenGL texture objects and add settings
	for (int i = 0; i < textureCount; ++i)
	{
		glGenTextures(1, &(textures[i]->mTextureID));
		glBindTexture(GL_TEXTURE_2D, textures[i]->mTextureID);
		glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
		glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
		glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, textures[i]->mWidth,
				textures[i]->mHeight, 0, GL_RGBA, GL_UNSIGNED_BYTE,
				(GLvoid*)  textures[i]->mData);
	}
#ifndef USE_OPENGL_ES_1_1

	shaderProgramID     = SampleUtils::createProgramFromBuffer(cubeMeshVertexShader,
			cubeFragmentShader);

	vertexHandle        = glGetAttribLocation(shaderProgramID,
			"vertexPosition");
	normalHandle        = glGetAttribLocation(shaderProgramID,
			"vertexNormal");
	textureCoordHandle  = glGetAttribLocation(shaderProgramID,
			"vertexTexCoord");
	mvpMatrixHandle     = glGetUniformLocation(shaderProgramID,
			"modelViewProjectionMatrix");

#endif
	int number=5;
	//Tps = new TeaPot[number];
	for( int i=0 ; i<numGroup1  ; i++ )
		group1[i].setShaderProgramID( shaderProgramID );
	for( int i=0 ; i<numGroup2  ; i++ )
		group2[i].setShaderProgramID( shaderProgramID );
	miobjeto.setShaderProgramID(shaderProgramID);
	//lego.setShaderProgramID(shaderProgramID);
	gil.setShaderProgramID( shaderProgramID );


}


JNIEXPORT void JNICALL
Java_mx_itesm_gda_giliberto_FindGilibertoRenderer_updateRendering(
		JNIEnv* env, jobject obj, jint width, jint height)
{
	LOG("Java_mx_itesm_gda_giliberto_FindGilibertoRenderer_updateRendering");

	// Update screen dimensions
	screenWidth = width;
	screenHeight = height;

	// Reconfigure the video background
	configureVideoBackground();
}

// ----------------------------------------------------------------------------
// Touch projection
// ----------------------------------------------------------------------------

/*
void
setPersonitaSeleccionada(Personaje* person)
{
    personitaSeleccionada = person;
}
 */

bool
linePlaneIntersection(QCAR::Vec3F lineStart, QCAR::Vec3F lineEnd,
		QCAR::Vec3F pointOnPlane, QCAR::Vec3F planeNormal,
		QCAR::Vec3F &intersection)
{
	QCAR::Vec3F lineDir = SampleMath::Vec3FSub(lineEnd, lineStart);
	lineDir = SampleMath::Vec3FNormalize(lineDir);

	QCAR::Vec3F planeDir = SampleMath::Vec3FSub(pointOnPlane, lineStart);

	float n = SampleMath::Vec3FDot(planeNormal, planeDir);
	float d = SampleMath::Vec3FDot(planeNormal, lineDir);

	if (fabs(d) < 0.00001) {
		// Line is parallel to plane
		return false;
	}

	float dist = n / d;

	QCAR::Vec3F offset = SampleMath::Vec3FScale(lineDir, dist);
	intersection = SampleMath::Vec3FAdd(lineStart, offset);
}

void
projectScreenPointToPlane(QCAR::Vec2F point, QCAR::Vec3F planeCenter, QCAR::Vec3F planeNormal,
		QCAR::Vec3F &intersection, QCAR::Vec3F &lineStart, QCAR::Vec3F &lineEnd)
{
	// Window Coordinates to Normalized Device Coordinates
	QCAR::VideoBackgroundConfig config = QCAR::Renderer::getInstance().getVideoBackgroundConfig();

	float halfScreenWidth = screenWidth / 2.0f;
	float halfScreenHeight = screenHeight / 2.0f;

	float halfViewportWidth = config.mSize.data[0] / 2.0f;
	float halfViewportHeight = config.mSize.data[1] / 2.0f;

	float x = (point.data[0] - halfScreenWidth) / halfViewportWidth;
	float y = (point.data[1] - halfScreenHeight) / halfViewportHeight * -1;

	QCAR::Vec4F ndcNear(x, y, -1, 1);
	QCAR::Vec4F ndcFar(x, y, 1, 1);

	// Normalized Device Coordinates to Eye Coordinates
	QCAR::Vec4F pointOnNearPlane = SampleMath::Vec4FTransform(ndcNear, inverseProjMatrix);
	QCAR::Vec4F pointOnFarPlane = SampleMath::Vec4FTransform(ndcFar, inverseProjMatrix);
	pointOnNearPlane = SampleMath::Vec3FDiv(pointOnNearPlane, pointOnNearPlane.data[3]);
	pointOnFarPlane = SampleMath::Vec3FDiv(pointOnFarPlane, pointOnFarPlane.data[3]);

	// Eye Coordinates to Object Coordinates
	QCAR::Matrix44F inverseModelViewMatrix = SampleMath::Matrix44FInverse(modelViewMatrix);

	QCAR::Vec4F nearWorld = SampleMath::Vec4FTransform(pointOnNearPlane, inverseModelViewMatrix);
	QCAR::Vec4F farWorld = SampleMath::Vec4FTransform(pointOnFarPlane, inverseModelViewMatrix);

	lineStart = QCAR::Vec3F(nearWorld.data[0], nearWorld.data[1], nearWorld.data[2]);
	lineEnd = QCAR::Vec3F(farWorld.data[0], farWorld.data[1], farWorld.data[2]);
	linePlaneIntersection(lineStart, lineEnd, planeCenter, planeNormal, intersection);
}

bool
isSeparatingAxisLine(QCAR::Vec3F axis, QCAR::Vec3F pointA, QCAR::Vec3F pointB)
{
	// Determine whether or not the given axis separates
	// the globally stored transformed vertices of the bounding box
	// and the given line

	float magnitude = axis.data[0] * axis.data[0] + axis.data[1] * axis.data[1] + axis.data[2] * axis.data[2];
	if (magnitude < 0.00001) return false;

	float minA, maxA, minB, maxB;

	minA = maxA = SampleMath::Vec3FDot(personTransformedVerticesA[0], axis);

	float p;

	for (int i = 1; i < 8; i++) {
		p = SampleMath::Vec3FDot(personTransformedVerticesA[i], axis);
		if (p < minA) minA = p;
		if (p > maxA) maxA = p;
	}

	minB = maxB = SampleMath::Vec3FDot(pointA, axis);
	p = SampleMath::Vec3FDot(pointB, axis);
	if (p < minB) minB = p;
	if (p > maxB) maxB = p;

	if (maxA < minB) return true;
	if (minA > maxB) return true;

	return false;
}

bool
checkIntersectionLine(QCAR::Matrix44F transformA, QCAR::Vec3F pointA, QCAR::Vec3F pointB)
{
	// Use the separating axis theorem to determine whether or not
	// the line intersects the object-oriented bounding box

	transformA = SampleMath::Matrix44FTranspose(transformA);
	QCAR::Vec3F lineDir = SampleMath::Vec3FSub(pointB, pointA);

	QCAR::Vec3F normalA1 = SampleMath::Vec3FTransformNormal(personNormals[0], transformA);
	QCAR::Vec3F normalA2 = SampleMath::Vec3FTransformNormal(personNormals[1], transformA);
	QCAR::Vec3F normalA3 = SampleMath::Vec3FTransformNormal(personNormals[2], transformA);

	QCAR::Vec3F crossA1 = SampleMath::Vec3FCross(normalA1, lineDir);
	QCAR::Vec3F crossA2 = SampleMath::Vec3FCross(normalA2, lineDir);
	QCAR::Vec3F crossA3 = SampleMath::Vec3FCross(normalA3, lineDir);

	for (int i = 0; i < 8; i++) {
		personTransformedVerticesA[i] = SampleMath::Vec3FTransform(personBaseVertices[i], transformA);
	}

	if (isSeparatingAxisLine(normalA1, pointA, pointB)) return false;
	if (isSeparatingAxisLine(normalA2, pointA, pointB)) return false;
	if (isSeparatingAxisLine(normalA3, pointA, pointB)) return false;

	if (isSeparatingAxisLine(crossA1, pointA, pointB)) return false;
	if (isSeparatingAxisLine(crossA2, pointA, pointB)) return false;
	if (isSeparatingAxisLine(crossA3, pointA, pointB)) return false;

	return true;
}

unsigned long
getCurrentTimeMS() {
	struct timeval tv;
	gettimeofday(&tv, NULL);
	unsigned long s = tv.tv_sec * 1000;
	unsigned long us = tv.tv_usec / 1000;
	return s + us;
}

void
handleTouches()
{
	// If there is a new tap that we haven't handled yet:
	if (touch1.didTap && touch1.startTime > lastTapTime) {

		// Find the start and end points in world space for the tap
		// These will lie on the near and far plane and can be used for picking
		QCAR::Vec3F intersection, lineStart, lineEnd;
		projectScreenPointToPlane(QCAR::Vec2F(touch1.tapX, touch1.tapY), QCAR::Vec3F(0, 0, 0), QCAR::Vec3F(0, 0, 1), intersection, lineStart, lineEnd);

		float dist;

		bool intersect = checkIntersectionLine(gilPickingTransform, lineStart, lineEnd);
		setGilTapped(intersect);

		// Store the timestamp for this tap so we know we've handled it
		lastTapTime = touch1.startTime;

	}
}

JNIEXPORT void JNICALL
Java_mx_itesm_gda_giliberto_FindGiliberto_nativeTouchEvent(JNIEnv* , jobject, jint actionType, jint pointerId, jfloat x, jfloat y)
{
	TouchEvent* touchEvent;

	// Determine which finger this event represents
	if (pointerId == 0) {
		touchEvent = &touch1;
	} else if (pointerId == 1) {
		touchEvent = &touch2;
	} else {
		return;
	}

	if (actionType == ACTION_DOWN) {
		// On touch down, reset the following:
		touchEvent->lastX = x;
		touchEvent->lastY = y;
		touchEvent->startX = x;
		touchEvent->startY = y;
		touchEvent->startTime = getCurrentTimeMS();
		touchEvent->didTap = false;
	} else {
		// Store the last event's position
		touchEvent->lastX = touchEvent->x;
		touchEvent->lastY = touchEvent->y;
	}

	// Store the lifetime of the touch, used for tap recognition
	unsigned long time = getCurrentTimeMS();
	touchEvent->dt = time - touchEvent->startTime;

	// Store the distance squared from the initial point, for tap recognition
	float dx = touchEvent->lastX - touchEvent->startX;
	float dy = touchEvent->lastY - touchEvent->startY;
	touchEvent->dist2 = dx * dx + dy * dy;

	if (actionType == ACTION_UP) {
		// On touch up, this touch is no longer active
		touchEvent->isActive = false;

		// Determine if this touch up ends a tap gesture
		// The tap must be quick and localized
		if (touchEvent->dt < MAX_TAP_TIMER && touchEvent->dist2 < MAX_TAP_DISTANCE2) {
			touchEvent->didTap = true;
			touchEvent->tapX = touchEvent->startX;
			touchEvent->tapY = touchEvent->startY;
		}
	} else {
		// On touch down or move, this touch is active
		touchEvent->isActive = true;
	}

	// Set the touch information for this event
	touchEvent->actionType = actionType;
	touchEvent->pointerId = pointerId;
	touchEvent->x = x;
	touchEvent->y = y;
}

#ifdef __cplusplus
}
#endif
