/*==============================================================================
            Copyright (c) 2013 Moritz Hader, Marcel Schrder
            All Rights Reserved.
            
	@file  HeimfallARNative.cpp

	@brief Defines the native function calls used by the HeimdallRenderer
   
   This program is free software: you can redistribute it and/or modify
    it under the terms of the GNU General Public License as published by
    the Free Software Foundation, either version 3 of the License, or
    (at your option) any later version.

    This program is distributed in the hope that it will be useful,
    but WITHOUT ANY WARRANTY; without even the implied warranty of
    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
    GNU General Public License for more details.

    You should have received a copy of the GNU General Public License
    along with this program.  If not, see <http://www.gnu.org/licenses/>.

==============================================================================*/




//Include the libraries that are necessary to run native applications
#include <jni.h>

//#include <android/log.h>
#include <stdio.h>

//Include the special math-library to for 3D matrix and vector operations
#include <egl/HeimdallMath.h>

//Include the Engine and EGL specific Libraries
#include <egl/ShaderManagementUnit.h>
#include <egl/HeimdallGLUtilities.h>
#include <egl/MatrixManagementUnit.h>
#include <engine/HeimdallNative3DObject.h>
#include <engine/HeimdallObjectContainer.h>


//Include the OpenGL_ES-libraries for rendering and shader-setup
#include <GLES2/gl2.h>
#include <GLES2/gl2ext.h>


//Include the QCAR-Libraries
#include <QCAR/QCAR.h>
#include <QCAR/CameraDevice.h>
#include <QCAR/Renderer.h>
#include <QCAR/VideoBackgroundConfig.h>
#include <QCAR/Trackable.h>
#include <QCAR/Tool.h>
#include <QCAR/Tracker.h>
#include <QCAR/TrackerManager.h>
#include <QCAR/ImageTracker.h>
#include <QCAR/CameraCalibration.h>
#include <QCAR/DataSet.h>

extern "C"{


	//Matrix definitions
	H44DMatrix projection; //= {1.9672f,0.0f,0.0f,0.0f,0.0f, -2.622933f, 0.0f,0.0f,0.0f,0.0f,1.002002f, 1.0f,0.0f,0.0f,-4.004004f,0.0f};
	H44DMatrix model; //= {2.919269f,0.398720f,-0.464637f,0.0f,0.607453f,-2.182344f,1.966819f,0.0f,-0.076596f,-2.019504f,-2.217146f,0.0f,29.380148f,45.854958f,764.186279f,1.0f};
	H44DMatrix mvp;

	ShaderManagementUnit shaderMU; //Should I make it a pointer or within the heap???

	MatrixManagementUnit matrixMU;

	//Local Object-Node
	HeimdallObjectContainer localContainer;

	//QCAR Data
	QCAR::DataSet* localTrackable = NULL; //Explicitly mark it as empty
	int deviceWidth, deviceHeight;
	float nearPlane = 2.0f;
	float farPlane = 2000.0f;



	/**
	 * Here, we initialize the objects and fields we are going to use in the later course of the
	 * rendering. This method is usually only called on initialization. It can however happen,
	 * that android reinitializes the View when something changes, so some emory-handling must
	 * be implemented here.
	 */
	JNIEXPORT void JNICALL
	Java_de_ibm_dhbw_heimdall_egl_HeimdallRenderer_renderInit(JNIEnv *env, jobject obj){

		__android_log_print(ANDROID_LOG_INFO, LOG_TAG, "Entering Java_de_ibm_dhbw_heimdall_egl_HeimdallRenderer_renderInit in native");

		//Static Part - this must always be done before rendering is possible

			//Define the color of the canvas when nothing is drawn... (black and transparent)
			glClearColor(0.0f, 0.0f, 0.0f, 0.0f);

			//Load the Shaders

			//flat shader
			shaderMU.LoadShader(HD_SHADER_UNIFORM_COLOR,
					HD_VERTEXSHADER_UNIFORM_COLOR, HD_FRAGMENTSHADER_UNIFORM_COLOR,
					HD_ATTRIBUTES_VERTEX,"HVertex",
					HD_ATTRIBUTES_ENDLIST);

			//primitive diffuse shader (Not Used Here)
			/*shaderMU.LoadShader(HD_SHADER_PRIMITIVE_SHADING,
					HD_VERTEXSHADER_PRIMITIVE_SHADING, HD_FRAGMENTSHADER_PRIMITIVE_SHADING,
					HD_ATTRIBUTES_VERTEX, "HVertex",
					HD_ATTRIBUTES_COLOR, "HColor",
					HD_ATTRIBUTES_ENDLIST);*/

			//diffuse ligthing shader
			shaderMU.LoadShader(HD_SHADER_POINT_LIGHT,
					HD_VERTEXSHADER_POINT_LIGHT, HD_FRAGMENTSHADER_POINT_LIGHT,
					HD_ATTRIBUTES_VERTEX, "HVertex",
					HD_ATTRIBUTES_NORMAL, "HNormal",
					HD_ATTRIBUTES_ENDLIST);

	}

	/**
	 * This method is called from within the onDraw method of the GLSurfaceView.Renderer and defines the
	 * necessary steps to draw objects on the canvas.
	 *
	 * Change 18.04.2013: Method now returns an jintArray that contains the marker-IDs detected during the last iteration.
	 * This will prevent the frequent calls to the java-sided method.
	 */
	JNIEXPORT jintArray JNICALL
	Java_de_ibm_dhbw_heimdall_egl_HeimdallRenderer_renderUpdate(JNIEnv *env, jobject obj){

		glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);

		glEnable(GL_DEPTH_TEST);
		glEnable(GL_CULL_FACE);

		//Receive the current trackable-position from the QCAR-Framework

			//Tell the framework that we are now going to render something
			QCAR::State currentState = QCAR::Renderer::getInstance().begin();

			//Render the camera-stream (current frame) to the background. WARNING: Bug found, not identified yet
			QCAR::Renderer::getInstance().drawVideoBackground();

			//Get the amount of markers that were detected
			int markerCount = currentState.getNumActiveTrackables();


			//Create an array with the current marker-IDs as well as an handle to this data-structure
			jintArray markerIDs = env->NewIntArray(markerCount);
			    jint *elemPointer = env->GetIntArrayElements(markerIDs, NULL);



			for(int index = 0; index<markerCount;index++){
				//__android_log_print(ANDROID_LOG_INFO, LOG_TAG, "Marker Detection: Index %d", index);

				//Store the marker ID (Still static)
				elemPointer[index] = 1;

				//Update the java-sided list of marker-ids
				//pushIdentifierToJava(env, obj, 1);

				//Get the current trackable
				 const QCAR::Trackable* foundTrackable = currentState.getActiveTrackable(index);

				 //Obtain the model-matrix
				 QCAR::Matrix44F qcarMatrix =  QCAR::Tool::convertPose2GLMatrix(foundTrackable->getPose());

				 memcpy(model,qcarMatrix.data, sizeof(H44DMatrix));

				 //Draw
				 HeimdallNative3DObject* buffer =  localContainer.getFirstElement();

				while(buffer != NULL){

					if(buffer->getMarkerID() == 1){ //TODO: markerID
						//Finalize any unfinalized object (this is the case, when a new object was attached during the last update)
						buffer->setupObject();

						//Draw the Object
						buffer->draw(&shaderMU, model, projection);
					}

					//Load the next element
					buffer = (HeimdallNative3DObject*)buffer->getNextElement();
				}
			}


		glDisable(GL_DEPTH_TEST);

		//Tell the QCAR-Framework that we are done rendering and that the render-bucked may be finalized
		QCAR::Renderer::getInstance().end();

		//Tell the JNI-Environment to free the reserved Array-Elements (Dont worry, the data wont be lost)
		env->ReleaseIntArrayElements(markerIDs, elemPointer, NULL);

		return markerIDs;
	}

	JNIEXPORT void JNICALL
	Java_de_ibm_dhbw_heimdall_HeimdallAR_CleanUpEnvironment(JNIEnv *env, jobject obj){

    	__android_log_print(ANDROID_LOG_INFO, LOG_TAG, "OnDestroy: Cleaning up the environment.");

    	localContainer.purgeList();

    	//Cleanup QCAR
    	 QCAR::TrackerManager& tm = QCAR::TrackerManager::getInstance();
    	 QCAR::ImageTracker* tracker = static_cast<QCAR::ImageTracker*>(tm.getTracker(QCAR::Tracker::IMAGE_TRACKER));

    	 	 //Stop the tracker
    	 	 tracker->stop();

    	 	 //Deactivate the trackable
    	 	 if(tracker->getActiveDataSet() == localTrackable){
    	 		tracker->deactivateDataSet(localTrackable);
    	 	 }

    	 //Stip the camera
		 QCAR::CameraDevice::getInstance().stop();
		 QCAR::CameraDevice::getInstance().deinit();
	}


/************************************************************************************************
 * Methods to handle the natification of the java-3D-Objects
 ************************************************************************************************/


	/**
	 * Native method that takes the calling Heimdall3DObject and loads its java-values.
	 */
	JNIEXPORT void JNICALL
		Java_de_ibm_dhbw_heimdall_engine_Heimdall3DObject_Copy3DObjectToNative(JNIEnv *env, jobject obj){

		__android_log_print(ANDROID_LOG_INFO, LOG_TAG, "Natifying Heimdall3DObject");

		HeimdallNative3DObject* hdno = new HeimdallNative3DObject();
		localContainer.addObject(hdno);

		hdno->loadJavaObject(env, obj);
		hdno = NULL;
	}

	/**
	 * Native method that removes the Heimdall3DObject with the given ID from the local-container
	 */
	JNIEXPORT void JNICALL
		Java_de_ibm_dhbw_heimdall_engine_Heimdall3DObject_Detach3DObjectFromNative(JNIEnv *env, jobject obj, int id){

		localContainer.deleteObject(id);
	}

	/**
	 * This method is called when the current transform-value of the Object with the given id is to be updated.
	 * The type defines which value is to be updated. 1 = Position, 2 = Scale, 3 = Rotation
	 */
	JNIEXPORT void JNICALL
		Java_de_ibm_dhbw_heimdall_engine_Heimdall3DObject_CopyTransformNative(JNIEnv *env, jobject obj, int id, int type){

		HeimdallNative3DObject* storedObj = localContainer.getObject(id);

		if(id != -1 && storedObj != NULL){

			//Handle to the Heimdall3DObject-Class
				jclass hdClass = env->GetObjectClass(obj);

			switch(type){
			case 1: //Position
					storedObj->updatePosition(env, obj,hdClass);
				break;

			case 2: //Scale
					storedObj->updateScale(env, obj,hdClass);
				break;

			case 3: //Rotation
					storedObj->updateRotation(env, obj,hdClass);
				break;
			}
		}
	}


	/*********************************************************************************
	 * Initialization routines used for the Qualcomm AR-Framework
	 *********************************************************************************/

	/**
	 * This method initializes the screen-metrics used during rendering
	 */
	JNIEXPORT void JNICALL
		Java_de_ibm_dhbw_heimdall_HeimdallAR_LoadScreenDimensions(JNIEnv *env, jobject obj, int width, int height){

		__android_log_print(ANDROID_LOG_INFO, LOG_TAG, "Loading screen-dimensions %d x %d", width, height);

		deviceWidth = width;
		deviceHeight = height;
	}


	/**
	 * This method is called during the setup of the Framework. It tells the QCAR-Framework where
	 * to find the trackable-definitions and issues the loading of it.
	 * As soon as we want to switch to multitarget and ImageMarkers, we will need to change this
	 * method.
	 */
	JNIEXPORT void JNICALL
		Java_de_ibm_dhbw_heimdall_HeimdallAR_LoadQCARTrackables(JNIEnv *env, jobject obj){

		__android_log_print(ANDROID_LOG_INFO, LOG_TAG, "Loading the Heimdall trackables");

		//Call the TrackerManager-Factory of the QCAR-Framework to load an instance of the TrackerManager
		QCAR::TrackerManager& tm = QCAR::TrackerManager::getInstance();

			//Call the QCAR-Framework to initialize the TM with the desired Trackable-Type
			if(tm.initTracker(QCAR::Tracker::IMAGE_TRACKER) == NULL){
				__android_log_print(ANDROID_LOG_ERROR, LOG_TAG, "Error. The TrackerManager could not be initialized. Check the QCAR-Initializationsettings");
				return;
			}


		//Now that the TM is up and running, we can try to get an concrete instance of the tracker (ImageTracker)
		 QCAR::ImageTracker* tracker = static_cast<QCAR::ImageTracker*>(tm.getTracker(QCAR::Tracker::IMAGE_TRACKER));

			 if(tracker == NULL){
				 __android_log_print(ANDROID_LOG_ERROR, LOG_TAG, "Error. The ImageTracker could not be loaded. Maybe the initialization did not work.");
				 return;
			 }

		//Next, we use the tracker-instance to load the trackable-definition(s) from the local assets-folder
		 localTrackable = tracker->createDataSet();

			 if(localTrackable == NULL){
				 __android_log_print(ANDROID_LOG_ERROR, LOG_TAG, "Error. The Trackable-DataSet could not be created.");
				 return;
			 }

			 if(localTrackable->load("StonesAndChips.xml",QCAR::DataSet::STORAGE_APPRESOURCE) == false){
				 __android_log_print(ANDROID_LOG_ERROR, LOG_TAG, "Error. The Trackable-DataSet could not be loaded. Check if the file really resides within the assets-directory of your apkg.");
				 return;
			 }

		//Fine, now that the dataset is loaded, we'll only need to activate it

			 if(tracker->activateDataSet(localTrackable) == false){
				 __android_log_print(ANDROID_LOG_ERROR, LOG_TAG, "Error. There was an error while activating the QCAR-DataSet (trackable).");
				 return;
			 }

		//Activate for several targets at the same time (that is, several instances of the same trackable)
	    QCAR::setHint(QCAR::HINT_MAX_SIMULTANEOUS_IMAGE_TARGETS, 4);

		__android_log_print(ANDROID_LOG_INFO, LOG_TAG, "Done loading and activating the Heimdall trackables");
	}

	/**
	 * This method loads all the necessary Data to configure and run a camera-stream in the background of the render-scene
	 */
	JNIEXPORT void JNICALL
		Java_de_ibm_dhbw_heimdall_HeimdallAR_SetupCameraInstance(JNIEnv *env, jobject obj){

		__android_log_print(ANDROID_LOG_INFO, LOG_TAG, "Setting up the Camera Instance");

		//First of all, call the QCAR-Framework to initialize the camera-stream
			if(QCAR::CameraDevice::getInstance().init() == false){
				 __android_log_print(ANDROID_LOG_ERROR, LOG_TAG, "Error. QCAR was unable to successfully start the camera-stream.");
				 return;
			}

		//Now that the camera is initialized, we'll have to setup the specific metrics
			 QCAR::VideoBackgroundConfig vConfig;
				 vConfig.mPosition.data[0] = deviceWidth/2; //Offset to x of the Viewport
				 vConfig.mPosition.data[1] = deviceHeight/2; //Offset to y of the Viewport
				 vConfig.mSynchronous = false; //Synchronize the rendering of the video-stream with the framerate of the renderer
				 vConfig.mEnabled = true; //Enable video-streaming

			QCAR::CameraDevice& cam = QCAR::CameraDevice::getInstance();
			QCAR::VideoMode vmode = cam.getVideoMode(QCAR::CameraDevice::MODE_OPTIMIZE_QUALITY); //Pick the highest possible resolution

			//Configure the video-stream-size
			vConfig.mSize.data[0] = deviceWidth;
			vConfig.mSize.data[1] = deviceHeight;


				if(vConfig.mSize.data[1] < deviceHeight)
				{
					 __android_log_print(ANDROID_LOG_ERROR, LOG_TAG, "Correcting rendering background size to handle missmatch between screen and video aspect ratios.");
					vConfig.mSize.data[0] = deviceHeight*((float)vmode.mWidth / (float)vmode.mHeight);
					vConfig.mSize.data[1] = deviceHeight;
				}

			//Propagate the configuration to the QCAR-Framework
			 QCAR::Renderer::getInstance().setVideoBackgroundConfig(vConfig);

		//Tell the system, that we  want to use the highest quality-mode
		 QCAR::CameraDevice::getInstance().selectVideoMode(QCAR::CameraDevice::MODE_OPTIMIZE_QUALITY);

		 //FINALLY, start the camera and enable the tracking-algorithms
			 if (QCAR::CameraDevice::getInstance().start() == false){
				 __android_log_print(ANDROID_LOG_ERROR, LOG_TAG, "Error. Could not start the camera. Check if your device supports a back-camera and that it is enabled.");
				 return;
			 }
		 QCAR::TrackerManager& tm = QCAR::TrackerManager::getInstance();
		 	 tm.getTracker(QCAR::Tracker::IMAGE_TRACKER)->start();


		__android_log_print(ANDROID_LOG_INFO, LOG_TAG, "Camera successfully initialized and started. Tracking engine is up and running");
	}

	/**
	 * In here, the device-specific projection-matrix is calculated.
	 */
	JNIEXPORT void JNICALL
		Java_de_ibm_dhbw_heimdall_HeimdallAR_PrepareDeviceProjection(JNIEnv* env, jobject obj){

		__android_log_print(ANDROID_LOG_INFO, LOG_TAG,"Preparing the device-specific projection-matrix");

		const QCAR::CameraCalibration& cameraCalibration = QCAR::CameraDevice::getInstance().getCameraCalibration();

		// The following code produces the projectionMatrix using the camera parameters
		QCAR::Vec2F size = cameraCalibration.getSize();
		QCAR::Vec2F focalLength = cameraCalibration.getFocalLength();
		QCAR::Vec2F principalPoint = cameraCalibration.getPrincipalPoint();

		float dx = principalPoint.data[0] - size.data[0] / 2;
		float dy = principalPoint.data[1] - size.data[1] / 2;

		float x =  2.0f * focalLength.data[0] / size.data[0];
		float y = -2.0f * focalLength.data[1] / size.data[1];

		float a = dx / size.data[0];
		float b = -1.0f * dy / size.data[1];

		float c = (farPlane + nearPlane) / (farPlane - nearPlane);
		float d = (-nearPlane*2.0f*farPlane)/(farPlane-nearPlane);

			projection[0] = x;      projection[1] = 0.0f;   projection[2] = 0.0f;  projection[3] = 0.0f;
			projection[4] = 0.0f;   projection[5] = y;      projection[6] = 0.0f;  projection[7] = 0.0f;
			projection[8] = a;      projection[9] = b;      projection[10] = c;    projection[11] = 1.0f;
			projection[12] = 0.0f;  projection[13] = 0.0f;  projection[14] = d;    projection[15] = 0.0f;


			//DEBUG
			//__android_log_print(ANDROID_LOG_INFO, LOG_TAG,"CAMMATRIX: (0,0)=> %f \t (0,1)=> %f \t (0,2)=> %f \t (0,3)=> %f", projection[0],projection[1],projection[2],projection[3]);
			//__android_log_print(ANDROID_LOG_INFO, LOG_TAG,"CAMMATRIX: (1,0)=> %f \t (1,1)=> %f \t (1,2)=> %f \t (1,3)=> %f", projection[4],projection[5],projection[6],projection[7]);
			//__android_log_print(ANDROID_LOG_INFO, LOG_TAG,"CAMMATRIX: (2,0)=> %f \t (2,1)=> %f \t (2,2)=> %f \t (2,3)=> %f", projection[8],projection[9],projection[10],projection[11]);
			//__android_log_print(ANDROID_LOG_INFO, LOG_TAG,"CAMMATRIX: (3,0)=> %f \t (3,1)=> %f \t (3,2)=> %f \t (3,3)=> %f", projection[12],projection[13],projection[14],projection[15]);

	}

}
