/*==============================================================================
            Copyright (c) 2010-2011 QUALCOMM Incorporated.
            All Rights Reserved.
            Qualcomm Confidential and Proprietary
            
@file 
    AR_Edificacio.cpp

@brief
    test program

==============================================================================*/


#include <jni.h>
#include <android/log.h>
#include <stdio.h>
#include <string.h>
#include <assert.h>
#include <sstream>
#include <string.h>
#include <stdlib.h>
#include <iostream>

#ifdef USE_OPENGL_ES_1_1
#include <GLES/gl.h>
#include <GLES/glext.h>
#else
#include <GLES2/gl2.h>
#include <GLES2/gl2ext.h>
#endif

#include <QCAR/QCAR.h>
#include <QCAR/CameraDevice.h>
#include <QCAR/Renderer.h>
#include <QCAR/VideoBackgroundConfig.h>
#include <QCAR/Trackable.h>
#include <QCAR/Tool.h>
#include <QCAR/Tracker.h>
#include <QCAR/CameraCalibration.h>

#include "SampleUtils.h"
#include "Texture.h"
#include "Geometry/MeshGroup.h"
#include "Geometry/Model3D.h"
#include "Domain/Channel.h"
#include "Domain/Layer.h"
#include "Domain/Model.h"
#include "Geometry/Cube.h"
#include "ShaderManager.h"

using namespace std;


#ifdef __cplusplus
extern "C"
{
#endif

// Textures:
//int textureCount                = 0;
//Texture** textures              = 0;

// Models:
int modelCount					= 0;
Model3D** models				= 0;

// Selected Channel:
Channel* channel					= 0;

// OpenGL ES 2.0 specific:

// Keep java environment to access it later
JNIEnv* javaEnv;
jobject javaObj;
jclass javaClass;

// Screen dimensions:
unsigned int screenWidth        = 0;
unsigned int screenHeight       = 0;

// Indicates whether screen is in portrait (true) or landscape (false) mode
bool isActivityInPortraitMode   = false;

// The projection matrix used for rendering virtual objects:
QCAR::Matrix44F projectionMatrix;

// Constants:
static const float kObjectScale = 30.f;


// ID of the last detected trackable, used to know to which trackable should we apply the model changes
int lastDetectedTrackable = -1;


JNIEXPORT int JNICALL
Java_com_EdificacioAR_AugmentedRealityActivity_getOpenGlEsVersionNative(JNIEnv *, jobject)
{
#ifdef USE_OPENGL_ES_1_1        
    return 1;
#else
    return 2;
#endif
}


JNIEXPORT void JNICALL
Java_com_EdificacioAR_AugmentedRealityActivity_setActivityPortraitMode(JNIEnv *, jobject, jboolean isPortrait)
{
    isActivityInPortraitMode = isPortrait;
}


JNIEXPORT void JNICALL
Java_com_EdificacioAR_AugmentedRealityActivity_onQCARInitializedNative(JNIEnv *, jobject)
{
    // Comment in to enable tracking of up to 2 targets simultaneously and
    // split the work over multiple frames:
    QCAR::setHint(QCAR::HINT_MAX_SIMULTANEOUS_IMAGE_TARGETS, 5);
    QCAR::setHint(QCAR::HINT_IMAGE_TARGET_MULTI_FRAME_ENABLED, 1);
}

/*-------- JAVA methods called from Native code ----------*/

void
updateLastDetectedTrackable(int newDetected) {

	//LOG("EdificacioAR::updateLastDetectedTrackable");
	//LOG("EdificacioAR - old trackable: %i , new trackable: %i", lastDetectedTrackable, newDetected);
	lastDetectedTrackable = newDetected;
	channel->selectedLayer = newDetected;

}

/*--------------------------------------------------------*/



void
renderVisibleGroups(Model3D* model, QCAR::Matrix44F modelViewProjection)
{
	//LOG("EdificacioAR::renderVisibleGroups - start rendering visible groups");

	glDepthMask(GL_TRUE);
	glColorMask(GL_TRUE, GL_TRUE, GL_TRUE, GL_TRUE);

	glEnable (GL_BLEND);
    glBlendFunc (GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);

	//LOG("EdificacioAR::renderVisibleGroups - mesh group count: %i", model->meshGroupCount);
	for (int g=0; g < model->meshGroupCount; g++)
		{
			//LOG("EdificacioAR::renderModel - rendering Group %i", g);

			MeshGroup* group = model->groups[g];
			Texture* texture = group->texture;

			ShaderManager* sm = ShaderManager::getInstance();

			// Set the shader
			if (group->hasTexture) sm->useShader(sm->textureShaderID, model, modelViewProjection);
			else sm->useShader(sm->colorShaderID, model, modelViewProjection);

			//LOG("EdificacioAR::renderModel - groupIndices = %i", group->indicesCount);

			if (group->hasTexture) {
				glActiveTexture(GL_TEXTURE0);
				glBindTexture(GL_TEXTURE_2D, texture->mTextureID);
			}
			//glUniformMatrix4fv(mvpMatrixHandle, 1, GL_FALSE,
			//				   (GLfloat*)&modelViewProjection.data[0] );
			glDrawElements(GL_TRIANGLES, group->indicesCount, GL_UNSIGNED_INT,
						   (const GLvoid*) &(group->indices[0]));
			SampleUtils::checkGlError("EdificacioAR renderModel - Error rendering group");

			// Unload the shader
			if (group->hasTexture) sm->disableShader(sm->textureShaderID);
			else sm->disableShader(sm->colorShaderID);
		}

    glDisable (GL_BLEND);
}

void
renderOccluderGroups(Model3D* model, QCAR::Matrix44F modelViewProjection)
{
	//LOG("EdificacioAR::renderOccluderGroups - start rendering occluder groups");

	ShaderManager* sm = ShaderManager::getInstance();
	sm->useShader(sm->colorShaderID, model, modelViewProjection);

	glDepthMask(GL_TRUE);
	glColorMask(GL_FALSE, GL_FALSE, GL_FALSE, GL_FALSE);

	//LOG("EdificacioAR::renderVisibleGroups - occluder count: %i", model->occluderCount);

	for (int i=0; i<model->occluderCount; i++)
	{
		MeshGroup* occluder = model->occluderGroups[i];

		glDrawElements(GL_TRIANGLES, occluder->indicesCount, GL_UNSIGNED_INT,
					   (const GLvoid*) &(occluder->indices[0]));
		SampleUtils::checkGlError("EdificacioAR renderOccluderGroups - Error rendering occluder");
	}
}

Layer*
getLayerFromId(int id)
{
	Layer* l = NULL;
	for (int i=0; i<channel->layerCount; i++)
	{
		if (id == channel->layers[i]->markerId)
			l = channel->layers[i];
	}

	return l;
}


JNIEXPORT void JNICALL
Java_com_EdificacioAR_AugmentedRealityRenderer_renderFrame(JNIEnv *, jobject)
{
    //LOG("EdificacioAR::renderFrame - kObjectScale = %f", kObjectScale);

    // Clear color and depth buffer 
    glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);

    // Render video background:
    QCAR::State state = QCAR::Renderer::getInstance().begin();
        
#ifdef USE_OPENGL_ES_1_1
    // Set GL11 flags:
    glEnableClientState(GL_VERTEX_ARRAY);
    glEnableClientState(GL_NORMAL_ARRAY);
    glEnableClientState(GL_TEXTURE_COORD_ARRAY);

    glEnable(GL_TEXTURE_2D);
    glDisable(GL_LIGHTING);
        
#endif

    glEnable(GL_DEPTH_TEST);
    //glEnable(GL_CULL_FACE);
    glDisable(GL_CULL_FACE);//TODO

    // Did we find any trackables this frame?
    for(int tIdx = 0; tIdx < state.getNumActiveTrackables(); tIdx++)
    {
    	//LOG("EdificacioAR::renderFrame: Trackable found");

        // Get the trackable:
        const QCAR::Trackable* trackable = state.getActiveTrackable(tIdx);
        QCAR::Matrix44F modelViewMatrix =
            QCAR::Tool::convertPose2GLMatrix(trackable->getPose());



#ifdef USE_OPENGL_ES_1_1
        //TODO: Update rendering for OpenGL ES 1.1! with the models
        // Load projection matrix:
        glMatrixMode(GL_PROJECTION);
        glLoadMatrixf(projectionMatrix.data);

        // Load model view matrix:
        glMatrixMode(GL_MODELVIEW);
        glLoadMatrixf(modelViewMatrix.data);
        glTranslatef(0.f, 0.f, kObjectScale);
        glScalef(kObjectScale, kObjectScale, kObjectScale);

        // Draw object:
        glBindTexture(GL_TEXTURE_2D, thisTexture->mTextureID);
        glTexCoordPointer(2, GL_FLOAT, 0, (const GLvoid*) model->texCoords);
        glVertexPointer(3, GL_FLOAT, 0, (const GLvoid*) model->vertices);
        glNormalPointer(GL_FLOAT, 0,  (const GLvoid*) model->vertexNormals);
        glDrawElements(GL_TRIANGLES, NUM_TEAPOT_OBJECT_INDEX, GL_INT,
                       (const GLvoid*) model->indices);
#else

        // Render the selected models over the tracked trackables
        int layerId = trackable->getId();
        Layer* layer = getLayerFromId(layerId);
        // We render the models only in case the tracked marker is included on the channel
        if (layer != NULL)
        {
            // Update the last detected trackable
    		//LOG("EdificacioAR::renderFrame - trackable %i DETECTED? %s" , trackable->getId(),
    		//		(trackable->getStatus() == QCAR::Trackable::DETECTED) ? "true" : "false");
    		if (((lastDetectedTrackable != layerId)
    				&& (trackable->getStatus() == QCAR::Trackable::DETECTED))
    				|| (state.getNumActiveTrackables() == 1))
    			updateLastDetectedTrackable(layerId);

			Model* model = layer->models[layer->selectedModel];

			// Define OpenGL translations and scale for the model which is going to be rendered
			QCAR::Matrix44F modelViewProjection;

			SampleUtils::translatePoseMatrix(model->translateX/* + offsetTranslateX*/, model->translateY/* + offsetTranslateY*/, 0.0f,
											 &modelViewMatrix.data[0]);
			//SampleUtils::rotatePoseMatrix(90, 1.0, 0.0, 0.0, &modelViewMatrix.data[0]);
			SampleUtils::scalePoseMatrix(model->scale*kObjectScale, model->scale*kObjectScale, model->scale*kObjectScale,
										 &modelViewMatrix.data[0]);
			SampleUtils::multiplyMatrix(&projectionMatrix.data[0],
										&modelViewMatrix.data[0] ,
										&modelViewProjection.data[0]);

			//It's important to render the occluders first, in order to have the desired AR effect
			renderOccluderGroups(model->model3D, modelViewProjection);
			renderVisibleGroups(model->model3D, modelViewProjection);
        }

#endif

    }

    glDisable(GL_DEPTH_TEST);

#ifdef USE_OPENGL_ES_1_1        
    glDisable(GL_TEXTURE_2D);
    glDisableClientState(GL_VERTEX_ARRAY);
    glDisableClientState(GL_NORMAL_ARRAY);
    glDisableClientState(GL_TEXTURE_COORD_ARRAY);
#else
    /*glDisableVertexAttribArray(vertexHandle);
    glDisableVertexAttribArray(normalHandle);
    glDisableVertexAttribArray(textureCoordHandle);*/
#endif

    QCAR::Renderer::getInstance().end();
}


void
configureVideoBackground()
{
    // Get the default video mode:
    QCAR::CameraDevice& cameraDevice = QCAR::CameraDevice::getInstance();
    QCAR::VideoMode videoMode = cameraDevice.
                                getVideoMode(QCAR::CameraDevice::MODE_DEFAULT);


    // Configure the video background
    QCAR::VideoBackgroundConfig config;
    config.mEnabled = true;
    config.mSynchronous = true;
    config.mPosition.data[0] = 0.0f;
    config.mPosition.data[1] = 0.0f;
    
    if (isActivityInPortraitMode)
    {
        //LOG("configureVideoBackground PORTRAIT");
        config.mSize.data[0] = videoMode.mHeight
                                * (screenHeight / (float)videoMode.mWidth);
        config.mSize.data[1] = screenHeight;
    }
    else
    {
        //LOG("configureVideoBackground LANDSCAPE");
        config.mSize.data[0] = screenWidth;
        config.mSize.data[1] = videoMode.mHeight
                            * (screenWidth / (float)videoMode.mWidth);
    }

    // Set the config:
    QCAR::Renderer::getInstance().setVideoBackgroundConfig(config);
}

void loadChannel(JNIEnv* env, jobject activityObject, jclass activityClass)
{
	//LOG("NDK - EdificacioAR::loadChannel - Start loading channel");

    jmethodID getChannelMethodID = env->GetMethodID(activityClass,
        "getSelectedChannel", "()Lcom/Domain/Channel;");

    if (getChannelMethodID == 0)
    {
        LOG("NDK - EdificacioAR::loadChannel - Function getSelectedChannel() not found.");
        return;
    }

    jobject channelObject = env->CallObjectMethod(activityObject, getChannelMethodID);
	if (channelObject == NULL)
	{
		LOG("NDK - EdificacioAR::loadChannel - GetSelectedChannel() returned zero pointer");
		return;
	}

	channel = Channel::create(env, channelObject);
	//LOG("NDK - EdificacioAR::loadChannel - Channel loaded successfully");
}


void loadModels(JNIEnv* env, jobject activityObject, jclass activityClass)
{
	//LOG("NDK - EdificacioAR::loadModels - Start loading models");

    jmethodID getModelCountMethodID = env->GetMethodID(activityClass,
                                                    "getModelCount", "()I");
    if (getModelCountMethodID == 0)
    {
        LOG("NDK - EdificacioAR::loadModels - Function getModelCount() not found.");
        return;
    }

    modelCount = env->CallIntMethod(activityObject, getModelCountMethodID);
    if (!modelCount)
    {
        LOG("NDK - EdificacioAR::loadModels - getModelCount() returned zero.");
        return;
    }

    //textures = new Texture*[textureCount];
    models = new Model3D*[modelCount];

    jmethodID getModelMethodID = env->GetMethodID(activityClass,
        "getModel", "(I)Lcom/Utils3D/Model3D;");

    if (getModelMethodID == 0)
    {
        LOG("NDK - EdificacioAR::loadModels - Function getModel() not found.");
        return;
    }

    // Register the models
    for (int i = 0; i < modelCount; ++i)
    {

        jobject modelObject = env->CallObjectMethod(activityObject, getModelMethodID, i);
        if (modelObject == NULL)
        {
            LOG("NDK - EdificacioAR::loadModels - GetModel() returned zero pointer");
            return;
        }

        models[i] = Model3D::create(env, modelObject);
        //LOG("NDK - EdificacioAR::loadModels - Model loaded successfully");
    }
}


JNIEXPORT void JNICALL
Java_com_EdificacioAR_AugmentedRealityActivity_initApplicationNative(
                            JNIEnv* env, jobject obj, jint width, jint height)
{
    //LOG("Java_com_EdificacioAR_AugmentedRealityActivity_initApplicationNative");

    // Store the java environment for later use
    // Note that this environment is only safe for use in this thread
    javaEnv = env;

    // Store the calling object for later use
    // Make a global reference to keep it valid beyond the scope of this function
    javaObj = env->NewGlobalRef(obj);

    // Store the class of the calling object for later use
    jclass objClass = env->GetObjectClass(obj);
    javaClass = (jclass) env->NewGlobalRef(objClass);

    // Store screen dimensions
    screenWidth = width;
    screenHeight = height;

    // Handle to the activity class:
    jclass activityClass = env->GetObjectClass(obj);

    //loadTextures(env, obj, activityClass);

    //loadModels(env, obj, activityClass);
    loadChannel(env, obj, activityClass);

}



JNIEXPORT void JNICALL
Java_com_EdificacioAR_AugmentedRealityActivity_deinitApplicationNative(
                                                        JNIEnv* env, jobject obj)
{
    //LOG("Java_com_qualcomm_QCARSamples_ImageTargets_ImageTargets_deinitApplicationNative");

    delete[]models;// Clean resources associated to the models (textures, indices, vertices...)
    delete channel;

}

JNIEXPORT void JNICALL
Java_com_EdificacioAR_AugmentedRealityActivity_setScaleFactor(
                            JNIEnv* env, jobject obj, jfloat scalingFactor)
{
	//LOG("EdificacioAR::setScaleFactor - scaleFactor: %f", scalingFactor);
	Layer* layer = getLayerFromId(channel->selectedLayer);
	if (layer == NULL) return;

	// We must scale the model considering the size of the marker defined by the user
	float markerScale = Model::TRACKABLE_WIDTH / layer->width;
	Model* model = layer->models[layer->selectedModel];
	model->scale = markerScale + scalingFactor;
	//LOG("EdificacioAR::setScaleFactor - end");
}

JNIEXPORT void JNICALL
Java_com_EdificacioAR_AugmentedRealityActivity_setTranslation(
                            JNIEnv* env, jobject obj, jfloat transX, jfloat transY)
{
	//LOG("EdificacioAR::setTranslation - transX: %f , transY: %f", transX, transY);
	Layer* layer = getLayerFromId(channel->selectedLayer);
	if (layer == NULL) return;

	Model* model = layer->models[layer->selectedModel];
	model->translateX += transX/10;
	model->translateY += transY/10;
	//LOG("EdificacioAR::setTranslation - end");

}

JNIEXPORT void JNICALL
Java_com_AndroidUI_GUIOverlayManager_nativeNextModel(JNIEnv*, jobject)
{
    //LOG("EdificacioAR::nativeNextModel");
    Layer* layer = getLayerFromId(channel->selectedLayer);
	if (layer == NULL) return;

    layer->selectedModel = (layer->selectedModel + 1)%layer->modelCount;

}

JNIEXPORT void JNICALL
Java_com_AndroidUI_GUIOverlayManager_nativePreviousModel(JNIEnv*, jobject)
{
    //LOG("EdificacioAR::nativePreviousModel");
    Layer* layer = getLayerFromId(channel->selectedLayer);
	if (layer == NULL) return;

    layer->selectedModel -= 1;
    if (layer->selectedModel < 0) layer->selectedModel = layer->modelCount - 1;

}


JNIEXPORT void JNICALL
Java_com_EdificacioAR_AugmentedRealityActivity_startCamera(JNIEnv *,
                                                                         jobject)
{
    //LOG("Java_com_qualcomm_QCARSamples_ImageTargets_ImageTargets_startCamera");

    // Initialize the camera:
    if (!QCAR::CameraDevice::getInstance().init())
        return;

    // Configure the video background
    configureVideoBackground();

    // Select the default mode:
    if (!QCAR::CameraDevice::getInstance().selectVideoMode(
                                QCAR::CameraDevice::MODE_DEFAULT))
        return;

    // Start the camera:
    if (!QCAR::CameraDevice::getInstance().start())
        return;

    // Uncomment to enable flash
    //if(QCAR::CameraDevice::getInstance().setFlashTorchMode(true))
    //	LOG("IMAGE TARGETS : enabled torch");

    // Uncomment to enable infinity focus mode, or any other supported focus mode
    // See CameraDevice.h for supported focus modes
    //if(QCAR::CameraDevice::getInstance().setFocusMode(QCAR::CameraDevice::FOCUS_MODE_INFINITY))
    //	LOG("IMAGE TARGETS : enabled infinity focus");

    // Start the tracker:
    QCAR::Tracker::getInstance().start();
 
    // Cache the projection matrix:
    const QCAR::Tracker& tracker = QCAR::Tracker::getInstance();
    const QCAR::CameraCalibration& cameraCalibration =
                                    tracker.getCameraCalibration();
    projectionMatrix = QCAR::Tool::getProjectionGL(cameraCalibration, 2.0f,
                                            2000.0f);
}


JNIEXPORT void JNICALL
Java_com_EdificacioAR_AugmentedRealityActivity_stopCamera(JNIEnv *,
                                                                   jobject)
{
    //LOG("Java_com_qualcomm_QCARSamples_ImageTargets_ImageTargets_stopCamera");

    QCAR::Tracker::getInstance().stop();

    QCAR::CameraDevice::getInstance().stop();
    QCAR::CameraDevice::getInstance().deinit();
}

JNIEXPORT jboolean JNICALL
Java_com_EdificacioAR_AugmentedRealityActivity_toggleFlash(JNIEnv*, jobject, jboolean flash)
{
    return QCAR::CameraDevice::getInstance().setFlashTorchMode((flash==JNI_TRUE)) ? JNI_TRUE : JNI_FALSE;
}

JNIEXPORT jboolean JNICALL
Java_com_EdificacioAR_AugmentedRealityActivity_autofocus(JNIEnv*, jobject)
{
    return QCAR::CameraDevice::getInstance().startAutoFocus()?JNI_TRUE:JNI_FALSE;
}

JNIEXPORT jboolean JNICALL
Java_com_EdificacioAR_AugmentedRealityActivity_setFocusMode(JNIEnv*, jobject, jint mode)
{
    return QCAR::CameraDevice::getInstance().setFocusMode(mode)?JNI_TRUE:JNI_FALSE;
}


JNIEXPORT void JNICALL
Java_com_EdificacioAR_AugmentedRealityRenderer_initRendering(
                                                    JNIEnv* env, jobject obj)
{
    //LOG("EdificacioAR::initRendering - start binding textures");

    // Define clear color
    glClearColor(0.0f, 0.0f, 0.0f, QCAR::requiresAlpha() ? 0.0f : 1.0f);
    

    // Now generate the OpenGL texture objects for the groups with textures and add settings
    //for (int mc=0; mc < modelCount; mc++)
    for (int lc=0; lc < channel->layerCount; lc++)
    {
    	Layer* layer = channel->layers[lc];
    	for (int mc=0; mc < layer->modelCount; mc++)
    	{
			Model3D* model = layer->models[mc]->model3D;

			for (int gc=0; gc < model->meshGroupCount; gc++)
			{
				MeshGroup* group = model->groups[gc];

				if (group->hasTexture)
				{
					Texture* tex = model->groups[gc]->texture;
					GLuint texId;

					//LOG("EdificacioAR::initRendering - currently loading texture %i", gc);

					glGenTextures(1, &(texId));

					//LOG("EdificacioAR::initRendering - texture is null? ", (tex == NULL));

					if (tex == NULL) LOG("EdificacioAR::initRendering - texture is NULL!");
					tex->mTextureID = texId;

					//LOG("EdificacioAR::initRendering - current texture id: %i", tex->mTextureID);

					glBindTexture(GL_TEXTURE_2D, texId);
					glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
					glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
					glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT);
					glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT);
						glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, tex->mWidth,
							tex->mHeight, 0, GL_RGBA, GL_UNSIGNED_BYTE,
							(GLvoid*)  tex->mData);

					//SampleUtils::checkGlError("EdificacioAR::initRendering - error after loading texture");
					//LOG("EdificacioAR::initRendering - texture loaded");
				}
			}
    	}
    }

#ifndef USE_OPENGL_ES_1_1

    ShaderManager* sm = ShaderManager::getInstance();
    sm->createShaders();
    sm->loadShaders();

#endif

    //LOG("EdificacioAR::initRendering - end binding textures");

}

JNIEXPORT void JNICALL
Java_com_EdificacioAR_AugmentedRealityRenderer_updateRendering(
                        JNIEnv* env, jobject obj, jint width, jint height)
{
    //LOG("Java_com_qualcomm_QCARSamples_ImageTargets_ImageTargetsRenderer_updateRendering");
    
    // Update screen dimensions
    screenWidth = width;
    screenHeight = height;

    // Reconfigure the video background
    configureVideoBackground();
}


#ifdef __cplusplus
}
#endif
