/**
 * <AiRmob-Framework; A simple augmented reality framework for android >
    Copyright (C) <2012>  <AiRmob Team>

    This program is free software: you can redistribute it and/or modify
    it under the terms of the GNU General Public License as published by
    the Free Software Foundation, either version 3 of the License, or
    (at your option) any later version.

    This program is distributed in the hope that it will be useful,
    but WITHOUT ANY WARRANTY; without even the implied warranty of
    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
    GNU General Public License for more details.

    You should have received a copy of the GNU General Public License
    along with this program.  If not, see <http://www.gnu.org/licenses/>.
 */

package airmob.framework;





import rendercomponents.AiRmobRenderer;
import rendercomponents.Identifier;
import rendercomponents.Renderer;
import trackercomponents.AiRmobTracker;
import airmob.minimal.R;
import android.app.Activity;
import android.hardware.Camera;
import android.hardware.Camera.Parameters;
import android.location.LocationManager;
import android.opengl.GLSurfaceView;
import android.opengl.Matrix;
import android.os.Build;
import android.os.Bundle;
import android.os.Handler;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.WindowManager;
import android.view.WindowManager.LayoutParams;
import android.widget.TextView;

/**
 * This class extends the Android-class 'Activty'.
 * It handles core-functionalities of the AiRmob-Framework.
 * In an instance of this class a renderer and a tracker have to be set.
 * Any other options have default values and can be set optionally.
 * @author nlichtenberg
 *
 */
public class AiRmobActivity extends Activity implements SurfaceHolder.Callback {

	private onPickListener onPickListener;
	
	public final static Handler messageHandler = new Handler();
	// 
	protected AiRmobCamera mCam;
	
	/**
	 * True if the activity is in front. Used as condition for looping threads.
	 * If the activity is not in front, threads using this condition will run out.
	 */
	protected boolean activityIsInFront = false;
	protected boolean frontFaceCamera = false;
	
	/**
	 * SurfaceView that holds the camerapicture
	 */
	protected SurfaceView mCamSV;
	/**
	 * SurfaceHolder that listens to cameracallbacks
	 */
	protected SurfaceHolder mCamSH;
	
	/**
	 * Holds an ID as an integer to set camera resolution.
	 */
	protected int CAMERA_RES_ID =  20;

	/**
	 * GLSurfaceView that handles the openGL content
	 */
	protected AiRmobGLSurfaceView mGLSurfaceV;
	
	/**
	 * Instance of the active tracker.
	 */
	protected AiRmobTracker mTracker = null;
	
	/**
	 * Instance of the active renderer.
	 */
	protected AiRmobRenderer mRenderer = null;
	
	/** handles the way the trackingpipeline works
	 * 
	 */
	public Pipelinehandler pipeline;	
	
	/** locationManager used by GPSAlertCreator
	 * 
	 */
	public LocationManager locationManager;
	
	


	protected void setRenderer(AiRmobRenderer renderer) {
		mRenderer = renderer;
	}

	protected void setTracker(AiRmobTracker tracker) {
		mTracker = tracker;
	}

	/** Called when the activity is first created. */
	@Override
	public void onCreate(Bundle savedInstanceState) {
		super.onCreate(savedInstanceState);		
		AiRmobFileManager.mContext = getApplicationContext();
		setContentView(R.layout.main);
		getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN,
				WindowManager.LayoutParams.FLAG_FULLSCREEN);
		
		getWindow().setFlags( LayoutParams.FLAG_KEEP_SCREEN_ON,  LayoutParams.FLAG_KEEP_SCREEN_ON);

		// set up SurfaceHolder for the camera 
		
		mCamSV = (SurfaceView)findViewById(R.id.camera_view);
		mCamSH = mCamSV.getHolder();
		mCamSH.addCallback(this);
		mCamSH.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
		
	
		
		// overlay for the renderer
//		mGLSurfaceV = new AiRmobGLSurfaceView(this, mRenderer);
//		addContentView(mGLSurfaceV, new LayoutParams(LayoutParams.FILL_PARENT,
//		LayoutParams.FILL_PARENT));
//		mGLSurfaceV.setZOrderOnTop(true);
		
		mGLSurfaceV = (AiRmobGLSurfaceView)findViewById(R.id.GLview);
		mGLSurfaceV.setRenderer(mRenderer);
		mGLSurfaceV.mRenderer = mRenderer;
		
		// init Runnables
		mRenderer.runnablePick = new PickRunnable();
		mRenderer.runnableRendererCreated = new RendererCreatedRunnable();
		mRenderer.runnableRendererChanged = new RendererChangedRunnable();
		
		//overlayText = (TextView)findViewById(R.id.txtview);
		
		
		
		
		// init pipeline with default pipelinemode
		// -> oneShotPreviewCallback and rendering for each tracked frame
		// Pipelinehandler must know the GLSurface and the Tracker 
		pipeline =  new Pipelinehandler(mGLSurfaceV, mTracker);		
		setPipelineMode(Pipelinehandler.MODE_VIDEOBG_RENDER_ON_TRACKED);
		mCam = new AiRmobCamera(this);
	}

	
	///////////////////////////
	// Surface Methods
	/////////////////////////////////
	
	public void surfaceCreated(SurfaceHolder holder) {
		// init camera and open it	
		if(frontFaceCamera)
			mCam.openFrontFacingCamera();
		else
			mCam.openSimpleCamera();			
	}
	
	
	/**
	 * Displayformat will be set and cameraparameters are sent to renderer/tracker.
	 * Tracker will be initialized and onAiRmobInitialized will be called.
	 * The framework is now ready to work.
	 */
	public void surfaceChanged(SurfaceHolder holder, int format, int width,
			int height) {
		
		//start the videostream and hand necessary data to renderer and tracker
		mCam.startVideoStream(holder, width, height, CAMERA_RES_ID);
		// send parameters to the tracker and renderer
		mTracker.cameraAngleHorizontal = mCam.getCameraHorizontalAngle();
		mTracker.cameraAngleVertical = mCam.getCameraVerticalAngle();		
		mRenderer.cameraAngleHorizontal = mCam.getCameraHorizontalAngle();
		mRenderer.cameraAngleVertical = mCam.getCameraVerticalAngle();
		mTracker.mHeight = mCam.height;
		mTracker.mWidth = mCam.width;
		mTracker.init();
		onAiRmobInitialized();		
	}
	
	/**
	 * This method is called a soon as the framework is fully initialized. From this point on tracking is possible. By default, tracking will be started immediately
	 * Override this method to not start tracking.
	 */
	public void onAiRmobInitialized(){	
		mCam.startTracking();
		mGLSurfaceV.setZOrderOnTop(true);
	}

	/**
	 * The application is shut down. Videostream will be automatically stopped.
	 */
	public void surfaceDestroyed(SurfaceHolder holder) {		
		mCam.stopVideoStream();
	}
	
	/**
	 * This method initializes an instance of Locationmanager. This instance is used by GPSMarkerCreator.
	 */
	public void activateLocationManager(){
		// init the locationmanager so it can be used by GPSMarkerCreator
		locationManager = (LocationManager) getSystemService(LOCATION_SERVICE);
	}
	
	/**
	 * This method receives a GPSMarker ID if a location specified by an active GPSMarker was entered or left
	 * @param markerID name of the GPSMarker that has called this method
	 * @param entering true if entering the GPSMarker's region, false if leaving
	 */
	public void onReceiveMarker(String markerID, boolean entering){		
	}

	/**
	 * Sets the pipelinemode
	 * @param mode selected mode
	 */
	public void setPipelineMode(int mode){
		pipeline.setPipelineMode(mode);
	}
	
	/**
	 * Defines the buffersize used by the pipeline. If a new camerapicture arrives while the previous one is still being processed it can be processed in parallel by another thread.
	 * 
	 * @param bufferSize defines the maximum of parallel processed images; default = 2.
	 */
	public void setPipelineBufferSize(int bufferSize){
		pipeline.setBufferSize(bufferSize);
	}
	
	
	
	// Runnablestuff	
	
	/**
	 * An instance gets called if a pickaction has been performed by the renderer. It then calls the AiRmobActivity's onPick method supplying the result.
	 * @author nlichtenberg
	 *
	 */
	public class PickRunnable implements Runnable{
		public Object id;
		@Override
		public void run() {
			onPick(id);
		}		
	};
	

	
	
	/**
	 * An instance of this runnable gets called as soon as the renderer has created its surface. 
	 * The AiRmobActivity's onRendererInitialized method will then be called.
	 * @author nlichtenberg
	 *
	 */
	public class RendererCreatedRunnable implements Runnable{
		@Override
		public void run() {
			onRendererCreated();
		}		
	};
	
	/**
	 * An instance of this runnable gets called as soon as the renderer has created its surface. 
	 * The AiRmobActivity's onRendererInitialized method will then be called.
	 * @author nlichtenberg
	 *
	 */
	public class RendererChangedRunnable implements Runnable{
		@Override
		public void run() {
			onRendererChanged();
		}		
	};
	
	
	/**
	 * this Method is called as soon as the renderer has created its surface.
	 * call super.onRendererCreated() if you override this.
	 */
	protected void onRendererCreated(){
		if(mRenderer.pickingActivated())
			((Renderer)mRenderer).runnablePick = new PickRunnable();
	}
	
	/**
	 * this Method is called as soon as the renderer has changed its surface.
	 */
	protected void onRendererChanged(){
	}
	
	/**
	 * This method will be called on picking an object found in the currently renderered scene
	 * @param id id of the picked object to perform actions on it
	 */
	public void onPick(Object id){	
		onPickListener.onPick(id);
	}
	
	public void setOnPickListener(onPickListener in){
		onPickListener = in;
	}
	
	/**
	 * This method will freeze the camerapreview if it's running or start the preview if it is in 'freeze' state
	 * interaction with 3D-objects will still be possible
	 */
	public void toggleFreeze(){
		if(!toggled){			
			toggled = true;
		if(Build.VERSION.SDK_INT < 11)
		mRenderer.freezeCamera(true);
		setPipelineMode(Pipelinehandler.MODE_FREEZE_ON_NEXT_FRAME);
		}
		else{
			toggled = false;
			mRenderer.freezeCamera(false);
			setPipelineMode(Pipelinehandler.MODE_BEFORE_FREEZE);
			pipeline.mCam.startPreview();
			mCam.startTracking();
		}
		
	}
	boolean toggled = false;
	
	/**
	 * 
	 * @param name
	 * @return  The object that was registered with the given name in an instance of AiRmobRenderer.
	 */
	public Object getObjectbyName(String name){
		return mRenderer.getObjectbyName(name);
	}
	
	/** 
	 * @return All registered objects in an array.
	 */
	public Object[] getAllObjects(){
		return mRenderer.getAllObjects();
	}
	
	
	/**
	 * Project input coordinates to maker plane.
	 * @param displayX 
	 * @param displayY
	 * @return Returns a float[2] including x,y world coordinates.
	 * x = float[0]
	 * y = float[1]
	 */
	public float[] pickMarkerPlane(int displayX, int displayY){		
		
		float width = (float)getWindowManager().getDefaultDisplay().getWidth();
		float height = (float)getWindowManager().getDefaultDisplay().getHeight();
		
		float halfScreenWidth = width / 2.0f;
		float halfScreenHeight = height / 2.0f;
		    
		float halfViewportWidth = mGLSurfaceV.getWidth() / 2.0f;
		float halfViewportHeight = mGLSurfaceV.getHeight() / 2.0f;
		
	    float x = (displayX - halfScreenWidth) / halfViewportWidth;
	    float y = (displayY - halfScreenHeight) / halfViewportHeight * -1;

		
		float[] invProjMatrix = new float[16];
		Matrix.invertM(invProjMatrix, 0, ((Renderer)mRenderer).mProjectionMatrix, 0);
		float[] invCameraMatrix = new float[16];
		Matrix.invertM(invCameraMatrix, 0, ((Renderer)mRenderer).mViewMatrix, 0);
		
		
		float[] startVector = new float[]{x,y,-1.0f,1.0f};
		float[] endVector = new float[]{x,y,1.0f,1.0f};
		float[] dirVector = new float[]{0,0,0,1.0f};
		float[] goalVector = new float[]{0,0};
		
		//unproject the start and end vectors by using inverse projection matrix
		Matrix.multiplyMV(startVector, 0, invProjMatrix, 0, startVector, 0);
		Matrix.multiplyMV(endVector, 0, invProjMatrix, 0, endVector, 0);
		
		// bring the homogeneous coordinate back to 1
		startVector[0] /= startVector[3];
		startVector[1] /= startVector[3];
		startVector[2] /= startVector[3];
		startVector[3] /= startVector[3];
		endVector[0] /= endVector[3];
		endVector[1] /= endVector[3];
		endVector[2] /= endVector[3];
		endVector[3] /= endVector[3];
		
		//unproject the start and end vectors by using inverse camera matrix
		Matrix.multiplyMV(startVector, 0, invCameraMatrix, 0, startVector, 0);
		Matrix.multiplyMV(endVector, 0, invCameraMatrix, 0, endVector, 0);
		
		dirVector[0] = endVector[0] - startVector[0];
		dirVector[1] = endVector[1] - startVector[1];
		dirVector[2] = endVector[2] - startVector[2];
		
		// normalize dirVector
		float length = Matrix.length(dirVector[0], dirVector[1], dirVector[2]);
		dirVector[0] = dirVector[0] / length;
		dirVector[1] = dirVector[1] / length;
		dirVector[2] = dirVector[2] / length;
		
		
		// find intersection where z = 0;
		float t = startVector[2] / (- dirVector[2]);
		
		goalVector[0] = startVector[0] + t * dirVector[0];
		goalVector[1] = startVector[1] + t * dirVector[1];		
		
		return goalVector;
		
	}
	
	
	/**
	 * Use the result to rotate an object. The object's local x-axis will then point to the position of the input coordinates.
	 * @param tapX x coordinate of a tap on the screen
	 * @param tapY y coordinate of a tap on the screen
	 * @param objectX x coordinate of the object to be rotated
	 * @param objectY y coordinate of the object to be rotated
	 * @return The angle between the world x-axis and the vector of the object's center to the tap position.
	 */
	public double getRotationToTapPosition(int tapX, int tapY, float objectX, float objectY){
		double rotationAngle = 0;
		
		// project the tap into marker plane		
		
		float[] tapProj = pickMarkerPlane(tapX, tapY);
		float worldTapX = tapProj[0];
		float worldTapY = tapProj[1];
	
		double cos = (worldTapX)  / Matrix.length(worldTapX, worldTapY, 0) ;
		
		rotationAngle = Math.acos(cos);
		rotationAngle *= 180/Math.PI;
		
		if (objectY > worldTapY)
			rotationAngle = 360-rotationAngle;
		return rotationAngle;
	}
	
	///////////////////////////
	// Lifecycle Methods
	////////////////////////////////////////
	
	
	public void onResume(){
		super.onResume();
		activityIsInFront = true;
	}
	
	public void onDestroy() {
		super.onDestroy();
	}

	public void onPause() {
		super.onPause();
		activityIsInFront = false;
	}
	




}