/*==============================================================================
 Copyright (c) 2012-2014 Qualcomm Connected Experiences, Inc.
 All Rights Reserved.
 ==============================================================================*/

package com.qualcomm.vuforia.samples.VideoPlayback.app.VideoPlayback;

import java.util.Vector;

import android.app.Activity;
import android.content.Intent;
import android.content.pm.ActivityInfo;
import android.content.res.Configuration;
import android.graphics.Color;
import android.hardware.Camera;
import android.hardware.Camera.CameraInfo;
import android.os.Build;
import android.os.Bundle;
import android.util.Log;
import android.view.GestureDetector;
import android.view.GestureDetector.OnDoubleTapListener;
import android.view.GestureDetector.SimpleOnGestureListener;
import android.view.LayoutInflater;
import android.view.MotionEvent;
import android.view.View;
import android.view.View.OnClickListener;
import android.view.ViewGroup.LayoutParams;
import android.widget.Button;
import android.widget.CheckBox;
import android.widget.ImageView;
import android.widget.RelativeLayout;
import android.widget.Switch;
import android.widget.Toast;

import com.arpost.pages.AboutInfo;
import com.arpost.pages.TakePhoto;
import com.qualcomm.vuforia.CameraDevice;
import com.qualcomm.vuforia.DataSet;
import com.qualcomm.vuforia.HINT;
import com.qualcomm.vuforia.ImageTracker;
import com.qualcomm.vuforia.State;
import com.qualcomm.vuforia.Trackable;
import com.qualcomm.vuforia.Tracker;
import com.qualcomm.vuforia.TrackerManager;
import com.qualcomm.vuforia.Vuforia;
import com.qualcomm.vuforia.samples.SampleApplication.SampleApplicationControl;
import com.qualcomm.vuforia.samples.SampleApplication.SampleApplicationException;
import com.qualcomm.vuforia.samples.SampleApplication.SampleApplicationSession;
import com.qualcomm.vuforia.samples.SampleApplication.utils.LoadingDialogHandler;
import com.qualcomm.vuforia.samples.SampleApplication.utils.SampleApplicationGLView;
import com.qualcomm.vuforia.samples.SampleApplication.utils.Texture;
import com.qualcomm.vuforia.samples.VideoPlayback.R;
import com.qualcomm.vuforia.samples.VideoPlayback.app.VideoPlayback.VideoPlayerHelper.MEDIA_STATE;
import com.qualcomm.vuforia.samples.VideoPlayback.app.VideoPlayback.VideoPlayerHelper.MEDIA_TYPE;
import com.qualcomm.vuforia.samples.VideoPlayback.ui.SampleAppMenu.SampleAppMenu;
import com.qualcomm.vuforia.samples.VideoPlayback.ui.SampleAppMenu.SampleAppMenuGroup;
import com.qualcomm.vuforia.samples.VideoPlayback.ui.SampleAppMenu.SampleAppMenuInterface;

// The AR activity for the VideoPlayback sample.
public class VideoPlayback extends Activity implements SampleApplicationControl, SampleAppMenuInterface {
	private static final String LOGTAG = "VideoPlayback";
	private static final String MYTAG = "MyVideoPlayback";

	SampleApplicationSession vuforiaAppSession;

	Activity mActivity;

	// Helpers to detect events such as double tapping:
	private GestureDetector mGestureDetector = null;
	private SimpleOnGestureListener mSimpleListener = null;

	// Movie for the Targets:
	public static final int NUM_TARGETS = 2;
	public static final int STONES = 0;
	public static final int CHIPS = 1;
	private static VideoPlayerHelper mVideoPlayerHelper[] = null;
	public static int mSeekPosition[] = null;
	private boolean mWasPlaying[] = null;
	private String mMovieName[] = null;
	private boolean shouldPlayNext = true;

	// A boolean to indicate whether we come from full screen:
	private boolean mReturningFromFullScreen = false;

	// Our OpenGL view:
	private SampleApplicationGLView mGlView;

	// Our renderer:
	private VideoPlaybackRenderer mRenderer;

	// The textures we will use for rendering:
	private Vector<Texture> mTextures;

	DataSet dataSetStonesAndChips = null;

	private RelativeLayout mUILayout;

	private boolean mFlash = false;
	private boolean mContAutofocus = false;
	private boolean mExtendedTracking = false;
	private static boolean mPlayFullscreenVideo = false;

	private View mFlashOptionView;

	private SampleAppMenu mSampleAppMenu;

	// AR Post
	private Button mSkipVideoButton;
	private Button mAboutInfoButton;
	
	private ImageView mARBox;
	private boolean hasARBox = true;

	private LoadingDialogHandler loadingDialogHandler = new LoadingDialogHandler(this);

	boolean mIsDroidDevice = false;
	boolean mIsInitialized = false;
	
	boolean mFirstRun = true;
	
	public static void backFromFullscreen(int second) {
		mSeekPosition[1] = second;
		Log.e(LOGTAG, "back from fullscreen " + second);
		mPlayFullscreenVideo = false;
		mVideoPlayerHelper[1].play(mPlayFullscreenVideo, mSeekPosition[1]);
	}
	
	public void removeARBox() {
		mARBox.post(new Runnable() {
			@Override
			public void run() {
				mUILayout.removeView(mARBox);
			}
		});
		hasARBox = false;
	}
	
	public void addARBox() {
		if (!hasARBox) {
			mUILayout.post(new Runnable() {
				@Override
				public void run() {
					if (mARBox.getParent() == null)
						mUILayout.addView(mARBox);
				}
			});
		}
		hasARBox = true;
	}
	
	// Called when the activity first starts or the user navigates back
	// to an activity.
	protected void onCreate(Bundle savedInstanceState) {
		Log.d(LOGTAG, "onCreate");
		super.onCreate(savedInstanceState);
		
		mFirstRun = true;

		vuforiaAppSession = new SampleApplicationSession(this);

		mActivity = this;

		startLoadingAnimation();

		vuforiaAppSession.initAR(this, ActivityInfo.SCREEN_ORIENTATION_PORTRAIT);

		// Load any sample specific textures:
		mTextures = new Vector<Texture>();
		loadTextures();

		// Create the gesture detector that will handle the single and
		// double taps:
		mSimpleListener = new SimpleOnGestureListener();
		mGestureDetector = new GestureDetector(getApplicationContext(), mSimpleListener);

		mVideoPlayerHelper = new VideoPlayerHelper[NUM_TARGETS];
		mSeekPosition = new int[NUM_TARGETS];
		mWasPlaying = new boolean[NUM_TARGETS];
		mMovieName = new String[NUM_TARGETS];

		// Create the video player helper that handles the playback of the movie
		// for the targets:
		for (int i = 0; i < NUM_TARGETS; i++) {
			mVideoPlayerHelper[i] = new VideoPlayerHelper();
			mVideoPlayerHelper[i].init();
			mVideoPlayerHelper[i].setActivity(this);
		}

		//mMovieName[STONES] = "VideoPlayback/VuforiaSizzleReel_1.m4v";
		//mMovieName[CHIPS] = "VideoPlayback/VuforiaSizzleReel_2.m4v";
		mMovieName[STONES] = "VideoPlayback/TurtleRabbitH.mp4";
		mMovieName[CHIPS] = "VideoPlayback/TurtleRabbitH3.mp4";
		shouldPlayNext = true;

		// Set the double tap listener:
		mGestureDetector.setOnDoubleTapListener(new OnDoubleTapListener() {
			public boolean onDoubleTap(MotionEvent e) {
				if (shouldPlayNext == true) {
					shouldPlayNext = false;
					exchangeMovieName();
					Log.i(MYTAG, "first video plays end, play: " + mMovieName[1]);
					//mVideoPlayerHelper[0].play(mPlayFullscreenVideo, mSeekPosition[0]);
					mVideoPlayerHelper[1].stop();
					mVideoPlayerHelper[1].unload();
					mPlayFullscreenVideo = true;
					mVideoPlayerHelper[1].load_full(mMovieName[1], MEDIA_TYPE.ON_TEXTURE_FULLSCREEN, true, 0);
				} else {
					playInFullscreen();
				}
				return false;
			}

			public boolean onDoubleTapEvent(MotionEvent e) {
				// We do not react to this event
				return false;
			}

			// Handle the single tap
			public boolean onSingleTapConfirmed(MotionEvent e) {
				boolean isSingleTapHandled = false;
				// Do not react if the StartupScreen is being displayed
				for (int i = 0; i < NUM_TARGETS; i++) {
					// Verify that the tap happened inside the target
					if (mRenderer != null && mRenderer.isTapOnScreenInsideTarget(i, e.getX(), e.getY())) {
						// Check if it is playable on texture
						if (mVideoPlayerHelper[i].isPlayableOnTexture()) {
							// We can play only if the movie was paused, ready
							// or stopped
							if ((mVideoPlayerHelper[i].getStatus() == MEDIA_STATE.PAUSED) || (mVideoPlayerHelper[i].getStatus() == MEDIA_STATE.READY)
									|| (mVideoPlayerHelper[i].getStatus() == MEDIA_STATE.STOPPED) || (mVideoPlayerHelper[i].getStatus() == MEDIA_STATE.REACHED_END)) {
								// Pause all other media
								pauseAll(i);

								// If it has reached the end then rewind
								if ((mVideoPlayerHelper[i].getStatus() == MEDIA_STATE.REACHED_END))
									mSeekPosition[i] = 0;

								mVideoPlayerHelper[i].play(mPlayFullscreenVideo, mSeekPosition[i]);
								mSeekPosition[i] = VideoPlayerHelper.CURRENT_POSITION;
							} else if (mVideoPlayerHelper[i].getStatus() == MEDIA_STATE.PLAYING) {
								// If it is playing then we pause it
								mVideoPlayerHelper[i].pause();
							}
						} else if (mVideoPlayerHelper[i].isPlayableFullscreen()) {
							// If it isn't playable on texture
							// Either because it wasn't requested or because it
							// isn't supported then request playback fullscreen.
							mVideoPlayerHelper[i].play(true, VideoPlayerHelper.CURRENT_POSITION);
						}

						isSingleTapHandled = true;

						// Even though multiple videos can be loaded only one
						// can be playing at any point in time. This break
						// prevents that, say, overlapping videos trigger
						// simultaneously playback.
						break;
					}
				}

				return isSingleTapHandled;
			}
		});
	}
	
	public void letsPlayVideo(int i) {
		if (mFirstRun) {
			mVideoPlayerHelper[i].play(mPlayFullscreenVideo, mSeekPosition[i]);
			mSeekPosition[i] = VideoPlayerHelper.CURRENT_POSITION;
		}
		mFirstRun = false;
	}

	//////////////////////////////////////////////////////////////
	// ARPost: to change video once the starting video ends
	public void changeMovie() {
		if (shouldPlayNext) {
			// The start video has just ended
			shouldPlayNext = false;
			exchangeMovieName();
			Log.e(MYTAG, "first video plays end, play: " + mMovieName[1]);
			//mVideoPlayerHelper[0].play(mPlayFullscreenVideo, mSeekPosition[0]);
			mVideoPlayerHelper[1].stop();
			mVideoPlayerHelper[1].unload();
			mVideoPlayerHelper[1].load(mMovieName[1], MEDIA_TYPE.ON_TEXTURE_FULLSCREEN, true, 0);
			//mVideoPlayerHelper[1].play(mPlayFullscreenVideo, mSeekPosition[1]);
		} else {
			// The second video has just ended 
			// Start next activity
			shouldPlayNext = true;
			exchangeMovieName();
			Log.e(MYTAG, "second video plays end, play: " + mMovieName[1]);
			mVideoPlayerHelper[1].stop();
			mVideoPlayerHelper[1].unload();
			mVideoPlayerHelper[1].load(mMovieName[1], MEDIA_TYPE.ON_TEXTURE_FULLSCREEN, false, 0);
			
			Intent intent = new Intent(VideoPlayback.this, TakePhoto.class);
			startActivity(intent);
			
			mFirstRun = true;
		}
	}
	
	private void exchangeMovieName() {
		String tmp = mMovieName[0];
		mMovieName[0] = mMovieName[1];
		mMovieName[1] = tmp;
	}
	
	/////////////////////////////////////////////////////////////

	// We want to load specific textures from the APK, which we will later
	// use for rendering.
	private void loadTextures() {
		mTextures.add(Texture.loadTextureFromApk("VideoPlayback/VuforiaSizzleReel_1.png", getAssets()));
		mTextures.add(Texture.loadTextureFromApk("VideoPlayback/VuforiaSizzleReel_2.png", getAssets()));
		mTextures.add(Texture.loadTextureFromApk("VideoPlayback/icon_play.png", getAssets()));
		mTextures.add(Texture.loadTextureFromApk("VideoPlayback/icon_busy.png", getAssets()));
		mTextures.add(Texture.loadTextureFromApk("VideoPlayback/icon_error.png", getAssets()));
	}

	// Called when the activity will start interacting with the user.
	protected void onResume() {
		Log.d(LOGTAG, "onResume");
		super.onResume();
		
		mFirstRun = true;

		// This is needed for some Droid devices to force portrait
		if (mIsDroidDevice) {
			setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE);
			setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_PORTRAIT);
		}

		try {
			vuforiaAppSession.resumeAR();
			if (mIsInitialized && mContAutofocus) {
				CameraDevice.getInstance().setFocusMode(CameraDevice.FOCUS_MODE.FOCUS_MODE_CONTINUOUSAUTO);
			}
		} catch (SampleApplicationException e) {
			Log.e(LOGTAG, e.getString());
		}

		// Resume the GL view:
		if (mGlView != null) {
			mGlView.setVisibility(View.VISIBLE);
			mGlView.onResume();
		}

		// Reload all the movies
		if (mRenderer != null) {
			for (int i = 0; i < NUM_TARGETS; i++) {
				if (!mReturningFromFullScreen) {
					mRenderer.requestLoad(i, mMovieName[i], mSeekPosition[i], false);
				} else {
					mRenderer.requestLoad(i, mMovieName[i], mSeekPosition[i], mWasPlaying[i]);
				}
			}
		}

		mReturningFromFullScreen = false;
	}

	// Called when returning from the full screen player
	protected void onActivityResult(int requestCode, int resultCode, Intent data) {
		Log.e(LOGTAG, "request code " + requestCode + " result code " + resultCode);
		
		if (requestCode == 1) {
			
			mActivity.setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_PORTRAIT);

			if (resultCode == RESULT_OK) {
				// The following values are used to indicate the position in
				// which the video was being played and whether it was being
				// played or not:
				String movieBeingPlayed = data.getStringExtra("movieName");
				mReturningFromFullScreen = true;

				// Find the movie that was being played full screen
				for (int i = 0; i < NUM_TARGETS; i++) {
					if (movieBeingPlayed.compareTo(mMovieName[i]) == 0) {
						mSeekPosition[i] = data.getIntExtra("currentSeekPosition", 0);
						mWasPlaying[i] = data.getBooleanExtra("playing", false);
					}
				}
			}
		}
	}

	public void onConfigurationChanged(Configuration config) {
		Log.d(LOGTAG, "onConfigurationChanged");
		super.onConfigurationChanged(config);

		vuforiaAppSession.onConfigurationChanged();
	}

	// Called when the system is about to start resuming a previous activity.
	protected void onPause() {
		Log.d(LOGTAG, "onPause");
		super.onPause();

		if (mGlView != null) {
			mGlView.setVisibility(View.INVISIBLE);
			mGlView.onPause();
		}

		// Store the playback state of the movies and unload them:
		for (int i = 0; i < NUM_TARGETS; i++) {
			// If the activity is paused we need to store the position in which
			// this was currently playing:
			if (mVideoPlayerHelper[i].isPlayableOnTexture()) {
				mSeekPosition[i] = mVideoPlayerHelper[i].getCurrentPosition();
				mWasPlaying[i] = mVideoPlayerHelper[i].getStatus() == MEDIA_STATE.PLAYING ? true : false;
			}

			// We also need to release the resources used by the helper, though
			// we don't need to destroy it:
			if (mVideoPlayerHelper[i] != null)
				mVideoPlayerHelper[i].unload();
		}

		mReturningFromFullScreen = false;

		// Turn off the flash
		if (mFlashOptionView != null && mFlash) {
			// OnCheckedChangeListener is called upon changing the checked state
			if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR1) {
				((Switch) mFlashOptionView).setChecked(false);
			} else {
				((CheckBox) mFlashOptionView).setChecked(false);
			}
		}

		try {
			vuforiaAppSession.pauseAR();
		} catch (SampleApplicationException e) {
			Log.e(LOGTAG, e.getString());
		}
	}

	// The final call you receive before your activity is destroyed.
	protected void onDestroy() {
		Log.d(LOGTAG, "onDestroy");
		super.onDestroy();

		for (int i = 0; i < NUM_TARGETS; i++) {
			// If the activity is destroyed we need to release all resources:
			if (mVideoPlayerHelper[i] != null)
				mVideoPlayerHelper[i].deinit();
			mVideoPlayerHelper[i] = null;
		}

		try {
			vuforiaAppSession.stopAR();
		} catch (SampleApplicationException e) {
			Log.e(LOGTAG, e.getString());
		}

		// Unload texture:
		mTextures.clear();
		mTextures = null;

		System.gc();
	}

	// Pause all movies except one
	// if the value of 'except' is -1 then
	// do a blanket pause
	private void pauseAll(int except) {
		// And pause all the playing videos:
		for (int i = 0; i < NUM_TARGETS; i++) {
			// We can make one exception to the pause all calls:
			if (i != except) {
				// Check if the video is playable on texture
				if (mVideoPlayerHelper[i].isPlayableOnTexture()) {
					// If it is playing then we pause it
					mVideoPlayerHelper[i].pause();
				}
			}
		}
	}

	// Do not exit immediately and instead show the startup screen
	public void onBackPressed() {
		pauseAll(-1);
		super.onBackPressed();
	}

	private void startLoadingAnimation() {
		LayoutInflater inflater = LayoutInflater.from(this);
		mUILayout = (RelativeLayout) inflater.inflate(R.layout.camera_overlay, null, false);

		mUILayout.setVisibility(View.VISIBLE);
		mUILayout.setBackgroundColor(Color.BLACK);

		// Gets a reference to the loading dialog
		loadingDialogHandler.mLoadingDialogContainer = mUILayout.findViewById(R.id.loading_indicator);

		// Shows the loading indicator at start
		loadingDialogHandler.sendEmptyMessage(LoadingDialogHandler.SHOW_LOADING_DIALOG);

		// Adds the inflated layout to the view
		addContentView(mUILayout, new LayoutParams(LayoutParams.MATCH_PARENT, LayoutParams.MATCH_PARENT));

		// add skip_button function
		mSkipVideoButton = (Button) mUILayout.findViewById(R.id.button_videoplayback_next);
		mSkipVideoButton.setOnClickListener(new OnClickListener() {
			@Override
			public void onClick(View v) {
				Intent intent = new Intent(VideoPlayback.this, TakePhoto.class);
				startActivity(intent);
				System.exit(0);
			}
		});
		mAboutInfoButton = (Button) mUILayout.findViewById(R.id.button_videoplayback_about_info);
		mAboutInfoButton.setOnClickListener(new OnClickListener() {
			@Override
			public void onClick(View v) {
				Intent intent = new Intent(VideoPlayback.this, AboutInfo.class);
				startActivity(intent);
			}
		});
		
		mARBox = (ImageView) mUILayout.findViewById(R.id.camera_box);
		hasARBox = true;
	}

	// Initializes AR application components.
	private void initApplicationAR() {
		// Create OpenGL ES view:
		int depthSize = 16;
		int stencilSize = 0;
		boolean translucent = Vuforia.requiresAlpha();

		mGlView = new SampleApplicationGLView(this);
		mGlView.init(translucent, depthSize, stencilSize);

		mRenderer = new VideoPlaybackRenderer(this, vuforiaAppSession);
		mRenderer.setTextures(mTextures);

		// The renderer comes has the OpenGL context, thus, loading to texture
		// must happen when the surface has been created. This means that we
		// can't load the movie from this thread (GUI) but instead we must
		// tell the GL thread to load it once the surface has been created.
		for (int i = 0; i < NUM_TARGETS; i++) {
			mRenderer.setVideoPlayerHelper(i, mVideoPlayerHelper[i]);
			mRenderer.requestLoad(i, mMovieName[i], 0, false);
		}

		mGlView.setRenderer(mRenderer);

		for (int i = 0; i < NUM_TARGETS; i++) {
			float[] temp = { 0f, 0f };
			mRenderer.targetPositiveDimensions[i].setData(temp);
			mRenderer.videoPlaybackTextureID[i] = -1;
		}

	}

	// We do not handle the touch event here, we just forward it to the
	// gesture detector
	public boolean onTouchEvent(MotionEvent event) {
		boolean result = false;
		result = mGestureDetector.onTouchEvent(event);

		// Process the Gestures
		/*
		if (!result && mSampleAppMenu != null)
			result = mSampleAppMenu.processEvent(event);
			*/

		return result;
	}

	@Override
	public boolean doInitTrackers() {
		// Indicate if the trackers were initialized correctly
		boolean result = true;

		// Initialize the image tracker:
		TrackerManager trackerManager = TrackerManager.getInstance();
		Tracker tracker = trackerManager.initTracker(ImageTracker.getClassType());
		if (tracker == null) {
			Log.d(LOGTAG, "Failed to initialize ImageTracker.");
			result = false;
		}

		return result;
	}

	@Override
	public boolean doLoadTrackersData() {
		// Get the image tracker:
		TrackerManager trackerManager = TrackerManager.getInstance();
		ImageTracker imageTracker = (ImageTracker) trackerManager.getTracker(ImageTracker.getClassType());
		if (imageTracker == null) {
			Log.d(LOGTAG, "Failed to load tracking data set because the ImageTracker has not been initialized.");
			return false;
		}

		// Create the data sets:
		dataSetStonesAndChips = imageTracker.createDataSet();
		if (dataSetStonesAndChips == null) {
			Log.d(LOGTAG, "Failed to create a new tracking data.");
			return false;
		}

		// Load the data sets:
		if (!dataSetStonesAndChips.load("StonesAndChips.xml", 1/*DataSet.STORAGE_TYPE.STORAGE_APPRESOURCE*/)) {
			Log.d(LOGTAG, "Failed to load data set.");
			return false;
		}

		// Activate the data set:
		if (!imageTracker.activateDataSet(dataSetStonesAndChips)) {
			Log.d(LOGTAG, "Failed to activate data set.");
			return false;
		}

		Log.d(LOGTAG, "Successfully loaded and activated data set.");
		return true;
	}

	@Override
	public boolean doStartTrackers() {
		// Indicate if the trackers were started correctly
		boolean result = true;

		Tracker imageTracker = TrackerManager.getInstance().getTracker(ImageTracker.getClassType());
		if (imageTracker != null) {
			imageTracker.start();
			Vuforia.setHint(HINT.HINT_MAX_SIMULTANEOUS_IMAGE_TARGETS, 2);
		} else
			result = false;

		return result;
	}

	@Override
	public boolean doStopTrackers() {
		// Indicate if the trackers were stopped correctly
		boolean result = true;

		Tracker imageTracker = TrackerManager.getInstance().getTracker(ImageTracker.getClassType());
		if (imageTracker != null)
			imageTracker.stop();
		else
			result = false;

		return result;
	}

	@Override
	public boolean doUnloadTrackersData() {
		// Indicate if the trackers were unloaded correctly
		boolean result = true;

		// Get the image tracker:
		TrackerManager trackerManager = TrackerManager.getInstance();
		ImageTracker imageTracker = (ImageTracker) trackerManager.getTracker(ImageTracker.getClassType());
		if (imageTracker == null) {
			Log.d(LOGTAG, "Failed to destroy the tracking data set because the ImageTracker has not been initialized.");
			return false;
		}

		if (dataSetStonesAndChips != null) {
			if (imageTracker.getActiveDataSet() == dataSetStonesAndChips && !imageTracker.deactivateDataSet(dataSetStonesAndChips)) {
				Log.d(LOGTAG, "Failed to destroy the tracking data set StonesAndChips because the data set could not be deactivated.");
				result = false;
			} else if (!imageTracker.destroyDataSet(dataSetStonesAndChips)) {
				Log.d(LOGTAG, "Failed to destroy the tracking data set StonesAndChips.");
				result = false;
			}

			dataSetStonesAndChips = null;
		}

		return result;
	}

	@Override
	public boolean doDeinitTrackers() {
		// Indicate if the trackers were deinitialized correctly
		boolean result = true;

		// Deinit the image tracker:
		TrackerManager trackerManager = TrackerManager.getInstance();
		trackerManager.deinitTracker(ImageTracker.getClassType());

		return result;
	}

	@Override
	public void onInitARDone(SampleApplicationException exception) {

		if (exception == null) {
			initApplicationAR();

			mRenderer.mIsActive = true;

			// Now add the GL surface view. It is important
			// that the OpenGL ES surface view gets added
			// BEFORE the camera is started and video
			// background is configured.
			addContentView(mGlView, new LayoutParams(LayoutParams.MATCH_PARENT, LayoutParams.MATCH_PARENT));

			// Sets the UILayout to be drawn in front of the camera
			mUILayout.bringToFront();

			// Hides the Loading Dialog
			loadingDialogHandler.sendEmptyMessage(LoadingDialogHandler.HIDE_LOADING_DIALOG);

			// Sets the layout background to transparent
			mUILayout.setBackgroundColor(Color.TRANSPARENT);

			try {
				vuforiaAppSession.startAR(CameraDevice.CAMERA.CAMERA_DEFAULT);
			} catch (SampleApplicationException e) {
				Log.e(LOGTAG, e.getString());
			}

			boolean result = CameraDevice.getInstance().setFocusMode(CameraDevice.FOCUS_MODE.FOCUS_MODE_CONTINUOUSAUTO);

			if (result)
				mContAutofocus = true;
			else
				Log.e(LOGTAG, "Unable to enable continuous autofocus");

			/*
			mSampleAppMenu = new SampleAppMenu(this, this, "Video Playback", mGlView, mUILayout, null);
			setSampleAppMenuSettings();
			*/

			mIsInitialized = true;

		} else {
			Log.e(LOGTAG, exception.getString());
			finish();
		}

	}

	@Override
	public void onQCARUpdate(State state) {
		//Log.i(MYTAG, mVideoPlayerHelper[1].getCurrentPosition() + "");
		//Log.i(MYTAG, mVideoPlayerHelper[1].getLength() + "");
	}

	final private static int CMD_BACK = -1;
	final private static int CMD_EXTENDED_TRACKING = 1;
	final private static int CMD_AUTOFOCUS = 2;
	final private static int CMD_FLASH = 3;
	final private static int CMD_FULLSCREEN_VIDEO = 4;
	final private static int CMD_CAMERA_FRONT = 5;
	final private static int CMD_CAMERA_REAR = 6;

	// This method sets the menu's settings
	private void setSampleAppMenuSettings() {
		SampleAppMenuGroup group;

		group = mSampleAppMenu.addGroup("", false);
		group.addTextItem(getString(R.string.menu_back), -1);

		group = mSampleAppMenu.addGroup("", true);
		group.addSelectionItem(getString(R.string.menu_extended_tracking), CMD_EXTENDED_TRACKING, false);
		group.addSelectionItem(getString(R.string.menu_contAutofocus), CMD_AUTOFOCUS, mContAutofocus);
		mFlashOptionView = group.addSelectionItem(getString(R.string.menu_flash), CMD_FLASH, false);
		if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.ICE_CREAM_SANDWICH) {
			group.addSelectionItem(getString(R.string.menu_playFullscreenVideo), CMD_FULLSCREEN_VIDEO, mPlayFullscreenVideo);
		}

		CameraInfo ci = new CameraInfo();
		boolean deviceHasFrontCamera = false;
		boolean deviceHasBackCamera = false;
		for (int i = 0; i < Camera.getNumberOfCameras(); i++) {
			Camera.getCameraInfo(i, ci);
			if (ci.facing == CameraInfo.CAMERA_FACING_FRONT)
				deviceHasFrontCamera = true;
			else if (ci.facing == CameraInfo.CAMERA_FACING_BACK)
				deviceHasBackCamera = true;
		}

		if (deviceHasBackCamera && deviceHasFrontCamera) {
			group = mSampleAppMenu.addGroup(getString(R.string.menu_camera), true);
			group.addRadioItem(getString(R.string.menu_camera_front), CMD_CAMERA_FRONT, false);
			group.addRadioItem(getString(R.string.menu_camera_back), CMD_CAMERA_REAR, true);
		}

		mSampleAppMenu.attachMenu();
	}

	@Override
	public boolean menuProcess(int command) {

		boolean result = true;

		switch (command) {
		case CMD_BACK:
			finish();
			break;

		case CMD_FLASH:
			result = CameraDevice.getInstance().setFlashTorchMode(!mFlash);

			if (result) {
				mFlash = !mFlash;
			} else {
				showToast(getString(mFlash ? R.string.menu_flash_error_off : R.string.menu_flash_error_on));
				Log.e(LOGTAG, getString(mFlash ? R.string.menu_flash_error_off : R.string.menu_flash_error_on));
			}
			break;

		case CMD_AUTOFOCUS:

			if (mContAutofocus) {
				result = CameraDevice.getInstance().setFocusMode(CameraDevice.FOCUS_MODE.FOCUS_MODE_NORMAL);

				if (result) {
					mContAutofocus = false;
				} else {
					showToast(getString(R.string.menu_contAutofocus_error_off));
					Log.e(LOGTAG, getString(R.string.menu_contAutofocus_error_off));
				}
			} else {
				result = CameraDevice.getInstance().setFocusMode(CameraDevice.FOCUS_MODE.FOCUS_MODE_CONTINUOUSAUTO);

				if (result) {
					mContAutofocus = true;
				} else {
					showToast(getString(R.string.menu_contAutofocus_error_on));
					Log.e(LOGTAG, getString(R.string.menu_contAutofocus_error_on));
				}
			}

			break;

		case CMD_FULLSCREEN_VIDEO:
			mPlayFullscreenVideo = !mPlayFullscreenVideo;

			for (int i = 0; i < mVideoPlayerHelper.length; i++) {
				if (mVideoPlayerHelper[i].getStatus() == MEDIA_STATE.PLAYING) {
					// If it is playing then we pause it
					mVideoPlayerHelper[i].pause();

					mVideoPlayerHelper[i].play(true, mSeekPosition[i]);
				}
			}
			break;

		case CMD_CAMERA_FRONT:
		case CMD_CAMERA_REAR:

			// Turn off the flash
			if (mFlashOptionView != null && mFlash) {
				// OnCheckedChangeListener is called upon changing the checked
				// state
				if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR1) {
					((Switch) mFlashOptionView).setChecked(false);
				} else {
					((CheckBox) mFlashOptionView).setChecked(false);
				}
			}

			doStopTrackers();
			CameraDevice.getInstance().stop();
			CameraDevice.getInstance().deinit();
			try {
				vuforiaAppSession.startAR(command == CMD_CAMERA_FRONT ? CameraDevice.CAMERA.CAMERA_FRONT : CameraDevice.CAMERA.CAMERA_BACK);
			} catch (SampleApplicationException e) {
				showToast(e.getString());
				Log.e(LOGTAG, e.getString());
				result = false;
			}
			doStartTrackers();
			break;

		case CMD_EXTENDED_TRACKING:
			for (int tIdx = 0; tIdx < dataSetStonesAndChips.getNumTrackables(); tIdx++) {
				Trackable trackable = dataSetStonesAndChips.getTrackable(tIdx);

				if (!mExtendedTracking) {
					if (!trackable.startExtendedTracking()) {
						Log.e(LOGTAG, "Failed to start extended tracking target");
						result = false;
					} else {
						Log.d(LOGTAG, "Successfully started extended tracking target");
					}
				} else {
					if (!trackable.stopExtendedTracking()) {
						Log.e(LOGTAG, "Failed to stop extended tracking target");
						result = false;
					} else {
						Log.d(LOGTAG, "Successfully started extended tracking target");
					}
				}
			}

			if (result)
				mExtendedTracking = !mExtendedTracking;

			break;

		}

		return result;
	}
	
	private void playInFullscreen() {
		mPlayFullscreenVideo = true;
		mSeekPosition[1] = mVideoPlayerHelper[1].getCurrentPosition();
		mVideoPlayerHelper[1].play(mPlayFullscreenVideo, mSeekPosition[1]);
	}

	private void showToast(String text) {
		Toast.makeText(this, text, Toast.LENGTH_SHORT).show();
	}
}
