package com.toyota.toyotaownerspoc.barcode;

import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.Hashtable;
import java.util.Iterator;
import com.google.zxing.BinaryBitmap;
import com.google.zxing.ChecksumException;
import com.google.zxing.DecodeHintType;
import com.google.zxing.FormatException;
import com.google.zxing.MultiFormatReader;
import com.google.zxing.NotFoundException;
import com.google.zxing.Reader;
import com.google.zxing.Result;
import com.google.zxing.common.HybridBinarizer;
import android.annotation.SuppressLint;
import android.app.Activity;
import android.content.pm.PackageManager;
import android.content.res.Configuration;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.ImageFormat;
import android.graphics.Matrix;
import android.hardware.Camera;
import android.hardware.Camera.AutoFocusCallback;
import android.hardware.Camera.CameraInfo;
import android.hardware.Camera.PictureCallback;
import android.hardware.Camera.PreviewCallback;
import android.hardware.SensorManager;
import android.os.AsyncTask;
import android.os.Bundle;
import android.os.Environment;
import android.os.Handler;
import android.util.Log;
import android.view.MotionEvent;
import android.view.SurfaceView;
import android.view.View;
import android.view.ViewGroup;
import android.widget.ArrayAdapter;
import android.widget.EditText;
import android.widget.ImageView;
import android.widget.RelativeLayout;
import android.widget.LinearLayout;
import android.widget.Spinner;
import android.widget.Toast;
import com.toyota.toyotaownerspoc.R;
import com.toyota.toyotaownerspoc.service.Service;
import com.toyota.toyotaownerspoc.takeAPicture.CameraUtility;
import com.toyota.toyotaownerspoc.util.ImageUtility;
import java.io.IOException;
import java.io.InputStream;
import java.util.List;
import android.view.OrientationEventListener;
import utility.FileUtility;
import utility.ScreenUtility;
import android.content.Context;
import android.content.Intent;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.graphics.Rect;
import android.graphics.drawable.Drawable;
import android.hardware.Camera.Size;
import android.view.SurfaceHolder;

public class ScanVinFromBarcodeActivity extends Activity {
	// camera object that is used globally in this activity and also passed
	// reference to PreviewSurface inner class
	private Camera globalCamera;
	private int cameraId = 0;
	// global flag whether a camera has been detected
	private boolean isThereACamera = false;
	// layout for this activity
	private RelativeLayout RelativeLayoutBarcodeScanner = null;
	// CameraPreview extends SurfaceView displays preview of images from the
	// Camera
	private CameraPreview newCameraPreview = null;
	// used to inflate the xml layout
	private SurfaceView surfaceViewBarcodeScanner = null;
	private boolean cameraPreviewing = false;
	// this continueToAutomaticallyDecode flag is initially set to TRUE, but
	// will be set to FALSE on the first successful decode OR when a crucial
	// method in the code process fails or throws an exception
	private volatile boolean continueToAutomaticallyDecode = true;
	// global flag used to indicate picture taking & decoding is in progress
	private volatile boolean takingPictureAndDecodeInprogress = false;
	// Bitmap options for bitmap creation from camera picture
	BitmapFactory.Options options = null;
	// used for samsung galaxy s devices only
	private Matrix rotationMatrix90CounterClockWise = null;
	// Reader is class from zxing used to decode barcodes
	Reader reader = null;
	// DecodeHintType hashtable is used to provide help to the zxing Reader
	// class
	Hashtable<DecodeHintType, Object> decodeHints = null;
	//
	private boolean onTouchEvent = true;
	//
	private OrientationEventListener orientationEventListener = null;
	// 1 means the screen is PORTRAIT and 2 means screen is LANDSCAPE
	private int latestScreenOrientation = 1;
	//
	private String globalVIN = null;
	//
	private Handler handler = null;
	//
	private Context globalContext = null; 


	public boolean isContinueToAutomaticallyDecode() {

		return continueToAutomaticallyDecode;
	}

	public void setContinueToAutomaticallyDecode(
			boolean continueToAutomaticallyDecode) {

		this.continueToAutomaticallyDecode = continueToAutomaticallyDecode;

	}

	public boolean isTakingPictureAndDecodeInprogress() {
		return takingPictureAndDecodeInprogress;
	}

	public void setTakingPictureAndDecodeInprogress(
			boolean takingPictureAndDecodeInprogress) {

		this.takingPictureAndDecodeInprogress = takingPictureAndDecodeInprogress;

	}

	/*
	 * This method , finds FEATURE_CAMERA, opens the camera, set parameters ,
	 * add CameraPreview to layout, set camera surface holder, start preview
	 */
	@SuppressLint("InlinedApi")
	private void initializeGlobalCamera() {

		try {
			if (!getPackageManager().hasSystemFeature(
					PackageManager.FEATURE_CAMERA)) {
				Toast.makeText(this, "No camera on this device",
						Toast.LENGTH_LONG).show();
			} else { // check for front camera ,and get the ID
				cameraId = CameraUtility.findFrontFacingCamera();
				if (cameraId < 0) {

					Toast.makeText(this, "No front facing camera found.",
							Toast.LENGTH_LONG).show();
				} else {

					Log.d("ClassScanViewBarcodeActivity",
							"camera was found , ID: " + cameraId);
					// camera was found , set global camera flag to true
					isThereACamera = true;
					// OPEN
					globalCamera = getGlobalCamera(cameraId);
					// try to set parameters continuous auto focus

					boolean FOCUS_MODE_CONTINUOUS_PICTURE_SUPPORTED = CameraUtility
							.isFocusModeSupported(
									globalCamera,
									Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE);
					if (FOCUS_MODE_CONTINUOUS_PICTURE_SUPPORTED) {
						globalCamera
								.getParameters()
								.setFocusMode(
										Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE);
						Log.d("ClassScanViewBarcodeActivity initializeGlobalCamera() is FOCUS_MODE_CONTINUOUS_PICTURE supported? ",
								String.valueOf(FOCUS_MODE_CONTINUOUS_PICTURE_SUPPORTED));
					} else {

						Log.d("ClassScanViewBarcodeActivity initializeGlobalCamera() is FOCUS_MODE_CONTINUOUS_PICTURE supported? ",
								String.valueOf(FOCUS_MODE_CONTINUOUS_PICTURE_SUPPORTED));
						// try FOCUS_MODE_AUTO
						boolean FOCUS_MODE_AUTO = CameraUtility
								.isFocusModeSupported(globalCamera,
										Camera.Parameters.FOCUS_MODE_AUTO);
						if (FOCUS_MODE_AUTO) {
							globalCamera.getParameters().setFocusMode(
									Camera.Parameters.FOCUS_MODE_AUTO);
							Log.d("ClassScanViewBarcodeActivity initializeGlobalCamera() is FOCUS_MODE_AUTO supported? ",
									String.valueOf(FOCUS_MODE_AUTO));
						} else {
							Toast.makeText(
									this,
									"Autofocus is not supported, barcode scanning might not work.",
									Toast.LENGTH_LONG).show();
						}

					}
					// set picture format to JPEG, everytime makesure JPEG
					globalCamera.getParameters().setPictureFormat(
							ImageFormat.JPEG);
					optimizeCameraForBarcode(globalCamera);
					/*
					 * START early setup variables & setting used in
					 * jpegCallback in order to optimize the jpegCallback code
					 */
					options = new BitmapFactory.Options();
					// option set for down sampling he captured image taken from
					// the camera in order to MemoryOutOfBounds exception
					options.inSampleSize = 4;
					// image quality rather than speed in order to achieve early
					// barcode detection & decode
					options.inPreferQualityOverSpeed = true;
					// Samsung galaxy S only , rotate to correct orientation
					// ,and capture only the image within the guidance rectangle

					rotationMatrix90CounterClockWise = new Matrix();
					rotationMatrix90CounterClockWise.postRotate(90);
					// early variable used by zxing to decode method
					decodeHints = new Hashtable<DecodeHintType, Object>();
					decodeHints.put(DecodeHintType.TRY_HARDER, Boolean.TRUE);
					decodeHints.put(DecodeHintType.PURE_BARCODE, Boolean.TRUE);
					decodeHints.put(DecodeHintType.ASSUME_CODE_39_CHECK_DIGIT,
							Boolean.TRUE);
					reader = new MultiFormatReader();

					CameraUtility.turnOnFlashlight(globalCamera, this);

					// pass surfaceView to CameraPreview
					newCameraPreview = new CameraPreview(this, globalCamera) {

					
						@Override
						public boolean onTouchEvent(MotionEvent event) {
							Log.d("ClassScanViewBarcodeActivity",
									" onTouchEvent(MotionEvent event) ");

							onTouchEvent = true;
							globalCamera.autoFocus(autoFocusCallbackForAutomaticScan);
							

							return super.onTouchEvent(event);
						}
						
					

					};

					// pass CameraPreview to Layout
					RelativeLayoutBarcodeScanner.addView(newCameraPreview);

					// give reference SurfaceView to camera object
					globalCamera.setPreviewDisplay(surfaceViewBarcodeScanner
							.getHolder());

					// PREVIEW
					if (cameraPreviewing != true) {
						globalCamera.startPreview();
						
						
					}
					
					

					Log.d("ClassScanViewBarcodeActivity",
							"camera opened & previewing");
					
					
				}
			}// end else ,check for front camera
		}// end try
		catch (Exception exc) {

			// in case of exception release resources & cleanup
			if (globalCamera != null) {
				globalCamera.stopPreview();
				cameraPreviewing = false;
				globalCamera.setPreviewCallback(null);
				globalCamera.release();
				globalCamera = null;
				options = null;
				rotationMatrix90CounterClockWise = null;
				reader = null;

			}
			Log.d("ClassScanViewBarcodeActivity initializeGlobalCamera() exception:",
					exc.getMessage());
			exc.printStackTrace();
		}// end catch

	}// end ini

	// onCreate, instantiates layouts & surfaceView used for video preview
	@Override
	public void onCreate(Bundle savedInstanceState) {
		super.onCreate(savedInstanceState);
		setContentView(R.layout.activity_barcode_vin_scanner);
		Log.d("ClassScanViewBarcodeActivity", "onCreate ");

		// create surfaceView for previewing of camera image
		RelativeLayoutBarcodeScanner = (RelativeLayout) findViewById(R.id.LayoutForPreview);
		surfaceViewBarcodeScanner = (SurfaceView) findViewById(R.id.surfaceViewBarcodeScanner);

		initializeGlobalCamera();
		
		
if (RelativeLayoutBarcodeScanner!=null)
{
	globalContext = RelativeLayoutBarcodeScanner.getContext();
}
		
		

		// instantiate orientationEventListener
		orientationEventListener = new OrientationEventListener(this,
				SensorManager.SENSOR_DELAY_NORMAL) {

			@Override
			public void onOrientationChanged(int arg0) {

				/*
				 * latestScreenOrientation = ScreenUtility
				 * .getScreenOrientation(
				 * RelativeLayoutBarcodeScanner.getContext());
				 * 
				 * Log.d("ClassScanViewBarcodeActivity",
				 * "latestScreenOrientation: " + latestScreenOrientation);
				 * 
				 * if (orientationEventListener.canDetectOrientation()) {
				 * orientationEventListener.enable();
				 * Log.d("ClassScanViewBarcodeActivity",
				 * "enabled orientationEventListener: " +
				 * String.valueOf(orientationEventListener
				 * .canDetectOrientation())); } else {
				 * Log.d("ClassScanViewBarcodeActivity",
				 * "enabled orientationEventListener: " +
				 * String.valueOf(orientationEventListener
				 * .canDetectOrientation())); }
				 */
			}

		};

		// set the image background of view

		handler = new Handler();
		


	}// end onCreate
	
	@Override
	public void onBackPressed(){
		
		if (globalCamera != null) {
			globalCamera.stopPreview();
			cameraPreviewing = false;
			globalCamera.setPreviewCallback(null);
			globalCamera.release();
			globalCamera = null;
		}
		setContinueToAutomaticallyDecode(false);
		 Intent i = new Intent(this.getApplicationContext(), Service.class);
		 startActivity(i);
	
		 
	}

	
	@Override
	protected void onResume() {

		Log.d("ClassScanViewBarcodeActivity, onResume() globalCamera:",
				String.valueOf(globalCamera));

		initializeGlobalCamera();

		/*
		 * if (orientationEventListener != null) {
		 * orientationEventListener.enable(); }
		 */

		super.onResume();
	}

	@Override
	protected void onStop() {

		if (globalCamera != null) {
			globalCamera.stopPreview();
			cameraPreviewing = false;
			globalCamera.setPreviewCallback(null);
			globalCamera.release();
			globalCamera = null;
		}

		/*
		 * if (orientationEventListener != null) {
		 * orientationEventListener.enable(); }
		 */
		super.onStop();
	}

	@Override
	protected void onPause() {
		if (globalCamera != null) {
			globalCamera.stopPreview();
			cameraPreviewing = false;
			globalCamera.setPreviewCallback(null);
			globalCamera.release();
			globalCamera = null;
			options = null;
			rotationMatrix90CounterClockWise = null;
			reader = null;
		}
		/*
		 * if (orientationEventListener != null) {
		 * orientationEventListener.enable(); }
		 */
		super.onPause();
	}// end onPause()

	public void onConfigurationChanged(Configuration newConfig) {
		super.onConfigurationChanged(newConfig);

		latestScreenOrientation = ScreenUtility
				.getScreenOrientation(RelativeLayoutBarcodeScanner.getContext());

		Log.d("ClassScanViewBarcodeActivity", "latestScreenOrientation: "
				+ latestScreenOrientation);

	}

	/*
	 * Implement PictureCallback's onPictureTaken(byte[] imgData, Camera camera)
	 * method. 1) create down-sampled the image via options.inSampleSize , in
	 * order to make image resolution smaller & prevent MemoryOutOfBounds
	 * exception. 2) Samsung galaxy S only , rotate to correct orientation ,and
	 * capture only the image within the guidance rectangle 3) calculate
	 * relative x,y & width & height , because SurfaceView coordinates are
	 * different than the coordinates of the image 4) crop & only take
	 * image/data that is within the green bounding rectangle in order to
	 * reduce/crop amount of extra information for zxing to process. 5) scale
	 * image to help zxing computationally by reducing image size 6) save image
	 * to sd card for testing & sanity check to see if actual image was taken 7)
	 * decode the VIN & set the value of GUI text component
	 */
	PictureCallback jpegCallback = new PictureCallback() {

		@SuppressLint("NewApi")
		public void onPictureTaken(byte[] imgData, Camera camera) {

			if (imgData != null && camera != null) {

				/*
				 * asyncDecodeTask newasyncDecodeTask1 = new asyncDecodeTask();
				 * newasyncDecodeTask1.setImgDataFromCamera(imgData);
				 * newasyncDecodeTask1.setCamera(camera);
				 * newasyncDecodeTask1.setAssumption(1);
				 * 
				 * 
				 * asyncDecodeTask newasyncDecodeTask2 = new asyncDecodeTask();
				 * newasyncDecodeTask2.setImgDataFromCamera(imgData);
				 * newasyncDecodeTask2.setCamera(camera);
				 * newasyncDecodeTask2.setAssumption(2);
				 * 
				 * asyncDecodeTask newasyncDecodeTask3 = new asyncDecodeTask();
				 * newasyncDecodeTask3.setImgDataFromCamera(imgData);
				 * newasyncDecodeTask3.setCamera(camera);
				 * newasyncDecodeTask3.setAssumption(3);
				 */

				/*
				 * newasyncDecodeTask1.executeOnExecutor(AsyncTask.
				 * THREAD_POOL_EXECUTOR);
				 * newasyncDecodeTask2.executeOnExecutor(AsyncTask
				 * .THREAD_POOL_EXECUTOR);
				 * newasyncDecodeTask3.executeOnExecutor(
				 * AsyncTask.THREAD_POOL_EXECUTOR);
				 */

				asyncDecodeTask newasyncDecodeTask3 = new asyncDecodeTask();
				newasyncDecodeTask3.setImgDataFromCamera(imgData);
				newasyncDecodeTask3.setCamera(camera);
				newasyncDecodeTask3.setAssumption(3);

				newasyncDecodeTask3
						.executeOnExecutor(AsyncTask.THREAD_POOL_EXECUTOR);
				
				

			} else {
				Log.d("ClassScanViewBarcodeActivity", "onPictureTaken()");
			}

		}
	};// jpegCallback implementation

	@SuppressWarnings("finally")
	public String cameraBytesToBitmapAndDecode(byte[] imgData, Camera camera,
			int assumption) {
		Bitmap bmpOfTheImageFromCamera = null;
		Bitmap croppedBitmap = null;
		BinaryBitmap binaryBitmap = null;
		byte[] bytesImage = null;
		String nameOfTheFileForTesting = null;
		String VIN = null;
		Matrix rotate = null;
		try {

			Log.d("ClassScanViewBarcodeActivity",
					"cameraBytesToBitmapAndDecode()");

			/*
			 * get the bitmap from camera imageData Implement PictureCallback's
			 * onPictureTaken(byte[] imgData, Camera camera) method.
			 * 
			 * 1) create down-sampled the image via options.inSampleSize , in
			 * order to make image resolution smaller & prevent
			 * MemoryOutOfBounds exception.
			 */
			// 1)convert image data to Bitmap, down-sampled the image via to
			// prevent MemoryOutOfBounds exception

			bmpOfTheImageFromCamera = BitmapFactory.decodeByteArray(imgData, 0,
					imgData.length, options);

			if (bmpOfTheImageFromCamera != null) {

				// for samsung galaxy s devices only
				bmpOfTheImageFromCamera = Bitmap.createBitmap(
						bmpOfTheImageFromCamera, 0, 0,
						bmpOfTheImageFromCamera.getWidth(),
						bmpOfTheImageFromCamera.getHeight(),
						rotationMatrix90CounterClockWise, true);

				/*
				 * save for testing , original before sharpen byte [] bytesImage
				 * = bitmapToByteArray ( bmpOfTheImageFromCamera );
				 * savePicture(bytesImage, 0 , "original");
				 */

				// 4) crop & only take image/data that is within the green
				// bounding rectangle in order to reduce/crop amount of extra
				// information for zxing to process.

				/*
				 * We will assume 4 different barcodes such as: 1) portrait
				 * black background 2) horizontal black background 3) portrait
				 * white background 4) horizontal white background
				 */

				int startCropX = (int) (bmpOfTheImageFromCamera.getWidth() / 3.5);
				int startCropY = (int) Math.floor(bmpOfTheImageFromCamera
						.getHeight() / 20);

				int endCropX = bmpOfTheImageFromCamera.getWidth()
						- (startCropX * 2);
				int endCropY = bmpOfTheImageFromCamera.getHeight()
						- (startCropY * 2);

				if (assumption == 1) {

					rotate = new Matrix();
					rotate.postRotate(270);

					nameOfTheFileForTesting = "_270_grey";
					// 1) crop according to assumption of portrait
					// check if the screen is portrait than crop based on
					// portrait dimensions

					// crop
					/*
					 * croppedBitmap = Bitmap.createBitmap(
					 * bmpOfTheImageFromCamera, endCropX, endCropY, startCropX,
					 * startCropY);
					 */

					// 2) deal with background color according to assumption

					// change image to grey scale
					bmpOfTheImageFromCamera = BitmapUtility
							.toGrayscale(bmpOfTheImageFromCamera);
					// invert , in order to get white barcode & black background
					bmpOfTheImageFromCamera = BitmapUtility
							.invert(bmpOfTheImageFromCamera);

					// crop
					croppedBitmap = Bitmap.createBitmap(
							bmpOfTheImageFromCamera, startCropX, startCropY,
							endCropX, endCropY);

					// rotate

					croppedBitmap = Bitmap.createBitmap(croppedBitmap, 0, 0,
							croppedBitmap.getWidth(),
							croppedBitmap.getHeight(), rotate, true);

				} else if (assumption == 2) {

					rotate = new Matrix();
					rotate.postRotate(90);

					nameOfTheFileForTesting = "_90_inverted";
					// 1) crop according to assumption of portrait
					// check if the screen is portrait than crop based on
					// portrait dimensions

					// crop
					/*
					 * croppedBitmap = Bitmap.createBitmap(
					 * bmpOfTheImageFromCamera, endCropX, endCropY, startCropX,
					 * startCropY);
					 */

					// 2) deal with background color according to assumption

					// change image to grey scale
					bmpOfTheImageFromCamera = BitmapUtility
							.toGrayscale(bmpOfTheImageFromCamera);
					// invert , in order to get white barcode & black background
					bmpOfTheImageFromCamera = BitmapUtility
							.invert(bmpOfTheImageFromCamera);

					// crop

					croppedBitmap = Bitmap.createBitmap(
							bmpOfTheImageFromCamera, startCropX, startCropY,
							endCropX, endCropY);
					// rotate

					croppedBitmap = Bitmap.createBitmap(croppedBitmap, 0, 0,
							croppedBitmap.getWidth(),
							croppedBitmap.getHeight(), rotate, true);
				} else if (assumption == 3) {
					rotate = new Matrix();
					rotate.postRotate(90);

					nameOfTheFileForTesting = "_90_original_color";
					// 1) crop according to assumption of portrait
					// check if the screen is portrait than crop based on
					// portrait dimensions

					// crop
					/*
					 * croppedBitmap = Bitmap.createBitmap(
					 * bmpOfTheImageFromCamera, endCropX, endCropY, startCropX,
					 * startCropY);
					 */

					// 2) deal with background color according to assumption

					// change image to grey scale
					bmpOfTheImageFromCamera = BitmapUtility
							.toGrayscale(bmpOfTheImageFromCamera);
					// invert , in order to get white barcode & black background
					// bmpOfTheImageFromCamera=
					// BitmapUtility.invert(bmpOfTheImageFromCamera);

					// crop

					croppedBitmap = Bitmap.createBitmap(
							bmpOfTheImageFromCamera, startCropX, startCropY,
							endCropX, endCropY);
					// rotate

					croppedBitmap = Bitmap.createBitmap(croppedBitmap, 0, 0,
							croppedBitmap.getWidth(),
							croppedBitmap.getHeight(), rotate, true);

				} else {
					// do nothing
				}

				// sharpen image
				// croppedBitmap = sharpeningKernel(croppedBitmap);

				// convert Bitmap to BinaryBitmap
				binaryBitmap = cameraBytesToBinaryBitmap(croppedBitmap);
				// used for testing & saving the image
				bytesImage = ImageUtility.bitmapToByteArray(croppedBitmap);

				if (binaryBitmap != null && reader != null
						&& decodeHints != null) {

					// 7) decode the VIN & set the value of GUI text component
					VIN = decodeBitmapToString(binaryBitmap, reader,
							decodeHints);

					Log.d("***ClassScanViewBarcodeActivity ,cameraBytesToBitmapAndDecode: VIN ",
							String.valueOf(VIN));

					// if VIN!=null
					if (VIN != null) {

						/*
						 * set VINdecodedAutomatically to true in order to
						 * indicate than VIN was decodeded at least once before
						 * & subsequent decoding should only be triggered
						 * manually using onTouch only
						 */

						setContinueToAutomaticallyDecode(false);

						// savePicture(byte[] data, double sizeInPercent, String
						// title)
						nameOfTheFileForTesting = nameOfTheFileForTesting
								.concat("success");
						CameraUtility.savePictureInSDcardPicturesDirectory(
								bytesImage, nameOfTheFileForTesting);
						
						
					
						
						

					}// ENd if VIN!=null 1st try
					else {

						nameOfTheFileForTesting = nameOfTheFileForTesting
								.concat("fail");

						CameraUtility.savePictureInSDcardPicturesDirectory(
								bytesImage, nameOfTheFileForTesting);

					}// END ELSE VIN==null
				} else {
					Log.d("ClassScanViewBarcodeActivity ,cameraBytesToBitmapAndDecode(): bitmap=",
							String.valueOf(binaryBitmap));
					Log.d("ClassScanViewBarcodeActivity ,cameraBytesToBitmapAndDecode(): reader=",
							String.valueOf(reader));

					Log.d("ClassScanViewBarcodeActivity ,cameraBytesToBitmapAndDecode(): decodeHints=",
							String.valueOf(decodeHints));

				}
			} else {
				Log.d("ClassScanViewBarcodeActivity , cameraBytesToBitmapAndDecode(): bmpOfTheImageFromCamera = ",
						String.valueOf(bmpOfTheImageFromCamera));

			}

		}// end try

		catch (NullPointerException exc) {
			exc.printStackTrace();
			if (exc.getMessage() != null) {

				Log.d("ClassScanViewBarcodeActivity , cameraBytesToBitmapAndDecode: NullPointerException = ",
						exc.getMessage());
			}
			exc.printStackTrace();
			setContinueToAutomaticallyDecode(false);
		} catch (Exception exc) {
			exc.printStackTrace();
			if (exc.getMessage() != null) {
				Log.d("ClassScanViewBarcodeActivity , cameraBytesToBitmapAndDecode: exception = ",
						exc.getMessage());
			}
			exc.printStackTrace();
			setContinueToAutomaticallyDecode(false);
		}

		finally {

			if (bmpOfTheImageFromCamera != null) {

				// cleanup
				bmpOfTheImageFromCamera.recycle();
				bmpOfTheImageFromCamera = null;
			}
			if (croppedBitmap != null) {
				//
				croppedBitmap.recycle();
				croppedBitmap = null;
			}

			binaryBitmap = null;
			bytesImage = null;
			globalCamera.startPreview();
			cameraPreviewing = true;

			setTakingPictureAndDecodeInprogress(false);

			return VIN;
		}

	}// end cameraBytesToBitmap()

	public Camera getGlobalCamera(int CameraId) {
		if (globalCamera == null) {
			// OPEN
			globalCamera = Camera.open(CameraId);
		}

		return globalCamera;

	}

	/*
	 * Attempts to decode barcode data inside the input bitmap. Uses zxing API
	 * hints such as DecodeHintType.TRY_HARDER ,DecodeHintType.PURE_BARCODE.
	 * This method should return String of the decode information or
	 * "Unable to decode".
	 */
	@SuppressWarnings("finally")
	public String decodeBitmapToString(BinaryBitmap bitmap, Reader reader,
			Hashtable<DecodeHintType, Object> DecodeHints) {

		Log.d("ClassScanViewBarcodeActivity",
				"decodeBitmapToString(BinaryBitmap bitmap");

		Result result = null;
		String textResult = null;

		try {

			if (bitmap != null) {
				result = reader.decode(bitmap, DecodeHints);

				if (result != null) {

					Log.d("decodeBitmapToString (BinaryBitmap bitmap): result = ",
							String.valueOf(result));

					textResult = result.getText();

					Log.d("decodeBitmapToString (BinaryBitmap bitmap): textResult  = ",
							textResult);

				} else {
					Log.d("decodeBitmapToString (BinaryBitmap bitmap): result = ",
							"null");
				}
			} else {
				Log.d("decodeBitmapToString (BinaryBitmap bitmap): bitmap = ",
						"null");
			}
			/*
			 * byte[] rawBytes = result.getRawBytes(); BarcodeFormat format =
			 * result.getBarcodeFormat(); ResultPoint[] points =
			 * result.getResultPoints();
			 */

		} catch (NotFoundException e) {
			// TODO Auto-generated catch block

			e.printStackTrace();
			Log.d("ClassScanViewBarcodeActivity, NotFoundException:",
					e.getMessage());

		} catch (ChecksumException e) {

			e.printStackTrace();
			Log.d("ClassScanViewBarcodeActivity, ChecksumException:",
					e.getMessage());

		} catch (FormatException e) {

			e.printStackTrace();
			Log.d("ClassScanViewBarcodeActivity, FormatException:",
					e.getMessage());

		} catch (NullPointerException e) {

			e.printStackTrace();
			Log.d("ClassScanViewBarcodeActivity, NullPointerException:",
					e.getMessage());

		}

		finally {
			// cleanup
			result = null;
			return textResult;
		}
	}// end decodeBitmapToString ()

	@SuppressWarnings("finally")
	public BinaryBitmap cameraBytesToBinaryBitmap(Bitmap bitmap) {
		Log.d("ClassScanViewBarcodeActivity , cameraBytesToBinaryBitmap (Bitmap bitmap):",
				"");
		BinaryBitmap binaryBitmap = null;
		RGBLuminanceSource source = null;
		HybridBinarizer bh = null;
		try {

			if (bitmap != null) {

				source = new RGBLuminanceSource(bitmap);
				bh = new HybridBinarizer(source);

				binaryBitmap = new BinaryBitmap(bh);

			} else {
				Log.d("ClassScanViewBarcodeActivity , cameraBytesToBinaryBitmap (Bitmap bitmap): bitmap = ",
						String.valueOf(bitmap));
			}
		} catch (Exception exc) {
			Log.d("ClassScanViewBarcodeActivity , cameraBytesToBinaryBitmap (Bitmap bitmap): Exception ",
					exc.getMessage());
			setContinueToAutomaticallyDecode(false);

			exc.printStackTrace();
		} finally {

			source = null;
			bh = null;
			return binaryBitmap;
		}
	}

	public void takePicture() {

		setTakingPictureAndDecodeInprogress(true);
		Log.d("ClassScanViewBarcodeActivity", "takePicture()");
		try {

			// if true take a picture
			if (isThereACamera) {
				Log.d("ClassScanViewBarcodeActivity",
						"setOnClickListener() isThereACamera: "
								+ isThereACamera);

				if (cameraPreviewing != true) {
					globalCamera.startPreview();
					cameraPreviewing = true;
				}

				globalCamera.takePicture(null, null, jpegCallback);

			}

		}// end try
		catch (Exception exc) {

			// in case of exception release resources & cleanup
			if (globalCamera != null) {
				globalCamera.stopPreview();
				globalCamera.setPreviewCallback(null);
				globalCamera.release();
				globalCamera = null;
				//
				setContinueToAutomaticallyDecode(false);
				cameraPreviewing = false;
			}
			Log.d("ClassScanViewBarcodeActivity setOnClickListener() exceprtion:",
					exc.getMessage());
			exc.printStackTrace();
		}// end catch

	}// end takePicture()

	public void optimizeCameraForBarcode(Camera camera) {
		if (camera != null) {
			String focusArea = camera.getParameters().getFocusMode();

			if (focusArea.equalsIgnoreCase(Camera.Parameters.FOCUS_MODE_MACRO)
					|| focusArea
							.equalsIgnoreCase(Camera.Parameters.FOCUS_MODE_AUTO)) {
				camera.getParameters().setSceneMode(
						Camera.Parameters.SCENE_MODE_BARCODE);
			}
		}
	}

	// inner class SurfaceView used to preview incoming images from Camera

	/** A basic Camera preview class */
	private class CameraPreview extends SurfaceView implements
			SurfaceHolder.Callback {
		private SurfaceHolder mHolder;
		private Camera mCamera;
		private Context context;
		private Bitmap guidanceScannerFrame = null;

		public CameraPreview(Context context, Camera camera) {
			super(context);

			mCamera = camera;
			this.context = context;
			// Install a SurfaceHolder.Callback so we get notified when the
			// underlying surface is created and destroyed.
			mHolder = getHolder();
			mHolder.addCallback(this);
			// deprecated setting, but required on Android versions prior to 3.0
			mHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);

			setWillNotDraw(false);
			setFocusable(true);
			requestFocus();

			guidanceScannerFrame = getBitmapFromResourcePath("res/drawable/scanner_frame.png");

		}

		@SuppressLint("NewApi")
		private void initializeCameraForSurfaceView() {

			try {
				mCamera.stopPreview();
				cameraPreviewing = false;

				Camera.Parameters p = mCamera.getParameters();

				p.set("orientation", "portrait");
				/*
				 * for samsung galaxy s only, rotate the image by 90 degrees
				 * clockwise , in order to correctly displayed the image ,
				 * images seem to be -90 degrees (counter clockwise) rotated
				 */
				mCamera.setDisplayOrientation(90);

				// get width & height of the SurfaceView
				int SurfaceViewWidth = this.getWidth();
				int SurfaceViewHeight = this.getHeight();

				List<Size> sizes = p.getSupportedPreviewSizes();
				Size optimalSize = CameraUtility.getOptimalPreviewSize(sizes,
						SurfaceViewWidth, SurfaceViewHeight);

				// set parameters
				p.setPreviewSize(optimalSize.width, optimalSize.height);
				boolean FOCUS_MODE_CONTINUOUS_PICTURE_SUPPORTED = CameraUtility
						.isFocusModeSupported(globalCamera,
								Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE);
				if (FOCUS_MODE_CONTINUOUS_PICTURE_SUPPORTED) {
					globalCamera.getParameters().setFocusMode(
							Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE);
					Log.d("ClassScanViewBarcodeActivity, initializeCameraForSurfaceView() is FOCUS_MODE_CONTINUOUS_PICTURE supported? ",
							String.valueOf(FOCUS_MODE_CONTINUOUS_PICTURE_SUPPORTED));
				} else {

					Log.d("ClassScanViewBarcodeActivity, initializeCameraForSurfaceView() is FOCUS_MODE_CONTINUOUS_PICTURE supported? ",
							String.valueOf(FOCUS_MODE_CONTINUOUS_PICTURE_SUPPORTED));
				}

				mCamera.setParameters(p);

				mCamera.setPreviewDisplay(mHolder);
				mCamera.startPreview();

				cameraPreviewing = true;

			} catch (IOException exc) {
				exc.printStackTrace();
			}
		}// end initializeCameraForSurfaceViewOnSurfaceChanged

		public void drawGuidance(Canvas canvas) {

			Paint paintRed = new Paint();
			paintRed.setStyle(Paint.Style.STROKE);
			paintRed.setStrokeWidth(6f);
			paintRed.setAntiAlias(true);
			paintRed.setColor(Color.RED);

			Paint paintGrey = new Paint();
			paintGrey.setStyle(Paint.Style.STROKE);
			paintGrey.setStrokeWidth(6f);
			paintGrey.setAntiAlias(true);
			paintGrey.setColor(Color.GRAY);

			// drawMiddleline
			canvas.drawLine(this.getWidth() / 2, 0, this.getWidth() / 2,
					this.getHeight(), paintRed);

			// draw guidance rectangle
			guidanceRectangleCoordinatesCalculations newguidanceRectangleCoordinatesCalculations = new guidanceRectangleCoordinatesCalculations(
					this.getWidth(), this.getHeight());
			canvas.drawRect(newguidanceRectangleCoordinatesCalculations
					.getGuidanceRectangleStartX(),
					newguidanceRectangleCoordinatesCalculations
							.getGuidanceRectangleStartY(),
					newguidanceRectangleCoordinatesCalculations
							.getGuidanceRectangleEndX(),
					newguidanceRectangleCoordinatesCalculations
							.getGuidanceRectangleEndY(), paintGrey);

			/*
			 * if (guidanceScannerFrame != null) {
			 * 
			 * Paint paint = new Paint(); paint.setFilterBitmap(true);
			 * 
			 * int diffInWidth = 0; int diffInHeight = 0;
			 * 
			 * try {
			 * 
			 * diffInWidth = this.getWidth() -
			 * guidanceScannerFrame.getScaledWidth(canvas); diffInHeight =
			 * this.getHeight() - guidanceScannerFrame.getScaledHeight(canvas);
			 * 
			 * // canvas.drawBitmap(guidanceScannerFrame, (diffInWidth / 2),
			 * (diffInHeight / 2), paint); } catch (NullPointerException exc) {
			 * exc.printStackTrace(); }
			 * 
			 * 
			 * 
			 * }
			 */

		}

		@Override
		public void onDraw(Canvas canvas) {

			// drawGuidance
			drawGuidance(canvas);

		}

		public boolean doesCameraPreviewSupportRGB(Camera camera) {
			boolean flag = false;
			if (camera != null) {
				List<Integer> listOfAvaialbleImageFormates = camera
						.getParameters().getSupportedPreviewFormats();

				Iterator<Integer> it = listOfAvaialbleImageFormates.iterator();
				while (it.hasNext()) {
					Integer format = it.next();

					if (format == ImageFormat.RGB_565) {
						flag = true;
						break;
					}
				}// end while

			}// end if

			return flag;
		}

		@SuppressLint("InlinedApi")
		public void surfaceCreated(SurfaceHolder holder) {
			initializeCameraForSurfaceView();

		}

		public void surfaceDestroyed(SurfaceHolder holder) {
			// empty. Take care of releasing the Camera preview in your
			// activity.
		}

		@SuppressLint("InlinedApi")
		public void surfaceChanged(SurfaceHolder holder, int format, int w,
				int h) {
			// If your preview can change or rotate, take care of those events
			// here.
			// Make sure to stop the preview before resizing or reformatting it.

			initializeCameraForSurfaceView();

			 //every 3.5 secs call autofocus & try to take a in focus picture 
			 Thread t = new Thread()
			 {
			 
				 public void run()
				 {
				while (isContinueToAutomaticallyDecode())
				{
					try {
						triggerAutomaticDecodeOnCameraFocus();
						Thread.sleep(3500);
						
					} catch (InterruptedException e) {
						// TODO Auto-generated catch block
						e.printStackTrace();
					}
				}
				 }
			 };
			 t.start();
			 

		}// end surfaceChanged()

		PreviewCallback newPreviewCallback = new PreviewCallback() {

			@Override
			public void onPreviewFrame(byte[] imgData, Camera camera) {

				Log.d("ClassScanViewBarcodeActivity", "onPreviewFrame()");

				if (imgData != null && camera != null) {

					/*
					 * asyncDecodeTask newasyncDecodeTask1 = new
					 * asyncDecodeTask();
					 * newasyncDecodeTask1.setImgDataFromCamera(imgData);
					 * newasyncDecodeTask1.setCamera(camera);
					 * newasyncDecodeTask1.setAssumption(1);
					 * 
					 * 
					 * asyncDecodeTask newasyncDecodeTask2 = new
					 * asyncDecodeTask();
					 * newasyncDecodeTask2.setImgDataFromCamera(imgData);
					 * newasyncDecodeTask2.setCamera(camera);
					 * newasyncDecodeTask2.setAssumption(2);
					 * 
					 * asyncDecodeTask newasyncDecodeTask3 = new
					 * asyncDecodeTask();
					 * newasyncDecodeTask3.setImgDataFromCamera(imgData);
					 * newasyncDecodeTask3.setCamera(camera);
					 * newasyncDecodeTask3.setAssumption(3);
					 */

					/*
					 * newasyncDecodeTask1.executeOnExecutor(AsyncTask.
					 * THREAD_POOL_EXECUTOR);
					 * newasyncDecodeTask2.executeOnExecutor
					 * (AsyncTask.THREAD_POOL_EXECUTOR);
					 * newasyncDecodeTask3.executeOnExecutor
					 * (AsyncTask.THREAD_POOL_EXECUTOR);
					 */

					asyncDecodeTask newasyncDecodeTask3 = new asyncDecodeTask();
					newasyncDecodeTask3.setImgDataFromCamera(imgData);
					newasyncDecodeTask3.setCamera(camera);
					newasyncDecodeTask3.setAssumption(3);

					newasyncDecodeTask3
							.executeOnExecutor(AsyncTask.THREAD_POOL_EXECUTOR);

				} else {
					Log.d("ClassScanViewBarcodeActivity",
							"onPreviewFrame() parameters are null");
				}

			}

		};
		

		//called when camera is in focus 
		AutoFocusCallback autoFocusCallbackForAutomaticScan = new AutoFocusCallback() {
			@Override
			public void onAutoFocus(boolean success, Camera camera) {

				
				if (isTakingPictureAndDecodeInprogress() != true)

				{

					
						if (success)
							{
						takePicture();

						Log.d("***autoFocusCallbackForAutomaticScan, onAutoFocus(boolean success, Camera camera)",
								"success: " + success + " - onTouch: "+ onTouchEvent);
						// reset onTouchEvent switch
						onTouchEvent = false;
								}
							else
							{
							

							Log.d("***autoFocusCallbackForAutomaticScan, onAutoFocus(boolean success, Camera camera)",
									"success: " + success + " - onTouch: "+ onTouchEvent);

							// if RGB is available set preview to RGB and set
							// PreviewCallback implementation
							boolean supportsRGB = doesCameraPreviewSupportRGB(mCamera);
							if (supportsRGB)

							{
								Log.d("ClassScanViewBarcodeActivity , initializeCameraForSurfaceView () RGB is suppoorted:",
										String.valueOf(supportsRGB));
								camera.getParameters().setPreviewFormat(
										ImageFormat.RGB_565);
								camera.setPreviewCallback(newPreviewCallback);
							} else {

							
								
								takePicture();
								

								Log.d("ClassScanViewBarcodeActivity , initializeCameraForSurfaceView () RGB is suppoorted:",
										String.valueOf(supportsRGB));
							}
				}

			}// end isTakingPictureAndDecodeInprogress() 
			
			}//end onAutoFocus(boolean success, Camera camera) 
		}; // end autoFocusCallbackForAutomaticScan instantiation

		public void triggerAutomaticDecodeOnCameraFocus() {
			if (mCamera != null) {
				String focusArea = mCamera.getParameters().getFocusMode();
				// check if camera has auto focus capability
				if (focusArea.equalsIgnoreCase(Camera.Parameters.FOCUS_MODE_MACRO)|| focusArea.equalsIgnoreCase(Camera.Parameters.FOCUS_MODE_AUTO) || focusArea.equalsIgnoreCase(Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE)) {

					mCamera.autoFocus(autoFocusCallbackForAutomaticScan);

				}

			}
		}

		public Bitmap getBitmapFromResourcePath(String path) {
			InputStream in = null;
			Bitmap guidanceBitmap = null;

			try {
				if (path != null) {

					in = this.getClass().getClassLoader()
							.getResourceAsStream(path);

					guidanceBitmap = BitmapFactory.decodeStream(in);

				}
			} catch (Exception exc) {
				exc.printStackTrace();
			} finally {
				if (in != null) {
					try {
						in.close();
					} catch (IOException e) {
						// TODO Auto-generated catch block
						e.printStackTrace();
					}
				}
			}
			return guidanceBitmap;
		}

	}

	private class asyncDecodeTask extends AsyncTask<Void, Void, String> {

		private byte[] imgDataFromCamera = null;
		private Camera camera = null;
		private int assumption = 0;

		public int getAssumption() {
			return assumption;
		}

		public void setAssumption(int assumption) {
			this.assumption = assumption;
		}

		public byte[] getImgDataFromCamera() {
			return imgDataFromCamera;
		}

		public void setImgDataFromCamera(byte[] imgDataFromCamera) {
			this.imgDataFromCamera = imgDataFromCamera;
		}

		public Camera getCamera() {
			return camera;
		}

		public void setCamera(Camera camera) {
			this.camera = camera;
		}

		@Override
		protected String doInBackground(Void... params) {
			// TODO Auto-generated method stub

			return cameraBytesToBitmapAndDecode(getImgDataFromCamera(),
					getCamera(), getAssumption());
		}

		@Override
		protected void onCancelled() {
			// TODO Auto-generated method stub
			super.onCancelled();
		}

		@Override
		protected void onCancelled(String result) {
			// TODO Auto-generated method stub
			super.onCancelled(result);
		}

		@Override
		protected void onPostExecute(String result) {
			// TODO Auto-generated method stub

			globalVIN = result;

			super.onPostExecute(result);

			Runnable runnable = new Runnable() {
				@Override
				public void run() {
					handler.post(new Runnable() { // This thread runs in the UI
						@Override
						public void run() {
							
							

							if (globalVIN != null) {
								
								// in case of exception release resources & cleanup
								if (globalCamera != null) {
									globalCamera.stopPreview();
									globalCamera.setPreviewCallback(null);
									globalCamera.release();
									globalCamera = null;
									//
									setContinueToAutomaticallyDecode(false);
									cameraPreviewing = false;
								}
								
								// for testing
								Toast toast = Toast
										.makeText(getApplicationContext(),
												"VIN:" + globalVIN,
												Toast.LENGTH_SHORT);
								
								//change the deafult editTextVIN value to VIN value 
								Intent intent = new Intent(globalContext, Service.class);
								
								//pass value to intent
								intent.putExtra("com.toyota.toyotaownerspoc.globalVIN", globalVIN);      
						        startActivity(intent);
								
								
								toast.show();
							} else {
								// for testing
								Toast toast = Toast.makeText(
										getApplicationContext(),
										"Unable to decode. Please try again",
										Toast.LENGTH_SHORT);

								toast.show();
							}

						}
					});
				}
			};
			new Thread(runnable).start();
		}

		@Override
		protected void onProgressUpdate(Void... values) {
			// TODO Auto-generated method stub
			super.onProgressUpdate(values);
		}

	}

}// end class

