package com.wnhoo.smartfishtank.ui;

import android.annotation.TargetApi;
import android.app.Activity;
import android.content.Context;
import android.content.Intent;
import android.content.IntentFilter;
import android.content.SharedPreferences;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.Canvas;
import android.graphics.Matrix;
import android.graphics.Paint;
import android.graphics.Point;
import android.graphics.Rect;
import android.graphics.RectF;
import android.hardware.Camera;
import android.hardware.Camera.Face;
import android.media.CamcorderProfile;
import android.media.MediaRecorder;
import android.media.MediaScannerConnection;
import android.media.Ringtone;
import android.media.RingtoneManager;
import android.net.Uri;
import android.os.Build;
import android.os.Handler;
import android.preference.PreferenceManager;
import android.util.Log;
import android.view.Display;
import android.view.MotionEvent;
import android.view.OrientationEventListener;
import android.view.Surface;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.widget.Toast;

import com.wnhoo.smartfishtank.util.CameraUtil;
import com.wnhoo.smartfishtank.util.MyDebug;

import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.text.DecimalFormat;
import java.util.ArrayList;
import java.util.List;
import java.util.Locale;
import java.util.Timer;
import java.util.TimerTask;
import java.util.Vector;

@TargetApi(Build.VERSION_CODES.JELLY_BEAN_MR1)
public class CameraPreView extends SurfaceView implements SurfaceHolder.Callback {
	private static final String TAG = "Preview";

	private static final String TAG_GPS_IMG_DIRECTION = "GPSImgDirection";
	private static final String TAG_GPS_IMG_DIRECTION_REF = "GPSImgDirectionRef";

	private Paint p = new Paint();
	private DecimalFormat decimalFormat = new DecimalFormat("#0.0");
	private Camera.CameraInfo camera_info = new Camera.CameraInfo();
	private Matrix camera_to_preview_matrix = new Matrix();
	private Matrix preview_to_camera_matrix = new Matrix();
	private RectF face_rect = new RectF();
	private Rect text_bounds = new Rect();
	private int display_orientation = 0;

	private SurfaceHolder mHolder = null;

	private boolean has_aspect_ratio = false;
	private double aspect_ratio = 0.0f;
	private Camera camera = null;
	private int cameraId = 0;
	private MediaRecorder video_recorder = null;
	private boolean video_start_time_set = false;
	private long video_start_time = 0;
	private String video_name = null;

	private final int PHASE_NORMAL = 0;
	private final int PHASE_TIMER = 1;
	private final int PHASE_TAKING_PHOTO = 2;
	private final int PHASE_PREVIEW_PAUSED = 3; // the paused state after taking
												// a photo
	private int phase = PHASE_NORMAL;
	/*
	 * private boolean is_taking_photo = false; private boolean
	 * is_taking_photo_on_timer = false;
	 */
	private Timer takePictureTimer = new Timer();
	private TimerTask takePictureTimerTask = null;
	private Timer beepTimer = new Timer();
	private TimerTask beepTimerTask = null;
	private long take_photo_time = 0;
	private int remaining_burst_photos = 0;
	private int n_burst = 1;

	// private boolean is_preview_paused = false; // whether we are in the
	// paused state after taking a photo
	private String preview_image_name = null;
	private Bitmap thumbnail = null; // thumbnail of last picture taken
	private boolean thumbnail_anim = false; // whether we are displaying the
											// thumbnail animation
	private long thumbnail_anim_start_ms = -1; // time that the thumbnail
												// animation started
	private RectF thumbnail_anim_src_rect = new RectF();
	private RectF thumbnail_anim_dst_rect = new RectF();
	private Matrix thumbnail_anim_matrix = new Matrix();
	private int[] gui_location = new int[2];

	private int current_orientation = 0; // orientation received by
											// onOrientationChanged
	private int current_rotation = 0; // orientation relative to camera's
										// orientation (used for
										// parameters.setOrientation())
	private boolean has_level_angle = false;
	private double level_angle = 0.0f;
	private double orig_level_angle = 0.0f;

	private float free_memory_gb = -1.0f;
	private long last_free_memory_time = 0;

	private List<Integer> zoom_ratios = null;
	private boolean touch_was_multitouch = false;

	private List<String> supported_flash_values = null; // our "values" format
	private int current_flash_index = -1; // this is an index into the
											// supported_flash_values array, or
											// -1 if no flash modes available

	private List<String> supported_focus_values = null; // our "values" format
	private int current_focus_index = -1; // this is an index into the
											// supported_focus_values array, or
											// -1 if no focus modes available

	private List<String> color_effects = null;
	private List<String> scene_modes = null;
	private List<String> white_balances = null;
	private String iso_key = null;
	private List<String> isos = null;

	private List<Camera.Size> sizes = null;
	private int current_size_index = -1; // this is an index into the sizes
											// array, or -1 if sizes not yet set

	class ToastBoxer {
		public Toast toast = null;

		ToastBoxer() {
		}
	}

	private ToastBoxer switch_camera_toast = new ToastBoxer();
	private ToastBoxer switch_video_toast = new ToastBoxer();
	private ToastBoxer flash_toast = new ToastBoxer();
	private ToastBoxer focus_toast = new ToastBoxer();
	private ToastBoxer exposure_lock_toast = new ToastBoxer();
	private ToastBoxer take_photo_toast = new ToastBoxer();
	private ToastBoxer stopstart_video_toast = new ToastBoxer();
	private ToastBoxer change_exposure_toast = new ToastBoxer();

	private int ui_rotation = 0;

	private Face[] faces_detected = null;
	private boolean has_focus_area = false;
	private int focus_screen_x = 0;
	private int focus_screen_y = 0;
	private long focus_complete_time = -1;
	private int focus_success = FOCUS_DONE;
	private static final int FOCUS_WAITING = 0;
	private static final int FOCUS_SUCCESS = 1;
	private static final int FOCUS_FAILED = 2;
	private static final int FOCUS_DONE = 3;
	private String set_flash_after_autofocus = "";
	private boolean successfully_focused = false;
	private long successfully_focused_time = -1;

	private IntentFilter battery_ifilter = new IntentFilter(Intent.ACTION_BATTERY_CHANGED);
	private boolean has_battery_frac = false;
	private float battery_frac = 0.0f;
	private long last_battery_time = 0;

	// accelerometer and geomagnetic sensor info
	private final float sensor_alpha = 0.8f; // for filter
	private boolean has_gravity = false;
	private float[] gravity = new float[3];
	private boolean has_geomagnetic = false;
	private float[] geomagnetic = new float[3];
	private float[] deviceRotation = new float[9];
	private float[] cameraRotation = new float[9];
	private float[] deviceInclination = new float[9];
	private boolean has_geo_direction = false;
	private float[] geo_direction = new float[3];

	// /////////////////////////////////////////////////////////////////////
	private List<Camera.Size> supported_preview_sizes = null;
	private boolean is_preview_started = false;
	private boolean supports_face_detection = false;
	private boolean has_surface = false;
	private boolean app_is_paused = true;

	private SharedPreferences sharedPreferences;

	public CameraPreView(Context context) {
		super(context);
		if (MyDebug.LOG) {
			Log.d(TAG, "new Preview");
		}
		// Install a SurfaceHolder.Callback so we get notified when the
		// underlying surface is created and destroyed.
		mHolder = getHolder();
		mHolder.addCallback(this);
		// deprecated setting, but required on Android versions prior to 3.0
		mHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS); // deprecated

		sharedPreferences = PreferenceManager.getDefaultSharedPreferences(this.getContext());
	}

	public void surfaceCreated(SurfaceHolder holder) {
		if (MyDebug.LOG)
			Log.d(TAG, "surfaceCreated()");
		// The Surface has been created, acquire the camera and tell it where
		// to draw.
		this.has_surface = true;
		this.openCamera();
		this.setWillNotDraw(false); // see
									// http://stackoverflow.com/questions/2687015/extended-surfaceviews-ondraw-method-never-called
	}

	public void surfaceChanged(SurfaceHolder holder, int format, int w, int h) {

	}

	public void surfaceDestroyed(SurfaceHolder holder) {
		this.has_surface = false;
		this.closeCamera();
	}

	private void openCamera() {
		if (MyDebug.LOG) {
			Log.d(TAG, "openCamera()");
			Log.d(TAG, "cameraId: " + cameraId);
		}
		// need to init everything now, in case we don't open the camera (but
		// these may already be initialised from an earlier call - e.g., if we
		// are now switching to another camera)
		has_focus_area = false;
		focus_success = FOCUS_DONE;
		successfully_focused = false;
		scene_modes = null;
		zoom_ratios = null;
		faces_detected = null;
		supports_face_detection = false;
		color_effects = null;
		white_balances = null;
		isos = null;
		sizes = null;
		current_size_index = -1;
		supported_flash_values = null;
		current_flash_index = -1;
		supported_focus_values = null;
		current_focus_index = -1;

		if (!this.has_surface) {
			if (MyDebug.LOG) {
				Log.d(TAG, "preview surface not yet available");
			}
			return;
		}

		if (this.app_is_paused) {
			if (MyDebug.LOG) {
				Log.d(TAG, "don't open camera as app is paused");
			}
			return;
		}
		try {
			camera = Camera.open(cameraId);
		} catch (RuntimeException e) {
			if (MyDebug.LOG)
				Log.e(TAG, "Failed to open camera: " + e.getMessage());
			e.printStackTrace();
			camera = null;
		}
		if (camera != null) {
			this.setCameraDisplayOrientation((Activity) getContext());
			try {
				camera.setPreviewDisplay(mHolder);
			} catch (IOException e) {
				if (MyDebug.LOG)
					Log.e(TAG, "Failed to set preview display: " + e.getMessage());
				e.printStackTrace();
			}
			Camera.Parameters parameters = camera.getParameters();

			// get available scene modes
			// important, from docs:
			// "Changing scene mode may override other parameters (such as flash
			// mode, focus mode, white balance).
			// For example, suppose originally flash mode is on and supported
			// flash modes are on/off. In night
			// scene mode, both flash mode and supported flash mode may be
			// changed to off. After setting scene
			// mode, applications should call getParameters to know if some
			// parameters are changed."
			// 设置场景模式，自动
			scene_modes = parameters.getSupportedSceneModes();
			String scene_mode = Camera.Parameters.SCENE_MODE_AUTO;
			if (scene_modes != null && !parameters.getSceneMode().equals(scene_mode)) {
				parameters.setSceneMode(scene_mode);
				// need to read back parameters, see comment above
				camera.setParameters(parameters);
				parameters = camera.getParameters();
			}

			// 启动支持人脸识别
			this.faces_detected = null;
			this.supports_face_detection = parameters.getMaxNumDetectedFaces() > 0;
			if (supports_face_detection) {
				class MyFaceDetectionListener implements Camera.FaceDetectionListener {
					@Override
					public void onFaceDetection(Face[] faces, Camera camera) {
						faces_detected = new Face[faces.length];
						System.arraycopy(faces, 0, faces_detected, 0, faces.length);
					}
				}
				camera.setFaceDetectionListener(new MyFaceDetectionListener());
			}

			// get available color effects,无颜色特效
			parameters.setColorEffect(Camera.Parameters.EFFECT_NONE);

			// get available white balances
			parameters.setWhiteBalance(Camera.Parameters.WHITE_BALANCE_AUTO);

			// get available isos - no standard value for this, see
			// http://stackoverflow.com/questions/2978095/android-camera-api-iso-setting
			{
				String iso_values = parameters.get("iso-values");
				if (iso_values == null) {
					iso_values = parameters.get("iso-mode-values"); // Galaxy
																	// Nexus
					if (iso_values == null) {
						iso_values = parameters.get("iso-speed-values"); // Micromax
																			// A101
						if (iso_values == null)
							iso_values = parameters.get("nv-picture-iso-values"); // LG
																					// dual
																					// P990
					}
				}
				if (iso_values != null && iso_values.length() > 0) {
					if (MyDebug.LOG)
						Log.d(TAG, "iso_values: " + iso_values);
					String[] isos_array = iso_values.split(",");
					if (isos_array != null && isos_array.length > 0) {
						isos = new ArrayList<String>();
						for (int i = 0; i < isos_array.length; i++) {
							isos.add(isos_array[i]);
						}
					}
				}
			}
			iso_key = "iso";
			if (parameters.get(iso_key) == null) {
				iso_key = "iso-speed"; // Micromax A101
				if (parameters.get(iso_key) == null) {
					iso_key = "nv-picture-iso"; // LG dual P990
					if (parameters.get(iso_key) == null)
						iso_key = null; // not supported
				}
			}
			if (iso_key != null) {
				parameters.set(iso_key, "auto");
			}

			// get min/max exposure
			// TODO 去掉了曝光

			// get available sizes
			sizes = parameters.getSupportedPictureSizes();
			if (MyDebug.LOG) {
				for (int i = 0; i < sizes.size(); i++) {
					Camera.Size size = sizes.get(i);
					Log.d(TAG, "supported picture size: " + size.width + " , " + size.height);
				}
			}
			current_size_index = -1;
			String resolution_value = sharedPreferences.getString(getResolutionPreferenceKey(cameraId), "");
			if (MyDebug.LOG)
				Log.d(TAG, "resolution_value: " + resolution_value);
			if (resolution_value.length() > 0) {
				// parse the saved size, and make sure it is still valid
				int index = resolution_value.indexOf(' ');
				if (index == -1) {
					if (MyDebug.LOG)
						Log.d(TAG, "resolution_value invalid format, can't find space");
				} else {
					String resolution_w_s = resolution_value.substring(0, index);
					String resolution_h_s = resolution_value.substring(index + 1);
					if (MyDebug.LOG) {
						Log.d(TAG, "resolution_w_s: " + resolution_w_s);
						Log.d(TAG, "resolution_h_s: " + resolution_h_s);
					}
					try {
						int resolution_w = Integer.parseInt(resolution_w_s);
						if (MyDebug.LOG)
							Log.d(TAG, "resolution_w: " + resolution_w);
						int resolution_h = Integer.parseInt(resolution_h_s);
						if (MyDebug.LOG)
							Log.d(TAG, "resolution_h: " + resolution_h);
						// now find size in valid list
						for (int i = 0; i < sizes.size() && current_size_index == -1; i++) {
							Camera.Size size = sizes.get(i);
							if (size.width == resolution_w && size.height == resolution_h) {
								current_size_index = i;
								if (MyDebug.LOG)
									Log.d(TAG, "set current_size_index to: " + current_size_index);
							}
						}
						if (current_size_index == -1) {
							if (MyDebug.LOG)
								Log.e(TAG, "failed to find valid size");
						}
					} catch (NumberFormatException exception) {
						if (MyDebug.LOG)
							Log.d(TAG, "resolution_value invalid format, can't parse w or h to int");
					}
				}
			}

			if (current_size_index == -1) {
				// set to largest
				Camera.Size current_size = null;
				for (int i = 0; i < sizes.size(); i++) {
					Camera.Size size = sizes.get(i);
					if (current_size == null || size.width * size.height > current_size.width * current_size.height) {
						current_size_index = i;
						current_size = size;
					}
				}
			}
			if (current_size_index != -1) {
				Camera.Size current_size = sizes.get(current_size_index);
				if (MyDebug.LOG)
					Log.d(TAG, "Current size index " + current_size_index + ": " + current_size.width + ", " + current_size.height);

				// now save, so it's available for PreferenceActivity
				resolution_value = current_size.width + " " + current_size.height;
				if (MyDebug.LOG) {
					Log.d(TAG, "save new resolution_value: " + resolution_value);
				}
				SharedPreferences.Editor editor = sharedPreferences.edit();
				editor.putString(getResolutionPreferenceKey(cameraId), resolution_value);
				editor.apply();

				// now set the size
				parameters.setPictureSize(current_size.width, current_size.height);
				camera.setParameters(parameters);
			}

			/*
			 * if( MyDebug.LOG ) Log.d(TAG, "Current image quality: " +
			 * parameters.getJpegQuality());
			 */
			parameters.setJpegQuality(90);

			/*
			 * 支持的闪光模式
			 */
			// we do the following after setting parameters, as these are done
			// by calling separate functions, that themselves set the parameters
			// directly
			List<String> supported_flash_modes = parameters.getSupportedFlashModes(); // Android
			current_flash_index = -1;
			if (supported_flash_modes != null && supported_flash_modes.size() > 1) {
				// convert to our format(also resorts)
				supported_flash_values = convertFlashModesToValues(supported_flash_modes);
				String flash_value = sharedPreferences.getString(getFlashPreferenceKey(cameraId), "");
				updateFlash(0);
			} else {
				supported_flash_values = null;
			}

			List<String> supported_focus_modes = parameters.getSupportedFocusModes(); // Android
			current_focus_index = -1;
			if (supported_focus_modes != null && supported_focus_modes.size() > 1) {
				// convert to our format(also resorts)
				supported_focus_values = convertFocusModesToValues(supported_focus_modes);
				updateFocus(0, false, true, true);
			} else {
				supported_focus_values = null;
			}

			// TODO 曝光锁定去掉
			// now switch to video if saved
			// TODO 视频去掉

			// Must set preview size before starting camera preview
			// and must do it after setting photo vs video mode
			setPreviewSize(); // need to call this when we switch cameras, not
								// just when we run for the first time
			// Must call startCameraPreview after checking if face detection is
			// present - probably best to call it after setting all parameters
			// that we want
			startCameraPreview();
			if (MyDebug.LOG) {
				// Log.d(TAG, "time after starting camera preview: " +
				// (System.currentTimeMillis() - debug_time));
			}

			final Handler handler = new Handler();
			handler.postDelayed(new Runnable() {
				@Override
				public void run() {
					tryAutoFocus(true, false); // so we get the autofocus when
												// starting up - we do this on a
												// delay, as calling it
												// immediately means the
												// autofocus doesn't seem to
												// work properly sometimes (at
												// least on Galaxy Nexus)
				}
			}, 500);
		}
	}

	private void calculateCameraToPreviewMatrix() {
		camera_to_preview_matrix.reset();
		// from
		// http://developer.android.com/reference/android/hardware/Camera.Face.html#rect
		Camera.getCameraInfo(cameraId, camera_info);
		// Need mirror for front camera.
		boolean mirror = (camera_info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT);
		camera_to_preview_matrix.setScale(mirror ? -1 : 1, 1);
		// This is the value for android.hardware.Camera.setDisplayOrientation.
		camera_to_preview_matrix.postRotate(display_orientation);
		// Camera driver coordinates range from (-1000, -1000) to (1000, 1000).
		// UI coordinates range from (0, 0) to (width, height).
		camera_to_preview_matrix.postScale(this.getWidth() / 2000f, this.getHeight() / 2000f);
		camera_to_preview_matrix.postTranslate(this.getWidth() / 2f, this.getHeight() / 2f);
	}

	private void calculatePreviewToCameraMatrix() {
		calculateCameraToPreviewMatrix();
		if (!camera_to_preview_matrix.invert(preview_to_camera_matrix)) {
			if (MyDebug.LOG)
				Log.d(TAG, "calculatePreviewToCameraMatrix failed to invert matrix!?");
		}
	}

	private ArrayList<Camera.Area> getAreas(float x, float y) {
		float[] coords = { x, y };
		calculatePreviewToCameraMatrix();
		preview_to_camera_matrix.mapPoints(coords);
		float focus_x = coords[0];
		float focus_y = coords[1];

		int focus_size = 50;
		if (MyDebug.LOG) {
			Log.d(TAG, "x, y: " + x + ", " + y);
			Log.d(TAG, "focus x, y: " + focus_x + ", " + focus_y);
		}
		Rect rect = new Rect();
		rect.left = (int) focus_x - focus_size;
		rect.right = (int) focus_x + focus_size;
		rect.top = (int) focus_y - focus_size;
		rect.bottom = (int) focus_y + focus_size;
		if (rect.left < -1000) {
			rect.left = -1000;
			rect.right = rect.left + 2 * focus_size;
		} else if (rect.right > 1000) {
			rect.right = 1000;
			rect.left = rect.right - 2 * focus_size;
		}
		if (rect.top < -1000) {
			rect.top = -1000;
			rect.bottom = rect.top + 2 * focus_size;
		} else if (rect.bottom > 1000) {
			rect.bottom = 1000;
			rect.top = rect.bottom - 2 * focus_size;
		}

		ArrayList<Camera.Area> areas = new ArrayList<Camera.Area>();
		areas.add(new Camera.Area(rect, 1000));
		return areas;
	}

	@Override
	public boolean onTouchEvent(MotionEvent event) {
		if (event.getPointerCount() != 1) {
			// multitouch_time = System.currentTimeMillis();
			touch_was_multitouch = true;
			return true;
		}
		if (event.getAction() != MotionEvent.ACTION_UP) {
			if (event.getAction() == MotionEvent.ACTION_DOWN && event.getPointerCount() == 1) {
				touch_was_multitouch = false;
			}
			return true;
		}
		if (touch_was_multitouch) {
			return true;
		}
		if (this.isTakingPhotoOrOnTimer()) {
			return true;
		}

		// note, we always try to force start the preview (in case
		// is_preview_paused has become false)
		startCameraPreview();
		cancelAutoFocus();

		if (camera != null && !this.supports_face_detection) {
			Camera.Parameters parameters = camera.getParameters();
			String focus_mode = parameters.getFocusMode();
			this.has_focus_area = false;
			if (parameters.getMaxNumFocusAreas() != 0
					&& (focus_mode.equals(Camera.Parameters.FOCUS_MODE_AUTO) || focus_mode.equals(Camera.Parameters.FOCUS_MODE_MACRO)
							|| focus_mode.equals(Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE) || focus_mode
								.equals(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO))) {
				if (MyDebug.LOG)
					Log.d(TAG, "set focus (and metering?) area");
				this.has_focus_area = true;
				this.focus_screen_x = (int) event.getX();
				this.focus_screen_y = (int) event.getY();

				ArrayList<Camera.Area> areas = getAreas(event.getX(), event.getY());
				parameters.setFocusAreas(areas);

				// also set metering areas
				if (parameters.getMaxNumMeteringAreas() == 0) {
					if (MyDebug.LOG)
						Log.d(TAG, "metering areas not supported");
				} else {
					parameters.setMeteringAreas(areas);
				}

				try {
					if (MyDebug.LOG)
						Log.d(TAG, "set focus areas parameters");
					camera.setParameters(parameters);
					if (MyDebug.LOG)
						Log.d(TAG, "done");
				} catch (RuntimeException e) {
					// just in case something has gone wrong
					if (MyDebug.LOG)
						Log.d(TAG, "failed to set parameters for focus area");
					e.printStackTrace();
				}
			} else if (parameters.getMaxNumMeteringAreas() != 0) {
				if (MyDebug.LOG)
					Log.d(TAG, "set metering area");
				// don't set has_focus_area in this mode
				ArrayList<Camera.Area> areas = getAreas(event.getX(), event.getY());
				parameters.setMeteringAreas(areas);

				try {
					camera.setParameters(parameters);
				} catch (RuntimeException e) {
					// just in case something has gone wrong
					if (MyDebug.LOG)
						Log.d(TAG, "failed to set parameters for focus area");
					e.printStackTrace();
				}
			}
		}
		tryAutoFocus(false, true);
		return true;
	}

	public void clearFocusAreas() {
		if (MyDebug.LOG)
			Log.d(TAG, "clearFocusAreas()");
		if (camera == null) {
			return;
		}
		cancelAutoFocus();
		Camera.Parameters parameters = camera.getParameters();
		boolean update_parameters = false;
		if (parameters.getMaxNumFocusAreas() > 0) {
			parameters.setFocusAreas(null);
			update_parameters = true;
		}
		if (parameters.getMaxNumMeteringAreas() > 0) {
			parameters.setMeteringAreas(null);
			update_parameters = true;
		}
		if (update_parameters) {
			camera.setParameters(parameters);
		}
		has_focus_area = false;
		focus_success = FOCUS_DONE;
		successfully_focused = false;
		// Log.d(TAG, "camera parameters null? " +
		// (camera.getParameters().getFocusAreas()==null));
	}

	private void reconnectCamera() {
		if (camera != null) { // just to be safe
			try {
				camera.reconnect();
				this.startCameraPreview();
			} catch (IOException e) {
				if (MyDebug.LOG)
					Log.e(TAG, "failed to reconnect to camera");
				e.printStackTrace();
				showToast(null, "Failed to reconnect to camera");
				closeCamera();
			}
			tryAutoFocus(false, false);
		}
	}

	private void closeCamera() {
		if (MyDebug.LOG) {
			Log.d(TAG, "closeCamera()");
		}
		has_focus_area = false;
		focus_success = FOCUS_DONE;
		successfully_focused = false;
		// if( is_taking_photo_on_timer ) {
		if (this.isOnTimer()) {
			takePictureTimerTask.cancel();
			if (beepTimerTask != null) {
				beepTimerTask.cancel();
			}
			/*
			 * is_taking_photo_on_timer = false; is_taking_photo = false;
			 */
			this.phase = PHASE_NORMAL;
			if (MyDebug.LOG)
				Log.d(TAG, "cancelled camera timer");
		}
		if (camera != null) {
			// camera.setPreviewCallback(null);
			this.setPreviewPaused(false);
			camera.stopPreview();
			/*
			 * this.is_taking_photo = false; this.is_taking_photo_on_timer =
			 * false;
			 */
			this.phase = PHASE_NORMAL;
			this.is_preview_started = false;
			camera.release();
			camera = null;
		}
	}

	private String setupValuesPref(List<String> values, String key, String default_value) {
		if (MyDebug.LOG)
			Log.d(TAG, "setupValuesPref, key: " + key);
		if (values != null && values.size() > 0) {
			if (MyDebug.LOG) {
				for (int i = 0; i < values.size(); i++) {
					Log.d(TAG, "supported value: " + values.get(i));
				}
			}
			SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(this.getContext());
			String value = sharedPreferences.getString(key, default_value);
			if (MyDebug.LOG)
				Log.d(TAG, "value: " + value);
			// make sure result is valid
			if (!values.contains(value)) {
				if (MyDebug.LOG)
					Log.d(TAG, "value not valid!");
				if (values.contains(default_value))
					value = default_value;
				else
					value = values.get(0);
				if (MyDebug.LOG)
					Log.d(TAG, "value is now: " + value);
			}

			// now save, so it's available for PreferenceActivity
			SharedPreferences.Editor editor = sharedPreferences.edit();
			editor.putString(key, value);
			editor.apply();

			return value;
		} else {
			if (MyDebug.LOG)
				Log.d(TAG, "values not supported");
			return null;
		}
	}

	private void setPreviewSize() {
		if (MyDebug.LOG)
			Log.d(TAG, "setPreviewSize()");
		if (camera == null) {
			return;
		}
		if (is_preview_started) {
			if (MyDebug.LOG)
				Log.d(TAG, "setPreviewSize() shouldn't be called when preview is running");
			throw new RuntimeException();
		}
		// set optimal preview size
		Camera.Parameters parameters = camera.getParameters();
		if (MyDebug.LOG)
			Log.d(TAG, "current preview size: " + parameters.getPreviewSize().width + ", " + parameters.getPreviewSize().height);
		supported_preview_sizes = parameters.getSupportedPreviewSizes();
		if (supported_preview_sizes.size() > 0) {
			/*
			 * Camera.Size best_size = supported_preview_sizes.get(0);
			 * for(Camera.Size size : supported_preview_sizes) { if( MyDebug.LOG
			 * ) Log.d(TAG, "    supported preview size: " + size.width + ", " +
			 * size.height); if( size.width*size.height >
			 * best_size.width*best_size.height ) { best_size = size; } }
			 */
			Camera.Size best_size = getOptimalPreviewSize(supported_preview_sizes);
			parameters.setPreviewSize(best_size.width, best_size.height);
			if (MyDebug.LOG)
				Log.d(TAG, "new preview size: " + parameters.getPreviewSize().width + ", " + parameters.getPreviewSize().height);
			this.setAspectRatio(((double) parameters.getPreviewSize().width) / (double) parameters.getPreviewSize().height);

			/*
			 * List<int []> fps_ranges =
			 * parameters.getSupportedPreviewFpsRange(); if( MyDebug.LOG ) {
			 * for(int [] fps_range : fps_ranges) { Log.d(TAG,
			 * "    supported fps range: " +
			 * fps_range[Camera.Parameters.PREVIEW_FPS_MIN_INDEX] + " to " +
			 * fps_range[Camera.Parameters.PREVIEW_FPS_MAX_INDEX]); } } int []
			 * fps_range = fps_ranges.get(fps_ranges.size()-1);
			 * parameters.setPreviewFpsRange
			 * (fps_range[Camera.Parameters.PREVIEW_FPS_MIN_INDEX],
			 * fps_range[Camera.Parameters.PREVIEW_FPS_MAX_INDEX]);
			 */
			camera.setParameters(parameters);
		}
	}

	private CamcorderProfile getCamcorderProfile(String quality) {
		if (MyDebug.LOG)
			Log.e(TAG, "getCamcorderProfile(): " + quality);
		CamcorderProfile camcorder_profile = CamcorderProfile.get(cameraId, CamcorderProfile.QUALITY_HIGH); // default
		try {
			String profile_string = quality;
			int index = profile_string.indexOf('_');
			if (index != -1) {
				profile_string = quality.substring(0, index);
				if (MyDebug.LOG)
					Log.e(TAG, "    profile_string: " + profile_string);
			}
			int profile = Integer.parseInt(profile_string);
			camcorder_profile = CamcorderProfile.get(cameraId, profile);
			if (index != -1 && index + 1 < quality.length()) {
				String override_string = quality.substring(index + 1);
				if (MyDebug.LOG)
					Log.e(TAG, "    override_string: " + override_string);
				if (override_string.charAt(0) == 'r' && override_string.length() >= 4) {
					index = override_string.indexOf('x');
					if (index == -1) {
						if (MyDebug.LOG)
							Log.d(TAG, "override_string invalid format, can't find x");
					} else {
						String resolution_w_s = override_string.substring(1, index); // skip
																						// first
																						// 'r'
						String resolution_h_s = override_string.substring(index + 1);
						if (MyDebug.LOG) {
							Log.d(TAG, "resolution_w_s: " + resolution_w_s);
							Log.d(TAG, "resolution_h_s: " + resolution_h_s);
						}
						// copy to local variable first, so that if we fail to
						// parse height, we don't set the width either
						int resolution_w = Integer.parseInt(resolution_w_s);
						int resolution_h = Integer.parseInt(resolution_h_s);
						camcorder_profile.videoFrameWidth = resolution_w;
						camcorder_profile.videoFrameHeight = resolution_h;
					}
				} else {
					if (MyDebug.LOG)
						Log.d(TAG, "unknown override_string initial code, or otherwise invalid format");
				}
			}
		} catch (NumberFormatException e) {
			if (MyDebug.LOG)
				Log.e(TAG, "failed to parse video quality: " + quality);
			e.printStackTrace();
		}
		return camcorder_profile;
	}

	private static String formatFloatToString(final float f) {
		final int i = (int) f;
		if (f == i)
			return Integer.toString(i);
		return String.format(Locale.getDefault(), "%.2f", f);
	}

	private static int greatestCommonFactor(int a, int b) {
		while (b > 0) {
			int temp = b;
			b = a % b;
			a = temp;
		}
		return a;
	}

	private static String getAspectRatio(int width, int height) {
		int gcf = greatestCommonFactor(width, height);
		width /= gcf;
		height /= gcf;
		return width + ":" + height;
	}

	static String getAspectRatioMPString(int width, int height) {
		float mp = (width * height) / 1000000.0f;
		return "(" + getAspectRatio(width, height) + ", " + formatFloatToString(mp) + "MP)";
	}

	String getCamcorderProfileDescription(String quality) {
		CamcorderProfile profile = getCamcorderProfile(quality);
		String highest = "";
		if (profile.quality == CamcorderProfile.QUALITY_HIGH) {
			highest = "Highest: ";
		}
		String type = "";
		if (profile.videoFrameWidth == 3840 && profile.videoFrameHeight == 2160) {
			type = "4K Ultra HD ";
		} else if (profile.videoFrameWidth == 1920 && profile.videoFrameHeight == 1080) {
			type = "Full HD ";
		} else if (profile.videoFrameWidth == 1280 && profile.videoFrameHeight == 720) {
			type = "HD ";
		} else if (profile.videoFrameWidth == 720 && profile.videoFrameHeight == 480) {
			type = "SD ";
		} else if (profile.videoFrameWidth == 640 && profile.videoFrameHeight == 480) {
			type = "VGA ";
		} else if (profile.videoFrameWidth == 352 && profile.videoFrameHeight == 288) {
			type = "CIF ";
		} else if (profile.videoFrameWidth == 320 && profile.videoFrameHeight == 240) {
			type = "QVGA ";
		} else if (profile.videoFrameWidth == 176 && profile.videoFrameHeight == 144) {
			type = "QCIF ";
		}
		String desc = highest + type + profile.videoFrameWidth + "x" + profile.videoFrameHeight + " "
				+ getAspectRatioMPString(profile.videoFrameWidth, profile.videoFrameHeight);
		return desc;
	}

	public double getTargetRatio(Point display_size) {
		double targetRatio = 0.0f;
//		targetRatio = ((double) display_size.x) / (double) display_size.y;
//		if (MyDebug.LOG)
//			Log.d(TAG, "targetRatio: " + targetRatio);
		Camera.Parameters parameters = camera.getParameters();
		Camera.Size picture_size = parameters.getPictureSize();
		if (MyDebug.LOG)
			Log.d(TAG, "picture_size: " + picture_size.width + " x " + picture_size.height);
		targetRatio = ((double) picture_size.width) / (double) picture_size.height;
		return targetRatio;
	}

	public Camera.Size getOptimalPreviewSize(List<Camera.Size> sizes) {
		if (MyDebug.LOG)
			Log.d(TAG, "getOptimalPreviewSize()");
		final double ASPECT_TOLERANCE = 0.05;
		if (sizes == null)
			return null;
		Camera.Size optimalSize = null;
		double minDiff = Double.MAX_VALUE;
		Point display_size = new Point();
		Activity activity = (Activity) this.getContext();
		{
			Display display = activity.getWindowManager().getDefaultDisplay();
			display.getSize(display_size);
			if (MyDebug.LOG)
				Log.d(TAG, "display_size: " + display_size.x + " x " + display_size.y);
		}
			
		double targetRatio = getTargetRatio(display_size);
		int targetHeight = Math.min(display_size.y, display_size.x);
		if (targetHeight <= 0) {
			targetHeight = display_size.y;
		}
		// Try to find an size match aspect ratio and size
		for (Camera.Size size : sizes) {
			if (MyDebug.LOG)
				Log.d(TAG, "    supported preview size: " + size.width + ", " + size.height);
			double ratio = (double) size.width / size.height;
			if (Math.abs(ratio - targetRatio) > ASPECT_TOLERANCE)
				continue;
			if (Math.abs(size.height - targetHeight) < minDiff) {
				optimalSize = size;
				minDiff = Math.abs(size.height - targetHeight);
			}
		}
		if (optimalSize == null) {
			// can't find match for aspect ratio, so find closest one
			if (MyDebug.LOG)
				Log.d(TAG, "no preview size matches the aspect ratio");
			minDiff = Double.MAX_VALUE;
			for (Camera.Size size : sizes) {
				double ratio = (double) size.width / size.height;
				if (Math.abs(ratio - targetRatio) < minDiff) {
					optimalSize = size;
					minDiff = Math.abs(ratio - targetRatio);
				}
			}
		}
		if (MyDebug.LOG) {
			Log.d(TAG, "chose optimalSize: " + optimalSize.width + " x " + optimalSize.height);
			Log.d(TAG, "optimalSize ratio: " + ((double) optimalSize.width / optimalSize.height));
		}
		return optimalSize;
	}

//	@Override
//	protected void onMeasure(int widthSpec, int heightSpec) {
//		if (!this.has_aspect_ratio) {
//			super.onMeasure(widthSpec, heightSpec);
//			return;
//		}
//		int previewWidth = MeasureSpec.getSize(widthSpec);
//		int previewHeight = MeasureSpec.getSize(heightSpec);
//
//		// Get the padding of the border background.
//		int hPadding = getPaddingLeft() + getPaddingRight();
//		int vPadding = getPaddingTop() + getPaddingBottom();
//
//		// Resize the preview frame with correct aspect ratio.
//		previewWidth -= hPadding;
//		previewHeight -= vPadding;
//
//		boolean widthLonger = previewWidth > previewHeight;
//		int longSide = (widthLonger ? previewWidth : previewHeight);
//		int shortSide = (widthLonger ? previewHeight : previewWidth);
//		if (longSide > shortSide * aspect_ratio) {
//			longSide = (int) ((double) shortSide * aspect_ratio);
//		} else {
//			shortSide = (int) ((double) longSide / aspect_ratio);
//		}
//		if (widthLonger) {
//			previewWidth = longSide;
//			previewHeight = shortSide;
//		} else {
//			previewWidth = shortSide;
//			previewHeight = longSide;
//		}
//
//		// Add the padding of the border.
//		previewWidth += hPadding;
//		previewHeight += vPadding;
//
//		// Ask children to follow the new preview dimension.
//		super.onMeasure(MeasureSpec.makeMeasureSpec(previewWidth, MeasureSpec.EXACTLY),
//				MeasureSpec.makeMeasureSpec(previewHeight, MeasureSpec.EXACTLY));
//	}

	private void setAspectRatio(double ratio) {
		if (ratio <= 0.0)
			throw new IllegalArgumentException();

		has_aspect_ratio = true;
		if (aspect_ratio != ratio) {
			aspect_ratio = ratio;
			if (MyDebug.LOG)
				Log.d(TAG, "new aspect ratio: " + aspect_ratio);
			requestLayout();
		}
	}

	// for the Preview - from
	// http://developer.android.com/reference/android/hardware/Camera.html#setDisplayOrientation(int)
	// note, if orientation is locked to landscape this is only called when
	// setting up the activity, and will always have the same orientation
	private void setCameraDisplayOrientation(Activity activity) {
		if (MyDebug.LOG)
			Log.d(TAG, "setCameraDisplayOrientation()");
		Camera.CameraInfo info = new Camera.CameraInfo();
		Camera.getCameraInfo(cameraId, info);
		int rotation = activity.getWindowManager().getDefaultDisplay().getRotation();
		int degrees = 0;
		switch (rotation) {
		case Surface.ROTATION_0:
			degrees = 0;
			break;
		case Surface.ROTATION_90:
			degrees = 90;
			break;
		case Surface.ROTATION_180:
			degrees = 180;
			break;
		case Surface.ROTATION_270:
			degrees = 270;
			break;
		}
		if (MyDebug.LOG)
			Log.d(TAG, "    degrees = " + degrees);

		int result = 0;
		if (info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
			result = (info.orientation + degrees) % 360;
			result = (360 - result) % 360; // compensate the mirror
		} else { // back-facing
			result = (info.orientation - degrees + 360) % 360;
		}
		if (MyDebug.LOG) {
			Log.d(TAG, "    info orientation is " + info.orientation);
			Log.d(TAG, "    setDisplayOrientation to " + result);
		}
		camera.setDisplayOrientation(result);
		this.display_orientation = result;
	}

	// for taking photos - from
	// http://developer.android.com/reference/android/hardware/Camera.Parameters.html#setRotation(int)
	private void onOrientationChanged(int orientation) {
		/*
		 * if( MyDebug.LOG ) { Log.d(TAG, "onOrientationChanged()"); Log.d(TAG,
		 * "orientation: " + orientation); }
		 */
		if (orientation == OrientationEventListener.ORIENTATION_UNKNOWN)
			return;
		if (camera == null)
			return;
		Camera.getCameraInfo(cameraId, camera_info);
		orientation = (orientation + 45) / 90 * 90;
		this.current_orientation = orientation % 360;
		int new_rotation = 0;
		if (camera_info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
			new_rotation = (camera_info.orientation - orientation + 360) % 360;
		} else { // back-facing camera
			new_rotation = (camera_info.orientation + orientation) % 360;
		}
		if (new_rotation != current_rotation) {
			/*
			 * if( MyDebug.LOG ) { Log.d(TAG, "    current_orientation is " +
			 * current_orientation); Log.d(TAG, "    info orientation is " +
			 * camera_info.orientation); Log.d(TAG,
			 * "    set Camera rotation from " + current_rotation + " to " +
			 * new_rotation); }
			 */
			this.current_rotation = new_rotation;
		}
	}

	@Override
	public void onDraw(Canvas canvas) {
		super.onDraw(canvas);
	}

	public void switchCamera() {
		if (MyDebug.LOG)
			Log.d(TAG, "switchCamera()");
		// if( is_taking_photo && !is_taking_photo_on_timer ) {
		if (this.phase == PHASE_TAKING_PHOTO) {
			// just to be safe - risk of cancelling the autofocus before taking
			// a photo, or otherwise messing things up
			if (MyDebug.LOG)
				Log.d(TAG, "currently taking a photo");
			return;
		}
		int n_cameras = Camera.getNumberOfCameras();
		if (MyDebug.LOG)
			Log.d(TAG, "found " + n_cameras + " cameras");
		if (n_cameras > 1) {
			closeCamera();
			cameraId = (cameraId + 1) % n_cameras;
			Camera.CameraInfo info = new Camera.CameraInfo();
			Camera.getCameraInfo(cameraId, info);
			if (info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
				showToast(switch_camera_toast, "Front Camera");
			} else {
				showToast(switch_camera_toast, "Back Camera");
			}
			// zoom_factor = 0; // reset zoom when switching camera
			this.openCamera();

		}
	}

	void cycleFlash() {
		if (MyDebug.LOG)
			Log.d(TAG, "cycleFlash()");
		// if( is_taking_photo && !is_taking_photo_on_timer ) {
		if (this.phase == PHASE_TAKING_PHOTO) {
			// just to be safe - risk of cancelling the autofocus before taking
			// a photo, or otherwise messing things up
			if (MyDebug.LOG)
				Log.d(TAG, "currently taking a photo");
			return;
		}
		if (this.supported_flash_values != null && this.supported_flash_values.size() > 1) {
			int new_flash_index = (current_flash_index + 1) % this.supported_flash_values.size();
			updateFlash(new_flash_index);

			// now save
			String flash_value = supported_flash_values.get(current_flash_index);
			if (MyDebug.LOG) {
				Log.d(TAG, "save new flash_value: " + flash_value);
			}
		}
	}

	public boolean updateFlash(String flash_value) {
		if (MyDebug.LOG)
			Log.d(TAG, "updateFlash(): " + flash_value);
		if (supported_flash_values != null) {
			int new_flash_index = supported_flash_values.indexOf(flash_value);
			if (MyDebug.LOG)
				Log.d(TAG, "new_flash_index: " + new_flash_index);
			if (new_flash_index != -1) {
				updateFlash(new_flash_index);
				return true;
			}
		}
		return false;
	}

	private void updateFlash(int new_flash_index) {
		if (MyDebug.LOG)
			Log.d(TAG, "updateFlash(): " + new_flash_index);
		// updates the Flash button, and Flash camera mode
		if (supported_flash_values != null && new_flash_index != current_flash_index) {
			boolean initial = current_flash_index == -1;
			current_flash_index = new_flash_index;
			if (MyDebug.LOG)
				Log.d(TAG, "    current_flash_index is now " + current_flash_index + " (initial " + initial + ")");

			String flash_value = supported_flash_values.get(current_flash_index);

			this.setFlash(flash_value);
		}
	}

	private void setFlash(String flash_value) {
		if (MyDebug.LOG)
			Log.d(TAG, "setFlash() " + flash_value);
		set_flash_after_autofocus = ""; // this overrides any previously saved
										// setting, for during the startup
										// autofocus
		cancelAutoFocus();
		Camera.Parameters parameters = camera.getParameters();
		String flash_mode = convertFlashValueToMode(flash_value);
		if (flash_mode.length() > 0 && !flash_mode.equals(parameters.getFlashMode())) {
			parameters.setFlashMode(flash_mode);
			camera.setParameters(parameters);
		}
	}

	// this returns the flash mode indicated by the UI, rather than from the
	// camera parameters (may be different, e.g., in startup autofocus!)
	public String getCurrentFlashMode() {
		if (current_flash_index == -1)
			return null;
		String flash_value = supported_flash_values.get(current_flash_index);
		String flash_mode = convertFlashValueToMode(flash_value);
		return flash_mode;
	}

	private String convertFlashValueToMode(String flash_value) {
		String flash_mode = "";
		if (flash_value.equals("flash_off")) {
			flash_mode = Camera.Parameters.FLASH_MODE_OFF;
		} else if (flash_value.equals("flash_auto")) {
			flash_mode = Camera.Parameters.FLASH_MODE_AUTO;
		} else if (flash_value.equals("flash_on")) {
			flash_mode = Camera.Parameters.FLASH_MODE_ON;
		} else if (flash_value.equals("flash_torch")) {
			flash_mode = Camera.Parameters.FLASH_MODE_TORCH;
		} else if (flash_value.equals("flash_red_eye")) {
			flash_mode = Camera.Parameters.FLASH_MODE_RED_EYE;
		}
		return flash_mode;
	}

	private List<String> convertFlashModesToValues(List<String> supported_flash_modes) {
		if (MyDebug.LOG)
			Log.d(TAG, "convertFlashModesToValues()");
		List<String> output_modes = new Vector<String>();
		if (supported_flash_modes != null) {
			/*
			 * for(String flash_mode : supported_flash_modes) { if(
			 * flash_mode.equals(Camera.Parameters.FLASH_MODE_OFF) ) {
			 * output_modes.add("flash_off"); } else if(
			 * flash_mode.equals(Camera.Parameters.FLASH_MODE_AUTO) ) {
			 * output_modes.add("flash_auto"); } else if(
			 * flash_mode.equals(Camera.Parameters.FLASH_MODE_ON) ) {
			 * output_modes.add("flash_on"); } else if(
			 * flash_mode.equals(Camera.Parameters.FLASH_MODE_TORCH) ) {
			 * output_modes.add("flash_torch"); } else if(
			 * flash_mode.equals(Camera.Parameters.FLASH_MODE_RED_EYE) ) {
			 * output_modes.add("flash_red_eye"); } }
			 */
			// also resort as well as converting
			// first one will be the default choice
			if (supported_flash_modes.contains(Camera.Parameters.FLASH_MODE_AUTO)) {
				output_modes.add("flash_auto");
				if (MyDebug.LOG)
					Log.d(TAG, " supports flash_auto");
			}
			if (supported_flash_modes.contains(Camera.Parameters.FLASH_MODE_OFF)) {
				output_modes.add("flash_off");
				if (MyDebug.LOG)
					Log.d(TAG, " supports flash_off");
			}
			if (supported_flash_modes.contains(Camera.Parameters.FLASH_MODE_ON)) {
				output_modes.add("flash_on");
				if (MyDebug.LOG)
					Log.d(TAG, " supports flash_on");
			}
			if (supported_flash_modes.contains(Camera.Parameters.FLASH_MODE_TORCH)) {
				output_modes.add("flash_torch");
				if (MyDebug.LOG)
					Log.d(TAG, " supports flash_torch");
			}
			if (supported_flash_modes.contains(Camera.Parameters.FLASH_MODE_RED_EYE)) {
				output_modes.add("flash_red_eye");
				if (MyDebug.LOG)
					Log.d(TAG, " supports flash_red_eye");
			}
		}
		return output_modes;
	}

	void cycleFocusMode() {
		if (MyDebug.LOG)
			Log.d(TAG, "cycleFocusMode()");
		// if( is_taking_photo && !is_taking_photo_on_timer ) {
		if (this.phase == PHASE_TAKING_PHOTO) {
			// just to be safe - otherwise problem that changing the focus mode
			// will cancel the autofocus before taking a photo, so we never take
			// a photo, but is_taking_photo remains true!
			if (MyDebug.LOG)
				Log.d(TAG, "currently taking a photo");
			return;
		}
		if (this.supported_focus_values != null && this.supported_focus_values.size() > 1) {
			int new_focus_index = (current_focus_index + 1) % this.supported_focus_values.size();
			updateFocus(new_focus_index, false, true, true);
		}
	}

	private boolean updateFocus(String focus_value, boolean quiet, boolean save, boolean auto_focus) {
		if (MyDebug.LOG)
			Log.d(TAG, "updateFocus(): " + focus_value);
		if (this.supported_focus_values != null) {
			int new_focus_index = supported_focus_values.indexOf(focus_value);
			if (MyDebug.LOG)
				Log.d(TAG, "new_focus_index: " + new_focus_index);
			if (new_focus_index != -1) {
				updateFocus(new_focus_index, quiet, save, auto_focus);
				return true;
			}
		}
		return false;
	}

	private void updateFocus(int new_focus_index, boolean quiet, boolean save, boolean auto_focus) {
		if (MyDebug.LOG)
			Log.d(TAG, "updateFocus(): " + new_focus_index + " current_focus_index: " + current_focus_index);
		// updates the Focus button, and Focus camera mode
		if (this.supported_focus_values != null && new_focus_index != current_focus_index) {
			boolean initial = current_focus_index == -1;
			current_focus_index = new_focus_index;
			if (MyDebug.LOG)
				Log.d(TAG, "    current_focus_index is now " + current_focus_index + " (initial " + initial + ")");

			String focus_value = supported_focus_values.get(current_focus_index);

			this.setFocus(focus_value, auto_focus);

			if (save) {
				// now save
				SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(this.getContext());
				SharedPreferences.Editor editor = sharedPreferences.edit();
				editor.putString(getFocusPreferenceKey(cameraId), focus_value);
				editor.apply();
			}
		}
	}

	private void setFocus(String focus_value, boolean auto_focus) {
		if (MyDebug.LOG)
			Log.d(TAG, "setFocus() " + focus_value);
		if (camera == null) {
			if (MyDebug.LOG)
				Log.d(TAG, "null camera");
			return;
		}
		cancelAutoFocus();
		Camera.Parameters parameters = camera.getParameters();
		if (focus_value.equals("focus_mode_auto")) {
			parameters.setFocusMode(Camera.Parameters.FOCUS_MODE_AUTO);
		} else if (focus_value.equals("focus_mode_infinity")) {
			parameters.setFocusMode(Camera.Parameters.FOCUS_MODE_INFINITY);
		} else if (focus_value.equals("focus_mode_macro")) {
			parameters.setFocusMode(Camera.Parameters.FOCUS_MODE_MACRO);
		} else if (focus_value.equals("focus_mode_fixed")) {
			parameters.setFocusMode(Camera.Parameters.FOCUS_MODE_FIXED);
		} else if (focus_value.equals("focus_mode_edof")) {
			parameters.setFocusMode(Camera.Parameters.FOCUS_MODE_EDOF);
		} else if (focus_value.equals("focus_mode_continuous_video")) {
			parameters.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO);
		} else {
			if (MyDebug.LOG)
				Log.d(TAG, "setFocus() received unknown focus value " + focus_value);
		}
		camera.setParameters(parameters);
		clearFocusAreas();
		if (auto_focus) {
			tryAutoFocus(false, false);
		}
	}

	private List<String> convertFocusModesToValues(List<String> supported_focus_modes) {
		if (MyDebug.LOG)
			Log.d(TAG, "convertFocusModesToValues()");
		List<String> output_modes = new Vector<String>();
		if (supported_focus_modes != null) {
			// also resort as well as converting
			// first one will be the default choice
			if (supported_focus_modes.contains(Camera.Parameters.FOCUS_MODE_AUTO)) {
				output_modes.add("focus_mode_auto");
				if (MyDebug.LOG)
					Log.d(TAG, " supports focus_mode_auto");
			}
			if (supported_focus_modes.contains(Camera.Parameters.FOCUS_MODE_INFINITY)) {
				output_modes.add("focus_mode_infinity");
				if (MyDebug.LOG)
					Log.d(TAG, " supports focus_mode_infinity");
			}
			if (supported_focus_modes.contains(Camera.Parameters.FOCUS_MODE_MACRO)) {
				output_modes.add("focus_mode_macro");
				if (MyDebug.LOG)
					Log.d(TAG, " supports focus_mode_macro");
			}
			if (supported_focus_modes.contains(Camera.Parameters.FOCUS_MODE_FIXED)) {
				output_modes.add("focus_mode_fixed");
				if (MyDebug.LOG)
					Log.d(TAG, " supports focus_mode_fixed");
			}
			if (supported_focus_modes.contains(Camera.Parameters.FOCUS_MODE_EDOF)) {
				output_modes.add("focus_mode_edof");
				if (MyDebug.LOG)
					Log.d(TAG, " supports focus_mode_edof");
			}
			if (supported_focus_modes.contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO)) {
				output_modes.add("focus_mode_continuous_video");
				if (MyDebug.LOG)
					Log.d(TAG, " supports focus_mode_continuous_video");
			}
		}
		return output_modes;
	}

	public void takePicturePressed() {
		if (MyDebug.LOG)
			Log.d(TAG, "takePicturePressed");
		if (camera == null) {
			if (MyDebug.LOG)
				Log.d(TAG, "camera not available");
			/*
			 * is_taking_photo_on_timer = false; is_taking_photo = false;
			 */
			this.phase = PHASE_NORMAL;
			return;
		}
		if (!this.has_surface) {
			if (MyDebug.LOG)
				Log.d(TAG, "preview surface not yet available");
			/*
			 * is_taking_photo_on_timer = false; is_taking_photo = false;
			 */
			this.phase = PHASE_NORMAL;
			return;
		}
		// if( is_taking_photo_on_timer ) {
		if (this.isOnTimer()) {
			takePictureTimerTask.cancel();
			if (beepTimerTask != null) {
				beepTimerTask.cancel();
			}
			/*
			 * is_taking_photo_on_timer = false; is_taking_photo = false;
			 */
			this.phase = PHASE_NORMAL;
			if (MyDebug.LOG)
				Log.d(TAG, "cancelled camera timer");
			showToast(take_photo_toast, "Cancelled timer");
			return;
		}
		// if( is_taking_photo ) {
		if (this.phase == PHASE_TAKING_PHOTO) {

			return;
		}

		// make sure that preview running (also needed to hide trash/share
		// icons)
		this.startCameraPreview();

		// is_taking_photo = true;
		SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(this.getContext());
		String timer_value = sharedPreferences.getString("preference_timer", "0");
		long timer_delay = 0;
		try {
			timer_delay = Integer.parseInt(timer_value) * 1000;
		} catch (NumberFormatException e) {
			if (MyDebug.LOG)
				Log.e(TAG, "failed to parse timer_value: " + timer_value);
			e.printStackTrace();
			timer_delay = 0;
		}

		String burst_mode_value = sharedPreferences.getString("preference_burst_mode", "1");
		try {
			n_burst = Integer.parseInt(burst_mode_value);
			if (MyDebug.LOG)
				Log.d(TAG, "n_burst: " + n_burst);
		} catch (NumberFormatException e) {
			if (MyDebug.LOG)
				Log.e(TAG, "failed to parse burst_mode_value: " + burst_mode_value);
			e.printStackTrace();
			n_burst = 1;
		}
		remaining_burst_photos = n_burst - 1;

		if (timer_delay == 0) {
			takePicture();
		} else {
			takePictureOnTimer(timer_delay, false);
		}
	}

	private void takePictureOnTimer(long timer_delay, boolean repeated) {
		if (MyDebug.LOG) {
			Log.d(TAG, "takePictureOnTimer");
			Log.d(TAG, "timer_delay: " + timer_delay);
		}
		this.phase = PHASE_TIMER;
		class TakePictureTimerTask extends TimerTask {
			public void run() {
				if (beepTimerTask != null) {
					beepTimerTask.cancel();
				}
				takePicture();
			}
		}
		take_photo_time = System.currentTimeMillis() + timer_delay;
		if (MyDebug.LOG)
			Log.d(TAG, "take photo at: " + take_photo_time);
		if (!repeated) {
			showToast(take_photo_toast, "Started timer");
		}
		takePictureTimer.schedule(takePictureTimerTask = new TakePictureTimerTask(), timer_delay);

		SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(this.getContext());
		if (sharedPreferences.getBoolean("preference_timer_beep", true)) {
			class BeepTimerTask extends TimerTask {
				public void run() {
					try {
						Uri notification = RingtoneManager.getDefaultUri(RingtoneManager.TYPE_NOTIFICATION);
						Activity activity = (Activity) getContext();
						Ringtone r = RingtoneManager.getRingtone(activity.getApplicationContext(), notification);
						r.play();
					} catch (Exception e) {
					}
				}
			}
			beepTimer.schedule(beepTimerTask = new BeepTimerTask(), 0, 1000);
		}
	}

	private void takePicture() {
		if (MyDebug.LOG)
			Log.d(TAG, "takePicture");
		this.thumbnail_anim = false;
		this.phase = PHASE_TAKING_PHOTO;
		if (camera == null) {
			if (MyDebug.LOG)
				Log.d(TAG, "camera not available");
			/*
			 * is_taking_photo_on_timer = false; is_taking_photo = false;
			 */
			this.phase = PHASE_NORMAL;
			return;
		}
		if (!this.has_surface) {
			if (MyDebug.LOG)
				Log.d(TAG, "preview surface not yet available");
			/*
			 * is_taking_photo_on_timer = false; is_taking_photo = false;
			 */
			this.phase = PHASE_NORMAL;
			return;
		}
		focus_success = FOCUS_DONE; // clear focus rectangle

		Camera.Parameters parameters = camera.getParameters();
		String focus_mode = parameters.getFocusMode();
		if (MyDebug.LOG)
			Log.d(TAG, "focus_mode is " + focus_mode);

		if (this.successfully_focused && System.currentTimeMillis() < this.successfully_focused_time + 5000) {
			if (MyDebug.LOG)
				Log.d(TAG, "recently focused successfully, so no need to refocus");
			takePictureWhenFocused();
		} else if (focus_mode.equals(Camera.Parameters.FOCUS_MODE_AUTO) || focus_mode.equals(Camera.Parameters.FOCUS_MODE_MACRO)) {
			Camera.AutoFocusCallback autoFocusCallback = new Camera.AutoFocusCallback() {
				@Override
				public void onAutoFocus(boolean success, Camera camera) {
					if (MyDebug.LOG)
						Log.d(TAG, "autofocus complete: " + success);
					takePictureWhenFocused();
				}
			};
			if (MyDebug.LOG)
				Log.d(TAG, "start autofocus to take picture");
			try {
				camera.autoFocus(autoFocusCallback);
			} catch (RuntimeException e) {
				// just in case? We got a RuntimeException report here from 1
				// user on Google Play:
				// 21 Dec 2013, Xperia Go, Android 4.1
				autoFocusCallback.onAutoFocus(false, camera);

				if (MyDebug.LOG)
					Log.e(TAG, "runtime exception from autoFocus when trying to take photo");
				e.printStackTrace();
			}
		} else {
			takePictureWhenFocused();
		}
	}

	private void takePictureWhenFocused() {
		// should be called when auto-focused
		if (MyDebug.LOG)
			Log.d(TAG, "takePictureWhenFocused");
		if (camera == null) {
			if (MyDebug.LOG)
				Log.d(TAG, "camera not available");
			/*
			 * is_taking_photo_on_timer = false; is_taking_photo = false;
			 */
			this.phase = PHASE_NORMAL;
			return;
		}
		if (!this.has_surface) {
			if (MyDebug.LOG)
				Log.d(TAG, "preview surface not yet available");
			/*
			 * is_taking_photo_on_timer = false; is_taking_photo = false;
			 */
			this.phase = PHASE_NORMAL;
			return;
		}
		successfully_focused = false; // so next photo taken will require an
										// autofocus
		if (MyDebug.LOG)
			Log.d(TAG, "remaining_burst_photos: " + remaining_burst_photos);

		Camera.ShutterCallback shutterCallback = new Camera.ShutterCallback() {
			// don't do anything here, but we need to implement the callback to
			// get the shutter sound (at least on Galaxy Nexus and Nexus 7)
			public void onShutter() {
				if (MyDebug.LOG)
					Log.d(TAG, "shutterCallback.onShutter()");
			}
		};

		Camera.PictureCallback jpegPictureCallback = new Camera.PictureCallback() {
			public void onPictureTaken(byte[] data, Camera cam) {
				// n.b., this is automatically run in a different thread
				System.gc();
				if (MyDebug.LOG)
					Log.d(TAG, "onPictureTaken");

				boolean success = false;
				String exif_orientation_s = null;
				String picFileName = null;
				File picFile = null;
				try {
					OutputStream outputStream = null;
					picFile = new File(CameraUtil.getOutputMediaFileUri(getContext(), CameraUtil.MEDIA_TYPE_IMAGE).getPath());
					picFileName = picFile.getAbsolutePath();
					if (MyDebug.LOG)
						Log.d(TAG, "save to: " + picFileName);

					Bitmap bm0 = BitmapFactory.decodeByteArray(data, 0, data.length);
					Matrix matrix = new Matrix();
					matrix.setRotate(90, (float) bm0.getWidth() / 2, (float) bm0.getHeight() / 2);
					if ((cameraId + 1) % Camera.getNumberOfCameras() == 0) {
						matrix.setRotate(-90, (float) bm0.getWidth() / 2, (float) bm0.getHeight() / 2);
					}

					Bitmap bm = Bitmap.createBitmap(bm0, 0, 0, bm0.getWidth(), bm0.getHeight(), matrix, true);

					outputStream = new FileOutputStream(picFile);

					if (outputStream != null) {
						bm.compress(Bitmap.CompressFormat.JPEG, 90, outputStream);
						outputStream.close();
						if (MyDebug.LOG)
							Log.d(TAG, "onPictureTaken saved photo");
						success = true;
					}

					if (success) {
						broadcastFile(picFile);
					}
				} catch (FileNotFoundException e) {
					if (MyDebug.LOG)
						Log.e(TAG, "File not found: " + e.getMessage());
					e.getStackTrace();
					showToast(null, "Failed to save photo");
				} catch (IOException e) {
					if (MyDebug.LOG)
						Log.e(TAG, "I/O error writing file: " + e.getMessage());
					e.getStackTrace();
					showToast(null, "Failed to save photo");
				}

				is_preview_started = false; // preview automatically stopped due
											// to taking photo
				phase = PHASE_NORMAL; // need to set this even if remaining
										// burst photos, so we can restart the
										// preview
				if (remaining_burst_photos > 0) {
					// we need to restart the preview; and we do this in the
					// callback, as we need to restart after saving the image
					// (otherwise this can fail, at least on Nexus 7)
					startCameraPreview();
					if (MyDebug.LOG)
						Log.d(TAG, "burst mode photos remaining: onPictureTaken started preview");
				} else {
					phase = PHASE_NORMAL;
					boolean pause_preview = sharedPreferences.getBoolean("preference_pause_preview", false);
					if (MyDebug.LOG)
						Log.d(TAG, "pause_preview? " + pause_preview);
					if (pause_preview && success) {
						setPreviewPaused(true);
						preview_image_name = picFileName;
					} else {
						// we need to restart the preview; and we do this in the
						// callback, as we need to restart after saving the
						// image
						// (otherwise this can fail, at least on Nexus 7)
						startCameraPreview();
						if (MyDebug.LOG)
							Log.d(TAG, "onPictureTaken started preview");
					}
				}

				System.gc();

				if (remaining_burst_photos > 0) {
					remaining_burst_photos--;

					String timer_value = sharedPreferences.getString("preference_burst_interval", "0");
					long timer_delay = 0;
					try {
						timer_delay = (long) (Float.parseFloat(timer_value) * 1000);
					} catch (NumberFormatException e) {
						if (MyDebug.LOG)
							Log.e(TAG, "failed to parse timer_value: " + timer_value);
						e.printStackTrace();
						timer_delay = 0;
					}

					if (timer_delay == 0) {
						// we go straight to taking a photo rather than
						// refocusing, for speed
						// need to manually set the phase and rehide the GUI
						phase = PHASE_TAKING_PHOTO;
						takePictureWhenFocused();
					} else {
						takePictureOnTimer(timer_delay, true);
					}
				}
			}
		};
		{
			if (MyDebug.LOG)
				Log.d(TAG, "current_rotation: " + current_rotation);
			Camera.Parameters parameters = camera.getParameters();
			parameters.setRotation(current_rotation);
			camera.setParameters(parameters);

			boolean enable_sound = true;
			if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR1) {
				camera.enableShutterSound(enable_sound);
			}
			if (MyDebug.LOG)
				Log.d(TAG, "about to call takePicture");
			String toast_text = "";
			if (n_burst > 1) {
				int photo = (n_burst - remaining_burst_photos);
				toast_text = "Taking photo... (" + photo + " / " + n_burst + ")";
			} else {
				toast_text = "Taking photo...";
			}
			if (MyDebug.LOG)
				Log.d(TAG, toast_text);
			try {
				camera.takePicture(shutterCallback, null, jpegPictureCallback);
				showToast(take_photo_toast, toast_text);
			} catch (RuntimeException e) {
				// just in case? We got a RuntimeException report here from 1
				// user on Google Play; I also encountered it myself once of
				// Galaxy Nexus when starting up
				if (MyDebug.LOG)
					Log.e(TAG, "runtime exception from takePicture");
				e.printStackTrace();
				showToast(null, "Failed to take picture");
			}
		}
		if (MyDebug.LOG)
			Log.d(TAG, "takePicture exit");
	}

	public void broadcastFile(File file) {
		// note that the new method means that the new folder shows up as a file
		// when connected to a PC via MTP (at least tested on Windows 8)
		if (file.isDirectory()) {
			// this.sendBroadcast(new Intent(Intent.ACTION_MEDIA_MOUNTED,
			// Uri.fromFile(file)));
			// ACTION_MEDIA_MOUNTED no longer allowed on Android 4.4! Gives:
			// SecurityException: Permission Denial: not allowed to send
			// broadcast android.intent.action.MEDIA_MOUNTED
			// note that we don't actually need to broadcast anything, the
			// folder and contents appear straight away (both in Gallery on
			// device, and on a PC when connecting via MTP)
			// also note that we definitely don't want to broadcast
			// ACTION_MEDIA_SCANNER_SCAN_FILE or use scanFile() for folders, as
			// this means the folder shows up as a file on a PC via MTP (and
			// isn't fixed by rebooting!)
		} else {
			// both of these work fine, but using
			// MediaScannerConnection.scanFile() seems to be preferred over
			// sending an intent
			// this.sendBroadcast(new
			// Intent(Intent.ACTION_MEDIA_SCANNER_SCAN_FILE,
			// Uri.fromFile(file)));
			MediaScannerConnection.scanFile(getContext(), new String[] { file.getAbsolutePath() }, null,
					new MediaScannerConnection.OnScanCompletedListener() {
						public void onScanCompleted(String path, Uri uri) {
							if (MyDebug.LOG) {
								Log.d("ExternalStorage", "Scanned " + path + ":");
								Log.d("ExternalStorage", "-> uri=" + uri);
							}
						}
					});
		}
	}

	private void tryAutoFocus(final boolean startup, final boolean manual) {
		// manual: whether user has requested autofocus (by touching screen)
		if (MyDebug.LOG)
			Log.d(TAG, "tryAutoFocus");
		if (camera == null) {
			if (MyDebug.LOG)
				Log.d(TAG, "no camera");
		} else if (!this.has_surface) {
			if (MyDebug.LOG)
				Log.d(TAG, "preview surface not yet available");
		} else if (!this.is_preview_started) {
			if (MyDebug.LOG)
				Log.d(TAG, "preview not yet started");
		}
		// else if( is_taking_photo ) {
		else if (this.isTakingPhotoOrOnTimer()) {
			if (MyDebug.LOG)
				Log.d(TAG, "currently taking a photo");
		} else {
			// it's only worth doing autofocus when autofocus has an effect
			// (i.e., auto or macro mode)
			Camera.Parameters parameters = camera.getParameters();
			String focus_mode = parameters.getFocusMode();
			// getFocusMode() is documented as never returning null, however
			// I've had null pointer exceptions reported in Google Play from the
			// below line (v1.7),
			// on Galaxy Tab 10.1 (GT-P7500), Android 4.0.3 - 4.0.4; HTC EVO 3D
			// X515m (shooteru), Android 4.0.3 - 4.0.4
			if (focus_mode != null && (focus_mode.equals(Camera.Parameters.FOCUS_MODE_AUTO) || focus_mode.equals(Camera.Parameters.FOCUS_MODE_MACRO))) {
				if (MyDebug.LOG)
					Log.d(TAG, "try to start autofocus");
				String old_flash = parameters.getFlashMode();
				if (MyDebug.LOG)
					Log.d(TAG, "old_flash: " + old_flash);
				set_flash_after_autofocus = "";
				// getFlashMode() may return null if flash not supported!
				if (startup && old_flash != null && old_flash != Camera.Parameters.FLASH_MODE_OFF) {
					set_flash_after_autofocus = old_flash;
					parameters.setFlashMode(Camera.Parameters.FLASH_MODE_OFF);
					camera.setParameters(parameters);
				}
				Camera.AutoFocusCallback autoFocusCallback = new Camera.AutoFocusCallback() {
					@Override
					public void onAutoFocus(boolean success, Camera camera) {
						if (MyDebug.LOG)
							Log.d(TAG, "autofocus complete: " + success);
						autoFocusCompleted(manual, success, false);
					}
				};

				this.focus_success = FOCUS_WAITING;
				this.focus_complete_time = -1;
				this.successfully_focused = false;
				try {
					camera.autoFocus(autoFocusCallback);
				} catch (RuntimeException e) {
					// just in case? We got a RuntimeException report here from
					// 1 user on Google Play
					autoFocusCallback.onAutoFocus(false, camera);

					if (MyDebug.LOG)
						Log.e(TAG, "runtime exception from autoFocus");
					e.printStackTrace();
				}
			} else if (has_focus_area) {
				// do this so we get the focus box, for focus modes that support
				// focus area, but don't support autofocus
				focus_success = FOCUS_SUCCESS;
				focus_complete_time = System.currentTimeMillis();
			}
		}
	}

	private void cancelAutoFocus() {
		if (MyDebug.LOG)
			Log.d(TAG, "cancelAutoFocus");
		if (camera != null) {
			camera.cancelAutoFocus();
			autoFocusCompleted(false, false, true);
		}
	}

	private void autoFocusCompleted(boolean manual, boolean success, boolean cancelled) {
		if (MyDebug.LOG) {
			Log.d(TAG, "autoFocusCompleted");
			Log.d(TAG, "    manual? " + manual);
			Log.d(TAG, "    success? " + success);
			Log.d(TAG, "    cancelled? " + cancelled);
		}
		if (cancelled) {
			focus_success = FOCUS_DONE;
		} else {
			focus_success = success ? FOCUS_SUCCESS : FOCUS_FAILED;
			focus_complete_time = System.currentTimeMillis();
		}
		if (manual && !cancelled && success) {
			successfully_focused = true;
			successfully_focused_time = focus_complete_time;
		}
		if (set_flash_after_autofocus.length() > 0) {
			if (MyDebug.LOG)
				Log.d(TAG, "set flash back to: " + set_flash_after_autofocus);
			Camera.Parameters parameters = camera.getParameters();
			parameters.setFlashMode(set_flash_after_autofocus);
			set_flash_after_autofocus = "";
			camera.setParameters(parameters);
		}
	}

	private void startCameraPreview() {

		// if( camera != null && !is_taking_photo && !is_preview_started ) {
		if (camera != null && !this.isTakingPhotoOrOnTimer() && !is_preview_started) {
			camera.startPreview();
			this.is_preview_started = true;
			if (this.supports_face_detection) {
				if (MyDebug.LOG)
					Log.d(TAG, "start face detection");
				try {
					camera.startFaceDetection();
				} catch (RuntimeException e) {
					// I didn't think this could happen, as we only call
					// startFaceDetection() after we've called takePicture() or
					// stopPreview(), which the Android docs say stops the face
					// detection
					// however I had a crash reported on Google Play for Open
					// Camera v1.4
					// 2 Jan 2014, "maxx_ax5", Android 4.0.3-4.0.4
					// startCameraPreview() was called after taking photo in
					// burst mode, but I tested with burst mode and face
					// detection, and can't reproduce the crash on Galaxy Nexus
					if (MyDebug.LOG)
						Log.d(TAG, "face detection already started");
				}
				faces_detected = null;
			}
		}
		this.setPreviewPaused(false);
	}

	private void setPreviewPaused(boolean paused) {
		if (MyDebug.LOG)
			Log.d(TAG, "setPreviewPaused: " + paused);
		/*
		 * is_preview_paused = paused; if( is_preview_paused ) {
		 */
		if (paused) {
			this.phase = PHASE_PREVIEW_PAUSED;
		} else {
			this.phase = PHASE_NORMAL;
			preview_image_name = null;
		}
	}

	public boolean supportsFaceDetection() {
		if (MyDebug.LOG)
			Log.d(TAG, "supportsFaceDetection");
		return supports_face_detection;
	}

	List<String> getSupportedColorEffects() {
		if (MyDebug.LOG)
			Log.d(TAG, "getSupportedColorEffects");
		return this.color_effects;
	}

	List<String> getSupportedSceneModes() {
		if (MyDebug.LOG)
			Log.d(TAG, "getSupportedSceneModes");
		return this.scene_modes;
	}

	List<String> getSupportedWhiteBalances() {
		if (MyDebug.LOG)
			Log.d(TAG, "getSupportedWhiteBalances");
		return this.white_balances;
	}

	String getISOKey() {
		if (MyDebug.LOG)
			Log.d(TAG, "getISOKey");
		return this.iso_key;
	}

	List<String> getSupportedISOs() {
		if (MyDebug.LOG)
			Log.d(TAG, "getSupportedISOs");
		return this.isos;
	}

	int getCurrentExposure() {
		if (MyDebug.LOG)
			Log.d(TAG, "getCurrentExposure");
		if (camera == null)
			return 0;
		Camera.Parameters parameters = camera.getParameters();
		int current_exposure = parameters.getExposureCompensation();
		return current_exposure;
	}

	List<Camera.Size> getSupportedPreviewSizes() {
		if (MyDebug.LOG)
			Log.d(TAG, "getSupportedPreviewSizes");
		return this.supported_preview_sizes;
	}

	public List<Camera.Size> getSupportedPictureSizes() {
		if (MyDebug.LOG)
			Log.d(TAG, "getSupportedPictureSizes");
		return this.sizes;
	}

	int getCurrentPictureSizeIndex() {
		if (MyDebug.LOG)
			Log.d(TAG, "getCurrentPictureSizeIndex");
		return this.current_size_index;
	}

	List<String> getSupportedFlashValues() {
		return supported_flash_values;
	}

	List<String> getSupportedFocusValues() {
		return supported_focus_values;
	}

	public int getCameraId() {
		return this.cameraId;
	}

	public void onResume() {
		if (MyDebug.LOG)
			Log.d(TAG, "onResume");
		this.app_is_paused = false;
		this.openCamera();
	}

	public void onPause() {
		if (MyDebug.LOG)
			Log.d(TAG, "onPause");
		this.app_is_paused = true;
		this.closeCamera();
	}

	public void showToast(final ToastBoxer clear_toast, final String message) {/*
		class RotatedTextView extends View {
			private String text = "";
			private Paint paint = new Paint();
			private Rect bounds = new Rect();
			private Rect rect = new Rect();

			public RotatedTextView(String text, Context context) {
				super(context);

				this.text = text;
			}

			@Override
			protected void onDraw(Canvas canvas) {
				final float scale = getResources().getDisplayMetrics().density;
				paint.setTextSize(14 * scale + 0.5f); // convert dps to pixels
				paint.setShadowLayer(1, 0, 1, Color.BLACK);
				paint.getTextBounds(text, 0, text.length(), bounds);
				
				 * if( MyDebug.LOG ) { Log.d(TAG, "bounds: " + bounds); }
				 
				final int padding = (int) (14 * scale + 0.5f); // convert dps to
																// pixels
				final int offset_y = (int) (32 * scale + 0.5f); // convert dps
																// to pixels
				canvas.save();
				canvas.rotate(ui_rotation, canvas.getWidth() / 2, canvas.getHeight() / 2);

				rect.left = canvas.getWidth() / 2 - bounds.width() / 2 + bounds.left - padding;
				rect.top = canvas.getHeight() / 2 + bounds.top - padding + offset_y;
				rect.right = canvas.getWidth() / 2 - bounds.width() / 2 + bounds.right + padding;
				rect.bottom = canvas.getHeight() / 2 + bounds.bottom + padding + offset_y;

				paint.setStyle(Paint.Style.FILL);
				paint.setColor(Color.rgb(75, 75, 75));
				canvas.drawRect(rect.left, rect.top, rect.right, rect.bottom, paint);

				paint.setStyle(Paint.Style.STROKE);
				paint.setColor(Color.rgb(150, 150, 150));
				canvas.drawLine(rect.left, rect.top, rect.right, rect.top, paint);
				canvas.drawLine(rect.left, rect.top, rect.left, rect.bottom, paint);

				paint.setStyle(Paint.Style.FILL); // needed for Android 4.4!
				paint.setColor(Color.WHITE);
				canvas.drawText(text, canvas.getWidth() / 2 - bounds.width() / 2, canvas.getHeight() / 2 + offset_y, paint);
				canvas.restore();
			}
		}

		if (MyDebug.LOG)
			Log.d(TAG, "showToast");
		final Activity activity = (Activity) this.getContext();
		// We get a crash on emulator at least if Toast constructor isn't run on
		// main thread (e.g., the toast for taking a photo when on timer).
		// Also see
		// http://stackoverflow.com/questions/13267239/toast-from-a-non-ui-thread
		activity.runOnUiThread(new Runnable() {
			public void run() {
				if (clear_toast != null && clear_toast.toast != null)
					clear_toast.toast.cancel();
				
				 * clear_toast =
				 * Toast.makeText(activity.getApplicationContext(), message,
				 * Toast.LENGTH_SHORT); clear_toast.show();
				 

				Toast toast = new Toast(activity);
				if (clear_toast != null)
					clear_toast.toast = toast;
				View text = new RotatedTextView(message, activity);
				toast.setView(text);
				toast.setDuration(Toast.LENGTH_SHORT);
				toast.show();
			}
		});
	*/}

	void setUIRotation(int ui_rotation) {
		if (MyDebug.LOG)
			Log.d(TAG, "setUIRotation");
		this.ui_rotation = ui_rotation;
	}

	// must be static, to safely call from other Activities
	public static String getFlashPreferenceKey(int cameraId) {
		return "flash_value_" + cameraId;
	}

	// must be static, to safely call from other Activities
	public static String getFocusPreferenceKey(int cameraId) {
		return "focus_value_" + cameraId;
	}

	// must be static, to safely call from other Activities
	public static String getResolutionPreferenceKey(int cameraId) {
		return "camera_resolution_" + cameraId;
	}

	// must be static, to safely call from other Activities
	public static String getVideoQualityPreferenceKey(int cameraId) {
		return "video_quality_" + cameraId;
	}

	// must be static, to safely call from other Activities
	public static String getIsVideoPreferenceKey() {
		return "is_video";
	}

	// must be static, to safely call from other Activities
	public static String getExposurePreferenceKey() {
		return "preference_exposure";
	}

	// must be static, to safely call from other Activities
	public static String getColorEffectPreferenceKey() {
		return "preference_color_effect";
	}

	// must be static, to safely call from other Activities
	public static String getSceneModePreferenceKey() {
		return "preference_scene_mode";
	}

	// must be static, to safely call from other Activities
	public static String getWhiteBalancePreferenceKey() {
		return "preference_white_balance";
	}

	// must be static, to safely call from other Activities
	public static String getISOPreferenceKey() {
		return "preference_iso";
	}

	// for testing:
	public Camera getCamera() {
		/*
		 * if( MyDebug.LOG ) Log.d(TAG, "getCamera: " + camera);
		 */
		return this.camera;
	}

	public boolean supportsFocus() {
		return this.supported_focus_values != null;
	}

	public boolean supportsFlash() {
		return this.supported_flash_values != null;
	}

	public String getCurrentFlashValue() {
		if (this.current_flash_index == -1)
			return null;
		return this.supported_flash_values.get(current_flash_index);
	}

	public boolean hasFocusArea() {
		return this.has_focus_area;
	}

	public boolean isTakingPhotoOrOnTimer() {
		// return this.is_taking_photo;
		return this.phase == PHASE_TAKING_PHOTO || this.phase == PHASE_TIMER;
	}

	public boolean isTakingPhoto() {
		return this.phase == PHASE_TAKING_PHOTO;
	}

	public boolean isOnTimer() {
		// return this.is_taking_photo_on_timer;
		return this.phase == PHASE_TIMER;
	}

	public boolean isPreviewStarted() {
		return this.is_preview_started;
	}
}
