package com.android.support.jhf.cropimage.cropactivity;

/*
 * Copyright (C) 2007 The Android Open Source Project
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 *      http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 */

import android.content.ContentResolver;
import android.content.ContentValues;
import android.content.Context;
import android.content.Intent;
import android.database.Cursor;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.Canvas;
import android.graphics.Matrix;
import android.graphics.Path;
import android.graphics.PointF;
import android.graphics.PorterDuff;
import android.graphics.Rect;
import android.graphics.RectF;
import android.graphics.Region;
import android.media.ExifInterface;
import android.media.FaceDetector;
import android.net.Uri;
import android.os.Bundle;
import android.os.Handler;
import android.provider.MediaStore;
import android.provider.MediaStore.Images.ImageColumns;
import android.text.TextUtils;
import android.util.AttributeSet;
import android.util.Log;
import android.view.KeyEvent;
import android.view.MotionEvent;
import android.view.View;
import android.view.Window;
import android.view.WindowManager;
import android.widget.Toast;

import java.io.BufferedInputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.net.URI;
import java.net.URISyntaxException;
import java.net.URL;
import java.net.URLConnection;
import java.util.ArrayList;
import java.util.concurrent.CountDownLatch;

import org.apache.http.HttpEntity;
import org.apache.http.HttpResponse;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.client.methods.HttpUriRequest;
import org.apache.http.conn.ClientConnectionManager;
import org.apache.http.impl.client.DefaultHttpClient;

import com.android.support.jhf.R;
import com.android.support.jhf.handlerui.HandlerToastUI;

/**
 * 		Intent intent = new Intent(mContext, CropImage.class);
 * 		intent.putExtra("savepath", Environment.getExternalStorageDirectory()
 *				+ "/cropimage.jpg");
 *		intent.putExtra("crop", "true");
 *		intent.putExtra("aspectX", 1);
 *		intent.putExtra("aspectY", 1);
 *		intent.putExtra("outputX", 512);
 *		intent.putExtra("outputY", 512);
 *		intent.putExtra("scale", true);
 *		intent.putExtra("circleCrop", true);
 *		intent.putExtra("outputFormat", Bitmap.CompressFormat.JPEG.toString());
 *		intent.putExtra("picturepath", imagePathString);
 *		startActivityForResult(intent, 0);
 *
 * The activity can crop specific region of interest from an image.
 */
public class CropImage extends MonitoredActivity {
	private static final String TAG = "CropImage";

	// These are various options can be specified in the intent.
	private Bitmap.CompressFormat mOutputFormat = Bitmap.CompressFormat.JPEG; // only
																				// used
																				// with
																				// mSaveUri
	private String mSaveUri = null;
	private int mAspectX, mAspectY; // CR: two definitions per line == sad
									// panda.
	private boolean mDoFaceDetection = true;
	private boolean mCircleCrop = false;
	private final Handler mHandler = new Handler();

	// These options specifiy the output image size and whether we should
	// scale the output to fit it (or just crop it).
	private int mOutputX, mOutputY;
	private boolean mScale;
	private boolean mScaleUp = true;

	boolean mWaitingToPick; // Whether we are wait the user to pick a face.
	boolean mSaving; // Whether the "save" button is already clicked.

	private CropImageView mImageView;
	private ContentResolver mContentResolver;

	private Bitmap mBitmap;
	// private MediaItem mItem;
	private final BitmapManager.ThreadSet mDecodingThreads = new BitmapManager.ThreadSet();
	HighlightView mCrop;

	private Context mContext;

	@Override
	public void onCreate(Bundle icicle) {
		super.onCreate(icicle);
		mContentResolver = getContentResolver();
		requestWindowFeature(Window.FEATURE_NO_TITLE);
		setContentView(R.layout.android_support_jhf_cropimage);

		mContext = this;

		mImageView = (CropImageView) findViewById(R.id.image);

		// CR: remove TODO's.
		// TODO: we may need to show this indicator for the main gallery
		// application
		// MenuHelper.showStorageToast(this);

		Intent intent = getIntent();
		Bundle extras = intent.getExtras();

		if (extras != null) {
			if (extras.getString("circleCrop") != null) {
				mCircleCrop = true;
				mAspectX = 1;
				mAspectY = 1;
			}
			// mSaveUri = (Uri) extras.getParcelable(MediaStore.EXTRA_OUTPUT);
			mSaveUri = extras.getString("savepath");
			if (mSaveUri != null) {
				String outputFormatString = extras.getString("outputFormat");
				if (outputFormatString != null) {
					mOutputFormat = Bitmap.CompressFormat
							.valueOf(outputFormatString);
				}
			}
			//这种传递方式会有问题，bitmap不能太大，太大会报错
			mBitmap = (Bitmap) extras.getParcelable("data");
			mAspectX = extras.getInt("aspectX");
			mAspectY = extras.getInt("aspectY");
			mOutputX = extras.getInt("outputX");
			mOutputY = extras.getInt("outputY");
			mScale = extras.getBoolean("scale", true);
			mScaleUp = extras.getBoolean("scaleUpIfNeeded", true);
			mDoFaceDetection = extras.containsKey("noFaceDetection") ? !extras
					.getBoolean("noFaceDetection") : true;
		}

		if (mBitmap == null) {
			// Create a MediaItem representing the URI.
			// Uri target = intent.getData();
			// String targetScheme = target.getScheme();
			String picturePathString = extras.getString("picturepath");
			int rotation = 0;

			if (TextUtils.isEmpty(picturePathString)) {
				Log.e(TAG, "Cannot load bitmap, exiting.");
				finish();
				return;
			}

			try {
				mBitmap = createFromUri(mContext, picturePathString, 1024,
						1024, 0L);
				ExifInterface exif = new ExifInterface(picturePathString);
				rotation = (int) exifOrientationToDegrees(exif.getAttributeInt(
						ExifInterface.TAG_ORIENTATION,
						ExifInterface.ORIENTATION_NORMAL));
			} catch (IOException e) {
			} catch (URISyntaxException e) {
			}

			if (mBitmap != null && rotation != 0f) {
				mBitmap = Util.rotate(mBitmap, rotation);
			}
		}

		if (mBitmap == null) {
			Log.e(TAG, "Cannot load bitmap, exiting.");
			finish();
			return;
		}

		// Make UI fullscreen.
		getWindow().addFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN);

		findViewById(R.id.discard).setOnClickListener(
				new View.OnClickListener() {
					public void onClick(View v) {
						setResult(RESULT_CANCELED);
						finish();
					}
				});

		findViewById(R.id.save).setOnClickListener(new View.OnClickListener() {
			public void onClick(View v) {
				onSaveClicked();
			}
		});

		startFaceDetection();
	}

	public Bitmap createFromUri(Context context, String picturePath,
			int maxResolutionX, int maxResolutionY, long cacheId)
			throws IOException, URISyntaxException, OutOfMemoryError {
		final BitmapFactory.Options options = new BitmapFactory.Options();
		options.inScaled = false;
		options.inPreferredConfig = Bitmap.Config.RGB_565;
		options.inDither = false;
		Bitmap bitmap = null;
		if (!TextUtils.isEmpty(picturePath)) {
			// Load the bitmap from a local file.
			options.inJustDecodeBounds = true;
			BufferedInputStream bufferedInput = new BufferedInputStream(
					new FileInputStream(picturePath), 16384);
			bufferedInput.mark(Integer.MAX_VALUE);
			bitmap = BitmapFactory.decodeStream(bufferedInput, null, options);
			int width = options.outWidth;
			int height = options.outHeight;
			float maxResX = maxResolutionY;
			if (width > height) {
				maxResX = maxResolutionX;
			}
			float maxResY = (maxResX == maxResolutionX) ? maxResolutionY
					: maxResolutionX;
			int ratioX = (int) Math.ceil((float) width / maxResX);
			int ratioY = (int) Math.ceil((float) height / maxResY);
			int ratio = Math.max(ratioX, ratioY);
			ratio = nextPowerOf2(ratio);
			options.inDither = false;
			options.inJustDecodeBounds = false;
			options.inSampleSize = ratio;
			Thread timeoutThread = new Thread("BitmapTimeoutThread") {
				public void run() {
					try {
						Thread.sleep(6000);
						options.requestCancelDecode();
					} catch (InterruptedException e) {

					}
				}
			};
			timeoutThread.start();
			bufferedInput.close();
			bufferedInput = new BufferedInputStream(new FileInputStream(
					picturePath), 16384);
			bitmap = BitmapFactory.decodeStream(bufferedInput, null, options);
			bufferedInput.close();
		} else {

		}
		return bitmap;
	}

	public int nextPowerOf2(int n) {
		n -= 1;
		n |= n >>> 16;
		n |= n >>> 8;
		n |= n >>> 4;
		n |= n >>> 2;
		n |= n >>> 1;
		return n + 1;
	}

	public float exifOrientationToDegrees(int exifOrientation) {
		if (exifOrientation == ExifInterface.ORIENTATION_ROTATE_90) {
			return 90;
		} else if (exifOrientation == ExifInterface.ORIENTATION_ROTATE_180) {
			return 180;
		} else if (exifOrientation == ExifInterface.ORIENTATION_ROTATE_270) {
			return 270;
		}
		return 0;
	}

	private void startFaceDetection() {
		if (isFinishing()) {
			return;
		}

		mImageView.setImageBitmapResetBase(mBitmap, true);

		Util.startBackgroundJob(this, null,
				getResources().getString(R.string.running_face_detection),
				new Runnable() {
					public void run() {
						final CountDownLatch latch = new CountDownLatch(1);
						final Bitmap b = mBitmap;
						mHandler.post(new Runnable() {
							public void run() {
								if (b != mBitmap && b != null) {
									mImageView.setImageBitmapResetBase(b, true);
									mBitmap.recycle();
									mBitmap = b;
								}
								if (mImageView.getScale() == 1.0f) {
									mImageView.center(true, true);
								}
								latch.countDown();
							}
						});
						try {
							latch.await();
						} catch (InterruptedException e) {
							throw new RuntimeException(e);
						}
						mRunFaceDetection.run();
					}
				}, mHandler);
	}

	private void onSaveClicked() {
		// CR: TODO!
		// TODO this code needs to change to use the decode/crop/encode single
		// step api so that we don't require that the whole (possibly large)
		// bitmap doesn't have to be read into memory
		if (mSaving)
			return;

		if (mCrop == null) {
			return;
		}

		mSaving = true;

		Rect r = mCrop.getCropRect();

		int width = r.width(); // CR: final == happy panda!
		int height = r.height();

		// If we are circle cropping, we want alpha channel, which is the
		// third param here.
		Bitmap croppedImage = Bitmap.createBitmap(width, height,
				mCircleCrop ? Bitmap.Config.ARGB_8888 : Bitmap.Config.RGB_565);
		{
			Canvas canvas = new Canvas(croppedImage);
			Rect dstRect = new Rect(0, 0, width, height);
			canvas.drawBitmap(mBitmap, r, dstRect, null);
		}

		if (mCircleCrop) {
			// OK, so what's all this about?
			// Bitmaps are inherently rectangular but we want to return
			// something that's basically a circle. So we fill in the
			// area around the circle with alpha. Note the all important
			// PortDuff.Mode.CLEAR.
			Canvas c = new Canvas(croppedImage);
			Path p = new Path();
			p.addCircle(width / 2F, height / 2F, width / 2F, Path.Direction.CW);
			c.clipPath(p, Region.Op.DIFFERENCE);
			c.drawColor(0x00000000, PorterDuff.Mode.CLEAR);
		}

		// If the output is required to a specific size then scale or fill.
		if (mOutputX != 0 && mOutputY != 0) {
			if (mScale) {
				// Scale the image to the required dimensions.
				Bitmap old = croppedImage;
				croppedImage = Util.transform(new Matrix(), croppedImage,
						mOutputX, mOutputY, mScaleUp);
				if (old != croppedImage) {
					old.recycle();
				}
			} else {

				/*
				 * Don't scale the image crop it to the size requested. Create
				 * an new image with the cropped image in the center and the
				 * extra space filled.
				 */

				// Don't scale the image but instead fill it so it's the
				// required dimension
				Bitmap b = Bitmap.createBitmap(mOutputX, mOutputY,
						Bitmap.Config.RGB_565);
				Canvas canvas = new Canvas(b);

				Rect srcRect = mCrop.getCropRect();
				Rect dstRect = new Rect(0, 0, mOutputX, mOutputY);

				int dx = (srcRect.width() - dstRect.width()) / 2;
				int dy = (srcRect.height() - dstRect.height()) / 2;

				// If the srcRect is too big, use the center part of it.
				srcRect.inset(Math.max(0, dx), Math.max(0, dy));

				// If the dstRect is too big, use the center part of it.
				dstRect.inset(Math.max(0, -dx), Math.max(0, -dy));

				// Draw the cropped bitmap in the center.
				canvas.drawBitmap(mBitmap, srcRect, dstRect, null);

				// Set the cropped bitmap as the new bitmap.
				croppedImage.recycle();
				croppedImage = b;
			}
		}

		// Return the cropped image directly or save it to the specified URI.
		Bundle myExtras = getIntent().getExtras();
		if (myExtras != null
				&& (myExtras.getParcelable("data") != null || myExtras
						.getBoolean("return-data"))) {
			Bundle extras = new Bundle();
			extras.putParcelable("data", croppedImage);
			setResult(RESULT_OK, (new Intent()).setAction("inline-data")
					.putExtras(extras));
			finish();
		} else {
			final Bitmap b = croppedImage;
			final Runnable save = new Runnable() {
				public void run() {
					saveOutput(b);
				}
			};
			Util.startBackgroundJob(this, null,
					getResources().getString(R.string.saving_image), save,
					mHandler);
		}
	}

	private void saveOutput(Bitmap croppedImage) {
		if (mSaveUri != null) {
			// OutputStream outputStream = null;
			// try {
			// outputStream = mContentResolver.openOutputStream(mSaveUri);
			// if (outputStream != null) {
			// croppedImage.compress(mOutputFormat, 75, outputStream);
			// }
			// // TODO ExifInterface write
			// } catch (IOException ex) {
			// Log.e(TAG, "Cannot open file: " + mSaveUri, ex);
			// } finally {
			// Util.closeSilently(outputStream);
			// }
			FileOutputStream fileOutputStream = null;
			try {
				fileOutputStream = new FileOutputStream(mSaveUri);
				if (null != fileOutputStream) {
					croppedImage.compress(mOutputFormat, 75, fileOutputStream);
				}
				try {
					fileOutputStream.flush();
					fileOutputStream.close();
				} catch (IOException e) {
					e.printStackTrace();
				}
			} catch (FileNotFoundException e) {
				e.printStackTrace();
			}
			Intent intent = new Intent();
			Bundle extras = new Bundle();
			extras.putString("savepath", mSaveUri);
			intent.putExtras(extras);
			setResult(RESULT_OK, intent);
		} else {
			HandlerToastUI.getHandlerToastUI(this, "保存失败");

			// Bundle extras = new Bundle();
			// extras.putString("rect", mCrop.getCropRect().toString());
			// if (mItem == null) {
			// // CR: Comments should be full sentences.
			// // this image doesn't belong to the local data source
			// // we can add it locally if necessary
			// } else {
			// File oldPath = new File(mItem.mFilePath);
			// File directory = new File(oldPath.getParent());
			//
			// int x = 0;
			// String fileName = oldPath.getName();
			// fileName = fileName.substring(0, fileName.lastIndexOf("."));
			//
			// // Try file-1.jpg, file-2.jpg, ... until we find a filename
			// // which
			// // does not exist yet.
			// while (true) {
			// x += 1;
			// String candidate = directory.toString() + "/" + fileName + "-" +
			// x + ".jpg";
			// boolean exists = (new File(candidate)).exists();
			// if (!exists) { // CR: inline the expression for exists
			// // here--it's clear enough.
			// break;
			// }
			// }
			// try {
			// MediaItem item = mItem;
			// String finalFileName = fileName + "-" + x + ".jpg";
			// // TODO this is going to cause the orientation to reset.
			// Uri newUri = ImageManager.addImage(mContentResolver,
			// item.mCaption, item.mDateTakenInMs, item.mLatitude,
			// item.mLongitude, 0, directory.toString(), finalFileName);
			// boolean complete = false;
			// try {
			// String[] projection = new String[] { ImageColumns._ID,
			// ImageColumns.MINI_THUMB_MAGIC };
			// Cursor c = mContentResolver.query(newUri, projection, null, null,
			// null);
			// try {
			// c.moveToPosition(0);
			// } finally {
			// c.close();
			// }
			// ContentValues values = new ContentValues();
			// values.put(ImageColumns.MINI_THUMB_MAGIC, 0);
			// mContentResolver.update(newUri, values, null, null);
			// OutputStream outputStream = null;
			// try {
			// outputStream = mContentResolver.openOutputStream(newUri);
			// if (outputStream != null) {
			// croppedImage.compress(mOutputFormat, 75, outputStream);
			// }
			// } catch (IOException ex) {
			// // TODO: report error to caller
			// Log.e(TAG, "Cannot open file: " + newUri, ex);
			// } finally {
			// Util.closeSilently(outputStream);
			// }
			// complete = true;
			// } finally {
			// if (!complete) {
			// try {
			// mContentResolver.delete(newUri, null, null);
			// } catch (Throwable t) {
			// // Ignore it while clean up.
			// }
			// }
			// }
			// setResult(RESULT_OK, new
			// Intent().setAction(newUri.toString()).putExtras(extras));
			// } catch (Exception e) { // CR: e.
			// // CR: sentences!
			// Log.e(TAG, "Store image fail, continue anyway", e);
			// }
			// }

		}
		croppedImage.recycle();
		finish();
	}

	@Override
	protected void onPause() {
		super.onPause();
		BitmapManager.instance().cancelThreadDecoding(mDecodingThreads);
	}

	@Override
	protected void onDestroy() {
		super.onDestroy();
	}

	Runnable mRunFaceDetection = new Runnable() {
		float mScale = 1F;
		Matrix mImageMatrix;
		FaceDetector.Face[] mFaces = new FaceDetector.Face[3];
		int mNumFaces;

		// For each face, we create a HightlightView for it.
		private void handleFace(FaceDetector.Face f) {
			PointF midPoint = new PointF();

			int r = ((int) (f.eyesDistance() * mScale)) * 2;
			f.getMidPoint(midPoint);
			midPoint.x *= mScale;
			midPoint.y *= mScale;

			int midX = (int) midPoint.x;
			int midY = (int) midPoint.y;

			HighlightView hv = new HighlightView(mImageView);

			int width = mBitmap.getWidth();
			int height = mBitmap.getHeight();

			Rect imageRect = new Rect(0, 0, width, height);

			RectF faceRect = new RectF(midX, midY, midX, midY);
			faceRect.inset(-r, -r);
			if (faceRect.left < 0) {
				faceRect.inset(-faceRect.left, -faceRect.left);
			}

			if (faceRect.top < 0) {
				faceRect.inset(-faceRect.top, -faceRect.top);
			}

			if (faceRect.right > imageRect.right) {
				faceRect.inset(faceRect.right - imageRect.right, faceRect.right
						- imageRect.right);
			}

			if (faceRect.bottom > imageRect.bottom) {
				faceRect.inset(faceRect.bottom - imageRect.bottom,
						faceRect.bottom - imageRect.bottom);
			}

			hv.setup(mImageMatrix, imageRect, faceRect, mCircleCrop,
					mAspectX != 0 && mAspectY != 0);

			mImageView.add(hv);
		}

		// Create a default HightlightView if we found no face in the picture.
		private void makeDefault() {
			HighlightView hv = new HighlightView(mImageView);

			int width = mBitmap.getWidth();
			int height = mBitmap.getHeight();

			Rect imageRect = new Rect(0, 0, width, height);

			// CR: sentences!
			// make the default size about 4/5 of the width or height
			int cropWidth = Math.min(width, height) * 4 / 5;
			int cropHeight = cropWidth;

			if (mAspectX != 0 && mAspectY != 0) {
				if (mAspectX > mAspectY) {
					cropHeight = cropWidth * mAspectY / mAspectX;
				} else {
					cropWidth = cropHeight * mAspectX / mAspectY;
				}
			}

			int x = (width - cropWidth) / 2;
			int y = (height - cropHeight) / 2;

			RectF cropRect = new RectF(x, y, x + cropWidth, y + cropHeight);
			hv.setup(mImageMatrix, imageRect, cropRect, mCircleCrop,
					mAspectX != 0 && mAspectY != 0);
			mImageView.add(hv);
		}

		// Scale the image down for faster face detection.
		private Bitmap prepareBitmap() {
			if (mBitmap == null) {
				return null;
			}

			// 256 pixels wide is enough.
			if (mBitmap.getWidth() > 256) {
				mScale = 256.0F / mBitmap.getWidth(); // CR: F => f (or change
														// all f to F).
			}
			Matrix matrix = new Matrix();
			matrix.setScale(mScale, mScale);
			Bitmap faceBitmap = Bitmap.createBitmap(mBitmap, 0, 0,
					mBitmap.getWidth(), mBitmap.getHeight(), matrix, true);
			return faceBitmap;
		}

		public void run() {
			mImageMatrix = mImageView.getImageMatrix();
			Bitmap faceBitmap = prepareBitmap();

			mScale = 1.0F / mScale;
			if (faceBitmap != null && mDoFaceDetection) {
				FaceDetector detector = new FaceDetector(faceBitmap.getWidth(),
						faceBitmap.getHeight(), mFaces.length);
				mNumFaces = detector.findFaces(faceBitmap, mFaces);
			}

			if (faceBitmap != null && faceBitmap != mBitmap) {
				faceBitmap.recycle();
			}

			mHandler.post(new Runnable() {
				public void run() {
					mWaitingToPick = mNumFaces > 1;
					if (mNumFaces > 0) {
						for (int i = 0; i < mNumFaces; i++) {
							handleFace(mFaces[i]);
						}
					} else {
						makeDefault();
					}
					mImageView.invalidate();
					if (mImageView.mHighlightViews.size() == 1) {
						mCrop = mImageView.mHighlightViews.get(0);
						mCrop.setFocus(true);
					}

					if (mNumFaces > 1) {
						// CR: no need for the variable t. just do
						// Toast.makeText(...).show().
						Toast t = Toast.makeText(CropImage.this,
								R.string.multiface_crop_help,
								Toast.LENGTH_SHORT);
						t.show();
					}
				}
			});
		}
	};
}

class CropImageView extends ImageViewTouchBase {
	ArrayList<HighlightView> mHighlightViews = new ArrayList<HighlightView>();
	HighlightView mMotionHighlightView = null;
	float mLastX, mLastY;
	int mMotionEdge;

	@Override
	protected void onLayout(boolean changed, int left, int top, int right,
			int bottom) {
		super.onLayout(changed, left, top, right, bottom);
		if (mBitmapDisplayed.getBitmap() != null) {
			for (HighlightView hv : mHighlightViews) {
				hv.mMatrix.set(getImageMatrix());
				hv.invalidate();
				if (hv.mIsFocused) {
					centerBasedOnHighlightView(hv);
				}
			}
		}
	}

	public CropImageView(Context context, AttributeSet attrs) {
		super(context, attrs);
	}

	@Override
	protected void zoomTo(float scale, float centerX, float centerY) {
		super.zoomTo(scale, centerX, centerY);
		for (HighlightView hv : mHighlightViews) {
			hv.mMatrix.set(getImageMatrix());
			hv.invalidate();
		}
	}

	@Override
	protected void zoomIn() {
		super.zoomIn();
		for (HighlightView hv : mHighlightViews) {
			hv.mMatrix.set(getImageMatrix());
			hv.invalidate();
		}
	}

	@Override
	protected void zoomOut() {
		super.zoomOut();
		for (HighlightView hv : mHighlightViews) {
			hv.mMatrix.set(getImageMatrix());
			hv.invalidate();
		}
	}

	@Override
	protected void postTranslate(float deltaX, float deltaY) {
		super.postTranslate(deltaX, deltaY);
		for (int i = 0; i < mHighlightViews.size(); i++) {
			HighlightView hv = mHighlightViews.get(i);
			hv.mMatrix.postTranslate(deltaX, deltaY);
			hv.invalidate();
		}
	}

	// According to the event's position, change the focus to the first
	// hitting cropping rectangle.
	private void recomputeFocus(MotionEvent event) {
		for (int i = 0; i < mHighlightViews.size(); i++) {
			HighlightView hv = mHighlightViews.get(i);
			hv.setFocus(false);
			hv.invalidate();
		}

		for (int i = 0; i < mHighlightViews.size(); i++) {
			HighlightView hv = mHighlightViews.get(i);
			int edge = hv.getHit(event.getX(), event.getY());
			if (edge != HighlightView.GROW_NONE) {
				if (!hv.hasFocus()) {
					hv.setFocus(true);
					hv.invalidate();
				}
				break;
			}
		}
		invalidate();
	}

	@Override
	public boolean onTouchEvent(MotionEvent event) {
		CropImage cropImage = (CropImage) getContext();
		if (cropImage.mSaving) {
			return false;
		}

		switch (event.getAction()) {
		case MotionEvent.ACTION_DOWN: // CR: inline case blocks.
			if (cropImage.mWaitingToPick) {
				recomputeFocus(event);
			} else {
				for (int i = 0; i < mHighlightViews.size(); i++) { // CR:
																	// iterator
																	// for; if
																	// not, then
																	// i++ =>
																	// ++i.
					HighlightView hv = mHighlightViews.get(i);
					int edge = hv.getHit(event.getX(), event.getY());
					if (edge != HighlightView.GROW_NONE) {
						mMotionEdge = edge;
						mMotionHighlightView = hv;
						mLastX = event.getX();
						mLastY = event.getY();
						// CR: get rid of the extraneous parens below.
						mMotionHighlightView
								.setMode((edge == HighlightView.MOVE) ? HighlightView.ModifyMode.Move
										: HighlightView.ModifyMode.Grow);
						break;
					}
				}
			}
			break;
		// CR: vertical space before case blocks.
		case MotionEvent.ACTION_UP:
			if (cropImage.mWaitingToPick) {
				for (int i = 0; i < mHighlightViews.size(); i++) {
					HighlightView hv = mHighlightViews.get(i);
					if (hv.hasFocus()) {
						cropImage.mCrop = hv;
						for (int j = 0; j < mHighlightViews.size(); j++) {
							if (j == i) { // CR: if j != i do your shit; no need
											// for continue.
								continue;
							}
							mHighlightViews.get(j).setHidden(true);
						}
						centerBasedOnHighlightView(hv);
						((CropImage) getContext()).mWaitingToPick = false;
						return true;
					}
				}
			} else if (mMotionHighlightView != null) {
				centerBasedOnHighlightView(mMotionHighlightView);
				mMotionHighlightView.setMode(HighlightView.ModifyMode.None);
			}
			mMotionHighlightView = null;
			break;
		case MotionEvent.ACTION_MOVE:
			if (cropImage.mWaitingToPick) {
				recomputeFocus(event);
			} else if (mMotionHighlightView != null) {
				mMotionHighlightView.handleMotion(mMotionEdge, event.getX()
						- mLastX, event.getY() - mLastY);
				mLastX = event.getX();
				mLastY = event.getY();

				if (true) {
					// This section of code is optional. It has some user
					// benefit in that moving the crop rectangle against
					// the edge of the screen causes scrolling but it means
					// that the crop rectangle is no longer fixed under
					// the user's finger.
					ensureVisible(mMotionHighlightView);
				}
			}
			break;
		}

		switch (event.getAction()) {
		case MotionEvent.ACTION_UP:
			center(true, true);
			break;
		case MotionEvent.ACTION_MOVE:
			// if we're not zoomed then there's no point in even allowing
			// the user to move the image around. This call to center puts
			// it back to the normalized location (with false meaning don't
			// animate).
			if (getScale() == 1F) {
				center(true, true);
			}
			break;
		}

		return true;
	}

	// Pan the displayed image to make sure the cropping rectangle is visible.
	private void ensureVisible(HighlightView hv) {
		Rect r = hv.mDrawRect;

		int panDeltaX1 = Math.max(0, getLeft() - r.left);
		int panDeltaX2 = Math.min(0, getRight() - r.right);

		int panDeltaY1 = Math.max(0, getTop() - r.top);
		int panDeltaY2 = Math.min(0, getBottom() - r.bottom);

		int panDeltaX = panDeltaX1 != 0 ? panDeltaX1 : panDeltaX2;
		int panDeltaY = panDeltaY1 != 0 ? panDeltaY1 : panDeltaY2;

		if (panDeltaX != 0 || panDeltaY != 0) {
			panBy(panDeltaX, panDeltaY);
		}
	}

	// If the cropping rectangle's size changed significantly, change the
	// view's center and scale according to the cropping rectangle.
	private void centerBasedOnHighlightView(HighlightView hv) {
		Rect drawRect = hv.mDrawRect;

		float width = drawRect.width();
		float height = drawRect.height();

		float thisWidth = getWidth();
		float thisHeight = getHeight();

		float z1 = thisWidth / width * .6F;
		float z2 = thisHeight / height * .6F;

		float zoom = Math.min(z1, z2);
		zoom = zoom * this.getScale();
		zoom = Math.max(1F, zoom);

		if ((Math.abs(zoom - getScale()) / zoom) > .1) {
			float[] coordinates = new float[] { hv.mCropRect.centerX(),
					hv.mCropRect.centerY() };
			getImageMatrix().mapPoints(coordinates);
			zoomTo(zoom, coordinates[0], coordinates[1], 300F); // CR: 300.0f.
		}

		ensureVisible(hv);
	}

	@Override
	protected void onDraw(Canvas canvas) {
		super.onDraw(canvas);
		for (int i = 0; i < mHighlightViews.size(); i++) {
			mHighlightViews.get(i).draw(canvas);
		}
	}

	public void add(HighlightView hv) {
		mHighlightViews.add(hv);
		invalidate();
	}

}
