package com.darin.camera;

import java.util.EnumMap;
import java.util.Map;
import java.util.Vector;

import android.graphics.Bitmap;
import android.graphics.PixelFormat;
import android.graphics.Rect;
import android.hardware.Camera;
import android.hardware.Camera.Size;
import android.os.Handler;
import android.os.Looper;
import android.os.Message;
import android.util.Log;

import com.google.zxing.BarcodeFormat;
import com.google.zxing.BinaryBitmap;
import com.google.zxing.DecodeHintType;
import com.google.zxing.MultiFormatReader;
import com.google.zxing.Result;
import com.google.zxing.ResultPoint;
import com.google.zxing.ResultPointCallback;
import com.google.zxing.common.HybridBinarizer;
/**
 * 主要负责camera动作处理和图片的解码
 * @author Jianfeng.Lao
 *
 */
public class CameraAction extends Thread implements ResultPointCallback {
	private static final String TAG = "CameraAction";
	private Handler handler = null;
	private CameraActionListener listener;
	// setup camera focus function
	public static final int CAMERA_FOCUS_CALLBACK_SETUP = 0x00100000;
	// setup camera preview callback
	public static final int CAMERA_PREVIEW_CALLBACK_SETUP = 0x00200000;
	// decode data from preview callback
	public static final int CAMERA_PREVIEW_HANDLE = 0x00300000;
	// decode data success msg

	public static final int SETUP_AUTO_FOCUS_INTERVAL = 1500;

	public static final int SETUP_PREVIEW_INTERVAL = 10;
	private Vector<BarcodeFormat> decodeFormats;
	private final Map<DecodeHintType, Object> hints;
	private ViewfinderView viewfinderView;

	private MultiFormatReader multiFormatReader;
	private CameraManager camera;
	private int previewWidth;
	private int previewHeight;

	public CameraAction(CameraActionListener listener, CameraManager camera) {
		super();
		this.listener = listener;
		this.camera = camera;
		Size preivewSize = camera.getCameraParameters().getPreviewSize();
		this.previewHeight = preivewSize.height;
		this.previewWidth = preivewSize.width;
		multiFormatReader = new MultiFormatReader();

		hints = new EnumMap<DecodeHintType, Object>(DecodeHintType.class);

		decodeFormats = new Vector<BarcodeFormat>();
		decodeFormats.addAll(DecodeFormatManager.ONE_D_FORMATS);
		decodeFormats.addAll(DecodeFormatManager.QR_CODE_FORMATS);
		decodeFormats.addAll(DecodeFormatManager.DATA_MATRIX_FORMATS);

		hints.put(DecodeHintType.POSSIBLE_FORMATS, decodeFormats);
		hints.put(DecodeHintType.NEED_RESULT_POINT_CALLBACK, this);
		multiFormatReader.setHints(hints);

	}

	@Override
	public void run() {
		super.run();
		Looper.prepare();
		handler = new Handler() {

			@Override
			public void handleMessage(Message msg) {
				super.handleMessage(msg);
				switch (msg.what) {
				case CAMERA_FOCUS_CALLBACK_SETUP:
					if (camera.isStartPrevious() && camera.hasCameraAutoFocusHardware()) {
						camera.getCamera().autoFocus(autoFocusCallBack);
					}
					break;
				case CAMERA_PREVIEW_CALLBACK_SETUP:
					if (camera.isStartPrevious()) {
						camera.getCamera().setPreviewCallback(mPreviewCallback);
					}
					break;
				case CAMERA_PREVIEW_HANDLE:
					decode((byte[]) (msg.obj), msg.arg1, msg.arg2);
					break;
				}
			}

		};
		if (listener != null) {
			listener.initCameraActionSuccess(handler);
		}
		Looper.loop();
		if (listener != null) {
			listener.onCameraActionQuit(handler);
		}
		Log.i(TAG, "CameraHandleThread>>end");
	}

	public Handler getHandler() {
		return handler;
	}

	@Override
	public void foundPossibleResultPoint(ResultPoint point) {
		if (viewfinderView != null) {
			Log.v(TAG, "point>>>" + point);
			viewfinderView.addPossibleResultPoint(point);
		}
	}

	public void setViewFinderView(ViewfinderView viewfinderView) {
		this.viewfinderView = viewfinderView;
	}
	private void decode(byte[] data, int width, int height) {
		if (data == null || width == 0 || height == 0) {
			Log.d(TAG, "decode data == null width>>" + width + " height>>" + height);
			return;
		}
		Result rawResult = null;
		try {
			
//			if(true){
//				OpenCVUtil.getLuminaces(li, data, width, height, camera, listener);
//				return;
//			}
			Bitmap bitmap = camera.getFrameBitmap(data, width, height, listener);
			if (bitmap == null) {
				return;
			}

			// PlanarYUVLuminanceSource source = buildLuminanceSource(data, width, height);

			BitmapLuminanceSource bls = new BitmapLuminanceSource(bitmap);
			BinaryBitmap binaryBitmap = new BinaryBitmap(new HybridBinarizer(bls));
			if (listener != null) {
				listener.onCameraPreviewBitmap(bls.renderCroppedGreyscaleBitmap());
			}
			//

			rawResult = multiFormatReader.decodeWithState(binaryBitmap);
			// rawResult=qrCodeReader.decode(bitmap);

		} catch (Exception re) {
			// continue
		} finally {
			multiFormatReader.reset();
			if (rawResult != null) {
				Log.v(TAG, "rawResult>>" + rawResult.getText());
				cancanlCameraPreViewCallBack();

				if (listener != null) {
					listener.onDecodeSuccess(rawResult);
				}
			} else {
				Log.v(TAG, "can't decode>>");
				// decode error >>
				if (handler != null) {
					handler.sendEmptyMessage(CameraAction.CAMERA_PREVIEW_CALLBACK_SETUP);
				}
			}
		}

	}

	private Camera.PreviewCallback mPreviewCallback = new Camera.PreviewCallback() {

		@Override
		public void onPreviewFrame(byte[] data, Camera camera) {
			// Log.d(TAG, "onPreviewFrame>>"+data.length);
			camera.setPreviewCallback(null);
			if (handler == null) {
				return;
			}
			Message msg = handler.obtainMessage();
			msg.what = CAMERA_PREVIEW_HANDLE;
			msg.arg1 = previewWidth;
			msg.arg2 = previewHeight;
			msg.obj = data;
			handler.sendMessageDelayed(msg, 0);

		}
	};

	private Camera.AutoFocusCallback autoFocusCallBack = new Camera.AutoFocusCallback() {

		@Override
		public void onAutoFocus(boolean success, Camera camera) {
			Log.i(TAG, "onAutoFocus>>" + success);
			// focusSuccess=success;
			if (handler != null) {
				handler.sendEmptyMessageDelayed(CAMERA_FOCUS_CALLBACK_SETUP, SETUP_AUTO_FOCUS_INTERVAL);
			}

		}
	};

	public void clearAllMessage() {
		if (handler != null) {
			cancalCameraCallBack();
			// handler.removeMessages(CAMERA_FOCUS_CALLBACK_SETUP);
			// handler.removeMessages(CAMERA_PREVIEW_CALLBACK_SETUP);
			handler.removeMessages(CAMERA_PREVIEW_HANDLE);
		}
	}

	public void cancalCameraCallBack() {
		if (camera != null) {
			handler.removeMessages(CAMERA_FOCUS_CALLBACK_SETUP);
			handler.removeMessages(CAMERA_PREVIEW_CALLBACK_SETUP);
			camera.getCamera().cancelAutoFocus();
			camera.getCamera().setPreviewCallback(null);
		}
	}

	public void cancanlCameraPreViewCallBack() {
		try {
			if (camera != null) {
				camera.getCamera().setPreviewCallback(null);
				handler.removeMessages(CAMERA_PREVIEW_CALLBACK_SETUP);
			}
		} catch (Exception e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}
	}

	public void setupCameraCallBack() {
		if (handler != null) {
			// clearAllMessage();
			cancalCameraCallBack();

			handler.sendEmptyMessageDelayed(CAMERA_FOCUS_CALLBACK_SETUP, SETUP_AUTO_FOCUS_INTERVAL);
			handler.sendEmptyMessageDelayed(CAMERA_PREVIEW_CALLBACK_SETUP, SETUP_PREVIEW_INTERVAL);

		}
	}

	public void setupCameraPreViewCallBack() {
		if (handler != null) {
			handler.removeMessages(CAMERA_PREVIEW_CALLBACK_SETUP);
			handler.sendEmptyMessageDelayed(CAMERA_PREVIEW_CALLBACK_SETUP, SETUP_PREVIEW_INTERVAL);

		}
	}

	public void quitCameraAction() {
		if (handler != null) {
			handler.getLooper().quit();
			handler = null;
		}
	}

	public PlanarYUVLuminanceSource buildLuminanceSource(byte[] data, int width, int height) {
		Rect frame = camera.getFrameRect();
		int previewFormat = camera.getCameraParameters().getPreviewFormat();
		String previewFormatString = camera.getCameraParameters().get("preview-format");
		switch (previewFormat) {
		// This is the standard Android format which all devices are REQUIRED to
		// support.
		// In theory, it's the only one we should ever care about.
		case PixelFormat.YCbCr_420_SP:
			// This format has never been seen in the wild, but is compatible as
			// we only care
			// about the Y channel, so allow it.
		case PixelFormat.YCbCr_422_SP:
			return new PlanarYUVLuminanceSource(data, width, height, frame.left, frame.top, frame.width(), frame.height());
		default:
			// The Samsung Moment incorrectly uses this variant instead of the
			// 'sp' version.
			// Fortunately, it too has all the Y data up front, so we can read
			// it.
			if ("yuv420p".equals(previewFormatString)) {
				return new PlanarYUVLuminanceSource(data, width, height, frame.left, frame.top, frame.width(), frame.height());
			}
		}
		throw new IllegalArgumentException("Unsupported picture format: " + previewFormat + '/' + previewFormatString);
	}


}
