package com.example.opencvtest;

import java.io.IOException;

import org.opencv.core.Core;
import org.opencv.core.CvType;
import org.opencv.core.Mat;
import org.opencv.core.Scalar;
import org.opencv.core.Size;
import org.opencv.imgproc.Imgproc;

import android.app.Activity;
import android.content.Context;
import android.hardware.Camera;
import android.hardware.Camera.CameraInfo;
import android.hardware.Camera.Parameters;
import android.hardware.Camera.PreviewCallback;
import android.util.AttributeSet;
import android.util.Log;
import android.view.Surface;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.View;
import android.view.ViewGroup;
import android.widget.Toast;
import autotrace.Autotrace;

public class CameraPreviewView extends SurfaceView implements PreviewCallback, SurfaceHolder.Callback
{
	private static final String		TAG			= "OCVTest::CameraPreviewView";

	private static final boolean	L2gradient	= false;

	// native void byteArrayTest(long dataAddr, int width, int height);

	// native static void staticMethod();

	OverlayView						mOverlayView;

	Camera							mCamera		= null;

	public void disableView()
	{
		mCamera.stopPreview();
		mCamera.release();
	}

	public CameraPreviewView(Context context, AttributeSet attrs)
	{
		super(context, attrs);
		init();
	}

	public CameraPreviewView(Context context, AttributeSet attrs, int defStyle)
	{
		super(context, attrs, defStyle);
		init();
	}

	public CameraPreviewView(Context context)
	{
		super(context);
		init();
	}

	private void init()
	{
		// Add a SurfaceHolder.Callback
		Log.i(TAG, "init");
		this.getHolder().addCallback(this);
		mCamera = getFrontFacingCamera();
		//mCamera.setDisplayOrientation(90);
	}

	public void setOverlay(OverlayView overlay)
	{
		mOverlayView = overlay;
	}

	public Camera getFrontFacingCamera()
	{
		Log.i(TAG, "getFrontFacingCamer");
		int numCams = Camera.getNumberOfCameras();
		Log.i(TAG, "number of cameras: " + numCams);
		for (int i = 0; i < numCams; i++) {
			CameraInfo cameraInfo = new CameraInfo();
			Camera.getCameraInfo(i, cameraInfo);
			if (cameraInfo.facing == CameraInfo.CAMERA_FACING_FRONT)
				return Camera.open(i);
		}
		throw new RuntimeException("no front facing cameras available");
	}

	public int		mCannyLower		= 0;
	public int		mCannyHigher	= 0;

	private boolean	mTakeAShot;

	public int		mSb3Value		= 1;

	@Override
	protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec)
	{
		Log.i(TAG, "onMeasure: Mode: " + (MeasureSpec.getMode(heightMeasureSpec) == MeasureSpec.EXACTLY)); 
		int maxWidthLandscape =  MeasureSpec.getSize(widthMeasureSpec);
		int maxHeightLandscape = MeasureSpec.getSize(heightMeasureSpec);
		
			
		Camera.Size s;
		if (mCamera != null) {
			Log.i(TAG, "onMeasure: mCamera is NOT null");
			s = getFittingPreviewSize(maxWidthLandscape, maxHeightLandscape);
			setMeasuredDimension(s.width, s.height);
		} else {
			Log.i(TAG, "onMeasure: mCamera is null");
			setMeasuredDimension(maxWidthLandscape, maxHeightLandscape);
		}
		
		// setMeasuredDimensions parameters are relative to screen orientation
	};

	public void onPreviewFrame(byte[] data, Camera camera)
	{
		camera.getParameters().getPreviewFormat();
		Camera.Size previewSize = camera.getParameters().getPreviewSize();
		

		// NV21 YUV 4:2:0 image with a plane of 8 bit Y samples followed by an
		// interleaved U/V plane containing 8 bit 2x2 subsampled colour
		// difference samples.
		// 8 + 8/(2*2) + 8/(2*2) = 12 = 8 + 8 / 2
		Mat mNv21 = new Mat(previewSize.height + previewSize.height / 2, previewSize.width, CvType.CV_8UC1);
		mNv21.put(0, 0, data);
		
		Size fullSize = mNv21.size(); // mNv21.size is the wrong size. We need to correct it to the display size manually
		fullSize.width = previewSize.width;
		fullSize.height = previewSize.height;
		
		Size smallSize = new Size(fullSize.width / 4, fullSize.height / 4);
		
		Log.i(TAG, "onPreviewFrame: pw: " + fullSize.toString());

		// Convert the Image to Grayscale
		Mat mGray = new Mat(fullSize, CvType.CV_8UC1);
		Imgproc.cvtColor(mNv21, mGray, Imgproc.COLOR_YUV2GRAY_NV21);



		Mat mGray2 = new Mat(smallSize, CvType.CV_8UC1);
		// downsize the image to reduce noise
		Imgproc.resize(mGray, mGray2, smallSize);

		Mat mBlurred = new Mat(smallSize, CvType.CV_8UC1);
		Imgproc.blur(mGray2, mBlurred, new Size(mSb3Value > 0 ? mSb3Value : 1, mSb3Value > 0 ? mSb3Value : 1));

		Mat mCanny = new Mat(smallSize, CvType.CV_8UC1);
		Imgproc.Canny(mBlurred, mCanny, mCannyLower, mCannyHigher);

		// invert the image (white on black) > (black on white)
		Mat mCanny2 = new Mat(smallSize, CvType.CV_8UC1);
		Core.bitwise_not(mCanny, mCanny2);

		Mat rgba = new Mat(smallSize, CvType.CV_8UC4);
		rgba.setTo(new Scalar(255, 0, 0, 255), mCanny);

		Mat mFullSize = new Mat();
		Imgproc.resize(rgba, mFullSize, fullSize);

		// set the Image that the overlay will draw
		if (mOverlayView != null) {
			mOverlayView.setImage(mFullSize);
		}

		if (mTakeAShot) {
			Log.i(TAG, "taking a shot");
			// The edge-detected image only has 1 color channel
			// Autotrace needs 3 channels (RGB)
			Mat mRgb = new Mat(mCanny2.size(), CvType.CV_8UC3);
			Imgproc.cvtColor(mCanny2, mRgb, Imgproc.COLOR_GRAY2RGB);

			Autotrace.traceMatAsync(mRgb, "/sdcard/CODE/cap.pdf", "pdf");
			mTakeAShot = false;
		}

	}

	public void surfaceChanged(SurfaceHolder holder, int format, int width, int height)
	{
		Log.i(TAG, "surfaceChanged: w:" + width + " h: " + height);
		// mCamera.stopPreview();
		Parameters params = mCamera.getParameters();
		Camera.Size fittingSize = getFittingPreviewSize(width, height);
		params.setPreviewSize(fittingSize.width, fittingSize.height);
		mCamera.setParameters(params);
		mCamera.startPreview();

	}

	private Camera.Size getFittingPreviewSize(int width, int height)
	{
		Log.i(TAG, "getFittingPreviewSize called: " + width + " "+ height);
		Parameters params = mCamera.getParameters();
		Camera.Size result = null;
		for (Camera.Size s2 : params.getSupportedPreviewSizes()) {
			 Log.i(TAG, String.format("w %d  h %d", s2.width, s2.height));
			if (s2.width <= width && s2.height <= height)
				result = s2;
		}
			
		Log.i(TAG, "getFittingPreviewSize returns: " + result.width + " "+ result.height);	
		return result;
	}

	public void surfaceCreated(SurfaceHolder holder)
	{
		Log.i(TAG, "surfaceCreated");
		try {
			//mCamera = getFrontFacingCamera();

			// set to portrait mode
			//mCamera.setDisplayOrientation(90);

			Log.i(TAG, "Camera: " + mCamera);
			Parameters params = mCamera.getParameters();

			/* Set the preview size */
			Camera.Size s = getFittingPreviewSize(this.getWidth(), this.getHeight());

			Log.i(TAG, "this size: " + this.getWidth() + " " + this.getHeight());
			Log.i(TAG, "chosen preview size: " + s.width + " " + s.height);

			params.setPreviewSize(s.width, s.height);

			for (String focusMode : params.getSupportedFocusModes()) {
				Log.i(TAG, "Focus Mode: " + focusMode);
			}

			for (Integer format : params.getSupportedPreviewFormats()) {
				Log.i(TAG, "Format: " + format);
			}
			Log.i(TAG, "Previewformat is: " + params.getPreviewFormat());

			for (int[] fpsRange : params.getSupportedPreviewFpsRange()) {
				Log.i(TAG, "FpsRange: min: " + fpsRange[0] + " max: " + fpsRange[1]);
			}

			mCamera.setParameters(params);

			mCamera.setPreviewDisplay(this.getHolder());

			mCamera.setPreviewCallback(this);
			// mCamera.setPreviewCallbackWithBuffer(this);

		} catch (IOException e) {
			Log.i(TAG, "IOException");
			e.printStackTrace();
			mCamera.stopPreview();
			// mCamera.release();
		}
	}

	public void surfaceDestroyed(SurfaceHolder holder)
	{
		Log.i(TAG, "surfaceDestroyed");
		// mCamera.stopPreview();
		mCamera.release();
	}

	public void takeTheShot()
	{
		mTakeAShot = true;

	}
}