package com.example.facedetectionjavacv;

import static com.googlecode.javacv.cpp.opencv_core.IPL_DEPTH_8U;
import static com.googlecode.javacv.cpp.opencv_core.cvGetSeqElem;
import static com.googlecode.javacv.cpp.opencv_core.cvLoad;
import static com.googlecode.javacv.cpp.opencv_core.cvPoint;
import static com.googlecode.javacv.cpp.opencv_core.cvRectangle;
import static com.googlecode.javacv.cpp.opencv_imgproc.CV_BGR2GRAY;
import static com.googlecode.javacv.cpp.opencv_imgproc.cvCvtColor;
import static com.googlecode.javacv.cpp.opencv_objdetect.cvHaarDetectObjects;

import it.sephiroth.android.library.imagezoom.ImageViewTouch;

import java.io.File;
import java.io.IOException;

import com.googlecode.javacpp.Loader;
import com.googlecode.javacv.cpp.opencv_objdetect;
import com.googlecode.javacv.cpp.opencv_core.CvMemStorage;
import com.googlecode.javacv.cpp.opencv_core.CvRect;
import com.googlecode.javacv.cpp.opencv_core.CvScalar;
import com.googlecode.javacv.cpp.opencv_core.CvSeq;
import com.googlecode.javacv.cpp.opencv_core.IplImage;
import com.googlecode.javacv.cpp.opencv_objdetect.CvHaarClassifierCascade;

import android.os.AsyncTask;
import android.os.Bundle;
import android.app.Activity;
import android.app.ProgressDialog;
import android.content.Intent;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.view.View;
import android.view.Window;
import android.view.WindowManager;
import android.view.View.OnClickListener;
import android.widget.Button;

public class ImageDetectionActivity extends Activity
{
	private String imagePath;
	public ImageViewTouch image_view_touch;
	
	@Override
	protected void onCreate(Bundle savedInstanceState)
	{
		super.onCreate(savedInstanceState);
		requestWindowFeature(Window.FEATURE_NO_TITLE);
		getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN, WindowManager.LayoutParams.FLAG_FULLSCREEN);
		setContentView(R.layout.activity_image_detection);
		Intent intent = getIntent();
		imagePath = intent.getStringExtra("ImagePath");
		image_view_touch = (ImageViewTouch)findViewById(R.id.image_view_touch);
		// Load image into ImageViewTouch
		loadImage();
		Button detect = (Button)findViewById(R.id.detect);
		detect.setOnClickListener(new OnClickListener()
		{
			@Override
			public void onClick(View v)
			{
				new detectImage().execute(imagePath);
			}
		});
	}
	
	private void loadImage()
	{
		Bitmap bitmap = BitmapFactory.decodeFile(imagePath);
		image_view_touch.setImageBitmap(bitmap, true, null, 5.0f);
	}
	
	private class detectImage extends AsyncTask<String, Void, Bitmap>
	{
		private ProgressDialog dialog;
		
		@Override
		protected Bitmap doInBackground(String... params)
		{
			Bitmap bitmap;
			
			// Preload the opencv_objdetect module to work around a known bug
			Loader.load(opencv_objdetect.class);
			
	        BitmapFactory.Options bitmapFatoryOptions = new BitmapFactory.Options();
	        bitmapFatoryOptions.inPreferredConfig = Bitmap.Config.ARGB_8888;
	        bitmap = BitmapFactory.decodeFile(params[0], bitmapFatoryOptions);
	        
	        // Load the original image
	        IplImage originalImage = IplImage.create(bitmap.getWidth(), bitmap.getHeight(), IPL_DEPTH_8U, 4);
	        // IplImage originalIage = com.googlecode.javacv.cpp.opencv_highgui.cvLoadImage(imagePath, 4);
	        bitmap.copyPixelsToBuffer(originalImage.getByteBuffer());
	        
	        // We need a grayscale image to do the detection for speeding detection process
	        IplImage grayImage = IplImage.create(originalImage.width(), originalImage.height(), IPL_DEPTH_8U, 1);
	        
	        // We convert the original image to grayscale
	        cvCvtColor(originalImage, grayImage, CV_BGR2GRAY);
	        
	        // We create CvMemStorage to allocate memory for detection process
	        CvMemStorage storage = CvMemStorage.create();
	        
	        File classifierFile = null;
	        try
	        {
	    	    // Load the classifier file from Java resources
	    	    classifierFile = Loader.extractResource(getClass(),
	    	    		"/com/example/facedetectionjavacv/haarcascade_frontalface_alt.xml",
	    			    getBaseContext().getCacheDir(), "haarcascade_frontalface_alt", ".xml");
	        }
	        catch (IOException e)
	        {
	    	    e.printStackTrace();
	        }
	        
	        // We instantiate a classifier cascade to be used for detection, using the cascade definition
	        CvHaarClassifierCascade cascade = new CvHaarClassifierCascade(cvLoad(classifierFile.getAbsolutePath()));
	        classifierFile.delete();
	        
	        // We detect the faces
	        CvSeq faces = cvHaarDetectObjects(grayImage, cascade, storage, 1.1, 3, 0);
	        
	        // We iterate over the discovered faces and draw blue rectangles around them
	        for (int i = 0; i < faces.total(); i++)
	        {
	          CvRect r = new CvRect(cvGetSeqElem(faces, i));
	          cvRectangle(originalImage, cvPoint(r.x(), r.y()),
	        		  cvPoint(r.x() + r.width(), r.y() + r.height()), CvScalar.YELLOW, 1, 8, 0);
	        }
	        
	        //  We need to copy bitmap from buffer
	        bitmap.copyPixelsFromBuffer(originalImage.getByteBuffer());
	        // com.googlecode.javacv.cpp.opencv_highgui.cvSaveImage(args[1], originalImage);
			
			return bitmap;
		}
		
		@Override
		protected void onPostExecute(Bitmap result)
		{
			dialog.dismiss();
			image_view_touch.setImageBitmap(result, true, null, 5.0f);
		}
		
		@Override
		protected void onPreExecute()
		{
			dialog = ProgressDialog.show(ImageDetectionActivity.this, "Waiting...", "Detecting...");
		}
	}
}