package sjtu.FaceRecognition;

import java.io.ByteArrayOutputStream;
import java.io.IOException;

import org.json.JSONException;
import org.json.JSONObject;
import org.opencv.core.Mat;
import android.graphics.Bitmap;
import android.graphics.Bitmap.CompressFormat;
import android.os.AsyncTask;
import android.os.Handler;
import android.util.Log;

import sjtu.ChatClient.HandlerConstants;
/**
 * 
 * @author Frank, Chen
 * AsyncTask used for process a picture
 *
 */
public class PictureProcessTask extends AsyncTask<byte[], Object, Bitmap> {
	
	private static final String TAG = "picture process task";
	
	private PictureProcessTask() {
		
	}
	//The image processor, which is unique.
	private ImgProcessor imgProcessor;
	private Handler handler;
	private String result;
	//private TimeCalc timeCalc = new TimeCalc();
	//For a whole detect process, the number of picture task is certain.
	//Index means the sequence. But the task with smaller index may end after those with bigger one.
	//It will be useful when analyzing. This won't be used when analyzing static images.
	private int index;
	
	public PictureProcessTask(ImgProcessor imgProcessor, Handler handler, int index) {
		this.imgProcessor = imgProcessor;
		this.handler = handler;
		this.index = index;
	}
	
	@Override
	protected Bitmap doInBackground(byte[]... params) {
		//TimeCalc tc = new TimeCalc();
		//tc.setTime1();
		byte[] data = params[0];
		int camera_width = CameraUtil.getPreviewWidth();
		int camera_height = CameraUtil.getPreviewHeight();
		int rotation = CameraUtil.getPicRotation();
		if (CommonUtil.DEBUG_LOG) {
			Log.e(TAG, "rotation is " + rotation);
		}
		//convert btye from camera to opencv mat
		Mat imageMat = imgProcessor.rawByteArray2RGBAMat(data, camera_width,
				camera_height, !CommonUtil.USE_NETWORK, CommonUtil.PIC_WIDTH,
				CommonUtil.PIC_HEIGHT);
		//rotate image
		imageMat = imgProcessor.rotateMat(imageMat, rotation, CommonUtil.PIC_WIDTH, CommonUtil.PIC_HEIGHT);
		int newWidth = imageMat.cols();
		int newHeight = imageMat.rows();
		if (CommonUtil.DEBUG_LOG) {
			Log.e(TAG, "new width is " + newWidth + " and new height is " + newHeight);
		}
		Bitmap bmp = imgProcessor.mat2Bitmap(imageMat, newWidth, newHeight);

		if (!CommonUtil.USE_NETWORK) {
            imageMat.release();
			//tc.setTime2();
//			if (CommonUtil.DEBUG_LOG) {
//				Log.e(TAG, "pic task all time is " + tc.getTime());
//			}
			return bmp;
		}
		//compress image
		ByteArrayOutputStream bos = new ByteArrayOutputStream();
		bmp.compress(CompressFormat.JPEG, CommonUtil.pic_quality, bos);
		bmp.recycle();
//		tc.setTime2();
//		if (CommonUtil.DEBUG_LOG) {
//			Log.e(TAG, "picture pre-process time is " + tc.getTime());
//		}
		//timeCalc.setTime1();
		byte[] jpgData = bos.toByteArray();
		try {
			bos.close();
		} catch (IOException e) {
			e.printStackTrace();
		}
		//networking
		String jsonString = NetworkUtil.getExpressionResultFromServer(jpgData);
		Log.e(TAG, "detect result is " + jsonString);
		//timeCalc.setTime2();
//		if (CommonUtil.DEBUG_LOG) {
//			Log.e(TAG, "network lag is " +  timeCalc.getTime());
//		}
		result = null;
		//process result
		if (jsonString != null) {
			String detectString = parseDetectResultFromJson(jsonString);
			if(CommonUtil.DEBUG_LOG) {
				Log.e(TAG, "detect result is " + detectString);
			}
			drawImageFromResult(imageMat, detectString);
			bmp = imgProcessor.mat2Bitmap(imageMat, newWidth, newHeight);
			if (CommonUtil.DEBUG_LOG) {
				Log.e(TAG, "bmp size is " + bmp.getWidth() + " " + bmp.getHeight());
			}
            imageMat.release();
			return bmp;
		}
		imageMat.release();
		bmp.recycle();
		return null;
	}

	@Override
	protected void onPostExecute(Bitmap bitmap) {
		super.onPostExecute(bitmap);
		if (CommonUtil.DEBUG_LOG) {
			Log.e(TAG, "pic task done index is " + index);
		}
		//sender.sendOnePicDoneMsg(bitmap);
		handler.obtainMessage(HandlerConstants.EXPRESSION_RESULT, result).sendToTarget();
	}
	
	//process result
	private void drawImageFromResult(Mat image, String detectResult) {
		if (detectResult == null || detectResult.length() == 0) {
			result = CommonUtil.NO_FACE;
			return;
		}
		if (!CommonUtil.IS_REAL_TIME) {
			String[] items = detectResult.split(";");
			for (int i = 0; i < items.length; i++) {
				if (items[i] != null && items[i].length() > 0) {
					String temps[] = items[i].split(":");
					if (temps.length >= 2 && temps[0].length() > 0) {
						int count = Integer.parseInt(temps[1]);
						int color_index = 0;
						if (temps[0].equals("Face")) {
							color_index = ImgProcessor.FACE_COLOR_INDEX;
						} else if (temps[0].equals("Eye")) {
							color_index = ImgProcessor.EYE_COLOR_INDEX;
						}
						for (int j = 0; j < count; j++) {
							String item = temps[j + 2];
							String[] parameters = item.split(",");
							int x = Integer.parseInt(parameters[0]);
							int y = Integer.parseInt(parameters[1]);
							int width = Integer.parseInt(parameters[2]);
							int height = Integer.parseInt(parameters[3]);
							imgProcessor.drawRectOnImage(image, x, y, width,
									height, color_index);
						}
					}
				}
			}
		} else {
			String[] items = detectResult.split(";");
			for (int i = 0; i < items.length; i++) {
				String item = items[i];
				String index = item.substring(0, item.indexOf(':'));
				String point = item.substring(item.indexOf(':') + 1, item.length());
				if (!index.equals("-1")) {
					int x = Integer.parseInt(point.substring(0, point.indexOf(',')));
					int y = Integer.parseInt(point.substring(point.indexOf(',') + 1, point.length()));
					imgProcessor.drawEllipseOnImage(image, x, y, 2, 2, ImgProcessor.FACE_COLOR_INDEX);
				} else {
					//index -1:result
					result = getEmotionFromNumber(point);
				}
				
			}
		}
	}
	
	//parse json received from sever, get the result
	private String parseDetectResultFromJson(String jsonString) {
		try {
			JSONObject jObject = new JSONObject(jsonString);
			JSONObject info = jObject.getJSONObject("pictureInfo");
			String result = info.getString("faceResult");
			return result;
		} catch (JSONException e) {
		}
		return null;
	}
	
	private String getEmotionFromNumber(String number) {
		if(number.equals("-1")) {
			return CommonUtil.EMO_SAD;
		}
		if(number.equals("0")) {
			return CommonUtil.EMO_NEUTRAL;
		}
		if(number.equals("1")) {
			return CommonUtil.EMO_HAPPY;
		}
		return "No Face";
	}
	
}

