package com.syn.phoneface.decode;

import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.os.AsyncTask;
import android.os.Environment;
import android.os.Handler;
import android.os.Looper;
import android.os.Message;

import com.smartshino.face.SsNow;
import com.smartshino.face.Stfaceattr;
import com.syn.phoneface.common.Logs;
import com.syn.phoneface.model.eyekey.CheckAction;
import com.syn.phoneface.model.result.Result;
import com.syn.phoneface.ui.camera.CameraFaceConfig;
import com.syn.phoneface.ui.camera.live.CameraFaceCallback;
import com.syn.phoneface.ui.view.CameraSurfaceView;
import com.syn.phoneface.util.ExecutorUtil;
import com.syn.phoneface.util.FileUtils;
import com.synface.facerecognize.algorithm.FaceRecognize;
import com.synface.facerecognize.interfaces.FaceInterface;
import com.syn.phoneface.common.MessageManager;

import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;

public final class DecodeHandler extends Handler implements IConstants {

  private static final String TAG = DecodeHandler.class.getSimpleName();

  private static final int CHECK_INTERVAL_TIME = 300;
  private static final int TEMP_VALUE_LENGTH = 4;

  private final CameraSurfaceView mSurfaceView;
  private final SsNow algorithm;
  private final CameraFaceCallback mCameraFaceCallback;
  private final CameraFaceConfig mFaceConfig;
  // 超时30秒后返回,主要用于捕捉时超时
  private TimerAsyncTask timerAsyncTask; // 异步定时器
  private boolean running = true;

  private List<CheckAction> mCheckActionList = new ArrayList<CheckAction>();
  private CheckAction mCheckAction = CheckAction.NONE;
  private int[] tempValue;
  private Bitmap[] tempBitmap;
  private int mCameraId;
  private int mOrientation;
  Stfaceattr mStfaceattr;
  /**
   * 图片宽高
   */
  int[] hWdHi = new int[2];
  /**
   * 图片解析Rgb
   */
  byte[] faceRgb24 = null;
  Handler handler = null;

  private long oneFrameTime;

  DecodeHandler(CameraSurfaceView surfaceView, SsNow algorithm,
                CameraFaceCallback faceCallback) {
    this.mSurfaceView = surfaceView;
    this.algorithm = algorithm;
    this.mFaceConfig = surfaceView.getFaceConfig();
    this.mCameraId = surfaceView.getCameraManager().getManualCameraId();
    this.mOrientation = surfaceView.getCameraManager().getOrientation();
    mCameraFaceCallback = faceCallback;
    tempValue = new int[TEMP_VALUE_LENGTH];
    tempBitmap = new Bitmap[3];
    mCheckActionList.addAll(mFaceConfig.getCheckActionList());
    handler = surfaceView.getHandler();
  }

  @Override
  public void handleMessage(Message message) {
    if (!running) {
      return;
    }
    switch (message.what) {
      case DECODE:
        decodeArray((byte[]) message.obj, message.arg1, message.arg2);
        break;
      case QUIT:
        running = false;
        timerCancel();
        Looper.myLooper().quit();
        break;
      case ONTIMER:
        onTimer();
        break;
    }
  }

  private void decodeArray(byte[] data, int width, int height) {
    if (data == null) {
      return;
    }

    // 解析人脸获取解析数据
    if (decodeData(data, width, height)) {
      if (mFaceConfig == null)
        return;
      // 分发检活状态
      dispachState(mStfaceattr);

      getFeature();
    } else {
      Logs.i(TAG, "正在找脸！");
      Logs.e(TAG, "DECODE_FAILED...");
      setDecodeFaceError();
    }
  }

  int num = 0;
  private boolean decodeData(byte[] data, int width, int height) {
    long start = System.currentTimeMillis();
    hWdHi[0] = width;
    hWdHi[1] = height;

    // yuv to rgb
    faceRgb24 = new byte[width * height * 3]; // 分配一个彩色RGB数组[nWd*nHi*3]
    algorithm.YuvToRgb(data, width, height, faceRgb24);
    long end = System.currentTimeMillis();
    long timeYuvToRgb = end - start;
    Logs.d(TAG, "native YuvToRgb花费时间 " + timeYuvToRgb);

    // 初始化加载点
    Stfaceattr stfaceattr = new Stfaceattr();
    int[] hFattr = stfaceattr.gethFattr();
    stfaceattr.setSize(115 * 4);
    stfaceattr.setOcclusion(1);// 遮挡物

    rotateData(stfaceattr);

    // detect rgb
    long DiscoverXStart = System.currentTimeMillis();
    int discoverXStatus = algorithm.DiscoverX(hFattr, faceRgb24, hWdHi[0],
        hWdHi[1], 0, 0);

    hFattr = null;
    // 这个是画框的矩形
    long DiscoverXStop = System.currentTimeMillis();
    Logs.d(TAG, "检测接口: " + discoverXStatus + "花费时间 +"
        + (DiscoverXStop - DiscoverXStart) + " ms");
    long end1 = System.currentTimeMillis();
    oneFrameTime = end1 - start;
    Logs.d(TAG, "检测单帧花费时间 " + oneFrameTime + " ms");

    mStfaceattr = stfaceattr;

    return discoverXStatus >= 0;
  }

  private void rotateData(Stfaceattr stfaceattr) {
    Logs.i(TAG, "orientation:" + mOrientation);
    long rotateStart = System.currentTimeMillis();
    if (mCameraId == 0) {
      switch (mOrientation) {
        case 90:
          algorithm.DoRotate(faceRgb24, hWdHi, 1);
          break;
        case 0:
          algorithm.DoRotate(faceRgb24, hWdHi, 0);
          break;
        case 270:
          algorithm.DoRotate(faceRgb24, hWdHi, 3);
          break;
        case 180:
          algorithm.DoRotate(faceRgb24, hWdHi, 2);
          break;
      }
    } else {
      switch (mOrientation) {
        case 90:
          algorithm.DoRotate(faceRgb24, hWdHi, -1);
          break;
        case 0:
          algorithm.DoRotate(faceRgb24, hWdHi, 0);
          break;
        case 270:
          algorithm.DoRotate(faceRgb24, hWdHi, 3);
          break;
        case 180:
          algorithm.DoRotate(faceRgb24, hWdHi, 2);
          break;
      }
    }
    long rotateStop = System.currentTimeMillis();
    Logs.d(TAG, "旋转图像花费时间 " + (rotateStop - rotateStart) + "MS");
    stfaceattr.setHeadPosi(1, 0, 0);
  }

  private void dispachState(Stfaceattr stfaceattr) {
    // 暂无信息，先不画框
    Result result = new Result("FIND_FACE", null, stfaceattr);
    MessageManager.sendToTarget(handler, DECODE_SUCCEDED, result);
  }

  private void getFeature() {
    if (isGettingFeature)
      return;

    ExecutorUtil.exec(new FeatureTask(faceRgb24, hWdHi[0], hWdHi[1]));
  }

  private void sendBitmap() {


//    if (!normalPicSuccess) {
//      normalPicSuccess = false;
//      Logs.e(TAG, "DECODE_FAILED...");
//      return;
//    }
//    normalPicSuccess = false;
//
//    long getBitmapStart = System.currentTimeMillis();
//    Logs.i(TAG, "发送照片...");
//
//
////      algorithm.DoRotate(faceRgb24, hWdHi, -1000);
//    final byte[] faceJpg = new byte[hWdHi[0] * hWdHi[1] * 3 + 1024];
//    algorithm.RgbToJpg(faceRgb24, hWdHi[0], hWdHi[1], faceJpg, 0, 0);
//    final Bitmap faceBitmap = BitmapFactory.decodeByteArray(
//        faceJpg, 0, faceJpg.length);
//
//    Logs.d(TAG, "获取到图片资源");
//    Bundle bundle = new Bundle();
//    bundle.putParcelable(DecodeThread.FACE_BITMAP,
//        faceBitmap);
//    Message message = Message.obtain(handler, PHOTO_VERFY_SUCCESS);
//    message.obj = faceResult;
//    message.setData(bundle);
//    message.sendToTarget();
//    Logs.i(TAG, "setResult...");
//
//    long getBitmapStop = System.currentTimeMillis();
//    Logs.d(TAG, "转化Bitmap花费时间 " + (getBitmapStop - getBitmapStart)
//        + " ms");
  }

  private void setDecodeFaceError() {
    if (handler != null) {
      MessageManager.sendToTarget(handler, DECODE_FAILED);
    }
  }

  /**
   * 启动沙漏
   */
  synchronized void onTimer() {
    Logs.i(TAG, "开始计时！");
    timerCancel();
    timerAsyncTask = new TimerAsyncTask();
    timerAsyncTask.execute();
  }

  /**
   * 停止沙漏
   */
  private synchronized void timerCancel() {
    AsyncTask<?, ?, ?> task = timerAsyncTask;
    if (task != null) {
      Logs.e(TAG, "停止超时检测");
      task.cancel(true);
      timerAsyncTask = null;
    }
  }

  int index = 0;

  private void saveBitmap() {
    Logs.i(TAG, "保存图像....................................  ");
    algorithm.DoRotate(faceRgb24, hWdHi, -1000);
    final byte[] faceJpg = new byte[480 * 640 * 3 + 1024];
    algorithm.RgbToJpg(faceRgb24, 480, 640, faceJpg, 0, 0);
    final Bitmap faceBitmap = BitmapFactory.decodeByteArray(faceJpg, 0,
        faceJpg.length);

    File file = new File(Environment.getExternalStorageDirectory()
        .getPath() + "/faceJpg" + index + ".png");
    try {
      FileOutputStream outputStream = new FileOutputStream(file);
      faceBitmap.compress(Bitmap.CompressFormat.JPEG, 100, outputStream);
      outputStream.flush();
      outputStream.close();
    } catch (IOException e) {
      e.printStackTrace();
    }
    index++;
  }

  private void saveBitmapToDisk(Bitmap bitmap) {
    Context context = mSurfaceView.getContext();
    File path = context.getCacheDir();
    Logs.i(TAG, "path:" + path);
    File file = new File(FileUtils.getExternalSdCardPath() + "/faceJpg"
        + index + ".png");
    Logs.i(TAG, "path:" + file.getAbsolutePath().toString());
    try {
      FileOutputStream outputStream = new FileOutputStream(file);
      bitmap.compress(Bitmap.CompressFormat.JPEG, 100, outputStream);
      outputStream.flush();
      outputStream.close();
    } catch (IOException e) {
      e.printStackTrace();
    }
    index++;
  }

  /**
   * @author James 简单沙漏，到了规定时间发送超时消息
   */
  private final class TimerAsyncTask extends
      AsyncTask<Object, Object, Object> {
    @Override
    protected Object doInBackground(Object... objects) {
      sleep(mFaceConfig.getTimeoutS() * 1000);
      return null;
    }

    @Override
    protected void onPostExecute(Object o) {
      super.onPostExecute(o);
      Logs.i(TAG, "已经等待" + mFaceConfig.getTimeoutS() + "秒捕捉超时");
      if (handler != null) {
        Message message = Message.obtain(handler, CAMERA_TIME_OUT);
        message.sendToTarget();
      }
    }
  }

  private void sleep(long millis) {
    try {
      Thread.sleep(millis);
    } catch (InterruptedException e) {
      e.printStackTrace();
    }
  }

  FaceRecognize mFaceRecognize = FaceRecognize.getInstance(null);

  private class FeatureTask implements Runnable {

    byte[] faceRgb;
    int width;
    int height;

    public FeatureTask(byte[] bytes, int width, int height) {
      faceRgb = bytes;
      this.width = width;
      this.height = height;
    }

    @Override
    public void run() {
      isGettingFeature = true;
      long getBitmapStart = System.currentTimeMillis();
      String fea = null;
      try {

        fea = mFaceRecognize.SynFace_GetFaceFeatureByData(null, faceRgb, width, height, mFaceRecognize.getMultithreadValue()[1]);
      } catch (FaceInterface.FaceException e) {
        e.printStackTrace();
        Logs.i(TAG, "提特征：" + e.getMessage());
      }
      Logs.d(TAG, "提特征：" + (System.currentTimeMillis() - getBitmapStart) + "ms");
      Logs.d(TAG, "提特征：" + "w:" + width + " h:" + height + "  fea==null:" + (fea == null));

      isGettingFeature = false;

      if (fea == null)
        return;

      Logs.i(TAG, "");
      if (mFaceConfig.getCameraId() == 1) {
        int[] hW = {width, height};
        algorithm.DoRotate(faceRgb, hW, -1000);
      }

      Result result = new Result();
      result.setFeature(fea);
      result.setBitmap(rgbToBitmap(faceRgb, width, height));
      Message message = Message.obtain(handler, PHOTO_VERFY_SUCCESS);
      message.obj = result;
      message.sendToTarget();
      Logs.i(TAG, "setResult...");
    }
  }

  private Bitmap rgbToBitmap(byte[] rgb, int width, int height) {
    if (rgb == null)
      return null;

    long start = System.currentTimeMillis();
    byte[] faceJpg = new byte[width * height * 3 + 1024];
    algorithm.RgbToJpg(rgb, width, height, faceJpg, 0, 0);
    Bitmap faceBitmap = BitmapFactory.decodeByteArray(
        faceJpg, 0, faceJpg.length);
    long end = System.currentTimeMillis();
    Logs.d(TAG,"rgbToBitmap时间：" + (end -start) + "ms" + " Thread id:" + Thread.currentThread().getId());
    return faceBitmap;
  }

  private boolean isGettingFeature = false;
}
