package com.example.opencvstudy;

import androidx.annotation.NonNull;
import androidx.appcompat.app.AppCompatActivity;
import androidx.camera.core.CameraSelector;
import androidx.camera.core.ImageAnalysis;
import androidx.camera.core.ImageProxy;
import androidx.camera.lifecycle.ProcessCameraProvider;
import androidx.core.content.ContextCompat;

import android.os.Bundle;
import android.util.Size;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.View;

import com.google.common.util.concurrent.ListenableFuture;

import java.util.concurrent.ExecutionException;
import java.util.concurrent.Executor;
import java.util.concurrent.Executors;

public class MainActivity extends AppCompatActivity implements SurfaceHolder.Callback {
    private ListenableFuture<ProcessCameraProvider> cameraProviderFuture;
    private FaceTracker faceTracker;
    private byte[] y;
    private byte[] u;
    private byte[] v;
    byte[] nv12;
    @Override
    protected void onCreate(Bundle savedInstanceState) {
        super.onCreate(savedInstanceState);
        setContentView(R.layout.activity_main);

        String s = Utils.copyAsset2Dir(this, "lbpcascade_frontalface.xml");
         faceTracker = new FaceTracker(s);
        faceTracker.start();
        SurfaceView surfaceView = findViewById(R.id.surfaceView);
        surfaceView.getHolder().addCallback(this);
        cameraProviderFuture = ProcessCameraProvider.getInstance(this);
        cameraProviderFuture.addListener(() -> {
            try {
                ProcessCameraProvider cameraProvider = cameraProviderFuture.get();
                bindAnalysis(cameraProvider);
            } catch (ExecutionException | InterruptedException e) {
                // No errors need to be handled for this Future.
                // This should never be reached.
            }
        }, ContextCompat.getMainExecutor(this));
    }

    private void bindAnalysis(ProcessCameraProvider cameraProvider) {
        ImageAnalysis imageAnalysis =
                new ImageAnalysis.Builder()
                        // enable the following line if RGBA output is needed.
                        //.setOutputImageFormat(ImageAnalysis.OUTPUT_IMAGE_FORMAT_RGBA_8888)
                        .setTargetResolution(new Size(1280, 720))
                        .setBackpressureStrategy(ImageAnalysis.STRATEGY_KEEP_ONLY_LATEST)
                        .build();

        imageAnalysis.setAnalyzer(Executors.newSingleThreadExecutor(), new ImageAnalysis.Analyzer() {
            @Override
            public void analyze(@NonNull ImageProxy imageProxy) {
                int rotationDegrees = imageProxy.getImageInfo().getRotationDegrees();
                // insert your code here.
                // 传递给 c++ 层 探测人脸
                // after done, release the ImageProxy object
//                byte[] dataFromImage = Utils.getDataFromImage(imageProxy);
                ImageProxy.PlaneProxy[] planes = imageProxy.getPlanes();
                if (y == null) {
                    y = new byte[planes[0].getBuffer().limit() - planes[0].getBuffer().position()];
                    u = new byte[planes[1].getBuffer().limit() - planes[1].getBuffer().position()];
                    v = new byte[planes[2].getBuffer().limit() - planes[2].getBuffer().position()];
                }
                if (imageProxy.getPlanes()[0].getBuffer().remaining() == y.length) {
                    planes[0].getBuffer().get(y);
                    planes[1].getBuffer().get(u);
                    planes[2].getBuffer().get(v);

                    Size size = new Size(imageProxy.getWidth(), imageProxy.getHeight());
                    int w = size.getWidth();
                    int h = size.getHeight();

                    // 吧yuv 组装成 nv12   传递给 编码器 编码 yyyy  yyyy  yyyy  yyyy  uvuv  uvuv
                    if (nv12 == null) {
                        nv12 = new byte[w * h * 3 / 2];
                    }
//        // 拷贝y
                    System.arraycopy(y, 0, nv12, 0, y.length);
                    //y+ 1/4 +1/4 = y*6/4   剩余1/2 空间

                    // 一共长度 是 分别 u v 取一半 放进去 因为是 uv 间隔
                    int length = y.length + u.length / 2 + v.length / 2;

//            Log.e(TAG,"length  -y:"+ y.length);
//            Log.e(TAG,"length  -u"+ u.length);
//            Log.e(TAG,"length  -v"+ v.length);
//            Log.e(TAG,"总大小 "+ w*h*3/2);
                    int uvIndex = 0;
                    // 把后面得 uv 都写进去
                    for (int i = y.length; i < h * w * 3 / 2; i += 2) {
                        nv12[i] = u[uvIndex];
                        nv12[i + 1] = v[uvIndex];
                        uvIndex++;
                    }
                    faceTracker.detect(nv12, imageProxy.getWidth(), imageProxy.getHeight(), rotationDegrees);
                    imageProxy.close();
                }
            }
        });
        CameraSelector cameraSelector = new CameraSelector.Builder()
                .requireLensFacing(CameraSelector.LENS_FACING_BACK)// 后摄像头
                .build();

//        CameraSelector.DEFAULT_BACK_CAMERA
        cameraProvider.bindToLifecycle(this,cameraSelector,imageAnalysis);
}
    @Override
    public void surfaceCreated(@NonNull SurfaceHolder holder) {

    }

    @Override
    public void surfaceChanged(@NonNull SurfaceHolder holder, int format, int width, int height) {

        faceTracker.setSurface(holder.getSurface());
    }

    @Override
    public void surfaceDestroyed(@NonNull SurfaceHolder holder) {
        faceTracker.setSurface(null);

    }

    @Override
    protected void onDestroy() {
        super.onDestroy();
        faceTracker.release();
    }
}