package oylj.recycle.all_in_one.face;


import android.Manifest;
import android.content.Context;
import android.content.pm.PackageManager;
import android.graphics.Bitmap;
import android.graphics.Matrix;
import android.graphics.Rect;
import android.graphics.ImageFormat;
import android.media.Image;
import android.media.ImageReader;
import android.os.Handler;
import android.renderscript.Allocation;
import android.renderscript.Element;
import android.renderscript.RenderScript;
import android.renderscript.ScriptIntrinsicYuvToRGB;
import android.renderscript.Type;
import android.util.Log;
import android.view.Surface;
import android.view.SurfaceHolder;

import androidx.annotation.NonNull;
import androidx.core.app.ActivityCompat;

import com.google.mlkit.vision.common.InputImage;
import com.google.mlkit.vision.face.Face;
import com.google.mlkit.vision.face.FaceDetector;

import java.nio.ByteBuffer;
import java.util.Arrays;
import java.util.List;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;

import android.hardware.camera2.CameraAccessException;
import android.hardware.camera2.CameraCaptureSession;
import android.hardware.camera2.CameraDevice;
import android.hardware.camera2.CameraManager;
import android.hardware.camera2.CaptureRequest;

public class CameraFaceHelper {
    private static final String TAG = "CameraFaceHelper";
    private final Context context;
    private final SurfaceHolder surfaceHolder;
    private final FaceDetector faceDetector;
    private final Handler cameraHandler; // 用于 camera callback
    private final Handler mainHandler;   // 用于回调到主线程
    private CameraDevice cameraDevice;
    private CameraCaptureSession cameraCaptureSession;
    private ImageReader imageReader;
    private volatile boolean isProcessingImage = false;
    private final ExecutorService executor = Executors.newSingleThreadExecutor();

    private RenderScript rs; // RenderScript 实例复用
    private static final long DETECTION_INTERVAL_MS = 333;
    private boolean canDetector =  false;
    private int cameraTranslateY;

    public interface FaceDetectCallback {
        void onFaceDetected(Bitmap fullBitmap, Bitmap faceBitmap);   // 单人脸检测成功
        void onNoFaceDetected();                                     // 未检测到人脸
        void onMultipleFacesDetected();                              // 检测到多人
        void onDetectError(Exception e);                             // 出错
    }


    private final FaceDetectCallback callback;

    public CameraFaceHelper(Context context,
                            SurfaceHolder holder,
                            int cameraTranslateY,
                            FaceDetector faceDetector,
                            Handler cameraHandler,
                            Handler mainHandler,
                            FaceDetectCallback callback) {
        this.context = context.getApplicationContext();
        this.surfaceHolder = holder;
        this.cameraTranslateY = cameraTranslateY;
        this.faceDetector = faceDetector;
        this.cameraHandler = cameraHandler;
        this.mainHandler = mainHandler;
        this.callback = callback;
        // 创建 RenderScript（延后也可以在 startPreview 时创建）
        try {
            rs = RenderScript.create(context);
        } catch (Exception e) {
            Log.w(TAG, "RenderScript create failed: " + e.getMessage());
            rs = null;
        }
    }


    public void setCanDetector(boolean canDetector) {
        this.canDetector = canDetector;
    }


    public void openCamera(final String cameraId) {
        CameraManager cameraManager = (CameraManager) context.getSystemService(Context.CAMERA_SERVICE);
        try {
            if (ActivityCompat.checkSelfPermission(context, Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED) {
                // 权限检查由调用方处理
                return;
            }
            cameraManager.openCamera(cameraId, new CameraDevice.StateCallback() {
                @Override
                public void onOpened(@NonNull CameraDevice camera) {
                    cameraDevice = camera;
                    startPreview();
                }

                @Override
                public void onDisconnected(@NonNull CameraDevice camera) {
                    closeCamera();
                }

                @Override
                public void onError(@NonNull CameraDevice camera, int error) {
                    closeCamera();
                }
            }, cameraHandler);
        } catch (CameraAccessException e) {
            if (callback != null) callback.onDetectError(e);
        }
    }

    private void startPreview() {

        if (cameraDevice == null) return;
        try {
            // 可按需调整分辨率
            int width = 800, height = 600;
            imageReader = ImageReader.newInstance(width, height, ImageFormat.YUV_420_888, 4);
            imageReader.setOnImageAvailableListener(reader -> {
                Image image = null;
                try {
                    image = reader.acquireLatestImage();
                    if (image != null) {
                        if (!isProcessingImage && canDetector) {
                            isProcessingImage = true;
                            final Image captured = image;
                            executor.execute(() -> {
                                try {
                                    processImage(captured);
                                } catch (Exception e) {
                                    if (callback != null) {
                                        mainHandler.post(() -> callback.onDetectError(e));
                                    }
                                } finally {
                                    // 确保 image 被关闭（processImage 内也会关闭一次以保险）
                                    try { captured.close(); } catch (Exception ignored) {}
                                }
                            });
                        } else {
                            image.close();
                        }
                    }
                } catch (Exception e) {
                    if (image != null) image.close();
                }
            }, cameraHandler);

            cameraDevice.createCaptureSession(
                    Arrays.asList(surfaceHolder.getSurface(), imageReader.getSurface()),
                    new CameraCaptureSession.StateCallback() {
                        @Override
                        public void onConfigured(@NonNull CameraCaptureSession session) {
                            cameraCaptureSession = session;
                            try {
                                CaptureRequest.Builder previewBuilder = cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
                                previewBuilder.addTarget(surfaceHolder.getSurface());
                                previewBuilder.addTarget(imageReader.getSurface());
                                previewBuilder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE);
                                previewBuilder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH);
                                session.setRepeatingRequest(previewBuilder.build(), null, cameraHandler);
                            } catch (CameraAccessException e) {
                                if (callback != null) mainHandler.post(() -> callback.onDetectError(e));
                            }
                        }

                        @Override
                        public void onConfigureFailed(@NonNull CameraCaptureSession session) {
                            if (callback != null) mainHandler.post(() -> callback.onDetectError(new RuntimeException("Camera configure failed")));
                        }
                    }, cameraHandler
            );
        } catch (CameraAccessException e) {
            if (callback != null) mainHandler.post(() -> callback.onDetectError(e));
        }
    }

    private void processImage(Image image) {
        if (image == null) {
            scheduleNext();
            return;
        }
        Bitmap bitmap = null;
        try {
            bitmap = YUV_420_888_toRGBIntrinsics(image);
            // image 资源在转换后要关闭
            try { image.close(); } catch (Exception ignored) {}

            if (bitmap == null) {
                scheduleNext();
                return;
            }
            bitmap = rotateBitmap(bitmap,0);
            Bitmap finalBitmap = Bitmap.createBitmap(bitmap, 0, -cameraTranslateY, bitmap.getWidth(), bitmap.getWidth());
            // 将 bitmap 转换成 MLKit 输入
            InputImage inputImage = InputImage.fromBitmap(finalBitmap, 0);
            faceDetector.process(inputImage)
                    .addOnSuccessListener(faces -> {
                        Log.d(TAG, "processImage: faces "+faces.size());
                        if (faces.isEmpty()) {
                            // no face
                            if (callback != null) mainHandler.post(callback::onNoFaceDetected);
                            scheduleNext();
                            return;
                        }
                        if (faces.size() > 1) {
                            if (callback != null) mainHandler.post(callback::onMultipleFacesDetected);
                            scheduleNext();
                            return;
                        }
                        // single face
                        Face face = faces.get(0);
                        Rect box = face.getBoundingBox();

                        int boxWidth = box.width();
                        int boxHeight = box.height();
                        if(boxWidth*boxHeight < finalBitmap.getWidth()*finalBitmap.getHeight()/4){
                            scheduleNext();
                            return;
                        }
                        // 增加一点 padding（按需）
                        float scale = 0.2f;
                        int left = (int) Math.max(box.left - boxWidth * scale, 0);
                        int top = (int) Math.max(box.top - boxHeight * scale, 0);
                        int right = (int) Math.min(box.right + boxWidth * scale, finalBitmap.getWidth());
                        int bottom = (int) Math.min(box.bottom + boxHeight * scale, finalBitmap.getHeight());

                        int w = Math.max(1, right - left);
                        int h = Math.max(1, bottom - top);
                        Log.d(TAG, String.format("finalBitmap.getWidth() %s getHeight %s ",finalBitmap.getWidth(),finalBitmap.getHeight()));
                        Log.d(TAG, String.format(" box left %s right %s top %s bottom %s ",box.left,box.right,box.top,box.bottom));
                        Log.d(TAG, String.format(" res left %s right %s top %s bottom %s ",left,right,top,bottom));
                        Bitmap faceBitmap;
                        try {
                            faceBitmap = Bitmap.createBitmap(finalBitmap, left, top, w, h);
                        } catch (Exception e) {
                            faceBitmap = null;
                        }

                        if (callback != null) {
                            final Bitmap finalFull = finalBitmap;
                            final Bitmap finalFace = faceBitmap;
                            mainHandler.post(() -> callback.onFaceDetected(finalFull, finalFace));
                        } else {
                            // no callback — 回收 bitmap
                            if (finalBitmap != null && !finalBitmap.isRecycled()) finalBitmap.recycle();
                            if (faceBitmap != null && !faceBitmap.isRecycled()) faceBitmap.recycle();
                        }
                        scheduleNext();
                    })
                    .addOnFailureListener(e -> {
                        if (callback != null) mainHandler.post(() -> callback.onDetectError(new Exception(e)));
                        scheduleNext();
                    });
        } catch (Exception e) {
            Log.d(TAG, "processImage: err "+e);
            if (callback != null) mainHandler.post(() -> callback.onDetectError(e));
            if (image != null) {
                try { image.close(); } catch (Exception ignored) {}
            }
            scheduleNext();
        }
    }

    private void scheduleNext() {
        // 保证至少间隔 DETECTION_INTERVAL_MS 后再处理下一帧
        mainHandler.postDelayed(() -> isProcessingImage = false, DETECTION_INTERVAL_MS);
    }

    private Bitmap rotateBitmap(Bitmap bitmap, int rotation) {
        Matrix matrix = new Matrix();
        switch (rotation) {
            case Surface.ROTATION_0:
                matrix.postRotate(90); // 旋转90度
                break;
            case Surface.ROTATION_90:
                matrix.postRotate(0);  // 不需要旋转
                break;
            case Surface.ROTATION_180:
                matrix.postRotate(-90); // 旋转270度
                break;
            case Surface.ROTATION_270:
                matrix.postRotate(180); // 旋转180度
                break;
        }
        return Bitmap.createBitmap(bitmap, 0, 0, bitmap.getWidth(), bitmap.getHeight(), matrix, true);
    }


    private Bitmap YUV_420_888_toRGBIntrinsics(Image image) {
        if (image == null) return null;

        int W = image.getWidth();
        int H = image.getHeight();

        Image.Plane Y = image.getPlanes()[0];
        Image.Plane U = image.getPlanes()[1];
        Image.Plane V = image.getPlanes()[2];

        int Yb = Y.getBuffer().remaining();
        int Ub = U.getBuffer().remaining();
        int Vb = V.getBuffer().remaining();

        byte[] data = new byte[Yb + Ub + Vb];

        // 注意 Y V U 顺序（与你设备的返回顺序一致）
        Y.getBuffer().get(data, 0, Yb);
        V.getBuffer().get(data, Yb, Vb);
        U.getBuffer().get(data, Yb + Vb, Ub);

        if (rs == null) {
            // fallback: cannot convert
            return null;
        }
        try {
            ScriptIntrinsicYuvToRGB yuvToRgbIntrinsic = ScriptIntrinsicYuvToRGB.create(rs, Element.U8_4(rs));
            Type.Builder yuvType = new Type.Builder(rs, Element.U8(rs)).setX(data.length);
            Allocation in = Allocation.createTyped(rs, yuvType.create(), Allocation.USAGE_SCRIPT);

            Type.Builder rgbaType = new Type.Builder(rs, Element.RGBA_8888(rs)).setX(W).setY(H);
            Allocation out = Allocation.createTyped(rs, rgbaType.create(), Allocation.USAGE_SCRIPT);

            final Bitmap bmpout = Bitmap.createBitmap(W, H, Bitmap.Config.ARGB_8888);
            in.copyFromUnchecked(data);
            yuvToRgbIntrinsic.setInput(in);
            yuvToRgbIntrinsic.forEach(out);
            out.copyTo(bmpout);

            // cleanup
            in.destroy();
            out.destroy();
            yuvToRgbIntrinsic.destroy();

            return bmpout;
        } catch (Exception e) {
            Log.e(TAG, "YUV->RGB failed: " + e.getMessage());
            return null;
        }
    }

    public void closeCamera() {
        try {
            if (cameraCaptureSession != null) {
                cameraCaptureSession.close();
                cameraCaptureSession = null;
            }
            if (cameraDevice != null) {
                cameraDevice.close();
                cameraDevice = null;
            }
            if (imageReader != null) {
                imageReader.close();
                imageReader = null;
            }
            if (rs != null) {
                try { rs.destroy(); } catch (Exception ignored) {}
                rs = null;
            }
            executor.shutdownNow();
        } catch (Exception e) {
            Log.w(TAG, "closeCamera error: " + e.getMessage());
        }
    }
}
