package com.example.tnot.api;
import androidx.annotation.NonNull;
import android.content.Context;
import android.graphics.ImageFormat;
import android.media.Image;
import android.util.Log;
import android.util.Size;

import androidx.camera.core.CameraSelector;
import androidx.camera.core.ExperimentalGetImage;
import androidx.camera.core.ImageAnalysis;
import androidx.camera.core.ImageProxy;
import androidx.camera.core.Preview;
import androidx.camera.lifecycle.ProcessCameraProvider;
import androidx.camera.view.PreviewView;
import androidx.core.content.ContextCompat;
import androidx.lifecycle.LifecycleOwner;

import com.google.common.util.concurrent.ListenableFuture;

import java.nio.ByteBuffer;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;

public class CameraUtil {
    private static final String TAG = "CameraUtil";
    //private static final Size IMAGE_SIZE = new Size(640, 480);
    private static final Size IMAGE_SIZE = new Size(640, 480);
    private static ExecutorService cameraExecutor;

    public interface FrameCallback {
        //void onFrameData(byte[] data);
        void onFrameData(byte[] data, int width, int height);
    }

    // 初始化摄像头（使用CameraX标准API）
    public static void startCamera(Context context, PreviewView previewView, FrameCallback callback) {
        cameraExecutor = Executors.newSingleThreadExecutor();
        ListenableFuture<ProcessCameraProvider> cameraProviderFuture = ProcessCameraProvider.getInstance(context);

        cameraProviderFuture.addListener(() -> {
            try {
                ProcessCameraProvider cameraProvider = cameraProviderFuture.get();

                // 1. 配置预览用例
                Preview preview = new Preview.Builder()
                        .setTargetResolution(IMAGE_SIZE)
                        .build();
                preview.setSurfaceProvider(previewView.getSurfaceProvider());

                // 2. 配置图像分析用例（替代ImageCapture，用于持续获取帧数据）
                ImageAnalysis imageAnalysis = new ImageAnalysis.Builder()
                        .setTargetResolution(IMAGE_SIZE)
                        // 设置背压策略：当数据处理慢于产生速度时，丢弃旧数据
                        .setBackpressureStrategy(ImageAnalysis.STRATEGY_KEEP_ONLY_LATEST)
                        .build();

                // 3. 设置分析器（处理预览帧）
                imageAnalysis.setAnalyzer(cameraExecutor, new ImageAnalysis.Analyzer() {
                    @Override
                    @ExperimentalGetImage // 声明使用实验性API
                    public void analyze(@NonNull ImageProxy imageProxy) {
                        byte[] data = imageToByteArray(imageProxy);
                        if (data != null && callback != null) {
                            // 获取摄像头实际输出的宽高（注意：ImageProxy的宽高可能与预览方向相关，需确认）
                            int width = imageProxy.getWidth();
                            int height = imageProxy.getHeight();
                            callback.onFrameData(data, width, height); // 传递实际宽高
                        }
                        imageProxy.close();
                    }
                });

                // 4. 选择摄像头（默认后置）
                CameraSelector cameraSelector = new CameraSelector.Builder()
                        .requireLensFacing(CameraSelector.LENS_FACING_BACK)
                        .build();

                // 5. 绑定生命周期和用例
                cameraProvider.unbindAll();
                cameraProvider.bindToLifecycle(
                        (LifecycleOwner) context,
                        cameraSelector,
                        preview,
                        imageAnalysis // 绑定分析用例，而非ImageCapture
                );

                Log.d(TAG, "摄像头初始化成功");

            } catch (ExecutionException | InterruptedException e) {
                Log.e(TAG, "摄像头初始化失败: " + e.getMessage(), e);
            }
        }, ContextCompat.getMainExecutor(context));
    }

    // 将ImageProxy转换为字节数组（JPEG格式）
    @ExperimentalGetImage
    private static byte[] imageToByteArray(ImageProxy imageProxy) {
        Image image = imageProxy.getImage();
        if (image == null) return null;

        // YUV_420_888格式包含3个平面
        Image.Plane[] planes = image.getPlanes();
        int width = image.getWidth();
        int height = image.getHeight();

        // 计算NV21格式的总长度（宽×高×1.5）
        int ySize = width * height;
        int uvSize = width * height / 2;
        byte[] nv21 = new byte[ySize + uvSize];

        // 1. 读取Y分量（第一个平面）
        ByteBuffer yBuffer = planes[0].getBuffer();
        int yStride = planes[0].getRowStride(); // 行步长（可能大于宽度）
        int yPixelStride = planes[0].getPixelStride(); // 像素步长（通常为1）
        for (int i = 0; i < height; i++) {
            for (int j = 0; j < width; j++) {
                nv21[i * width + j] = yBuffer.get(i * yStride + j * yPixelStride);
            }
        }

        // 2. 读取UV分量（第二、三个平面），转换为NV21的交错格式（V-U-V-U...）
        ByteBuffer uBuffer = planes[1].getBuffer();
        ByteBuffer vBuffer = planes[2].getBuffer();
        int uStride = planes[1].getRowStride();
        int vStride = planes[2].getRowStride();
        int uPixelStride = planes[1].getPixelStride();
        int vPixelStride = planes[2].getPixelStride();

        int uvWidth = width / 2;
        int uvHeight = height / 2;
        for (int i = 0; i < uvHeight; i++) {
            for (int j = 0; j < uvWidth; j++) {
                // NV21格式中，UV分量交错存储（V在前，U在后）
                int pos = ySize + i * uvWidth * 2 + j * 2;
                // 取V分量
                nv21[pos] = vBuffer.get(i * vStride + j * vPixelStride);
                // 取U分量
                nv21[pos + 1] = uBuffer.get(i * uStride + j * uPixelStride);
            }
        }

        return nv21;
    }

    // 停止摄像头并释放资源
    public static void stopCamera() {
        if (cameraExecutor != null) {
            cameraExecutor.shutdown();
        }
    }
}