package com.jrfid.smartcabinet.demo.ui.add_face_camera_x

import android.annotation.SuppressLint
import androidx.camera.camera2.interop.Camera2Interop
import androidx.camera.core.*
import androidx.camera.core.ImageAnalysis.OUTPUT_IMAGE_FORMAT_YUV_420_888
import androidx.camera.core.impl.CameraInfoInternal
import androidx.camera.core.impl.CameraInternal
import androidx.camera.lifecycle.ProcessCameraProvider
import androidx.camera.view.PreviewView
import androidx.core.content.ContextCompat
import androidx.core.util.Preconditions
import com.arcsoft.face.ErrorInfo
import com.arcsoft.face.FaceEngine
import com.arcsoft.face.FaceInfo
import com.arcsoft.face.LivenessInfo
import com.arcsoft.face.enums.DetectFaceOrientPriority
import com.arcsoft.face.enums.DetectMode
import com.blankj.utilcode.util.LogUtils
import com.jrfid.smartcabinet.demo.R
import com.jrfid.smartcabinet.demo.arcface.DrawHelper
import com.jrfid.smartcabinet.demo.arcface.TrackUtil
import com.jrfid.smartcabinet.demo.base.BaseFragment
import com.jrfid.smartcabinet.demo.databinding.AddFaceCameraXFragmentBinding
import com.jrfid.smartcabinet.demo.model.DrawFaceInfoModel
import com.jrfid.smartcabinet.demo.utils.ImageUtil
import com.jrfid.smartcabinet.demo.utils.ImageUtils
import com.jrfid.smartcabinet.demo.view.FaceRectView
import java.util.ArrayList
import java.util.concurrent.ExecutorService
import java.util.concurrent.Executors
import kotlin.math.abs
import kotlin.math.max
import kotlin.math.min

class AddFaceByCameraXFragment : BaseFragment<AddFaceCameraXFragmentBinding, AddFaceByCameraXViewModel>(AddFaceByCameraXViewModel::class.java) {

    override fun getLayoutId(): Int = R.layout.add_face_camera_x_fragment

    private lateinit var cameraExecutor: ExecutorService
    private lateinit var viewFinder: PreviewView
    private lateinit var faceRectView: FaceRectView
    private var drawHelper: DrawHelper? = null

    /**
     * 当前识别到的人脸
     */
    private var currentFaceInfoModel: DrawFaceInfoModel? = null
    override fun setData() {
        cameraExecutor = Executors.newSingleThreadExecutor()
        initFace()
        faceRectView = getDataBinding()!!.faceView
        getDataBinding()?.cameraView?.let {
            viewFinder = it
            it.post {
                setupCamera()
            }
        }
    }

    private var cameraProvider: ProcessCameraProvider? = null
    private var camera: Camera? = null

    @SuppressLint("RestrictedApi")
    private fun setupCamera() {
        val cameraProviderFuture = ProcessCameraProvider.getInstance(requireContext())
        cameraProviderFuture.addListener(Runnable {
            cameraProvider = cameraProviderFuture.get()
            bindCameraUseCases()
        }, ContextCompat.getMainExecutor(requireContext()))
    }

    @SuppressLint("RestrictedApi", "UnsafeOptInUsageError")
    private fun bindCameraUseCases() {
        val screenAspectRatio = aspectRatio(viewFinder.width, viewFinder.height)
        LogUtils.dTag(TAG, "Preview aspect ratio: $screenAspectRatio")

        val rotation = viewFinder.display.rotation

        // CameraProvider
        val cameraProvider = cameraProvider
            ?: throw IllegalStateException("Camera initialization failed.")

        val lensFacing = when {
            hasBackCamera() -> CameraSelector.LENS_FACING_BACK
            hasFrontCamera() -> CameraSelector.LENS_FACING_FRONT
            else -> throw IllegalStateException("Back and front camera are unavailable")
        }

        // CameraSelector
        val cameraSelector = CameraSelector.Builder()
            .addCameraFilter {
                val result = mutableListOf<CameraInfo>()
                for (info in it) {
                    if (info is CameraInfoInternal) {
                        val l = info.lensFacing
                        val i = info.cameraId
                        if (l != null && l == lensFacing && i == "1") {
                            result.add(info)
                        }
                    }
                }
                result
            }.build()

        // Preview
        val preview = Preview.Builder()
            // We request aspect ratio but no resolution
            .setTargetAspectRatio(screenAspectRatio)
            // Set initial target rotation
            .setTargetRotation(rotation)
            .build()

        // ImageAnalysis
        val imageAnalyzer = ImageAnalysis.Builder()
            .setTargetAspectRatio(screenAspectRatio)
            .setTargetRotation(rotation)
            .setOutputImageFormat(OUTPUT_IMAGE_FORMAT_YUV_420_888)
            .build()
            .also {
                it.setAnalyzer(cameraExecutor, ImageAnalysis.Analyzer { imageProxy ->
                    faceRectView.clearFaceInfo()
                    val faceInfoList = arrayListOf<FaceInfo>()
                    val data = ImageUtils.yuv420ToNv21(imageProxy)
                    val code = ftEngine?.detectFaces(data, imageProxy.width, imageProxy.height, FaceEngine.CP_PAF_NV21, faceInfoList)
                    if (code == ErrorInfo.MOK) {
                        if (faceInfoList.isEmpty()) {
                            LogUtils.dTag(TAG, "未识别到人脸")
                        } else {
                            //保留其中一个最大的人脸
                            TrackUtil.keepMaxFace(faceInfoList)
                            //绘制人脸框
                            val faceInfo = faceInfoList[0]
                            if (drawHelper == null) {
                                drawHelper =
                                    DrawHelper(imageProxy.width, imageProxy.height, viewFinder.width, viewFinder.height, viewFinder.display.rotation, 0, false, false, false)
                            }
                            if (currentFaceInfoModel?.faceId == faceInfo.faceId) {
                                //没有换人，只重新绘制位置
                                currentFaceInfoModel?.let {
                                    it.faceRect = drawHelper?.adjustRect(faceInfo.rect)
                                    it.faceInfo = faceInfo
                                }
                            } else {
                                //换人过后，重新绘制
                                currentFaceInfoModel = DrawFaceInfoModel().apply {
                                    this.faceInfo = faceInfoList[0]
                                    this.faceRect = drawHelper?.adjustRect(faceInfo.rect)
                                    this.faceId = faceInfoList[0].faceId
                                }
                            }
                            faceRectView.setDrawFaceInfo(currentFaceInfoModel)
                        }
                    } else {
                        LogUtils.dTag(TAG, "人脸识别错误：${code}")
                    }
                    imageProxy.close()
                })
            }
        // Must unbind the use-cases before rebinding them
        cameraProvider.unbindAll()

        try {
            // A variable number of use-cases can be passed here -
            // camera provides access to CameraControl & CameraInfo
            camera = cameraProvider.bindToLifecycle(this, cameraSelector, preview, imageAnalyzer)
            // Attach the viewfinder's surface provider to preview use case
            preview.setSurfaceProvider(viewFinder.surfaceProvider)
            observeCameraState(camera!!.cameraInfo)
        } catch (exc: Exception) {
            LogUtils.dTag(TAG, "Use case binding failed", exc)
        }
    }

    private fun observeCameraState(cameraInfo: CameraInfo) {
        cameraInfo.cameraState.observe(viewLifecycleOwner) { cameraState ->
            run {
                when (cameraState.type) {
                    CameraState.Type.PENDING_OPEN -> {
                        // Ask the user to close other camera apps
                        LogUtils.dTag(TAG, "CameraState: Pending Open")
                    }
                    CameraState.Type.OPENING -> {
                        // Show the Camera UI
                        LogUtils.dTag(TAG, "CameraState: Opening")
                    }
                    CameraState.Type.OPEN -> {
                        // Setup Camera resources and begin processing
                        LogUtils.dTag(TAG, "CameraState: Open")
                    }
                    CameraState.Type.CLOSING -> {
                        // Close camera UI
                        LogUtils.dTag(TAG, "CameraState: Closing")
                    }
                    CameraState.Type.CLOSED -> {
                        // Free camera resources
                        LogUtils.dTag(TAG, "CameraState: Closed")
                    }
                }
            }

            cameraState.error?.let { error ->
                when (error.code) {
                    // Open errors
                    CameraState.ERROR_STREAM_CONFIG -> {
                        // Make sure to setup the use cases properly
                        LogUtils.dTag(TAG, "Stream config error")
                    }
                    // Opening errors
                    CameraState.ERROR_CAMERA_IN_USE -> {
                        // Close the camera or ask user to close another camera app that's using the
                        // camera
                        LogUtils.dTag(TAG, "Camera in use")
                    }
                    CameraState.ERROR_MAX_CAMERAS_IN_USE -> {
                        // Close another open camera in the app, or ask the user to close another
                        // camera app that's using the camera
                        LogUtils.dTag(TAG, "Max cameras in use")
                    }
                    CameraState.ERROR_OTHER_RECOVERABLE_ERROR -> {
                        LogUtils.dTag(TAG, "Other recoverable error")
                    }
                    // Closing errors
                    CameraState.ERROR_CAMERA_DISABLED -> {
                        // Ask the user to enable the device's cameras
                        LogUtils.dTag(TAG, "Camera disabled")
                    }
                    CameraState.ERROR_CAMERA_FATAL_ERROR -> {
                        // Ask the user to reboot the device to restore camera function
                        LogUtils.dTag(TAG, "Fatal error")
                    }
                    // Closed errors
                    CameraState.ERROR_DO_NOT_DISTURB_MODE_ENABLED -> {
                        // Ask the user to disable the "Do Not Disturb" mode, then reopen the camera
                        LogUtils.dTag(TAG, "Do not disturb mode enabled")
                    }
                }
            }
        }
    }

    private fun aspectRatio(width: Int, height: Int): Int {
        val previewRatio = max(width, height).toDouble() / min(width, height)
        if (abs(previewRatio - RATIO_4_3_VALUE) <= abs(previewRatio - RATIO_16_9_VALUE)) {
            return AspectRatio.RATIO_4_3
        }
        return AspectRatio.RATIO_16_9
    }

    /** Returns true if the device has an available back camera. False otherwise */
    private fun hasBackCamera(): Boolean {
        return cameraProvider?.hasCamera(CameraSelector.DEFAULT_BACK_CAMERA) ?: false
    }

    /** Returns true if the device has an available front camera. False otherwise */
    private fun hasFrontCamera(): Boolean {
        return cameraProvider?.hasCamera(CameraSelector.DEFAULT_FRONT_CAMERA) ?: false
    }

    /**
     * VIDEO模式人脸检测引擎，用于预览帧人脸追踪
     */
    private var ftEngine: FaceEngine? = null

    /**
     * 用于特征提取的引擎
     */
    private var frEngine: FaceEngine? = null

    /**
     * IMAGE模式活体检测引擎，用于预览帧人脸活体检测
     */
    private var flEngine: FaceEngine? = null

    private fun initFace() {
        ftEngine = FaceEngine()
        var code = ftEngine?.init(requireContext(), DetectMode.ASF_DETECT_MODE_VIDEO, DetectFaceOrientPriority.ASF_OP_ALL_OUT, 16, 10, FaceEngine.ASF_FACE_DETECT)
        if (code != ErrorInfo.MOK) {
            ftEngine = null
            LogUtils.dTag(TAG, "人脸识别引擎初始化失败")
        }
        frEngine = FaceEngine()
        code = frEngine?.init(requireContext(), DetectMode.ASF_DETECT_MODE_IMAGE, DetectFaceOrientPriority.ASF_OP_0_ONLY, 16, 10, FaceEngine.ASF_FACE_RECOGNITION)
        if (code != ErrorInfo.MOK) {
            LogUtils.dTag(TAG, "frEngine 初始化错误，${code}")
        }
        flEngine = FaceEngine()
        code = flEngine?.init(requireContext(), DetectMode.ASF_DETECT_MODE_IMAGE, DetectFaceOrientPriority.ASF_OP_0_ONLY, 16, 10, FaceEngine.ASF_LIVENESS)
        if (code != ErrorInfo.MOK) {
            LogUtils.dTag(TAG, "flEngine 初始化错误，${code}")
        }
    }

    private fun destroyEngine() {
        ftEngine?.let {
            synchronized(it) {
                val code = it.unInit()
                LogUtils.dTag(TAG, "ftEngine 回收，${code}")
            }
        }
        frEngine?.let {
            synchronized(it) {
                val code = it.unInit()
                LogUtils.dTag(TAG, "frEngine 回收，${code}")
            }
        }
        flEngine?.let {
            synchronized(it) {
                val code = it.unInit()
                LogUtils.dTag(TAG, "flEngine 回收，${code}")
            }
        }
    }

    override fun onDestroyView() {
        destroyEngine()
        super.onDestroyView()
    }

    companion object {
        private const val RATIO_4_3_VALUE = 4.0 / 3.0
        private const val RATIO_16_9_VALUE = 16.0 / 9.0
    }

}