package com.newlink.building.userinfo.activity

import android.Manifest
import android.app.ProgressDialog
import android.content.ContentResolver
import android.content.Intent
import android.content.pm.PackageManager
import android.graphics.Bitmap
import android.graphics.BitmapFactory
import android.graphics.Matrix
import android.graphics.Rect
import android.net.Uri
import android.provider.MediaStore
import android.provider.Settings
import android.util.Base64
import android.view.View
import android.widget.TextView
import androidx.activity.result.ActivityResultLauncher
import androidx.activity.result.contract.ActivityResultContracts
import androidx.activity.result.contract.ActivityResultContracts.OpenDocument
import androidx.core.app.ActivityCompat
import androidx.exifinterface.media.ExifInterface
import androidx.lifecycle.lifecycleScope
import com.google.mlkit.vision.common.InputImage
import com.google.mlkit.vision.face.Face
import com.google.mlkit.vision.face.FaceDetection
import com.google.mlkit.vision.face.FaceDetector
import com.google.mlkit.vision.face.FaceDetectorOptions
import com.newlink.building.common_base.base.NL_BaseMvpActivity
import com.newlink.building.common_base.ext.showToast
import com.newlink.building.common_base.model.bean.AuthKeyTypeFace
import com.newlink.building.common_base.model.bean.AuthRequestBody
import com.newlink.building.common_base.model.bean.AuthResponseBody
import com.newlink.building.common_base.model.bean.DeleteFaceDataRequest
import com.newlink.building.common_base.model.bean.DeleteFaceDataRequestBody
import com.newlink.building.common_base.model.bean.LoadAuthDataRequestBody
import com.newlink.building.common_base.model.bean.LoadFaceListDataRequest
import com.newlink.building.common_base.model.bean.UploadFaceDataRequest
import com.newlink.building.common_base.utils.DialogUtil
import com.newlink.building.common_base.utils.NLog
import com.newlink.building.common_base.utils.GridSpaceDecoration
import com.newlink.building.common_base.utils.PermissionHelper
import com.newlink.building.common_base.utils.RomUtils
import com.newlink.building.common_base.utils.ToastUtils
import com.newlink.building.common_base.widget.CustomDialog
import com.newlink.building.common_base.widget.CustomMessageDialog
import com.newlink.building.userinfo.R
import com.newlink.building.userinfo.adapter.Module_UserInfo_FaceDataAdapter
import com.newlink.building.userinfo.databinding.ActivityAuthFaceBinding
import com.newlink.building.userinfo.helper.Module_UserInfo_FaceDetectUtil
import com.newlink.building.userinfo.helper.Module_UserInfo_PermissionUtils
import com.newlink.building.userinfo.mvp.Module_UserInfo_AuthFacePresenter
import com.newlink.building.userinfo.mvp.contract.Module_UserInfo_AuthFaceContract
import com.newlink.building.userinfo.utils.getRotationDegrees
import com.newlink.building.obfuscation.Module_Obfuscation_ObfuscationHelper
import com.newlink.building.obfuscation.Module_Obfuscation_ObfuscationInvoker
import kotlinx.coroutines.Dispatchers
import kotlinx.coroutines.MainScope
import kotlinx.coroutines.delay
import kotlinx.coroutines.launch
import java.io.ByteArrayOutputStream
import java.io.File

/**
 * @Author: Jake
 * @Date: 2023-12-14
 * @Description:
 */
class Module_UserInfo_AuthFaceActivityNL : NL_BaseMvpActivity<Module_UserInfo_AuthFaceContract.View, Module_UserInfo_AuthFaceContract.Presenter>(),
    Module_UserInfo_AuthFaceContract.View {

    private var mDialog: ProgressDialog? = null
    private var mAdapter: Module_UserInfo_FaceDataAdapter? = null
    private var mFaceList: MutableList<AuthResponseBody> = mutableListOf()
    private var mBase64: String? = null
    private var mParamName: String? = null
    private var mParamUserId: String? = null
    private var mParamArea: Array<String>? = null
    private var mWaitProgressDialog: ProgressDialog? = null
    private var mPermissionUtils: Module_UserInfo_PermissionUtils? = null
    private lateinit var mFaceDetector: FaceDetector
    private lateinit var mDocumentLauncher: ActivityResultLauncher<Array<String>>

    override fun createPresenter(): Module_UserInfo_AuthFaceContract.Presenter = Module_UserInfo_AuthFacePresenter()

    private lateinit var mBinding: ActivityAuthFaceBinding

    override fun attachLayoutRes(): View {
        mBinding = ActivityAuthFaceBinding.inflate(layoutInflater)
        return mBinding.root
    }

    override fun initData() {
        // 人脸认证页面初始化时的混淆
        Module_Obfuscation_ObfuscationHelper.onMethodEnter("AuthFaceActivity", "initData")
        Module_Obfuscation_ObfuscationInvoker.invokeRandom("face_auth")

        initFaceKit()
        mPermissionUtils = Module_UserInfo_PermissionUtils.getInstance().with(this)
        // ---- data -----

        mDocumentLauncher = activityResultLauncher()
        // ---- init widgets ----
        mBinding.btnAuthFaceGallery.setOnClickListener {
            // 选择照片进行人脸识别时的混淆
            Module_Obfuscation_ObfuscationInvoker.invokeSpecific(3, "FACE_GALLERY")
            startPickPhoto()
//            if (PermissionUtils.isGranted(
//                    Manifest.permission.READ_EXTERNAL_STORAGE, this@AuthFaceActivity
//                )
//            ) {
//                NLog.e("[AuthFaceActivity] open gallery")
//                openGallery()
//            } else {
//                NLog.e("[AuthFaceActivity] open gallery request permission")
//                initPermission()
//            }
        }

        findViewById<View>(R.id.title_bar).findViewById<TextView>(R.id.tv_title_bar_center).text =
            getString(R.string.auth_face)

        mBinding.btnAuthFaceCamera.setOnClickListener {

            if (!PermissionHelper.required(this).hasPermission(Manifest.permission.CAMERA)) {
                checkCameraPermission()
            } else {
                startActivityForResult(
                    Intent(this@Module_UserInfo_AuthFaceActivityNL, Module_UserInfo_FaceDetectionActivityNL::class.java), 1_000_002
                )
            }
        }

        mAdapter = Module_UserInfo_FaceDataAdapter(mFaceList).apply {
            setOnDeleteClickListener(object : Module_UserInfo_FaceDataAdapter.OnDeleteClickListener {
                override fun onDeleteClick(item: AuthResponseBody) {
                    removeFaceData(item)
                }
            })
        }

        mBinding.faceDataList.apply {
            adapter = mAdapter
            itemAnimator = null
            addItemDecoration(
                GridSpaceDecoration(
                    resources.getDimension(com.newlink.building.common_base.R.dimen.dp_8).toInt()
                )
            )
        }
    }

    private fun initFaceKit() {
        val highAccuracyOpts = FaceDetectorOptions.Builder()
            .setPerformanceMode(FaceDetectorOptions.PERFORMANCE_MODE_ACCURATE)
            .setLandmarkMode(FaceDetectorOptions.LANDMARK_MODE_ALL)
            .setClassificationMode(FaceDetectorOptions.CLASSIFICATION_MODE_ALL)
            .setMinFaceSize(if (RomUtils.isXiaoMi()) 0.5f else 0.3f)
            .build()

        mFaceDetector = FaceDetection.getClient(highAccuracyOpts)
    }


//    private fun initPermission() {
//        // Android 13 (API 33) 及以上
//        if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.TIRAMISU) {
//            if (ContextCompat.checkSelfPermission(
//                    this, Manifest.permission.READ_MEDIA_IMAGES
//                ) != PackageManager.PERMISSION_GRANTED
//            ) {
//                requestPermissionLauncher.launch(Manifest.permission.READ_MEDIA_IMAGES)
//            } else {
//                // 权限已被授予，可以读取照片
//                openGallery()
//            }
//        } else { // Android 6.0 到 Android 12 (API 级别 23-32)
//            if (ContextCompat.checkSelfPermission(
//                    this, Manifest.permission.READ_EXTERNAL_STORAGE
//                ) != PackageManager.PERMISSION_GRANTED
//            ) {
//                requestPermissionLauncher.launch(Manifest.permission.READ_EXTERNAL_STORAGE)
//            } else {
//                // 权限已被授予，可以读取照片
//                openGallery()
//            }
//        }
//
//    }

    private val requestPermissionLauncher = registerForActivityResult(
        ActivityResultContracts.RequestPermission()
    ) { isGranted: Boolean ->
        if (isGranted) {
            // 权限已被授予，可以读取照片
            openGallery()
        } else {
            // 权限被拒绝
            mPermissionUtils?.showDialog(this@Module_UserInfo_AuthFaceActivityNL)
        }
    }

    override fun onResume() {
        super.onResume()
        loadData()
    }

    private fun openGallery() {
//        if (Build.VERSION.SDK_INT <= Build.VERSION_CODES.P || RomUtils.isHuawei()) {
//            NLog.e("jake .. openGallery < p")
//            val intent = Intent(Intent.ACTION_PICK, MediaStore.Images.Media.EXTERNAL_CONTENT_URI)
//            startActivityForResult(intent, 1_000_001)
//        } else {
//            NLog.e("jake .. openGallery > p")
//            mDocumentLauncher.launch(arrayOf("image/*"))
//        }

        startPickPhoto()
    }

    private fun startPickPhoto() {
        val pickIntent = Intent(Intent.ACTION_PICK)
        pickIntent.setDataAndType(MediaStore.Images.Media.EXTERNAL_CONTENT_URI, "image/*")
        startActivityForResult(pickIntent, 1_000_001)
    }

    /**
     * 选择图片后的回调
     * @return ActivityResultLauncher<Array<String>>
     */
    private fun activityResultLauncher(): ActivityResultLauncher<Array<String>> {
        val mDocumentLauncher = registerForActivityResult(
            OpenDocument()
        ) { resultUri ->
            if (resultUri == null) {
                return@registerForActivityResult
            }
            val bmp = decodeBitmapWithHalfSize(contentResolver, resultUri)
            bmp?.let {
                if (RomUtils.isXiaoMi() || RomUtils.isSamsung()) {
                    NLog.e("[authface] >>>> 小米")
                    val rotateBmp = rotateBitmap(it, resultUri)
                    runFaceDetection2(rotateBmp!!)
                } else {
                    NLog.e("[authface] >>>> other")
                    runFaceDetection2(it)
                }
            }

//            val path = try {
//                UriPathHelper().getPathFromUri(this@AuthFaceActivity , resultUri)
//            } catch (e: Exception) {
//                ""
//            }
//            NLog.e("[authface] >>>> path: $path")
//            val realPath = if (path.isNullOrEmpty()) {
//                ""
//            } else {
//                path
//            }


        }
        return mDocumentLauncher
    }

    private fun runFaceDetection(bitmap: Bitmap, path: String = "") {

        NLog.e("[authface] >>>> origin bmp width ${bitmap.width}")
        NLog.e("[authface] >>>> origin bmp height ${bitmap.height}")

//        val scaledBitmap = scaleBitmap(bitmap)
        val scaledBitmap = scaleBitmap(bitmap)

        NLog.e("[authface] >>>> detected bmp width ${scaledBitmap.width}")
        NLog.e("[authface] >>>> detected bmp height ${scaledBitmap.height}")


        val image = InputImage.fromBitmap(scaledBitmap, 0)

        //Initialize Face Detector
        val highAccuracyOpts = FaceDetectorOptions.Builder()
            .setPerformanceMode(FaceDetectorOptions.PERFORMANCE_MODE_ACCURATE)
            .setLandmarkMode(FaceDetectorOptions.LANDMARK_MODE_ALL)
            .setClassificationMode(FaceDetectorOptions.CLASSIFICATION_MODE_ALL)
            .setMinFaceSize(0.1f)
            .build()

        val detector = FaceDetection.getClient(highAccuracyOpts)

        detector.process(image).addOnSuccessListener { faces ->
            if (faces.isEmpty()) {
                NLog.e("[authface] >>> No faces detected")
                ToastUtils.showTextToast(
                    this@Module_UserInfo_AuthFaceActivityNL, getString(R.string.face_detection_failed)
                )
            } else {

                NLog.e("[authface] >>>  faces detected size = ${faces.size}")
                val bestFace = faces[0]
//                val bestFace =
//                        getCenterMostFace(faces, bitmap.width, bitmap.height)

                bestFace?.let { it ->

                    val boundingBox = it.boundingBox

                    NLog.e("[authface] >>>> detected faces in width ${it.boundingBox.width()}")
                    NLog.e("[authface] >>>> detected faces in height ${it.boundingBox.height()}")


                    val paddingFactor = 0.8f
                    val extendedBoundingBox = extendBoundingBox(
                        boundingBox, bitmap.width, bitmap.height, paddingFactor
                    )
                    NLog.e("[authface] >>>> extendedBoundingBox width ${extendedBoundingBox.width()}")
                    NLog.e("[authface] >>>> extendedBoundingBox height ${extendedBoundingBox.height()}")


                    val rotationDegrees = path?.let {
                        if (path.isNotEmpty()) {
                            getRotationDegrees(path)
                        } else {
                            0
                        }
                    } ?: 0

                    val croppedFace = rotateBitmap(
                        cropBitmapByRect(bitmap, extendedBoundingBox), rotationDegrees.toFloat()
                    )

                    //Scale the acquired Face to 112*112 which is required input for model
                    val scaled: Bitmap =
                        //                            FaceDetectUtil.getResizedBitmap(croppedFace!!, 112, 112)
                        Module_UserInfo_FaceDetectUtil.getResizedBitmap(croppedFace!!, 224, 224)
                    NLog.e("[authface]  ======= cropped width ========== ${scaled.width}")
                    NLog.e("[authface]  ======= cropped height ========== ${scaled.height}")
//                    mBinding.tmp.setImageBitmap(scaled)
//                    val b64 = FaceDetectUtil.bitmapToBase64(scaled)
//                    NLog.e("[authface]  ====人脸信息 base64 ===== $b64")
//                    val faceBase64 = FaceDetectUtil.byte2Base64(FaceDetectUtil.bitmap2Byte(scaled))
//                    NLog.e("[authface]  ====人脸信息 base64 ===== ${faceBase64?.trim()}")
                    //                    faceBase64?.let { face ->
                    //                        ToastUtils.showTextToast(
                    //                            this@AuthFaceActivity, getString(R.string.face_added_successfully)
                    //                        )
                    //                        mBase64 = face
                    //                        setFaceParam()
                    //                    }
                    //                    scaledBitmap.recycle()
                    //                    croppedFace.recycle()
                } ?: {
                    ToastUtils.showTextToast(
                        this@Module_UserInfo_AuthFaceActivityNL, getString(R.string.face_detection_failed)
                    )
                }
            }
        }.addOnFailureListener { e ->
            NLog.e("[authface]  Face detection failed: ${e.message}")
            ToastUtils.showTextToast(
                this@Module_UserInfo_AuthFaceActivityNL, getString(R.string.face_detection_failed)
            )

        }
    }


    private fun runFaceDetection2(bitmap: Bitmap) {

        NLog.e("[authface] >>>> origin bmp width ${bitmap.width}")
        NLog.e("[authface] >>>> origin bmp height ${bitmap.height}")

        val scaledBitmap = if (RomUtils.isXiaoMi()) {
            scaleBitmap(bitmap)
        } else {
            bitmap
        }
//        mBinding.tmp2.setImageBitmap(scaledBitmap)

        NLog.e("[authface] >>>> detected bmp width ${scaledBitmap.width}")
        NLog.e("[authface] >>>> detected bmp height ${scaledBitmap.height}")

        val image = InputImage.fromBitmap(scaledBitmap, 0)

        mFaceDetector.process(image).addOnSuccessListener { faces ->
            if (faces.isEmpty()) {
                NLog.e("[authface] >>> No faces detected")
                ToastUtils.showTextToast(
                    this@Module_UserInfo_AuthFaceActivityNL, getString(R.string.face_detection_failed)
                )
            } else {
//                val bestFace =
//                        getCenterMostFace(faces, bitmap.width, bitmap.height)
                NLog.e("[authface] >>>  faces detected size = ${faces.size}")
                val bestFace = faces[0]

                bestFace?.let { it ->

                    val boundingBox = it.boundingBox

                    NLog.e("[authface] >>>> detected faces in width ${it.boundingBox.width()}")
                    NLog.e("[authface] >>>> detected faces in height ${it.boundingBox.height()}")


                    val paddingFactor = 0.8f
                    val extendedBoundingBox = extendBoundingBox(
                        boundingBox, bitmap.width, bitmap.height, paddingFactor
                    )
                    NLog.e("[authface] >>>> extendedBoundingBox width ${extendedBoundingBox.width()}")
                    NLog.e("[authface] >>>> extendedBoundingBox height ${extendedBoundingBox.height()}")


                    if (RomUtils.isXiaoMi() || RomUtils.isSamsung()) {
                        lifecycleScope.launch {
                            val scaled: Bitmap =
                                Module_UserInfo_FaceDetectUtil.getResizedBitmap(bitmap, 240, 270)

                            val tempBmp = compressAndEncodeImage2(scaled)
//                            mBinding.tmp.setImageBitmap(tempBmp)

                            MainScope().launch {
                                val faceBase64 =
                                    Module_UserInfo_FaceDetectUtil.byte2Base64(Module_UserInfo_FaceDetectUtil.bitmap2Byte(tempBmp))
                                mBase64 = faceBase64
                                NLog.e("[authface] >>upload bitmap>> size = ${tempBmp.byteCount / 1024} mBase64= ${mBase64}")
                                delay(100)
                                setFaceParam()
                            }
                        }

                    } else {
                        val croppedFace =
                            rotateBitmap(cropBitmapByRect(bitmap, extendedBoundingBox), 0f)
                        //Scale the acquired Face to 112*112 which is required input for model
                        val scaled: Bitmap =
                            Module_UserInfo_FaceDetectUtil.getResizedBitmap(croppedFace, 320, 320)
                        NLog.e("[authface]  ======= cropped width ========== ${scaled.width}")
                        NLog.e("[authface]  ======= cropped height ========== ${scaled.height}")
//                        mBinding.tmp.setImageBitmap(scaled)
//                    val b64 = FaceDetectUtil.bitmapToBase64(scaled)
//                    NLog.e("[authface]  ====人脸信息 base64 ===== $b64")
                        val faceBase64 =
                            Module_UserInfo_FaceDetectUtil.byte2Base64(Module_UserInfo_FaceDetectUtil.bitmap2Byte(scaled))
                        NLog.e("[authface]  ====人脸信息 base64 ===== ${faceBase64?.trim()}")
                        faceBase64?.let { face ->
                            ToastUtils.showTextToast(
                                this@Module_UserInfo_AuthFaceActivityNL, getString(R.string.face_added_successfully)
                            )
                            mBase64 = face
                            MainScope().launch {
                                delay(400)
                                setFaceParam()
                            }

                        }
                        croppedFace.recycle()
                    }
                } ?: {
                    ToastUtils.showTextToast(
                        this@Module_UserInfo_AuthFaceActivityNL, getString(R.string.face_detection_failed)
                    )
                }
            }
        }.addOnFailureListener { e ->
            NLog.e("[authface]  Face detection failed: ${e.message}")
            ToastUtils.showTextToast(
                this@Module_UserInfo_AuthFaceActivityNL, getString(R.string.face_detection_failed)
            )

        }
    }

    private fun scaleBitmap(bitmap: Bitmap): Bitmap {
        val scaledBitmap = if (bitmap.width > 5100 || bitmap.height > 5100) {
            Bitmap.createScaledBitmap(bitmap, bitmap.width / 3, bitmap.height / 3, false)
        } else if (bitmap.width > 3600 || bitmap.height > 3600) {
            Bitmap.createScaledBitmap(bitmap, bitmap.width / 2, bitmap.height / 2, false)
        } else {
            bitmap
        }
        return scaledBitmap
    }

    private fun rotateBitmap(bitmap: Bitmap, imageUri: Uri): Bitmap? {
        val file = File(imageUri.path)
        val path = imageUri.toString()
        val inputStream = contentResolver.openInputStream(imageUri)
        NLog.e("[authface]  >>>>>>>>>>>>>>>>>>>path:$path")
        path?.let {
            val ei = inputStream?.let { stream -> ExifInterface(stream) }
            val orientation = ei?.getAttributeInt(
                ExifInterface.TAG_ORIENTATION,
                ExifInterface.ORIENTATION_UNDEFINED
            )

            val rotatedBitmap = when (orientation) {
                ExifInterface.ORIENTATION_ROTATE_90 -> rotateImage(bitmap, 90f)
                ExifInterface.ORIENTATION_ROTATE_180 -> rotateImage(bitmap, 180f)
                ExifInterface.ORIENTATION_ROTATE_270 -> rotateImage(bitmap, 270f)
                else -> bitmap
            }
            NLog.e("[authface]  >>>>>>>>>>>>>>>>>>>rotatedBitmap:$rotatedBitmap")
            return rotatedBitmap
        }
        return null
    }

    private fun rotateImage(source: Bitmap, angle: Float): Bitmap {
        val matrix = Matrix()
        matrix.postRotate(angle)
        return Bitmap.createBitmap(source, 0, 0, source.width, source.height, matrix, true)
    }


    private fun cropFace(bitmap: Bitmap, boundingBox: Rect): Bitmap {
        return Bitmap.createBitmap(
            bitmap, boundingBox.left, boundingBox.top, boundingBox.width(), boundingBox.height()
        )
    }

    private fun rotateBitmap(bitmap: Bitmap, rotationDegrees: Float): Bitmap {
        // 创建一个矩阵对象
        val matrix = Matrix()
        // 设置旋转角度
        matrix.postRotate(rotationDegrees)

        // 创建一个新的位图，并应用旋转矩阵
        return Bitmap.createBitmap(
            bitmap, 0, 0, bitmap.width, bitmap.height, matrix, true
        )
    }

    private fun extendBoundingBox(
        box: Rect, imageWidth: Int, imageHeight: Int, paddingFactor: Float
    ): Rect {
        val paddingX = (box.width() * paddingFactor).toInt()
        val paddingY = (box.height() * paddingFactor).toInt()

        val left = maxOf(0, box.left - paddingX)
        val top = maxOf(0, box.top - paddingY)
        val right = minOf(imageWidth, box.right + paddingX)
        val bottom = minOf(imageHeight, box.bottom + paddingY)

        return Rect(left, top, right, bottom)
    }

    // 根据扩展的边界框裁剪位图
    private fun cropBitmapByRect(bitmap: Bitmap, rect: Rect): Bitmap {
        return Bitmap.createBitmap(bitmap, rect.left, rect.top, rect.width(), rect.height())
    }

    private fun compressAndEncodeImage(bitmap: Bitmap): String {
        var quality = 100
        var byteArrayOutputStream = ByteArrayOutputStream()
        bitmap.compress(Bitmap.CompressFormat.JPEG, quality, byteArrayOutputStream)

        while (byteArrayOutputStream.toByteArray().size > 100 * 1024 && quality > 0) {
            quality -= 5
            byteArrayOutputStream = ByteArrayOutputStream()
            bitmap.compress(Bitmap.CompressFormat.JPEG, quality, byteArrayOutputStream)
        }

        val compressedImage = byteArrayOutputStream.toByteArray()
        return Base64.encodeToString(compressedImage, Base64.DEFAULT)
    }

    private fun compressAndEncodeImage2(bitmap: Bitmap): Bitmap {
        var quality = 100
        var byteArrayOutputStream = ByteArrayOutputStream()
        bitmap.compress(Bitmap.CompressFormat.JPEG, quality, byteArrayOutputStream)

        while (byteArrayOutputStream.toByteArray().size > 100 * 1024 && quality > 0) {
            quality -= 5
            byteArrayOutputStream = ByteArrayOutputStream()
            bitmap.compress(Bitmap.CompressFormat.JPEG, quality, byteArrayOutputStream)
        }

        val compressedImage = byteArrayOutputStream.toByteArray()
        return BitmapFactory.decodeByteArray(compressedImage, 0, compressedImage.size)
    }


    private fun getCenterMostFace(faces: List<Face>, imageWidth: Int, imageHeight: Int): Face? {
        // Calculate the center point of the image
        val imageCenterX = imageWidth / 2.0f
        val imageCenterY = imageHeight / 2.0f

        // Initialize the minimum distance with a large value
        var minDistance = Double.MAX_VALUE
        var centerMostFace: Face? = null

        // Loop through all detected faces to find the center most face
        for (face in faces) {
            val boundingBox = face.boundingBox
            val faceCenterX = boundingBox.centerX().toFloat()
            val faceCenterY = boundingBox.centerY().toFloat()

            // Calculate the Euclidean distance from the face center to the image center
            val distance = Math.sqrt(
                Math.pow(
                    (faceCenterX - imageCenterX).toDouble(), 2.0
                ) + Math.pow((faceCenterY - imageCenterY).toDouble(), 2.0)
            )

            // Check if this face is closer to the image center than the previous closest face
            if (distance < minDistance) {
                minDistance = distance
                centerMostFace = face
            }
        }
        return centerMostFace
    }

    /**
     * 删除人脸数据
     * @param item AuthResponseBody
     */
    private fun removeFaceData(item: AuthResponseBody) {
        CustomDialog(this).setMessage(getString(R.string.confirm_delete_face_info))
            .setPositiveButton(object : CustomDialog.OnClickListener {
                override fun onClick(dialog: CustomDialog) {
                    NLog.e("[authface] ........ delete face")
                    showLoading()
                    val request = DeleteFaceDataRequest(
                        accessToken = accessToken,
                        uId = "$userId",
                        data = DeleteFaceDataRequestBody(
                            keyId = arrayListOf("${item.id}")
                        )
                    )
                    mPresenter?.removeFaceData(request)
                    dialog.dismiss()
                }
            }).setNegativeButton(object : CustomDialog.OnClickListener {
                override fun onClick(dialog: CustomDialog) {
                    dialog.dismiss()
                }
            }).show()
    }

    private fun uriToBitmap(uri: Uri): Bitmap? {
        return try {
            val inputStream = contentResolver.openInputStream(uri)
            BitmapFactory.decodeStream(inputStream)
        } catch (e: Exception) {
            NLog.e("Failed to decode bitmap from URI: ${e.message}")
            null
        }
    }

    private fun decodeBitmapWithHalfSize(contentResolver: ContentResolver, uri: Uri): Bitmap? {
        // 第一步：仅解码图片的尺寸（不加载实际像素数据）
        val options = BitmapFactory.Options().apply {
            inJustDecodeBounds = true
        }
        contentResolver.openInputStream(uri)?.use { inputStream ->
            val bmp = BitmapFactory.decodeStream(inputStream, null, options)
            if (bmp != null) {
                if (RomUtils.isXiaoMi()) {
                    if (bmp.width > 3600 || bmp.height > 3600) {
                        options.inSampleSize = 4
                    }
                } else if (bmp.width > 3600 || bmp.height > 3600) {
                    // 第二步：设置 inSampleSize 为 2，将图片缩小到原来的一半
                    options.inSampleSize = 2
                }
            }
        }

        // 第三步：解码实际的位图并缩小尺寸
        options.inJustDecodeBounds = false
        contentResolver.openInputStream(uri)?.use { inputStream ->
            return BitmapFactory.decodeStream(inputStream, null, options)
        }

        return null
    }

    /**
     * 加载人脸数据
     */
    private fun loadData() {
        NLog.e("[authface]......加载人脸数据.... ")

        showLoading()
        mPresenter?.getFaceData(
            LoadFaceListDataRequest(
                accessToken = accessToken, uId = "$userId", data = LoadAuthDataRequestBody(
                    keyType = AuthKeyTypeFace
                )
            )
        )
    }

    /**
     * 上传人脸数据
     * @param base64 String
     */
    private fun uploadFaceData(
        keyName: String = "", userId: String = "", base64: String, macs: Array<String> = arrayOf()
    ) {
        if (mBase64.isNullOrEmpty()) {
            return
        }
        showLoading()
        NLog.e("[authface]......上传人脸数据到KCP服务器.... ")
        val request = UploadFaceDataRequest(
            accessToken = accessToken, uId = "$userId", data = AuthRequestBody(
                userId = userId,
                keyName = keyName,
                keyType = AuthKeyTypeFace,
                keyInfo = base64,
                macs = macs
            )
        )
        mPresenter?.uploadFaceData(request)
    }

    private fun setFaceParam() {
        startActivityForResult(
            Intent(this@Module_UserInfo_AuthFaceActivityNL, Module_UserInfo_AuthParamActivityNL::class.java), 1_000_003
        )
    }

    // ---------- Activity 系统回调 -------------------

    override fun onActivityResult(requestCode: Int, resultCode: Int, data: Intent?) {
        super.onActivityResult(requestCode, resultCode, data)
        if (requestCode == 1_000_001 && resultCode == RESULT_OK) {
            if (data != null) {
                val imageUri = data.data
                if (imageUri != null) {
                    val options = BitmapFactory.Options().apply {
                        inPreferredConfig = Bitmap.Config.RGB_565
                    }
                    val bmp = BitmapFactory.decodeStream(
                        contentResolver.openInputStream(imageUri),
                        Rect(0, 0, 0, 0),
                        options
                    )
//                    runFaceDetection2(bmp!!)
//                    detectionFaceImage(bmp!!)

                    if (RomUtils.isXiaoMi() || RomUtils.isSamsung()) {
                        NLog.e("[authface] >>>> 小米")
                        val rotateBmp = rotateBitmap(bmp!!, imageUri)
                        runFaceDetection2(rotateBmp!!)
                    } else {
                        NLog.e("[authface] >>>> other")
                        runFaceDetection2(bmp!!)
                    }


                } else {
                    ToastUtils.showTextToast(
                        this@Module_UserInfo_AuthFaceActivityNL, getString(R.string.face_detection_failed)
                    )
                }

            }
        } else if (requestCode == 1_000_002) {
            val result = data?.getStringExtra("face_data")
            result?.let {
                NLog.e("[authface] Camera get face data is $it")
                mBase64 = it
                setFaceParam()
            }
        } else if (requestCode == 1_000_003) {
            mParamName = data?.getStringExtra("face_name")
            mParamUserId = data?.getStringExtra("face_user_id")
            mParamArea = data?.getStringArrayExtra("face_area")
//            showWaitingDialog("正在服务器验证")
//            checkFaceValid()
            if (mParamName?.isNotEmpty() == true
                && mParamUserId?.isNotEmpty() == true
                && mBase64?.isNotEmpty() == true
                && mParamArea?.isNotEmpty() == true
            ) {
                NLog.e("[authface] > face > $mBase64 ")
                uploadFaceData(mParamName!!, mParamUserId!!, mBase64!!, mParamArea!!)
                NLog.e("[authface]......上传人脸数据到KCP服务器.... ")
            } else {
                hideLoading()
                NLog.e("[authface]......上传人脸参数不完整.... ")
            }
        }
    }

    private fun detectionFaceImage(bmp: Bitmap) {
        mDialog =
            DialogUtil.getWaitDialog(this@Module_UserInfo_AuthFaceActivityNL, getString(R.string.face_detecting))
        mDialog?.show()
        NLog.e("[authface] >>> import bmp size : ${bmp.byteCount}")
        val bitmap = if (bmp.byteCount / 1024f / 1024f > 3) {
            Bitmap.createScaledBitmap(bmp, bmp.width / 10, bmp.height / 10, true);
        } else {
            bmp
        }
        NLog.e("[authface] >>> scale bmp size : ${bitmap.byteCount}")

        lifecycleScope.launch(Dispatchers.IO) {
            Module_UserInfo_FaceDetectUtil.detectFace(bitmap) {
                mDialog?.dismiss()
                if (it.isNotEmpty()) {
                    NLog.e("[authface] .. face detection success > $it")
                    mBase64 = it
                    setFaceParam()
                } else {
                    NLog.e("[authface] .. face detection failed ")
                    MainScope().launch {
                        ToastUtils.showTextToast(
                            this@Module_UserInfo_AuthFaceActivityNL, getString(R.string.face_detection_failed)
                        )
                    }
                }
            }
        }
    }


    private fun showWaitingDialog(msg: String) {
        hideWaitingDialog()
        mWaitProgressDialog = DialogUtil.getWaitDialog(this@Module_UserInfo_AuthFaceActivityNL, msg)
        mWaitProgressDialog?.setProgressStyle(ProgressDialog.STYLE_SPINNER)
        mWaitProgressDialog?.show()
    }

    private fun hideWaitingDialog() {
        if (mWaitProgressDialog?.isShowing == true) {
            mWaitProgressDialog?.dismiss()
            mWaitProgressDialog = null
        }
    }

    private fun checkFaceValid() {
        if (mBase64.isNullOrEmpty()) {
//            ToastUtils.showTextToast(this@AuthFaceActivity, "识别失败")
            return
        }
        NLog.e("[authface]......check face valid from http.... $mBase64")
        mPresenter?.checkFaceValid(accessToken, mBase64!!)
    }

    override fun onCheckFaceValidSuccess() {
        NLog.e("[authface]......check face success.... ")
        showWaitingDialog("开始上传人脸信息")
        uploadFaceData(mParamName!!, mParamUserId!!, mBase64!!, mParamArea!!)
    }

    override fun onCheckFaceValidFail() {
        NLog.e("[authface]......check face failed.... ")
        hideWaitingDialog()
        hideLoading()
//        ToastUtils.showTextToast(this@AuthFaceActivity, "网络认证失败")
    }

    override fun showDefaultMsg(msg: String) {
        super.showDefaultMsg(msg)
        NLog.e("[authface]......showDefaultMsg.... $msg")
        hideWaitingDialog()
        hideLoading()
    }

    override fun onDestroy() {
        super.onDestroy()
        mPermissionUtils?.destroy()
    }

    // -------------KCP 请求回调------------------------

    override fun onUploadFaceDataSuccess() {
        NLog.e("[authface] ........ upload face success")
//        hideLoading()
        hideWaitingDialog()
        ToastUtils.showTextToast(this@Module_UserInfo_AuthFaceActivityNL, getString(R.string.default_upload_success))

        loadData()
    }

    override fun onUploadFaceDataFailed(msg: String?) {
        hideLoading()
        NLog.e("[authface] ........ upload face failed")
        val error = if (msg.isNullOrEmpty()) {
            getString(R.string.default_upload_failed)
        } else {
            msg
        }
        ToastUtils.showTextToast(this@Module_UserInfo_AuthFaceActivityNL, error)
    }

    override fun onDeleteFaceDataSuccess() {
        hideLoading()
        ToastUtils.showTextToast(this@Module_UserInfo_AuthFaceActivityNL, getString(R.string.default_delete_success))
        NLog.e("[authface]......删除完毕重新加载人脸数据.... ")
        loadData()
    }

    override fun onDeleteFaceDataFailed() {
        hideLoading()
        NLog.e("[authface] ........ 人脸删除失败")
        ToastUtils.showTextToast(this@Module_UserInfo_AuthFaceActivityNL, getString(R.string.default_delete_fail))
    }

    override fun onFaceListDataSuccess(data: MutableList<AuthResponseBody>?) {
        NLog.e("[authface]......onFaceListDataSuccess.... $data")
        hideLoading()

        if (data != null) {
            if (data.size == 0) {
                mFaceList.clear()
            } else {
                if (data[0].keyType != AuthKeyTypeFace) {
                    return
                }
                mFaceList = data
            }
            NLog.e("[authface]........ face list data ... $mFaceList")
        } else {
            mFaceList.clear()
            NLog.e("[authface]........ face list data is null... $mFaceList")
        }
        mAdapter?.setList(mFaceList)
        updateStatusLayout(mFaceList.size == 0)

    }

    private fun updateStatusLayout(isEmpty: Boolean) {
        mBinding.recordFaceEmptyLayout.visibility = if (isEmpty) {
            View.VISIBLE
        } else {
            View.GONE
        }
    }

    private fun checkCameraPermission() {
        val message =
            getString(R.string.common_permission_camera_message, getString(R.string.app_name_smart))

        CustomMessageDialog(this).setTitle(getString(R.string.common_permission_title))
            .setMessage(message)

            .setNegativeButton(
                getString(R.string.cancel_dialog_custom)
            ) { dialog ->
                dialog.dismiss()
                showToast(getString(R.string.common_permission_cancel_alert))
            }
            .setPositiveButton(
                getString(R.string.common_permission_to_setting)
            ) { dialog ->
                requestPermissions(arrayOf(Manifest.permission.CAMERA), 0)
                dialog.dismiss()
            }.show()
    }


    override fun onRequestPermissionsResult(
        requestCode: Int,
        permissions: Array<out String>,
        grantResults: IntArray
    ) {

        if (requestCode == 0) {
            for (i in permissions.indices) {
                when (permissions[i]) {
                    Manifest.permission.CAMERA -> if (grantResults[i] == PackageManager.PERMISSION_GRANTED) {
                        //给了权限
                        startActivityForResult(
                            Intent(this@Module_UserInfo_AuthFaceActivityNL, Module_UserInfo_FaceDetectionActivityNL::class.java),
                            1_000_002
                        )
                    } else {
                        if (ActivityCompat.shouldShowRequestPermissionRationale(
                                this,
                                Manifest.permission.CAMERA
                            )
                        ) {
                            //拒绝
                            checkCameraPermission()

                        } else {
                            openAppSettings()
                        }
                    }

                }

            }
        }
        super.onRequestPermissionsResult(requestCode, permissions, grantResults)

    }

    private fun openAppSettings() {
        val intent = Intent(Settings.ACTION_APPLICATION_DETAILS_SETTINGS).apply {
            data = Uri.fromParts("package", packageName, null)
        }
        startActivity(intent)
    }
}