package com.xhkj.testpose

import android.content.Context
import android.graphics.Bitmap
import android.graphics.Matrix
import android.os.Bundle
import android.util.Log
import android.view.Surface
import android.widget.TextView
import androidx.appcompat.app.AppCompatActivity
import androidx.camera.core.AspectRatio
import androidx.camera.core.CameraSelector
import androidx.camera.core.ImageAnalysis
import androidx.camera.core.ImageCapture
import androidx.camera.core.ImageProxy
import androidx.camera.core.Preview
import androidx.camera.lifecycle.ProcessCameraProvider
import androidx.camera.view.PreviewView
import androidx.core.content.ContextCompat
import com.google.mlkit.vision.face.FaceDetectorOptions
import com.jeremyliao.liveeventbus.LiveEventBus
import com.xhkj.mylibrary.mlkit.GraphicOverlay
import com.xhkj.mylibrary.mlkit.facedetector.FaceDetectorProcessor
import kotlinx.coroutines.Dispatchers
import kotlinx.coroutines.MainScope
import kotlinx.coroutines.cancel
import kotlinx.coroutines.delay
import kotlinx.coroutines.launch
import java.util.concurrent.ExecutorService
import java.util.concurrent.Executors
import java.util.concurrent.TimeUnit
import kotlin.math.abs
import kotlin.math.max
import kotlin.math.min

/**
 * 根据人脸中的眼睛检测 距离
 */
class FaceActivity : AppCompatActivity() {

    private var backgroundExecutor: ExecutorService? = null
    private var imageCapture: ImageCapture? = null
    private var preview: Preview? = null
    private var cameraProvider: ProcessCameraProvider? = null
    private var mPreview: PreviewView? = null
    private var overlay: GraphicOverlay? = null
    private var tvNum: TextView? = null
    private val scope = MainScope()
    private var imageBitmap: Bitmap? = null

    override fun onCreate(savedInstanceState: Bundle?) {
        super.onCreate(savedInstanceState)
        setContentView(R.layout.activity_face)
        mPreview = findViewById(R.id.face_preview)
        overlay = findViewById(R.id.face_overlay)
        tvNum = findViewById(R.id.tvNum)
        startCamera()
        initListener()
    }

    private fun initListener() {
        LiveEventBus.get<Int>("FACE_NUM").observe(this) {
            tvNum?.text = "$it"
        }
        LiveEventBus.get<Int>("FACE_IS_EMPTY").observe(this) {
            tvNum?.text = "没有检测到人脸"
        }
    }

    private fun startCamera() {
        val optionsBuilder = FaceDetectorOptions.Builder()
            .setLandmarkMode(FaceDetectorOptions.LANDMARK_MODE_NONE)
            .setContourMode(FaceDetectorOptions.CONTOUR_MODE_ALL)
            .setClassificationMode(FaceDetectorOptions.CLASSIFICATION_MODE_NONE)
            .setPerformanceMode(FaceDetectorOptions.PERFORMANCE_MODE_FAST)
            .setMinFaceSize(0.1f)
        val build = optionsBuilder.build()
        val imageProcessor = FaceDetectorProcessor(this@FaceActivity, build)

        backgroundExecutor = Executors.newSingleThreadExecutor()
        val cameraProviderFuture = ProcessCameraProvider.getInstance(this)
        cameraProviderFuture.addListener({
            cameraProvider = cameraProviderFuture.get()
            preview = Preview.Builder().build().also {
                it.setSurfaceProvider(mPreview!!.surfaceProvider)
            }
            imageCapture = ImageCapture.Builder()
                .setCaptureMode(ImageCapture.CAPTURE_MODE_MINIMIZE_LATENCY)
                .setTargetAspectRatio(
                    aspectRatio(
                        getWindowSize(this)[0], getWindowSize(this)[1]
                    )
                ).build()
            val imageAnalyzer = ImageAnalysis.Builder()
                .setTargetAspectRatio(
                    aspectRatio(
                        getWindowSize(this)[0], getWindowSize(this)[1]
                    )
                )
                .setTargetRotation(Surface.ROTATION_0)
                .setBackpressureStrategy(ImageAnalysis.STRATEGY_KEEP_ONLY_LATEST)
                .setOutputImageFormat(ImageAnalysis.OUTPUT_IMAGE_FORMAT_RGBA_8888)
                .build()
                .also {
                    it.setAnalyzer(backgroundExecutor!!) { image ->
                        //得到图片流传递给 MLKit 识别
                        detectHand(image, imageProcessor)
                    }
                }
            val cameraSelector = CameraSelector.DEFAULT_FRONT_CAMERA
            try {
                cameraProvider?.unbindAll()
                cameraProvider?.bindToLifecycle(
                    this, cameraSelector, preview, imageAnalyzer, imageCapture
                )
            } catch (e: Exception) {
                e.printStackTrace()
                Log.e("课程学习", "startCamera: 此相机绑定失败，说明没有被其他相机调用 ${e.message}")
            }
        }, ContextCompat.getMainExecutor(this))
    }

    /**
     * @param imageProxy 图片流资源
     * @param imageProcessor    mlKit 人脸检测对象  mlkit：https://developers.google.cn/ml-kit/vision/face-detection?hl=en
     */
    private fun detectHand(imageProxy: ImageProxy, imageProcessor: FaceDetectorProcessor) {
        scope.launch(Dispatchers.IO) {  //子线程是为了阻塞 500 毫秒，没间隔需求也可以直接去除
            delay(500)  //间隔一会进行检测
            if (imageBitmap != null) {  //释放图片流转 bitmap 资源
                imageBitmap?.recycle()
            }
            val bitmapBuffer =
                Bitmap.createBitmap(
                    imageProxy.width, imageProxy.height, Bitmap.Config.ARGB_8888
                )
            imageProxy.use { bitmapBuffer.copyPixelsFromBuffer(imageProxy.planes[0].buffer) }
            imageProxy.close()
            val matrix = Matrix().apply {
                postRotate(imageProxy.imageInfo.rotationDegrees.toFloat())
            }
            imageBitmap = Bitmap.createBitmap(
                bitmapBuffer, 0, 0, bitmapBuffer.width, bitmapBuffer.height,
                matrix, true
            )
            //开始人脸识别
            overlay!!.clear()

            overlay!!.setImageSourceInfo(imageBitmap!!.width, imageBitmap!!.height, false)
            imageProcessor.processBitmap(imageBitmap, overlay!!)
        }
    }

    private fun aspectRatio(width: Int, height: Int): Int {
        val RATIO_4_3_VALUE = 4.0 / 3.0
        val RATIO_16_9_VALUE = 16.0 / 9.0
        val previewRatio = max(width, height).toDouble() / min(width, height)
        if (abs(previewRatio - RATIO_4_3_VALUE) <= abs(previewRatio - RATIO_16_9_VALUE)) {
            return AspectRatio.RATIO_4_3
        }
        return AspectRatio.RATIO_16_9
    }

    //获取屏幕分辨率
    private fun getWindowSize(context: Context): Array<Int> {
        val displayMetrics = context.resources.displayMetrics
        val widthPixels = displayMetrics.widthPixels
        val heightPixels = displayMetrics.heightPixels
        return arrayOf(widthPixels, heightPixels)
    }

    override fun onResume() {
        super.onResume()
        backgroundExecutor?.execute {}
    }

    override fun onPause() {
        super.onPause()
        // 停止相机预览
        preview?.setSurfaceProvider(null)

        // 关闭相机
        cameraProvider?.unbindAll()
    }

    override fun onDestroy() {
        super.onDestroy()
        backgroundExecutor?.shutdown()
        backgroundExecutor?.awaitTermination(Long.MAX_VALUE, TimeUnit.NANOSECONDS)
        scope.cancel()
    }
}