package com.jackykeke.opengldemo.utils

import android.content.Context
import android.graphics.Bitmap
import android.net.Uri
import android.util.DisplayMetrics
import android.util.Log
import androidx.camera.core.*
import androidx.camera.lifecycle.ProcessCameraProvider
import androidx.camera.view.PreviewView
import androidx.core.content.ContextCompat
import androidx.lifecycle.LifecycleOwner
import com.google.common.util.concurrent.ListenableFuture
//import com.qr.util.SettingManager
//import com.qr.util.qr.QrLog
import java.io.File
import java.nio.ByteBuffer
import java.text.SimpleDateFormat
import java.util.*
import java.util.concurrent.ExecutorService
import java.util.concurrent.Executors
import java.util.concurrent.TimeUnit
import kotlin.math.abs
import kotlin.math.max
import kotlin.math.min


typealias LumaListener = (luma: Double) -> Unit

class CameraUtil() {

    companion object {
        const val TAG = "CameraUtil"
        const val FILENAME = "yyyy-MM-dd-HH-mm-ss-SSS"
        const val PHOTO_EXTENSION = ".jpg"
        const val RATIO_4_3_VALUE = 4.0 / 3.0
        const val RATIO_16_9_VALUE = 16.0 / 9.0
    }

    private var cameraSelector: CameraSelector? = null
//    private lateinit var bm: Bitmap

    private var lensFacing = CameraSelector.LENS_FACING_BACK
    private var imageCapture: ImageCapture? = null
    private var cameraExecutor: ExecutorService? = null
    private var camera: Camera? = null
    private var outputDirectory: File? = null

    //    private var settingManager: SettingManager? = null
    private var cameraProvider: ProcessCameraProvider? = null
    private var preview: Preview? = null
    private lateinit var previewView: PreviewView
    private var imageAnalyzer: ImageAnalysis? = null
    private var imageAnalyzerExecutor: ExecutorService? = null

    /**
     * 初始化相机并绑定activity/fragment的生命周期 ,与PreviewView一起使用
     */
    fun initCamera(
        view: PreviewView,
        context: Context,
        lifecycleOwner: LifecycleOwner,
        mLensFacing: Int = CameraSelector.LENS_FACING_BACK
    ) {
        lensFacing = mLensFacing
        previewView = view
        outputDirectory = getOutputDirectory(context)
        cameraExecutor = Executors.newSingleThreadExecutor()
        val metrics = DisplayMetrics().also {
            previewView.display.getRealMetrics(it)
        }
        Log.d(TAG, "Screen metrics: ${metrics.widthPixels} x ${metrics.heightPixels}")
        val screenAspectRatio = aspectRatio(metrics.widthPixels, metrics.heightPixels)
        Log.d(TAG, "Preview aspect ratio: $screenAspectRatio")
        val rotation = view.display.rotation
        cameraSelector = CameraSelector.Builder().requireLensFacing(lensFacing).build()
        val cameraProviderFuture = ProcessCameraProvider.getInstance(context)
        cameraProviderFuture.addListener(Runnable {
            cameraProvider = cameraProviderFuture.get()
            imageCapture = ImageCapture.Builder()
                .setCaptureMode(ImageCapture.CAPTURE_MODE_MINIMIZE_LATENCY)
                .setTargetAspectRatio(screenAspectRatio)
                .setTargetRotation(rotation)
                .setFlashMode(ImageCapture.FLASH_MODE_AUTO)
                .build()
            preview = Preview.Builder()
                .setTargetAspectRatio(screenAspectRatio)
                .setTargetRotation(rotation)
                .build()

            imageAnalyzerExecutor = Executors.newSingleThreadExecutor()
            imageAnalyzer = ImageAnalysis.Builder()
                .setTargetAspectRatio(screenAspectRatio)
                .setImageQueueDepth(1)
                .setTargetRotation(rotation)
                .build()
//                    .also {
//                        it.setAnalyzer(cameraExecutor!!, LuminosityAnalyzer {luma ->
//
//                        })
//                    }
//
//                    .build().apply {
//                        setAnalyzer(imageAnalyzerExecutor!!, FrameAnalyzer {
//                            bm = it
//                        })
//                    }

            cameraProvider?.unbindAll()
            try {
                camera = cameraProvider?.bindToLifecycle(
                    lifecycleOwner,
                    cameraSelector!!,
                    preview,
                    imageCapture,
                    imageAnalyzer
                )
                camera?.cameraControl?.apply {
                    val width = previewView.width
                    val height = previewView.height
                    focus(width.toFloat(), height.toFloat())
                }
//                preview?.setSurfaceProvider(view.createSurfaceProvider(camera?.cameraInfo))
                preview?.setSurfaceProvider(previewView.createSurfaceProvider())

            } catch (exc: Exception) {
                Log.e(TAG, "Use case binding failed", exc)
            }

        }, ContextCompat.getMainExecutor(context))
//        settingManager = SettingManager.getInstance()
    }

    /**
     * 拍照
     */
    fun takePhoto() {
//        forceStopPreview()
        imageCapture?.let {
            val photoFile = createFile(outputDirectory!!, FILENAME, PHOTO_EXTENSION)
            val metadata = ImageCapture.Metadata().apply {
                isReversedHorizontal = lensFacing == CameraSelector.LENS_FACING_FRONT
            }

            val outputFileOptions = ImageCapture.OutputFileOptions.Builder(photoFile)
                .setMetadata(metadata)
                .build()
            imageCapture?.takePicture(
                outputFileOptions,
                cameraExecutor!!,
                object : ImageCapture.OnImageSavedCallback {
                    override fun onImageSaved(outputFileResults: ImageCapture.OutputFileResults) {
//                            settingManager?.playBeepSoundAndVibrate()
                        val savedUri = outputFileResults.savedUri ?: Uri.fromFile(photoFile)
                        callback?.onPhotoCallBack(savedUri)
                        Log.i(TAG, savedUri.toString())

//                            forceRestartPreview()
                    }

                    override fun onError(exception: ImageCaptureException) {
                        Log.i(TAG, exception.toString())
//                            forceRestartPreview()
                    }
                })
        }
    }

    fun forceStopPreview() {
        previewView?.post {
            preview?.setSurfaceProvider(null)
        }
    }

    fun forceRestartPreview() {
        previewView?.postDelayed({
//            preview?.setSurfaceProvider(previewView?.createSurfaceProvider(camera?.cameraInfo))
            preview?.setSurfaceProvider(previewView?.createSurfaceProvider())
        }, 1000)
    }

    /**
     * 打开手电筒   有些华为机型不行
     */
    fun setOpenTorch(boolean: Boolean) {
        Log.i(TAG, "setOpenTorch---hasFlashUnit" + camera?.cameraInfo?.hasFlashUnit())
        camera?.cameraControl?.enableTorch(boolean)

    }

    /**
     * 切换前后摄像头
     */
    fun switchCamera(view: PreviewView, context: Context, lifecycleOwner: LifecycleOwner) {
        lensFacing = if (CameraSelector.LENS_FACING_FRONT == lensFacing) {
            CameraSelector.LENS_FACING_BACK
        } else {
            CameraSelector.LENS_FACING_FRONT
        }
        initCamera(view, context, lifecycleOwner, lensFacing)
    }


    fun shautdown() {
        cameraExecutor?.shutdown()
    }

    private fun aspectRatio(width: Int, height: Int): Int {
        val previewRatio = max(width, height).toDouble() / min(width, height)
        if (abs(previewRatio - RATIO_4_3_VALUE) <= abs(previewRatio - RATIO_16_9_VALUE)) {
            return AspectRatio.RATIO_4_3
        }
        return AspectRatio.RATIO_16_9
    }


    private fun createFile(baseFolder: File, format: String, extension: String) =
        File(
            baseFolder, SimpleDateFormat(format, Locale.US)
                .format(System.currentTimeMillis()) + extension
        )


    private fun getOutputDirectory(context: Context): File {
        val appContext = context.applicationContext
        val mediaDir = context.externalMediaDirs.firstOrNull()?.let {
            File(it, TAG).apply { mkdirs() }
        }
        return if (mediaDir != null && mediaDir.exists())
            mediaDir else appContext.filesDir
    }

    interface onPhotoCallBack {
        fun onPhotoCallBack(uri: Uri)
    }

    var callback: onPhotoCallBack? = null
    fun setPhotoCallBack(mCallback: onPhotoCallBack) {
        callback = mCallback
    }


    interface OnPreviewCallback {
        fun bitmapCallBack(bm: Bitmap)
    }


    // 预览帧
    fun preview(onPreviewCallback: OnPreviewCallback) {
//        if (this::bm.isInitialized) {
//            onPreviewCallback.bitmapCallBack(bm)
//        }
    }

    //手动对焦
    fun focus(x: Float, y: Float) {

        val factory: MeteringPointFactory = SurfaceOrientedMeteringPointFactory(x, y)
        val point = factory.createPoint(x, y)
        val action = FocusMeteringAction.Builder(
            point,
            FocusMeteringAction.FLAG_AF
        ) // auto calling cancelFocusAndMetering in 3 seconds
            .setAutoCancelDuration(6, TimeUnit.SECONDS)
            .build()

//        mFocusView.startFocus(Point(x as Int, y as Int))
        val future: ListenableFuture<FocusMeteringResult>? =
            camera?.cameraControl?.startFocusAndMetering(action)
        future?.addListener(Runnable {
            try {
                val result = future.get() as FocusMeteringResult
                if (result.isFocusSuccessful) {
//                    mFocusView.onFocusSuccess()
                    Log.i(TAG, "focus  Success")
                } else {
//                    mFocusView.onFocusFailed()
                    Log.i(TAG, "focus  Failed")
                }
            } catch (e: java.lang.Exception) {
            }
        }, cameraExecutor)
    }

    /**
     * Our custom image analysis class.
     *
     * <p>All we need to do is override the function `analyze` with our desired operations. Here,
     * we compute the average luminosity of the image by looking at the Y plane of the YUV frame.
     */
    private class LuminosityAnalyzer(listener: LumaListener? = null) : ImageAnalysis.Analyzer {
        private val frameRateWindow = 8
        private val frameTimestamps = ArrayDeque<Long>(5)
        private val listeners = ArrayList<LumaListener>().apply { listener?.let { add(it) } }
        private var lastAnalyzedTimestamp = 0L
        var framesPerSecond: Double = -1.0
            private set

        /**
         * Used to add listeners that will be called with each luma computed
         */
        fun onFrameAnalyzed(listener: LumaListener) = listeners.add(listener)

        /**
         * Helper extension function used to extract a byte array from an image plane buffer
         */
        private fun ByteBuffer.toByteArray(): ByteArray {
            rewind()    // Rewind the buffer to zero
            val data = ByteArray(remaining())
            get(data)   // Copy the buffer into a byte array
            return data // Return the byte array
        }

        /**
         * Analyzes an image to produce a result.
         *
         * <p>The caller is responsible for ensuring this analysis method can be executed quickly
         * enough to prevent stalls in the image acquisition pipeline. Otherwise, newly available
         * images will not be acquired and analyzed.
         *
         * <p>The image passed to this method becomes invalid after this method returns. The caller
         * should not store external references to this image, as these references will become
         * invalid.
         *
         * @param image image being analyzed VERY IMPORTANT: Analyzer method implementation must
         * call image.close() on received images when finished using them. Otherwise, new images
         * may not be received or the camera may stall, depending on back pressure setting.
         *
         */
        override fun analyze(image: ImageProxy) {
            // If there are no listeners attached, we don't need to perform analysis
            if (listeners.isEmpty()) {
                image.close()
                return
            }

            // Keep track of frames analyzed
            val currentTime = System.currentTimeMillis()
            frameTimestamps.push(currentTime)

            // Compute the FPS using a moving average
            while (frameTimestamps.size >= frameRateWindow) frameTimestamps.removeLast()
            val timestampFirst = frameTimestamps.peekFirst() ?: currentTime
            val timestampLast = frameTimestamps.peekLast() ?: currentTime
            framesPerSecond = 1.0 / ((timestampFirst - timestampLast) /
                    frameTimestamps.size.coerceAtLeast(1).toDouble()) * 1000.0

            // Analysis could take an arbitrarily long amount of time
            // Since we are running in a different thread, it won't stall other use cases

            lastAnalyzedTimestamp = frameTimestamps.first

            // Since format in ImageAnalysis is YUV, image.planes[0] contains the luminance plane
            val buffer = image.planes[0].buffer

            // Extract image data from callback object
            val data = buffer.toByteArray()

            // Convert the data into an array of pixel values ranging 0-255
            val pixels = data.map { it.toInt() and 0xFF }

            // Compute average luminance for the image
            val luma = pixels.average()

            // Call all listeners with new value
            listeners.forEach { it(luma) }

            image.close()
        }
    }


}

