package com.andova.camera

import android.content.Context
import android.graphics.*
import android.opengl.GLES30
import android.opengl.GLUtils
import android.os.Build
import android.os.Bundle
import android.os.Environment
import android.view.View
import androidx.annotation.RequiresApi
import androidx.appcompat.app.AppCompatActivity
import com.CpuUtils
import com.FaceNeuralNetwork
import com.andova.ext.e
import com.andova.ext.getAsset
import com.andova.ext.i
import com.andova.glcamera.FRAME_ACTION_LAZY_NORMAL
import com.andova.glcamera.FRAME_ACTION_LAZY_PREVIEW
import com.andova.glcamera.StateCallback
import com.andova.glcamera.device.*
import com.andova.glcamera.texture.TextureFrameCallback
import com.andova.image.glesInitialize
import com.andova.opengles.R
import com.andova.opengles.databinding.AcCameraPreviewBinding
import com.face.Data
import com.google.gson.Gson
import com.google.gson.JsonSyntaxException
import io.reactivex.Observable
import io.reactivex.Observer
import io.reactivex.android.schedulers.AndroidSchedulers
import io.reactivex.disposables.Disposable
import io.reactivex.schedulers.Schedulers
import java.io.ByteArrayOutputStream
import java.io.File
import java.io.FileOutputStream
import java.io.IOException
import java.nio.ByteBuffer
import java.nio.ByteOrder
import java.nio.file.Files
import java.nio.file.Paths
import java.util.*
import java.util.concurrent.TimeUnit
import kotlin.math.abs
import kotlin.math.max
import kotlin.math.min

const val ANGLE = 90
const val SCALE = 1f

class GLCameraWithoutImageActivity : AppCompatActivity() {
    private lateinit var mBinding: AcCameraPreviewBinding
    override fun onCreate(savedInstanceState: Bundle?) {
        super.onCreate(savedInstanceState)
        FaceNeuralNetwork.ActiveLibrary(
            this,
            "FA3075E245143FAF68BF5601CE577EB42A5B8E12FE5A3EF6E48212B1B748E9AC01834C2DAA8D8F4C3B38041435DEEE19942F11B0F088E3678FB0A699583EF826A9CB29AEDE77347EA8A62E6A4699DCFADBF09CC8160FD023C37FA57805A9AA82F19F5A301C62D389032B36A97A699006FE396BED0B12A7012EC9DC108277CCA969C36EAF076A3A09A7D7D6CB5B227012C25970A379736C09366B8D9A7D75ABF1F99DB71C6E4D63EA7E075319190C93127BE43A1DF6368BE9F115B0D0050CA10D62DB41F455490C11E3DB22667595D44232E2574449B09FE7A50BB2B3078C1D7D40280E8870AF063DA6B6F1DD627A34EF1B0EBEA72B8D4FFD845E4766C05D0600"
        )
        FaceNeuralNetwork.SetThreadCount(2)
        FaceNeuralNetwork.SetUseEnable(FaceNeuralNetwork.EnableQuality + FaceNeuralNetwork.EnableFaceLandarks + FaceNeuralNetwork.EnableMonocularLiving)
        mBinding = AcCameraPreviewBinding.inflate(layoutInflater)
        setContentView(mBinding.root)
    }

    @RequiresApi(Build.VERSION_CODES.LOLLIPOP)
    override fun onResume() {
        super.onResume()
        mBinding.backcamera.useTextureRender(
            TextureFrame(this, SCALE),
            object : CameraBuilder() {
                override fun stateCallback(): StateCallback? = null
                override fun previewSize(sizes: SortedSet<Size>?): Size? = sizes?.first()
                override fun pictureSize(sizes: SortedSet<Size>?): Size? = sizes?.first()
            }.cameraId(CAMERA_ID_0).aspectRatio(ASPECT_RATIO_4_3).usePreviewCallback(true)
        )
            .lazyCallback(10L)
            .displayOrientation(270)
            .displayOrientation(0)
            .action(FRAME_ACTION_LAZY_NORMAL)
            .start()
        mBinding.frontcamera.useTextureRender(
            null,
            object : CameraBuilder() {
                override fun stateCallback(): StateCallback? = null
                override fun previewSize(sizes: SortedSet<Size>?): Size? = sizes?.first()
                override fun pictureSize(sizes: SortedSet<Size>?): Size? = sizes?.first()
            }.cameraId(CAMERA_ID_1).aspectRatio(ASPECT_RATIO_4_3).usePreviewCallback(false)
        )
            .lazyCallback(200L)
            .displayOrientation(270)
            .displayOrientation(0)
            .action(FRAME_ACTION_LAZY_PREVIEW)
//                .pause()
            .start()
//        backcamera.useBufferRender(BufferFrame(this), ACameraDeviceBuilder().cameraDirection(CAMERA_DIRECTION_BACK))
    }

    @RequiresApi(Build.VERSION_CODES.LOLLIPOP)
    override fun onPause() {
        super.onPause()
        mBinding.backcamera.stop()
        mBinding.frontcamera.stop()
    }

    private fun intervalCpuFrequency() {
        Observable.interval(0L, 1L, TimeUnit.SECONDS)
            .subscribeOn(Schedulers.io())
            .observeOn(AndroidSchedulers.mainThread())
            .subscribe(object : Observer<Long> {
                private var d: Disposable? = null
                override fun onComplete(): Unit = run { d?.dispose() }
                override fun onSubscribe(d: Disposable) = run { this.d = d }
                override fun onError(ex: Throwable): Unit = run { e("task interval error, $ex") }
                override fun onNext(t: Long) {
                    val list = CpuUtils.getCpuCurFreq()
                    val sb = StringBuilder()
                    for (v in list) {
                        sb.append(v)
                        sb.append(',')
                    }
                    i("scaling_cur_freq -> $sb")
                }
            })
    }

    @RequiresApi(Build.VERSION_CODES.O)
    private fun readYuvFile() {
//        val width = 480
//        val height = 640
        val width = 80
        val height = 142
        val yuv = try {
            Files.readAllBytes(Paths.get("${Environment.getExternalStorageDirectory().path}/1"))
        } catch (e: NoSuchFileException) {
            return
        }
        decodeToOriginalBitmap(yuv, width, height)
        val points = faceDetectPoint(yuv.clone(), width, height)
        i("points -> ${points?.toList()}")
        val feature = FaceNeuralNetwork.GetFaceFeautre(
            yuv.clone(), width * height * 3 / 2,
            width, height,
            FaceNeuralNetwork.ImageYuv420SP, ANGLE, points ?: return
        )
        i("feature -> $feature")
    }

    override fun onWindowFocusChanged(hasFocus: Boolean) {
        super.onWindowFocusChanged(hasFocus)
        if (!hasFocus) return
        window.decorView.systemUiVisibility = (View.SYSTEM_UI_FLAG_LAYOUT_STABLE
                or View.SYSTEM_UI_FLAG_LAYOUT_HIDE_NAVIGATION
                or View.SYSTEM_UI_FLAG_LAYOUT_FULLSCREEN
                or View.SYSTEM_UI_FLAG_HIDE_NAVIGATION
                or View.SYSTEM_UI_FLAG_FULLSCREEN
                or View.SYSTEM_UI_FLAG_IMMERSIVE_STICKY)
        val dm = resources.displayMetrics
        val widthPx = dm.widthPixels
        val heightPx = dm.heightPixels
        i("device width pixel $widthPx, device height pixel $heightPx")
    }
}

@RequiresApi(Build.VERSION_CODES.LOLLIPOP)
class TextureFrame(context: Context, private val scale: Float) : TextureFrameCallback {
    private val gson = Gson()
    private var faceData: Data? = null
    private val texture = FaceRectTexture(context, scale)
    private fun bgrToRgb(bgr: ByteArray): ByteArray {
        val ret = ByteArray(bgr.size)
        for (i in 0 until bgr.size / 3) {
            ret[i] = bgr[i + 2]
            ret[i + 1] = bgr[i + 1]
            ret[i + 2] = bgr[i]
        }
        return ret
    }

    override fun onTaskFrame(imageWidth: Int, imageHeight: Int, nv21: ByteArray, handlerResult: Any?): Any? {
//        printCounter()
//        decodeToOriginalBitmap(imageBytes,imageWidth, imageHeight)
//        return

        val type = 3
        val write = false
        val decode = false
        when (type) {
            0 -> {
                val bitmap = Bitmap.createBitmap(imageWidth, imageHeight, Bitmap.Config.RGB_565)
                bitmap.copyPixelsFromBuffer(ByteBuffer.wrap(nv21))
                bitmap?.recycle()
            }
            1 -> {
                if (decode) decodeToOriginalBitmap(nv21, imageWidth, imageHeight)
                texture.onTaskFrame(imageWidth, imageHeight, nv21, null)
            }
            2 -> {
                try {
                    val sss = FaceNeuralNetwork.FaceDetect(
                        nv21,
                        imageWidth * imageHeight * 3,
                        imageWidth, imageHeight,
                        FaceNeuralNetwork.ImageRGB888, ANGLE
                    )
                    println(sss)
                } catch (ignore: Exception) {
                    return null
                }
            }
            3 -> {
                if (write) writeToFile(nv21)
                if (decode) decodeToOriginalBitmap(nv21, imageWidth, imageHeight)
                val points = faceDetectPoint(nv21, imageWidth, imageHeight)
//                points?.apply {
//                    act.backcamera.action(FRAME_ACTION_NORMAL)
//                    act.frontcamera.resume()
//                } ?: apply {
//                    act.backcamera.action(FRAME_ACTION_LAZY_BUFFER)
//                    act.frontcamera.pause()
//                }
                val feature = FaceNeuralNetwork.GetFaceFeautre(
                    nv21, imageWidth * imageHeight * 3 / 2,
                    imageWidth, imageHeight,
                    FaceNeuralNetwork.ImageYuv420SP, ANGLE, points ?: return null
                )
                i("feature -> $feature")
                texture.onTaskFrame(imageWidth, imageHeight, nv21, null)
            }
        }
        return null
    }

    private fun faceDetectPoint(bytes: ByteArray, previewWidth: Int, previewHeight: Int, draw: Boolean = true): FloatArray? {
        val dataBean = facePoint(bytes, previewWidth, previewHeight) ?: return null
        dataBean.data?.apply { if (size <= 0) return null } ?: return null
        var maxData = dataBean.data?.get(0) ?: return null
        for (data in dataBean.data ?: return null) {
            if (data.area <= maxData.area) continue
            maxData = data
        }
        if (!verifyPointData(maxData)) return null
        try {
//            val yaw = maxData.yaw.toFloat()
//            recBean(abs(maxData.x1 - maxData.x2), yaw, maxData.point)
//            if (maxData.Quality.toFloat() < MIN_RECOGNITION_QUALITY) {
//                v("this flame's quality is so low -> ${maxData.Quality}, ignore it.")
//                mIgnore = true
//            }
//            if (yaw > MAX_RECOGNITION_YAW || yaw < MIN_RECOGNITION_YAW) {
//                v("this flame's yaw is so large -> ${maxData.yaw}, ignore it.")
//                mIgnore = true
//            }
//            mask = hasMask(maxData.Mask.toInt() == 1)
        } catch (e: NumberFormatException) {
        }
//        if (draw) drawFaceRect(maxData)
        faceData = maxData
        val points = maxData.point
        val pointArray = FloatArray(points.size)
        for (i in points.indices) pointArray[i] = points[i].toFloat()
        return pointArray
    }

    private fun facePoint(bytes: ByteArray, width: Int, height: Int): com.face.FaceDTO? {
        val sss: String
        try {
            sss = FaceNeuralNetwork.FaceDetect(
                bytes, bytes.size, width, height,
                FaceNeuralNetwork.ImageYuv420SP, ANGLE
            )
        } catch (ignore: Exception) {
            return null
        }
        if (sss?.length ?: 0 < 20) return null
        val pointBean: com.face.FaceDTO
        try {
            pointBean = gson.fromJson(sss, com.face.FaceDTO::class.java)
        } catch (ignore: JsonSyntaxException) {
            return null
        }
        return pointBean
    }

    private fun verifyPointData(dataBean: Data): Boolean {
        return true // todo 由于产品说识别不灵敏 故将识别远近的功能停掉
    }

    override fun onSurfaceAvailable(width: Int, height: Int) {
        i("${Thread.currentThread()} BufferFrame onSurfaceAvailable -> width:$width,height:$height")
        texture.onSurfaceAvailable(width, height)
    }

    override fun onTextureFrame(surfaceTexture: SurfaceTexture?, taskResult: Any?) {
        texture.onDrawFrame(faceData ?: return)
    }

    override fun onSurfaceSizeChanged(width: Int, height: Int) {
        i("${Thread.currentThread()} BufferFrame onSurfaceSizeChanged -> width:$width,height:$height")
        texture.onSurfaceSizeChanged(width, height)
    }

    override fun onSurfaceDestroyed(surfaceTexture: SurfaceTexture?) {
        i("${Thread.currentThread()} BufferFrame onSurfaceDestroyed")
        texture.onSurfaceDestroyed(surfaceTexture)
    }
}

fun writeToFile(bytes: ByteArray) {
    val fFace: File = Environment.getExternalStorageDirectory()
    val time = System.currentTimeMillis()
    createFile(File(fFace, time.toString()), bytes)
}

fun createFile(file: File, bytes: ByteArray) {
    try {
        if (!file.exists() && !file.createNewFile()) return
        val fos = FileOutputStream(file)
        fos.write(bytes, 0, bytes.size)
        fos.flush()
        fos.close()
    } catch (e: IOException) {
        e.printStackTrace()
    }
}

data class RectC(val left: Int, val top: Int, val right: Int, val bottom: Int)
data class ViewportC(val x: Int, val y: Int, val width: Int, val height: Int)
class FaceRectTexture(private val context: Context, private val scale: Float) : TextureFrameCallback {
    private var deviceWidth = 0
    private var deviceHeight = 0
    private var imageWidth = 0
    private var imageHeight = 0

    private var texProgram = 0
    private var textureIds = IntArray(1)
    override fun onTaskFrame(imageWidth: Int, imageHeight: Int, nv21: ByteArray, handlerResult: Any?) {
        val dm = context.resources.displayMetrics
        this.deviceWidth = dm.widthPixels
        this.deviceHeight = dm.heightPixels
        this.imageWidth = (imageWidth / scale).toInt()
        this.imageHeight = (imageHeight / scale).toInt()
    }

    override fun onTextureFrame(surfaceTexture: SurfaceTexture?, taskResult: Any?) {}
    override fun onSurfaceSizeChanged(width: Int, height: Int) {}
    override fun onSurfaceAvailable(width: Int, height: Int) {
        texProgram = glesInitialize(getAsset(context, "glsl/vert_face_rect.glsl"), getAsset(context, "glsl/frag_face_rect.glsl"))
        GLES30.glGenTextures(textureIds.size, textureIds, 0)
        GLES30.glBindTexture(GLES30.GL_TEXTURE_2D, textureIds[0])
        GLES30.glTexParameteri(GLES30.GL_TEXTURE_2D, GLES30.GL_TEXTURE_MIN_FILTER, GLES30.GL_NEAREST)
        GLES30.glTexParameteri(GLES30.GL_TEXTURE_2D, GLES30.GL_TEXTURE_MAG_FILTER, GLES30.GL_LINEAR)
        GLES30.glTexParameteri(GLES30.GL_TEXTURE_2D, GLES30.GL_TEXTURE_WRAP_S, GLES30.GL_CLAMP_TO_EDGE)
        GLES30.glTexParameteri(GLES30.GL_TEXTURE_2D, GLES30.GL_TEXTURE_WRAP_T, GLES30.GL_CLAMP_TO_EDGE)
        val instr = context.resources.openRawResource(R.raw.img_face_rect_v2)
        val bmp = try {
            BitmapFactory.decodeStream(instr)
        } catch (e: IOException) {
            null
        } finally {
            try {
                instr.close()
            } catch (t: IOException) {
            }
        }
        bmp ?: return GLES30.glBindTexture(GLES30.GL_TEXTURE_2D, GLES30.GL_NONE)
        GLUtils.texImage2D(GLES30.GL_TEXTURE_2D, 0, bmp, 0)
        GLES30.glBindTexture(GLES30.GL_TEXTURE_2D, GLES30.GL_NONE)
    }

    override fun onSurfaceDestroyed(surfaceTexture: SurfaceTexture?) {
        GLES30.glDeleteTextures(textureIds.size, textureIds, 0)
    }

    fun onDrawFrame(data: Data) {
        val vertices = floatArrayOf(
            -1f, -1f, 1f,
            1f, -1f, 1f,
            1f, 1f, 1f,
            -1f, 1f, 1f
        )
        val vertexBuffer = ByteBuffer.allocateDirect(vertices.size * Int.SIZE_BYTES).order(ByteOrder.nativeOrder()).asFloatBuffer().put(vertices).position(0)
        val textures = floatArrayOf(
            0f, 1f,
            1f, 1f,
            1f, 0f,
            0f, 0f
        )
        val textureBuffer = ByteBuffer.allocateDirect(textures.size * Int.SIZE_BYTES).order(ByteOrder.nativeOrder()).asFloatBuffer().put(textures).position(0)

        GLES30.glUseProgram(texProgram)

        val x1 = (min(data.x1, data.x2) / scale).toInt()
        val x2 = (max(data.x1, data.x2) / scale).toInt()
        val y1 = (min(data.y1, data.y2) / scale).toInt()
        val y2 = (max(data.y1, data.y2) / scale).toInt()
        val vc = reviseRectToViewport(deviceWidth, deviceHeight, imageWidth, imageHeight, RectC(x1, y1, x2, y2))
        GLES30.glViewport(vc.x, vc.y, vc.width, vc.height)

        val attrPos = GLES30.glGetAttribLocation(texProgram, "aPosition")
        val attrTex = GLES30.glGetAttribLocation(texProgram, "aTexCoord")
        GLES30.glVertexAttribPointer(attrPos, 3, GLES30.GL_FLOAT, false, 3 * Int.SIZE_BYTES, vertexBuffer)
        GLES30.glVertexAttribPointer(attrTex, 2, GLES30.GL_FLOAT, false, 2 * Int.SIZE_BYTES, textureBuffer)
        GLES30.glEnableVertexAttribArray(attrPos)
        GLES30.glEnableVertexAttribArray(attrTex)
        GLES30.glEnable(GLES30.GL_BLEND)
        GLES30.glBlendFunc(GLES30.GL_ONE, GLES30.GL_ONE_MINUS_SRC_ALPHA)
//        GLES30.glBlendFunc(GLES30.GL_SRC_ALPHA, GLES30.GL_ONE_MINUS_SRC_ALPHA)
        GLES30.glActiveTexture(GLES30.GL_TEXTURE0)
        GLES30.glBindTexture(GLES30.GL_TEXTURE_2D, textureIds[0])
        GLES30.glDrawArrays(GLES30.GL_TRIANGLE_FAN, 0, 4)

        GLES30.glDisable(GLES30.GL_BLEND)
        GLES30.glDisableVertexAttribArray(attrPos)
        GLES30.glDisableVertexAttribArray(attrTex)
        GLES30.glBindTexture(GLES30.GL_TEXTURE_2D, GLES30.GL_NONE)
    }

    private fun reviseRectToViewport(minSurfaceWidth: Int, maxSurfaceHeight: Int, minPreviewWidth: Int, maxPreviewHeight: Int, data: RectC): ViewportC {
        val widthRatio = minPreviewWidth / minSurfaceWidth.toFloat()
        val heightRatio = maxPreviewHeight / maxSurfaceHeight.toFloat()
        val width = abs(data.right - data.left) * widthRatio
        val height = abs(data.bottom - data.top) * heightRatio
        val x = if (deviceWidth > imageWidth) (data.left * widthRatio).toInt() else data.left
        val y = if (deviceHeight > imageHeight) (data.top * heightRatio).toInt() else data.top
        return ViewportC(x, y, width.toInt(), height.toInt())
    }
}

fun decodeToOriginalBitmap(data: ByteArray, imageWidth: Int, imageHeight: Int) {
    i("decode -> imageWidth:$imageWidth,imageHeight:$imageHeight")
    var bitmap: Bitmap? = null
    var stream: ByteArrayOutputStream? = null
    try {
        val imgW = imageWidth
        val imgH = imageHeight
        // 这里的clone很重要，确保原数据不改变
        val image = YuvImage(data.clone(), ImageFormat.NV21, imgW, imgH, null)

        if (false) {
            val file = File(Environment.getExternalStorageDirectory(), "${System.currentTimeMillis()}.jpg")
            val fos = FileOutputStream(file)
            image.compressToJpeg(Rect(0, 0, imgW, imgH), 100, fos)
            fos.close()
            Thread.sleep(3000L)
            return
        }

        stream = ByteArrayOutputStream()
        image.compressToJpeg(Rect(0, 0, imgW, imgH), 100, stream)
        bitmap = BitmapFactory.decodeByteArray(stream.toByteArray(), 0, stream.size())
    } catch (e: Exception) {
    } finally {
        try {
            stream?.close()
        } catch (ignored: IOException) {
        }
    }
    bitmap?.recycle()
}

fun faceDetect(imageWidth: Int, imageHeight: Int, yuv: ByteArray): FaceDTO? {
    i("face detect -> previewWidth:$imageWidth,previewHeight:$imageHeight")
    val sss = FaceNeuralNetwork.FaceDetect(
        yuv.clone(),
        imageWidth * imageHeight * 3 / 2,
        imageWidth,
        imageHeight,
        FaceNeuralNetwork.ImageYuv420SP, ANGLE
    )
    i("parse data -> $sss")
    if (sss?.length ?: 0 < 20) {
        e("face detect -> $sss")
        return null
    }
    val pointBean: FaceDTO
    try {
        pointBean = Gson().fromJson(sss, FaceDTO::class.java)
    } catch (ex: JsonSyntaxException) {
        e("face detect -> $ex")
        return null
    }
    if (pointBean.data?.size ?: 0 <= 0) {
        e("face detect -> face size is 0")
        return null
    }
    val x1 = pointBean.data?.get(0)?.x1 ?: 0
    val x2 = pointBean.data?.get(0)?.x2 ?: 0
    val y1 = pointBean.data?.get(0)?.y1 ?: 0
    val y2 = pointBean.data?.get(0)?.y2 ?: 0
    i("face detect -> x1:$x1,y1:$y1,x2:$x2,y2:$y2")
    return pointBean
}

private fun faceDetectPoint(bytes: ByteArray, imageWidth: Int, imageHeight: Int): FloatArray? {
    val dataBean = faceDetect(imageWidth, imageHeight, bytes) ?: return null
    dataBean.data?.apply { if (size <= 0) return null } ?: return null
    var maxData = dataBean.data?.get(0) ?: return null
    for (data in dataBean.data ?: return null) {
        if (data.area <= maxData.area) continue
        maxData = data
    }
    val points = maxData.point
    val pointArray = FloatArray(points.size)
    for (i in points.indices) pointArray[i] = points[i].toFloat()
    return pointArray
}