package com.csw.android.ffmpegtest.ui.section8

import android.Manifest
import android.graphics.ImageFormat
import android.hardware.camera2.CameraCharacteristics
import android.media.ImageReader
import android.opengl.GLSurfaceView
import android.os.Bundle
import android.os.Handler
import android.os.HandlerThread
import android.view.View
import com.csw.android.dev_utils.utils.ToastUtils
import com.csw.android.ffmpegtest.R
import com.csw.android.ffmpegtest.camera.CameraController
import com.csw.android.ffmpegtest.camera.CameraSelector
import com.csw.android.ffmpegtest.databinding.LayoutSection801Binding
import com.csw.android.ffmpegtest.gl.Section601Renderer
import com.csw.android.ffmpegtest.ui.base.DataBindingFragment
import com.tbruyelle.rxpermissions3.RxPermissions

/**
 * 学习 Android 图形图像架构，能够使用 GLSurfaceviw 绘制 Camera 预览画面
 *
 * YUV与RBG一样都是表示颜色的一种数据格式，与RGB将颜色分成红绿蓝三中颜色通道的混合不同，YUV将颜色分成亮度Y,
 * 色度U，浓度V，当UV分量为128（0~255）时，图片即变为灰度图，U决定颜色，V决定饱和度。YUV格式常用于图象特效。
 * YUV颜色采样，由于人眼对UV分量不是很敏感，可以相邻的四个像素点组成一个宏像素，YUV420即代表其采样率，4个Y
 * 共用同一个UV值。YUV444则表示全采样。
 *
 * 每个分量用一个byte表示，四个像素点，ARGB需要4x4=16byte，YUV444需要12byte，YUV420则只需要4+1+1 = 6byte
 *  YUV420占用的带宽只有ARGB的3/8,YUV444的1/2，是一种很好的带宽压缩思路，YUV的出现也是为了兼容早期的黑白电视
 */
class Section801 : DataBindingFragment<LayoutSection801Binding>() {
    private var handlerThread: HandlerThread? = null
    private val imageFormat = ImageFormat.YUV_420_888
    private var renderer: Section601Renderer? = null
    private var cameraController: CameraController? = null
    private var previewCameraId: String? = null
    private var imageReader: ImageReader? = null

    override fun getContentViewID(): Int {
        return R.layout.layout_section_801
    }

    override fun initView(rootView: View, savedInstanceState: Bundle?) {
        super.initView(rootView, savedInstanceState)
        dataBinding?.glSurfaceView?.run {
            setEGLContextClientVersion(3)
            setRenderer(Section601Renderer().apply {
                renderer = this
            })
            renderMode = GLSurfaceView.RENDERMODE_WHEN_DIRTY
        }
    }

    override fun initListener() {
        super.initListener()
        handlerThread?.quit()
        handlerThread = HandlerThread("图片接收处理线程").apply {
            start()
        }
        imageReader = ImageReader.newInstance(
            1080,
            1920,
            imageFormat,
            2
        ).apply {
            setOnImageAvailableListener({ reader ->
                reader?.run {
                    acquireLatestImage()?.run {
                        try {
                            if (format == imageFormat) {
                                //yuv420采样，返回的数据是yuv三个平面的数据(4:1:1)
                                //但这里是4:2:2，uv平面的数据是间隔着来的。。。
                                //还要我自己转化
                                val yPlane = planes[0]//像素步长1
                                val uPlane = planes[1]//像素步长2
                                val vPlane = planes[2]

                                //这里先手动换算成rbg试试显示效果
                                val yBuffer = yPlane.buffer
                                val uBuffer = uPlane.buffer
                                val vBuffer = vPlane.buffer
                                val pixels = yBuffer.limit()
                                val width = yPlane.rowStride / yPlane.pixelStride//一行多少像素
                                val height = pixels / width
                                val rgb = ByteArray(width * height * 3)
                                var pIndex: Int
                                var y: Int
                                var u: Int
                                var v: Int
                                try {
                                    //rgb都取y分量，就变成灰度图
                                    for (w in 0 until width) {
                                        for (h in 0 until height) {
                                            pIndex = h * width + w
                                            y = 0xFF and yBuffer.get(pIndex * yPlane.pixelStride)
                                                .toInt()
                                            u = 0xFF and
                                                    uBuffer.get(h / 2 * uPlane.rowStride + w / 2 * uPlane.pixelStride)
                                                        .toInt()
                                            v = 0xFF and
                                                    vBuffer.get(h / 2 * vPlane.rowStride + w / 2 * vPlane.pixelStride)
                                                        .toInt()
                                            rgb[pIndex * 3 + 0] =
                                                (y + 1.4075 * (v - 128)).toInt().toByte()
                                            rgb[pIndex * 3 + 1] =
                                                (y - 0.3455 * (u - 128) - 0.7169 * (v - 128)).toInt()
                                                    .toByte()
                                            rgb[pIndex * 3 + 2] =
                                                (y + 1.779 * (u - 128)).toInt().toByte()
                                        }
                                    }
                                } catch (e: Exception) {
                                    e.printStackTrace()
                                }
                                renderer?.updatePicture(width, height, rgb)
                                dataBinding?.glSurfaceView?.requestRender()
                            }
                        } catch (e: Exception) {
                            e.printStackTrace()
                        } finally {
                            close()
                        }
                    }
                }
            }, Handler(handlerThread!!.looper))
        }
    }

    override fun initData() {
        super.initData()
        RxPermissions(this).request(
            Manifest.permission.CAMERA,
            Manifest.permission.RECORD_AUDIO
        ).subscribe {
            if (it) {
                dataBinding?.root?.visibility = View.VISIBLE
                initCamera()
            } else {
                activity?.finish()
            }
        }
    }

    private fun initCamera() {
        val surface = imageReader?.surface ?: return
        activity?.let { context ->
            CameraSelector().apply {
                imageFormat = this@Section801.imageFormat
                facing = CameraCharacteristics.LENS_FACING_FRONT
            }.selectCamera()?.let { cameraId ->
                previewCameraId = cameraId
                cameraController = CameraController().apply {
                    connectToCamera(context, cameraId)
                    addSurface(
                        cameraId,
                        surface
                    )
                }
                return
            }
            ToastUtils.showShort("无匹配摄像头")
        }
    }

    override fun onResume() {
        super.onResume()
        dataBinding?.glSurfaceView?.onResume()
    }

    override fun onPause() {
        dataBinding?.glSurfaceView?.onPause()
        super.onPause()
    }

    override fun onDestroyView() {
        cameraController?.release()
        handlerThread?.quitSafely()
        renderer = null
        super.onDestroyView()
    }

}