package com.guoji.cameracall.helper

import android.media.MediaCodec
import android.media.MediaCodecInfo
import android.media.MediaFormat
import android.util.Log
import com.guoji.cameracall.socket.ISocketLive
import java.nio.ByteBuffer


/**
 * create by zhongZihao on 2021/9/27
 * 推流编码
 */
class EncodePushH265(
    private val socketLive: ISocketLive,
    private val width: Int,
    private val height: Int
) {
    private var mediaCodec: MediaCodec? = null

    // nv21转换成nv12的数据
    private var nv12 = ByteArray(width * height * 3 / 2)

    // 旋转之后的yuv数据
    private var yuv = ByteArray(width * height * 3 / 2)

    //帧序号
    var frameIndex=0L

    //实例化编码器
    fun startEncode() {
        try {
            mediaCodec = MediaCodec.createEncoderByType(MediaFormat.MIMETYPE_VIDEO_HEVC)
            val format =
                MediaFormat.createVideoFormat(MediaFormat.MIMETYPE_VIDEO_HEVC,  height,width) //注意这里宽高互换
                    .apply {
                        setInteger(MediaFormat.KEY_BIT_RATE, width * height)
                        setInteger(MediaFormat.KEY_FRAME_RATE, 15)
                        setInteger(
                            MediaFormat.KEY_COLOR_FORMAT,
                            MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Flexible
                        )
                        setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 5) //IDR帧刷新时间
                    }
            mediaCodec!!.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE)
            mediaCodec!!.start()
        } catch (e: Exception) {
            e.printStackTrace()
        }
    }

    fun encodeCameraFrame(input: ByteArray?) {
        input ?: return
        mediaCodec ?: return
        //将android摄像头nv21格式转为通用的nv12格式
        nv12 = YuvUtil.nv21ToNv12(input)
        //旋转
        YuvUtil.portraitData2Raw(nv12, yuv, width, height)
        val index = mediaCodec!!.dequeueInputBuffer(100000)
        if (index>=0){
            val inputBuffer = mediaCodec!!.getInputBuffer(index)
            inputBuffer?.clear()
            inputBuffer?.put(yuv)
            val presentationTimeUs = computePresentationTime(frameIndex)
            mediaCodec!!.queueInputBuffer(index,0,yuv.size,presentationTimeUs,0)
            frameIndex++
        }
        val bufferInfo = MediaCodec.BufferInfo()
        var outIndex = mediaCodec!!.dequeueOutputBuffer(bufferInfo, 100000)
        while (outIndex >= 0) {
            val outputBuffer = mediaCodec!!.getOutputBuffer(outIndex)
            dealFrame(outputBuffer, bufferInfo)
            mediaCodec!!.releaseOutputBuffer(outIndex, false)
            outIndex = mediaCodec!!.dequeueOutputBuffer(bufferInfo, 0)
        }
    }

    //H265的第一帧I帧类型
    private val NAL_I = 19

    //H265的VPS类型
    private val NAL_VPS = 32

    //记录第一帧信息帧
    private lateinit var vps_sps_pps_buf: ByteArray

    //记录最新的I帧，用于一连接则推送I帧
    lateinit var curIFrame: ByteArray

    //处理推流的帧
    private fun dealFrame(buffer: ByteBuffer, bufferInfo: MediaCodec.BufferInfo) {
        //查找分隔符是00 00 01还是 00 00 00 01，并跳到分隔符后面开始的字节取出第一帧信息
        val offset = if (buffer.get(2).toInt() == 0x01) 3 else 4
        //分析H265第一帧的帧类型, 帧类型通过 与上 01111110（0x7E）并右移1位获取
        val type = (buffer.get(offset).toInt() and 0x7E) shr 1
        val frame = ByteArray(bufferInfo.size).apply { buffer.get(this) }
        when (type) {
            NAL_VPS -> {
                //记住开始编码的第一帧VPS/SPS/PPS数据，用于每次遇到I帧就拼接上一齐发送，VPS/SPS/PPS数据只会出现一次
                vps_sps_pps_buf = frame
                Log.v("TAG_Encode", "VPS-SPS-PPS帧数据  ${vps_sps_pps_buf.contentToString()}")
            }
            NAL_I -> {
                //遇到I帧编码时，每个I帧都带上VPS/SPS/PPS数据再发送
                val newIFrame = ByteArray(vps_sps_pps_buf.size + frame.size)
                System.arraycopy(vps_sps_pps_buf, 0, newIFrame, 0, vps_sps_pps_buf.size)
                System.arraycopy(frame, 0, newIFrame, vps_sps_pps_buf.size, frame.size)
                curIFrame = newIFrame
                socketLive.sendData(newIFrame)
                Log.v("TAG_Encode", "I帧数据  ${newIFrame.contentToString()}")
            }
            else -> {
                //P帧B帧就直接发送
                socketLive.sendData(frame)
                Log.v("TAG_Encode", "PB帧数据  ${frame.contentToString()}")
            }
        }

    }

    //用于保证帧顺序
    private fun computePresentationTime(frameIndex: Long): Long {
        return 132 + frameIndex * 1000000 / 15
    }


}