package com.example.audio_test.utils

import android.media.MediaCodec
import android.media.MediaCodecInfo
import android.media.MediaCodecList
import android.media.MediaFormat
import com.example.audio_test.bean.EncoderParams
import java.lang.ref.WeakReference
import kotlin.experimental.and

/**
 *对YUV视频流进行编码
 *
 * @author : Kun
 * @date : 2021/6/29
 * @time : 16:03
 */
class H264EncoderConsumer : Thread() {
    companion object {
        val MIME_TYPE = "video/avc"

        // 间隔1s插入一帧关键帧
        val FRAME_INTERVAL = 1

        /**
         * 超时时间
         */
        val TIMES_OUT = 10000L
    }

    private var mVideoEncodec: MediaCodec? = null
    private var colorFormat = 0
    private var isExit = false
    private var isEncoderStart = false

    private var isAddKeyFrame = false

    private var mParamsRef: WeakReference<EncoderParams>? = null
    private var newFormat: MediaFormat? = null
    private var mMuxerRef: WeakReference<MediaMuxerUtil>? = null

    /**
     * 码率
     */
    enum class Quality {
        LOW, MIDDLE, HIGH
    }

    /**
     * 帧率
     */
    enum class FrameRate {
        FPS_20, FPS_25, FPS_30
    }

    fun setTmpuMuxer(muxerUtil: MediaMuxerUtil, params: EncoderParams) {
        mMuxerRef = WeakReference(muxerUtil)
        mParamsRef = WeakReference(params)

        val muxer = mMuxerRef!!.get()
        if (muxer != null && newFormat != null) {
            muxer.addTrack(newFormat!!, true)
        }
    }

    val millisPreFrame = 1000L / 20
    var lastPush = 0L

    /**
     * 添加数据
     */
    fun addData(yuvData: ByteArray) {
        if (!isEncoderStart || mParamsRef == null) {
            return
        }
        try {
            if (lastPush == 0L) {
                lastPush = System.currentTimeMillis()
            }
            var time = System.currentTimeMillis() - lastPush
            if (time >= 0) {
                time = millisPreFrame - time
                if (time > 0) {
                    sleep(time / 2)
                }
            }

            val params = mParamsRef!!.get()
            nv21ToNV12(yuvData, params!!.frameWidth, params!!.frameHeight)
            feedMediaCodecData(yuvData)

            if (time > 0) {
                sleep(time / 2)
            }
            lastPush = System.currentTimeMillis()
        } catch (e: java.lang.Exception) {
            e.printStackTrace()
        }

    }

    /**
     * 填充编码数据
     */
    private fun feedMediaCodecData(yuvData: ByteArray) {
        if (mVideoEncodec == null) {
            return
        }
        val inputIndex = mVideoEncodec!!.dequeueInputBuffer(TIMES_OUT)
        if (inputIndex >= 0) {
            val inputBuffer = mVideoEncodec!!.getInputBuffer(inputIndex)
            inputBuffer!!.clear()
            inputBuffer.put(yuvData)
            mVideoEncodec!!.queueInputBuffer(
                inputIndex,
                0,
                yuvData.size,
                System.nanoTime() / 1000,
                MediaCodec.BUFFER_FLAG_KEY_FRAME
            )
        }
    }

    /**
     * 退出
     */
    fun exit() {
        isExit = true
    }

    override fun run() {
        if (!isEncoderStart) {
            try {
                sleep(200)
            } catch (e: Exception) {
                e.printStackTrace()
            }
            startCodec()
        }

        while (!isExit) {
            val bufferInfo = MediaCodec.BufferInfo()
            var outputIndex = mVideoEncodec!!.dequeueOutputBuffer(bufferInfo, TIMES_OUT)
            while (outputIndex >= 0) {
                if (outputIndex == MediaCodec.INFO_TRY_AGAIN_LATER) {
                    //超时
                } else if (outputIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
                    //变化
                } else if (outputIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
                    //编码器输出缓存区格式改变，通常在存储数据之前且只会改变一次
                    synchronized(this) {
                        newFormat = mVideoEncodec!!.outputFormat
                        if (mMuxerRef != null) {
                            mMuxerRef!!.get()?.addTrack(newFormat!!, true)
                        }
                    }
                } else {
                    synchronized(this) {
                        newFormat = mVideoEncodec!!.outputFormat
                        if (mMuxerRef != null) {
                            mMuxerRef!!.get()?.addTrack(newFormat!!, true)
                        }
                    }

                    val outBuffer = mVideoEncodec!!.getOutputBuffer(outputIndex)
                    //根据NALU类型判断帧类型
                    val type = (outBuffer!!.get(4) and 0x1F).toInt()

                    if (type == 7 || type == 8) {
                        //pps,sps帧 非图像数据，忽略
                        bufferInfo.size = 0
                    } else if (type == 5) {
                        // 录像时，第1秒画面会静止，这是由于音视轨没有完全被添加
                        // Muxer没有启动
                        // 添加视频流到混合器
                        if (mMuxerRef != null) {
                            mMuxerRef!!.get()?.pumpStream(outBuffer, bufferInfo, true)
                        }
                        isAddKeyFrame = true
                    } else {
                        if (isAddKeyFrame && mMuxerRef != null) {
                            mMuxerRef!!.get()?.pumpStream(outBuffer, bufferInfo, true)
                        }
                    }
                }
                mVideoEncodec!!.releaseOutputBuffer(outputIndex, false)
                outputIndex = mVideoEncodec!!.dequeueOutputBuffer(
                    bufferInfo,
                    AACEncodeConsumer.TIMES_OUT
                )
            }
        }
        stopCodec()
    }

    /**
     * 停止编码
     */
    private fun stopCodec() {
        if (mVideoEncodec != null) {
            mVideoEncodec!!.stop()
            mVideoEncodec!!.release()
            mVideoEncodec = null
            isAddKeyFrame = false
            isEncoderStart = true
        }
    }

    /**
     * 初始化编码器
     */
    private fun startCodec() {
        isExit = false
        if (mParamsRef == null) {
            return
        }
        val params = mParamsRef!!.get()
        try {
            val mCodeInfo = selectSupportCodec(MIME_TYPE)
            if (mCodeInfo == null) {
                return
            }

            colorFormat = selectSupportColorFormat(mCodeInfo, MIME_TYPE)
            mVideoEncodec = MediaCodec.createByCodecName(mCodeInfo.name)
        } catch (e: Exception) {
            e.printStackTrace()
        }
        val format = if (params!!.isVertical) MediaFormat.createVideoFormat(
            MIME_TYPE,
            params.frameHeight,
            params.frameWidth
        )
        else MediaFormat.createVideoFormat(MIME_TYPE, params.frameWidth, params.frameHeight)

        format.setInteger(MediaFormat.KEY_BIT_RATE, getBitRate())
        format.setInteger(MediaFormat.KEY_FRAME_RATE, getFrameRate())
        format.setInteger(MediaFormat.KEY_COLOR_FORMAT, colorFormat)
        format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, FRAME_INTERVAL)

        if (mVideoEncodec != null) {
            mVideoEncodec!!.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE)
            mVideoEncodec!!.start()
            isEncoderStart = true
        }

    }

    /**
     *获取帧率
     */
    private fun getFrameRate(): Int {
        if (mParamsRef == null) {
            return -1
        }
        val params = mParamsRef!!.get()

        return when (params!!.frameRateDegree) {
            FrameRate.FPS_20 -> 20
            FrameRate.FPS_25 -> 25
            else -> 30
        }
    }

    /**
     * 获取比特率
     */
    private fun getBitRate(): Int {
        if (mParamsRef == null) {
            return -1
        }
        val mParams = mParamsRef!!.get()
        val width = mParams!!.frameWidth
        val height = mParams.frameHeight
        var bitRate = width * height * 20 * 2 * 0.07f
        if (width >= 1920 || height >= 1920) {
            when (mParams.bitRateQuality) {
                Quality.LOW -> bitRate *= 0.75f
                Quality.MIDDLE -> bitRate *= 1.1f
                Quality.HIGH -> bitRate * 1.5f
            }
        } else if (width >= 1280 || height >= 1280) {
            when (mParams.bitRateQuality) {
                Quality.LOW -> bitRate *= 1.0f
                Quality.MIDDLE -> bitRate *= 1.4f
                Quality.HIGH -> bitRate * 1.9f
            }
        } else if (width >= 640 || height >= 640) {
            when (mParams.bitRateQuality) {
                Quality.LOW -> bitRate *= 1.4f
                Quality.MIDDLE -> bitRate *= 2.1f
                Quality.HIGH -> bitRate * 3f
            }
        }

        return bitRate.toInt()
    }

    private fun selectSupportCodec(mimeType: String): MediaCodecInfo? {
        val mediaCodecList = MediaCodecList(MediaCodecList.REGULAR_CODECS)
        for (codecInfo in mediaCodecList.codecInfos) {
            if (!codecInfo.isEncoder) {
                //不是编码器
                continue
            }
            for (type in codecInfo.supportedTypes) {
                if (type!!.equals(mimeType, true)) {
                    return codecInfo
                }
            }
        }
        return null
    }

    private fun selectSupportColorFormat(codeInfo: MediaCodecInfo, mimeType: String): Int {
        val capabilities = codeInfo.getCapabilitiesForType(mimeType)
        for (i in capabilities.colorFormats.indices) {
            if (isCodecRecognizedFormat(capabilities.colorFormats[i])) {
                return capabilities.colorFormats[i]
            }
        }
        return 0
    }

    private fun isCodecRecognizedFormat(colorFormat: Int): Boolean {
        return colorFormat == MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Flexible
    }

    /**
     * NV21转NV21
     *
     *
     *  YUV，分为三个分量，“Y”表示明亮度（Luminance或Luma），也就是灰度值；
     * 而“U”和“V” 表示的则是色度（Chrominance或Chroma），作用是描述影像色彩及饱和度，用于指定像素的颜色。
     * YUV码流的存储格式其实与其采样的方式密切相关，主流的采样方式有三种，YUV4:4:4，YUV4:2:2，YUV4:2:0，
     * YUV 4:4:4采样，每一个Y对应一组UV分量。
     *YUV 4:2:2采样，每两个Y共用一组UV分量。
     *YUV 4:2:0采样，每四个Y共用一组UV分量。
     *
     * NV12存储格式: YYYYYYYY UVUV     =>YUV420SP
     * NV21存储格式: YYYYYYYY VUVU     =>YUV420SP
     *
     * 更多了解  https://www.cnblogs.com/azraelly/archive/2013/01/01/2841269.html
     *
     * 这里采用的是420
     * 转换时只需交换UV
     *
     */
    private fun nv21ToNV12(input: ByteArray?, width: Int, height: Int) {
        if (input == null) {
            return
        }

        /*
        *  这里采用的是4:2:0
        *   所需内存大小 Y=width*height  U = Y/4 V = Y/4 (共计 width*height*3/2)
         */
        val frameSize = width * height
        var temp: Byte
        for (i in 2 until frameSize / 2 step 2) {
            //前部分为Y不用管，后部分交换UV;UV交替互換
            //把V赋值给temp
            temp = input[frameSize - 1 + i]
            //把改位置上的V变成U
            input[frameSize - 1 + i] = input[frameSize - 1 + i - 1]
            //U变成V
            input[frameSize - 1 + i - 1] = temp
        }
    }
}