package com.ligq.h265toupinpush.call

import android.media.MediaCodec
import android.media.MediaCodecInfo
import android.media.MediaFormat
import android.util.Log
import com.ligq.common.constants.FrameType
import com.ligq.common.utils.FileUtils
import com.ligq.common.utils.YuvUtils

const val VALUE_FRAME_RATE = 30
const val TAG = "ligq_call"

class EncodeH264LivePush(
    private val width: Int,
    private val height: Int,
    private val socketLive: SocketLive
) {
    private lateinit var mediaCodec: MediaCodec
    private var index = 0
    private lateinit var spsData: ByteArray

    init {
        socketLive.call()
    }

    fun startLive() {
        try {
            val format =
                MediaFormat.createVideoFormat(MediaFormat.MIMETYPE_VIDEO_AVC, height, width)
            format.setInteger(MediaFormat.KEY_FRAME_RATE, 30)
            format.setInteger(MediaFormat.KEY_BIT_RATE, width * height)
            format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 10)
            format.setInteger(
                MediaFormat.KEY_COLOR_FORMAT,
                MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Flexible
            )
            mediaCodec = MediaCodec.createEncoderByType(MediaFormat.MIMETYPE_VIDEO_AVC)
            mediaCodec.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE)
            mediaCodec.start()
        } catch (e: Exception) {
            Log.e(TAG, "startLive: ", e)
        }
    }

    fun encodeFrame(input: ByteArray) {
        //将nv21数据转为nv12数据
        val nv12 = YuvUtils.nv21ToNv12(input)
        //将横屏转为竖屏
        val portraitData = YuvUtils.portraitDataRawFront2(nv12, width, height)
        try {
            //传入dsp中
            val inputIndex = mediaCodec.dequeueInputBuffer(10000)
            val bufferInfo = MediaCodec.BufferInfo()
            if (inputIndex >= 0) {
                val inputBuffer = mediaCodec.getInputBuffer(inputIndex)
                inputBuffer?.clear()
                inputBuffer?.put(portraitData)
                mediaCodec.queueInputBuffer(
                    inputIndex,
                    0,
                    portraitData.size,
                    compPts(),
                    0
                )
                index++
            }

            //dsp编码完成后，传出来
            val outIndex = mediaCodec.dequeueOutputBuffer(bufferInfo, 10000)
            if (outIndex >= 0) {
                val outputBuffer = mediaCodec.getOutputBuffer(outIndex)
                val outData = ByteArray(bufferInfo.size)
                outputBuffer?.get(outData)
                //进行传输，需要将序列参数集数据插入到每个I帧前
                dealFrame(outData)
                mediaCodec.releaseOutputBuffer(outIndex, false)
            }
        } catch (e: Exception) {
            Log.e(TAG, "encodeFrame: ", e)
        }
    }

    /**
     * 用于区分I帧，sps帧，将sps数据插入到I帧前
     */
    private fun dealFrame(src: ByteArray) {
//        Log.d(TAG, "dealFrame: src=${src.contentToString()}")
        val outData: ByteArray
        var offset = 4
        if (src[2].toInt() == 0x01) {
            offset = 3
        }
        when (src[offset].toInt() and 0x1F) {
            FrameType.TYPE_H264_SPS.frameType -> {
                spsData = src
                outData = spsData
            }
            FrameType.TYPE_H264_I.frameType -> {
                outData = ByteArray(spsData.size + src.size)
                System.arraycopy(spsData, 0, outData, 0, spsData.size)
                System.arraycopy(src, 0, outData, spsData.size, src.size)
            }
            else -> outData = src
        }
        Log.d(TAG, "dealFrame: send data=${outData.contentToString()}")
        //发送outData数据
        socketLive.sendData(1, outData)
        if (SAVE_DATA) {
            FileUtils.writeBytes("sendData.h264", outData)
        }
    }

    private fun compPts(): Long = (1000000 * index / VALUE_FRAME_RATE).toLong()

}