package com.xunua.myaudioandvideolearning

import android.media.MediaCodec
import android.media.MediaFormat
import android.util.Log
import android.view.Surface
import java.io.*
import java.lang.Exception

/**
 * @author: linxunyou
 * @description:
 * @date :2022/5/520:01
 **/
private const val TAG = "H264player"

class H264player(
    var path: String,//数据源
    var surface: Surface//显示 渲染目的地 surface
) : Thread() {
    //解码器
    var mediaCodec: MediaCodec? = null

    init {
        //通过try catch来判断设备支持哪种编码的硬解码
        try {
            mediaCodec = MediaCodec.createDecoderByType(MediaFormat.MIMETYPE_VIDEO_AVC)
            var mediaFormat =
                MediaFormat.createVideoFormat(MediaFormat.MIMETYPE_VIDEO_AVC, 364, 368)//宽高设置小了也没事，通常情况下不会取这里的宽高。
            mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, 15)
            mediaCodec?.configure(mediaFormat, surface, null, 0)
        }catch (e: Exception) {
            Log.e(TAG, "不支持")
            e.printStackTrace()
        }
    }

    fun play() {
        mediaCodec?.start()
        Thread(this).start()
    }

    override fun run() {
        super.run()
        //解码
        try {
            decodeH264()
        } catch (e: Exception) {

        }
    }

    private fun decodeH264() {
        var bytes: ByteArray? = null
        try {
            bytes = getBytes(path)
        } catch (e: Exception) {
            e.printStackTrace()
        }
//        //拿取dsp中所有的容器Buff
//        var inputBuffers = mediaCodec?.inputBuffers  //过时
        //dequeueInputBuffer返回容器 @params:timeoutUs   等待时间
        if (bytes!=null){
            var startIndex=0
            var bufferInfo = MediaCodec.BufferInfo()
            while (true) {
                var nextFrameStartIndex=findNextFrameIndex(bytes,startIndex+2, bytes.size)
                var inIndex = mediaCodec?.dequeueInputBuffer(1000 * 10) ?: -1
                Log.e(TAG, "decodeH264: 拿到了${inIndex}号容器     nextFrameStartIndex:${nextFrameStartIndex}")
                if (inIndex >= 0) {
                    var byteBuffer = mediaCodec?.getInputBuffer(inIndex)
                    //一帧一帧的传递   通过分隔符来
                    //startIndex第一帧开始的位置，nextFrameIndex下一帧开始的位置
                    var currentFrameLength = nextFrameStartIndex - startIndex
                    //丢一部分数据到容器(容器由dsp提供)   一帧的数据
                    byteBuffer?.put(bytes,startIndex,currentFrameLength)
                    //告知dsp当前在哪个容器存入了数据，通知dsp去拿这个容器的东西
                    mediaCodec?.queueInputBuffer(inIndex,0,currentFrameLength,0,0)//presentationImeUs:pts-->当前帧的时间戳，不知道的话传就0。解码会按照视频里面的数据来拿取pts
                    startIndex=nextFrameStartIndex
                }
                var outIndex = mediaCodec?.dequeueOutputBuffer(bufferInfo, 1000*10)?:-10086
                Log.e(TAG, "decodeH264: 输出了${outIndex}号容器")
                if (outIndex>=0){
                    //解码成功,开始渲染
                    try {
                        Thread.sleep(33)//33毫秒一帧
                    }catch (e:Exception){

                    }
                    mediaCodec?.releaseOutputBuffer(outIndex,true)
                }
            }
        }
    }

    private fun findNextFrameIndex(bytes: ByteArray, startIndex: Int, totalSize: Int): Int{
        for (i in startIndex .. (totalSize-4)){
            if (bytes[i]==(0x00.toByte()) && bytes[i+1]==(0x00.toByte()) && bytes[i+2]==(0x00.toByte()) && bytes[i+3]==(0x01.toByte())){
                return i
            }
        }
        return -1
    }

    @Throws(IOException::class)
    private fun getBytes(path: String): ByteArray {
        val inputStream = DataInputStream(FileInputStream(path)) as InputStream
        var len: Int
        val size = 1024
        val bos = ByteArrayOutputStream()
        var buf = ByteArray(size)
        while (inputStream.read(buf, 0, size).also { len = it } != -1) {
            bos.write(buf, 0, len)
        }
        buf = bos.toByteArray()
        return buf
    }
}