package com.shenhangye.pushlive.codec

import android.content.Context
import android.media.MediaCodec
import android.media.MediaCodecInfo
import android.media.MediaFormat
import android.media.MediaMuxer
import android.opengl.EGLContext
import android.os.Handler
import android.os.HandlerThread
import android.util.Log
import android.view.Surface
import com.shenhangye.pushlive.gles.EGLBase
import com.shenhangye.pushlive.util.FileUtils

class MediaRecorder(var context: Context,var path:String,
                    var mWidth:Int,var mHeight:Int,var eglContext: EGLContext ) {

    private var mEglBase: EGLBase? = null
    private var mMediaCodec: MediaCodec? = null
    private var mInputSurface: Surface? = null

    private var mMediaMuxer: MediaMuxer? = null
    private var mHandler: Handler? = null

    private  val TAG = "MediaRecorder"
    fun start(){
        Log.e(TAG,"start")
        var mediaFormat=MediaFormat.createVideoFormat(MediaFormat.MIMETYPE_VIDEO_AVC, mWidth, mHeight)
        mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, 1500000)
        mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, 20)
        mediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 20)
        mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT,
            MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface
        )

        //编码器
        mMediaCodec = MediaCodec.createEncoderByType(MediaFormat.MIMETYPE_VIDEO_AVC)
        mMediaCodec!!.configure(mediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE)
        mInputSurface = mMediaCodec!!.createInputSurface()
        mMediaMuxer = MediaMuxer(path, MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4)

        val handlerThread = HandlerThread("VideoCodec")
        handlerThread.start()
        val looper = handlerThread.looper
        mHandler = Handler(looper)

        mHandler?.post {
//opengl环境  得到   当前线程上下文
            mEglBase = EGLBase(context, mWidth, mHeight, mInputSurface, eglContext)
            mMediaCodec!!.start()
            isStart = true

        }

    }
    private var dataIndex = 0
    private var isStart = false
    fun encodeFrame( textureId:Int ,  timestamp:Long ){
        if (!isStart) {
            return
        }
        Log.e(TAG,"encodeFrame")
        mHandler?.post(Runnable {
            mEglBase?.draw(textureId, timestamp)
            getCodec(false)
        })


    }


    fun getCodec(endOfStream:Boolean){
        Log.e(TAG,"endOfStream")
        if (endOfStream){
            mMediaCodec?.signalEndOfInputStream()
        }


        var bufferInfo=MediaCodec.BufferInfo()

        while (true){
            var index=mMediaCodec?.dequeueOutputBuffer(bufferInfo, 10000)
            when(index){
                //等待超时 等待超时  一直循环
                MediaCodec.INFO_TRY_AGAIN_LATER->{

                    if (!endOfStream) {
                        break
                    }
                }
                //格式转换
                MediaCodec.INFO_OUTPUT_FORMAT_CHANGED->{


//直播    一路    直播了     添加了 直播  而

                    //开始编码 就会调用一次
                    val outputFormat = mMediaCodec!!.outputFormat


                    //配置封装器
                   //  增加一路指定格式的媒体流 视频
                    dataIndex = mMediaMuxer!!.addTrack(outputFormat)
                    mMediaMuxer?.start()



                }

                MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED->{

                }else->{

//                index》=0
//成功 取出一个有效的输出
                val outputBuffer = mMediaCodec!!.getOutputBuffer(index!!)
//                val data = ByteArray(bufferInfo.size)
//                outputBuffer?.get(data)
//                FileUtils.writeContent(data)
//                for (datum in data) {
//                }
                //如果获取的ByteBuffer 是配置信息 ,不需要写出到mp4
                if ((bufferInfo.flags and MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
                    bufferInfo.size = 0
                }
                if (bufferInfo.size != 0) {


                    //写到mp4
                    //根据偏移定位  bufferInfo.offset  ==0
                    outputBuffer!!.position(bufferInfo.offset)
                    //ByteBuffer 可读写总长度
                    outputBuffer.limit(bufferInfo.offset + bufferInfo.size)

                    mMediaMuxer?.writeSampleData(dataIndex, outputBuffer, bufferInfo)
                }


                //输出缓冲区 我们就使用完了，可以回收了，让mediacodec继续使用
                mMediaCodec!!.releaseOutputBuffer(index, false)


                if ((bufferInfo.flags and MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
                    break
                }


                }



            }


        }


    }

    fun stop() {
        isStart = false
        mHandler!!.post {
            getCodec(true)
            mEglBase?.release()
            mMediaCodec?.release()
            mMediaCodec = null
            mMediaMuxer?.stop()
            mMediaMuxer?.release()
            mMediaMuxer = null
            mEglBase?.release()
            mEglBase = null
            mInputSurface = null
        }
    }



}