package com.zysdev.opengldouyin1demo.filter

import android.content.Context
import android.media.MediaCodec
import android.media.MediaCodecInfo
import android.media.MediaFormat
import android.media.MediaMuxer
import android.opengl.EGLContext
import android.os.Handler
import android.os.HandlerThread
import android.util.Log
import android.view.Surface
import com.zysdev.opengldouyin1demo.utils.FileUtils
import java.lang.Exception

class MediaRecorderH264 {

    private var mContext: Context? = null
    private var mPath: String? = null
    private var mWidth: Int = 0
    private var mHeight: Int = 0
    private var mGlContext: EGLContext? = null

    private var mLastTimeStamp: Long = 0
    private var track = 0
    private var mSpeed = 0f

    //    编码封装格式  h264
    private var mMuxer: MediaMuxer? = null

    private var mMediaCodec: MediaCodec? = null
    private var mSurface: Surface? = null
    private var isStart: Boolean = false
    private var mHandler: Handler? = null
    private var eglEnv: EGLEnv? = null
    private var startTime: Long = 0L
    constructor(
        context: Context,
        path: String,
        glContext: EGLContext,
        width: Int,
        height: Int
    ) {
        mContext = context.applicationContext
        mPath = path
        mWidth = width
        mHeight = height
        mGlContext = glContext
    }


    fun start(speed: Float) {
        mSpeed = speed

        try {
            val format = MediaFormat.createVideoFormat(
                MediaFormat.MIMETYPE_VIDEO_AVC,
                mWidth, mHeight
            )
            //颜色空间 从 surface当中获得
            format.setInteger(
                MediaFormat.KEY_COLOR_FORMAT,
                MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface
            )
            //码率
            format.setInteger(MediaFormat.KEY_BIT_RATE, 1500000)
            //帧率
            format.setInteger(MediaFormat.KEY_FRAME_RATE, 25)
            //关键帧间隔
            format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 15)
            //创建编码器
            mMediaCodec = MediaCodec.createEncoderByType(MediaFormat.MIMETYPE_VIDEO_AVC)
            //配置编码器
            mMediaCodec!!.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE)
            //输入数据     byte[]    gpu  mediaprojection
            mSurface = mMediaCodec!!.createInputSurface()

            //视频  编码一个可以播放的视频
            //混合器 (复用器) 将编码的h.264封装为mp4
            mMuxer = MediaMuxer(
                mPath!!,
                MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4
            )

            //开启编码
            mMediaCodec!!.start()

            //重点：取gpu里面的数据画面，肯定要调用opengl函数
            //創建OpenGL 的 環境
            val handlerThread = HandlerThread("codec-gl")
            handlerThread.start()
            mHandler = Handler(handlerThread.looper)

            mHandler!!.post {
                eglEnv = EGLEnv(mContext!!, mGlContext!!, mSurface!!, mWidth, mHeight)
                isStart=true
            }
        } catch (e: Exception) {
            e.printStackTrace()
        }

    }

    //    编码   textureId数据  并且编码
    fun fireFrame(textureId: Int, timestamp: Long) {
//        主动拉去openglfbo数据
        if (!isStart) {
            return
        }
        //录制用的opengl已经和handler的线程绑定了 ，所以需要在这个线程中使用录制的opengl
        mHandler!!.post {
            //此时handlerThread有opengl的环境
            //调用draw，就会将数据渲染到MediaCodec的surface,而surface是绑定到虚拟的物理屏幕上EGL_DEFAULT_DISPLAY
            eglEnv!!.draw(textureId, timestamp)
            // 获取对应的数据
            deCodec(false)
        }
    }

    //        编码
    private fun deCodec(endOfStream: Boolean) {
        //输入的数据在MediaCodec的surface中
        //给个结束信号
        if (endOfStream) {
            mMediaCodec!!.signalEndOfInputStream()
        }
        val bufferInfo = MediaCodec.BufferInfo()
        val index = mMediaCodec!!.dequeueOutputBuffer(bufferInfo, 10000)
//        Log.i(com.maniu.openglfilter.MediaRecorder.TAG, "run: $index")
        if (index >= 0) {
            val buffer = mMediaCodec!!.getOutputBuffer(index)
            val mediaFormat = mMediaCodec!!.getOutputFormat(index)
//            Log.i(com.maniu.openglfilter.MediaRecorder.TAG, "mediaFormat: $mediaFormat")
            val outData = ByteArray(bufferInfo.size)
            buffer!![outData]
            if (startTime == 0L) {
                // 微妙转为毫秒
                startTime = bufferInfo.presentationTimeUs / 1000
            }

            //推流
            FileUtils.writeContent(outData)
            FileUtils.writeBytes(outData)
            // 包含分隔符
            mMediaCodec!!.releaseOutputBuffer(index, false)
        }
    }

    fun stop() {
        // 释放
        isStart = false
        mHandler!!.post {
            deCodec(true)
            mMediaCodec!!.stop()
            mMediaCodec!!.release()
            mMediaCodec = null
            mMuxer!!.stop()
            mMuxer!!.release()
            eglEnv!!.release()
            eglEnv = null
            mMuxer = null
            mSurface = null
            mHandler!!.looper.quitSafely()
            mHandler = null
        }
    }
}