package com.zhg.videorecord.encoder.impl

import android.annotation.SuppressLint
import android.graphics.SurfaceTexture
import android.hardware.Camera
import android.media.MediaCodec
import android.media.MediaFormat
import android.os.Build
import androidx.annotation.RequiresApi
import com.lmy.codec.pipeline.Pipeline
import com.lmy.codec.pipeline.impl.EventPipeline
import com.lmy.codec.pipeline.impl.GLEventPipeline
import com.lmy.codec.wrapper.CodecTextureWrapper
import com.zhg.videorecord.encoder.Encoder
import com.zhg.videorecord.entity.CodecContext
import com.zhg.videorecord.entity.PresentationTimer
import com.zhg.videorecord.helper.CodecHelper
import com.zhg.videorecord.helper.CodectUtil
import com.zhg.videorecord.helper.TLog
import com.zhg.videorecord.helper.YuvConver

/**
 * 视频的mediacode+previewcallback取数据的编码器
 */
@RequiresApi(Build.VERSION_CODES.JELLY_BEAN)
class VideoHardSurfaceEncoderImpl (var context: CodecContext,
                                   override var onPreparedListener: Encoder.OnPreparedListener? = null,
                                   asyn: Boolean = false)
    : Encoder {


    override var onRecordListener: Encoder.OnRecordListener? = null
    private var onSampleListener: Encoder.OnSampleListener? = null
    private var pTimer: PresentationTimer = PresentationTimer(context.video.fps)
    //媒体信息
    private lateinit var format: MediaFormat
    //Android编码器
    private var codec: MediaCodec? = null
    private var codecWrapper: CodecTextureWrapper? = null
    private val bufferInfo = MediaCodec.BufferInfo()
    private val outputFormatLock = Object()
    private val mEncodingSyn = Any()
    private var nsecs: Long = Long.MIN_VALUE
    private var mEncoding = false
    private var mFrameCount = 0
    private var generateIndex: Long = 0;
    private var mBufferInfo: MediaCodec.BufferInfo = MediaCodec.BufferInfo()
    private var presentationTime:Long=0;
    private var frameSize=0;
    private var mPipeline: Pipeline = if (asyn) {
        EventPipeline.create("VideoHardEncoderImpl")
    } else {
        GLEventPipeline.INSTANCE
    }
    private var mDequeuePipeline: Pipeline = if (GLEventPipeline.isMe(mPipeline)) {
        EventPipeline.create("VideoHardEncoderImpl")
    } else {
        mPipeline
    }
    companion object {
        private val WAIT_TIME = 10000L
    }
    init {
        initCodec()
        mPipeline.queueEvent(Runnable { init() })
    }
    /**
     * desc:创建视频编码器实例
     */
    @RequiresApi(Build.VERSION_CODES.JELLY_BEAN)
    private fun initCodec() {
        val f = CodecHelper.createVideoFormat(context)
        if (null == f) {
            TLog.i("Unsupport codec type")
            return
        }
        format = f
        TLog.i("create codec: ${format.getString(MediaFormat.KEY_MIME)}")
        try {
            codec = MediaCodec.createEncoderByType(format.getString(MediaFormat.KEY_MIME))
        } catch (e: Exception) {
            TLog.e("Can not create codec")
        } finally {
            if (null == codec)
                TLog.e("Can not create codec")
        }
    }
    private fun init() {
        if (null == codec) {
            TLog.e("codec is null")
            return
        }
        frameSize=context.video.width*context.video.height
        pTimer.reset()
        codec!!.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE)
        codec!!.start()
        onPreparedListener?.onPrepared(this)
    }
    override fun start() {
        synchronized(mEncodingSyn) {
            pTimer.start()
            mEncoding = true
            generateIndex=0
        }
    }

    override fun pause() {
        synchronized(mEncodingSyn) {
            mEncoding = false
        }
    }
    @RequiresApi(Build.VERSION_CODES.LOLLIPOP)
    override fun stop() {
        TLog.i("Video encoder stop")
        pause()
        if (mFrameCount > 0) {
            while (dequeue()) {//取出编码器中剩余的帧
            }
            //编码结束，发送结束信号，让surface不在提供数据
            //codec!!.signalEndOfInputStream()
        }
        this.nsecs = Long.MIN_VALUE
        mFrameCount = 0
        codec!!.stop()
        codec!!.release()
        mPipeline.queueEvent(Runnable {
            codecWrapper?.release()
            codecWrapper = null
        })
        if (!GLEventPipeline.isMe(mPipeline)) {
            mPipeline.quit()
        }
        if (!GLEventPipeline.isMe(mDequeuePipeline)) {
            mDequeuePipeline.quit()
        }
    }

    override fun setOnSampleListener(listener: Encoder.OnSampleListener) {
        onSampleListener = listener
    }

    override fun setPresentationTime(nsecs: Long) {
        mPipeline.queueEvent(Runnable {
            this.nsecs = nsecs * 1000
        })

    }

    override fun getOutputFormat(): MediaFormat {
        mPipeline.queueEvent(Runnable {
            val index = codec!!.dequeueOutputBuffer(bufferInfo, WAIT_TIME)
            if (MediaCodec.INFO_OUTPUT_FORMAT_CHANGED != index) {
                getOutputFormat()
            } else {
                synchronized(outputFormatLock) {
                    outputFormatLock.notifyAll()
                }
            }
        })
        synchronized(outputFormatLock) {
            outputFormatLock.wait()
        }
        return codec!!.outputFormat
    }
    @Deprecated("Invalid")
    override fun onFrameAvailable(surfaceTexture: SurfaceTexture?) {

    }
    @RequiresApi(Build.VERSION_CODES.LOLLIPOP)
    override fun onPreviewFrame(data: ByteArray, camera: Camera?) {
        if (!mEncoding) return
        mPipeline.queueEvent(Runnable { encode(data) })
    }
    @RequiresApi(Build.VERSION_CODES.LOLLIPOP)
    private fun encode( buffer: ByteArray) {
        synchronized(mEncodingSyn) {
            pTimer.record()
            var inputBuffIndex =  codec!!.dequeueInputBuffer(-1)
            if (inputBuffIndex >= 0) {
                var inputBuffer =  codec!!.getInputBuffer(inputBuffIndex)
                if(inputBuffer!=null){
                    inputBuffer.clear()
                    var data_420sp = ByteArray(context.video.width * context.video.height * 3 / 2)
                    //CodectUtil.converNV21ToNV12(buffer, data_420sp, context.video.width, context.video.height)
                    YuvConver.convertNv21ToNv12(buffer,data_420sp,frameSize)
                    inputBuffer.put(data_420sp)
                    presentationTime=CodectUtil.computePresentationTime(generateIndex)
                    codec!!.queueInputBuffer(inputBuffIndex, 0, buffer.size, pTimer.presentationTimeUs/1000, 0)
                }

            }
            mDequeuePipeline.queueEvent(Runnable { dequeue() })
        }
    }
    /**
     * 通过OpenGL来控制数据输入，省去了直接控制输入缓冲区的步骤，所以这里直接操控输出缓冲区即可
     */
    @RequiresApi(Build.VERSION_CODES.LOLLIPOP)
    @SuppressLint("WrongConstant")
    private fun dequeue(): Boolean {
        try {
            /**
             * 从输出缓冲区取出一个Buffer，返回一个状态
             * 这是一个同步操作，所以我们需要给定最大等待时间WAIT_TIME，一般设置为10000ms
             */
            val flag = codec!!.dequeueOutputBuffer(mBufferInfo, WAIT_TIME)
            when (flag) {
                MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED -> {//输出缓冲区改变，通常忽略
                    TLog.i("INFO_OUTPUT_BUFFERS_CHANGED")
                }
                MediaCodec.INFO_TRY_AGAIN_LATER -> {//等待超时，需要再次等待，通常忽略
//                    debug_v("INFO_TRY_AGAIN_LATER")
                    return false
                }
                /**
                 * 输出格式改变，很重要
                 * 这里必须把outputFormat设置给MediaMuxer，而不能不能用inputFormat代替，它们是不一样的，不然无法正确生成mp4文件
                 */
                MediaCodec.INFO_OUTPUT_FORMAT_CHANGED -> {
                    TLog.i("INFO_OUTPUT_FORMAT_CHANGED")
                    onSampleListener?.onFormatChanged(this, codec!!.outputFormat)
                }
                else -> {
                    if (flag < 0) return false//如果小于零，则跳过
                    val buffer = codec!!.outputBuffers[flag]//否则代表编码成功，可以从输出缓冲区队列取出数据
                    if (null != buffer) {
                        val endOfStream = mBufferInfo.flags and MediaCodec.BUFFER_FLAG_END_OF_STREAM
                        if (endOfStream == 0) {//如果没有收到BUFFER_FLAG_END_OF_STREAM信号，则代表输出数据时有效的
                            if (mBufferInfo.size != 0) {
                                ++mFrameCount
                                buffer.position(mBufferInfo.offset)
                                buffer.limit(mBufferInfo.offset + mBufferInfo.size)
                              //  mBufferInfo.presentationTimeUs = pTimer.presentationTimeUs
                                onSampleListener?.onSample(this, CodecHelper.copy(mBufferInfo), buffer)
                                onRecordListener?.onRecord(this,mBufferInfo.presentationTimeUs)
                            }
                        }
                        //缓冲区使用完后必须把它还给MediaCodec，以便再次使用，至此一个流程结束，再次循环
                        codec!!.releaseOutputBuffer(flag, false)
//                        if (endOfStream == MediaCodec.BUFFER_FLAG_END_OF_STREAM) {
//                            return true
//                        }
                        return true
                    }
                }
            }
        } catch (e: Exception) {
            e.printStackTrace()
        }
        return false
    }


}