package com.ehome.images2video

import android.graphics.Bitmap
import android.graphics.BitmapFactory
import android.media.MediaCodec
import android.media.MediaCodecInfo.CodecCapabilities
import android.media.MediaFormat
import android.media.MediaMuxer
import android.os.Bundle
import android.os.Handler
import android.widget.Toast
import androidx.appcompat.app.AppCompatActivity
import com.blankj.utilcode.util.PathUtils
import timber.log.Timber
import java.io.File
import java.nio.ByteBuffer
import kotlin.concurrent.thread


class MainActivity : AppCompatActivity() {

    private val TAG = "MainActivity"

    private val mFrameRate: Int = 15
    private var mMuxerStarted: Boolean = false
    private var startTime: Long = 0

    private var isRunning: Boolean = false


    private var colorFormat: Int = 0

    private val timeoutUs: Long = 10000

    private val outPath = File(PathUtils.getExternalDownloadsPath(), "out.mp4")

    private val imagesFile: File = File(PathUtils.getExternalDownloadsPath(), "images")

    private val width = 1024
    private val height = 768

    lateinit var mediaCodec: MediaCodec
    lateinit var mediaMuxer: MediaMuxer

    var mTrackIndex: Int = 0

    var imageIndex = 0

    override fun onCreate(savedInstanceState: Bundle?) {
        super.onCreate(savedInstanceState)
        setContentView(R.layout.activity_main)

        thread(start = true) {
            init()

            encode(getCurrentBitmap())
        }
    }

    private fun init() {
        mediaCodec = MediaCodec.createEncoderByType(MediaFormat.MIMETYPE_VIDEO_HEVC)
        //创建生成MP4初始化对象
        mediaMuxer = MediaMuxer(outPath.absolutePath, MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4)

        colorFormat = CodecCapabilities.COLOR_FormatYUV420SemiPlanar

        val mediaFormat = MediaFormat.createVideoFormat(MediaFormat.MIMETYPE_VIDEO_HEVC, width, height)
        mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, colorFormat)
        mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE,  4* width * height)
        mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, mFrameRate)
        mediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 5)
        mediaCodec.configure(mediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE)
        mediaCodec.start()
    }


    private fun encode(bitmap: Bitmap?) {
        var bitmap = bitmap

        isRunning = true
        var generateIndex: Long = 0
        val info = MediaCodec.BufferInfo()
        startTime = System.currentTimeMillis()
        while (isRunning) {
            val inputBufferIndex = mediaCodec.dequeueInputBuffer(timeoutUs)

            if (inputBufferIndex >= 1) {
                val ptsUsec: Long = computePresentationTime(generateIndex)
                if (bitmap == null) {
                    mediaCodec.queueInputBuffer(
                        inputBufferIndex, 0, 0, ptsUsec,
                        MediaCodec.BUFFER_FLAG_END_OF_STREAM
                    )
                    isRunning = false
                    drainEncoder(true, info)
                    codeFinish()
                    Handler(mainLooper).post {
                        Toast.makeText(this@MainActivity, "合成成功", Toast.LENGTH_SHORT).show()
                    }
                } else {
                    val data: ByteArray? = getNV12(width, height, bitmap)
                    //有效的空的缓存区
                    var inputBuffer: ByteBuffer
                    inputBuffer = mediaCodec.inputBuffers[inputBufferIndex]
                    inputBuffer.clear()
                    inputBuffer.put(data)
                    //将数据放到编码队列
                    mediaCodec.queueInputBuffer(inputBufferIndex, 0, data!!.size, ptsUsec, 0)
                    drainEncoder(false, info)
                    bitmap = getCurrentBitmap()
                }
                generateIndex++


            } else {
                Timber.d( "input buffer not available")
                try {
                    Thread.sleep(10)
                } catch (e: InterruptedException) {
                    e.printStackTrace()
                }
            }
        }
    }

    private fun getCurrentBitmap(): Bitmap? {
        return if (imageIndex >= imagesFile.listFiles().size) {
            null
        } else {
            val options = BitmapFactory.Options()
            options.inPreferredConfig = Bitmap.Config.ARGB_8888
            val bitmap = BitmapFactory.decodeFile(imagesFile.listFiles()[imageIndex].absolutePath, options)
            imageIndex++
            bitmap
        }
    }


    private fun computePresentationTime(frameIndex: Long): Long {
        return 132 + frameIndex * 1000000 / mFrameRate
    }


    private fun getNV12(inputWidth: Int, inputHeight: Int, bitmap: Bitmap): ByteArray? {
        val argb = IntArray(inputWidth * inputHeight)
        bitmap.getPixels(argb, 0, inputWidth, 0, 0, inputWidth, inputHeight)
        val yuv = ByteArray(inputWidth * inputHeight * 3 / 2)
        encodeYUV420SP(yuv, argb, inputWidth, inputHeight)
        return yuv
    }


    private fun drainEncoder(endOfStream: Boolean, bufferInfo: MediaCodec.BufferInfo) {
        var buffers: Array<ByteBuffer?> = mediaCodec.outputBuffers
        if (endOfStream) {
            try {
                mediaCodec.signalEndOfInputStream()
            } catch (e: Exception) {

            }
        }
        while (true) {
            val encoderStatus = mediaCodec.dequeueOutputBuffer(bufferInfo, timeoutUs)
            if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
                if (!endOfStream) {
                    break // out of while
                } else {
                    Timber.i( "no output available, spinning to await EOS")
                }
            } else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
                if (mMuxerStarted) {
                    throw RuntimeException("format changed twice")
                }
                val mediaFormat = mediaCodec.outputFormat
                mTrackIndex = mediaMuxer.addTrack(mediaFormat)
                mediaMuxer.start()
                mMuxerStarted = true
            } else if (encoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
                //数据变化，更新数据
                buffers = mediaCodec.outputBuffers
            } else if (encoderStatus < 0) {
                Timber.i( "unexpected result from encoder.dequeueOutputBuffer: $encoderStatus")
            } else {
                var outputBuffer: ByteBuffer?
                outputBuffer = buffers[encoderStatus]
                if (outputBuffer == null) {
                    throw RuntimeException(
                        "encoderOutputBuffer "
                                + encoderStatus + " was null"
                    )
                }
                if (bufferInfo.flags and MediaCodec.BUFFER_FLAG_CODEC_CONFIG != 0) {
                    Timber.d( "ignoring BUFFER_FLAG_CODEC_CONFIG")
                    bufferInfo.size = 0
                }
                if (bufferInfo.size != 0) {
                    if (!mMuxerStarted) {
                        throw RuntimeException("muxer hasn't started")
                    }

                    // adjust the ByteBuffer values to match BufferInfo
                    outputBuffer.position(bufferInfo.offset)
                    outputBuffer.limit(bufferInfo.offset + bufferInfo.size)
                    Timber.d(
                         "BufferInfo: " + bufferInfo.offset + ","
                                + bufferInfo.size + ","
                                + bufferInfo.presentationTimeUs
                    )
                    try {
                        mediaMuxer.writeSampleData(mTrackIndex, outputBuffer, bufferInfo)
                        Timber.e( "写入数据数据")
                    } catch (e: Exception) {
                        Timber.i( "Too many frames")
                    }
                }

                mediaCodec.releaseOutputBuffer(encoderStatus, false)

                if (bufferInfo.flags and MediaCodec.BUFFER_FLAG_END_OF_STREAM != 0) {
                    if (!endOfStream) {
                        Timber.i( "reached end of stream unexpectedly")
                    } else {
                        Timber.i( "end of stream reached")
                    }
                    break // out of while
                }
            }
        }
    }

    private fun encodeYUV420SP(yuv420sp: ByteArray, argb: IntArray, width: Int, height: Int) {
        val frameSize = width * height
        var yIndex = 0
        var uvIndex = frameSize
        var a: Int
        var R: Int
        var G: Int
        var B: Int
        var Y: Int
        var U: Int
        var V: Int
        var index = 0
        for (j in 0 until height) {
            for (i in 0 until width) {
                a = argb[index] and -0x1000000 shr 24 // a is not used obviously
                R = argb[index] and 0xff0000 shr 16
                G = argb[index] and 0xff00 shr 8
                B = argb[index] and 0xff shr 0
                //                R = (argb[index] & 0xff000000) >>> 24;
//                G = (argb[index] & 0xff0000) >> 16;
//                B = (argb[index] & 0xff00) >> 8;

                // well known RGB to YUV algorithm
                Y = (66 * R + 129 * G + 25 * B + 128 shr 8) + 16
                V = (-38 * R - 74 * G + 112 * B + 128 shr 8) + 128 // Previously U
                U = (112 * R - 94 * G - 18 * B + 128 shr 8) + 128 // Previously V
                yuv420sp[yIndex++] = (if (Y < 0) 0 else if (Y > 255) 255 else Y).toByte()
                if (j % 2 == 0 && index % 2 == 0) {
                    yuv420sp[uvIndex++] = (if (V < 0) 0 else if (V > 255) 255 else V).toByte()
                    yuv420sp[uvIndex++] = (if (U < 0) 0 else if (U > 255) 255 else U).toByte()
                }
                index++
            }
        }
    }

    fun codeFinish() {
        isRunning = false
        if (mediaCodec != null) {
            mediaCodec.stop()
            mediaCodec.release()
        }

        if (mediaMuxer != null) {
            if (mMuxerStarted) {
                mediaMuxer.stop()
                mediaMuxer.release()
            }
        }
    }

}