package com.abxh.media.audio

import android.annotation.SuppressLint
import android.media.*
import android.util.Log
import android.util.Size
import android.view.SurfaceHolder
import androidx.camera.core.ImageProxy
import androidx.camera.core.internal.utils.ImageUtil
import androidx.lifecycle.viewModelScope
import com.abxh.jetpack.IViewModel
import com.abxh.media.audio.utils.CameraUtils
import kotlinx.coroutines.*
import kotlinx.coroutines.flow.buffer
import kotlinx.coroutines.flow.collect
import kotlinx.coroutines.flow.flow
import okio.BufferedSink
import okio.BufferedSource
import okio.Okio
import java.io.File
import java.io.IOException
import java.util.concurrent.ArrayBlockingQueue
import java.util.concurrent.TimeUnit

/**
 * Created by dab on 2021/9/11 11:10
 */
class MediaViewModel : IViewModel() {
    //    val flow = MutableSharedFlow<ImageProxy>(5, 10, BufferOverflow.DROP_OLDEST)
    private val queue = ArrayBlockingQueue<ByteArray>(20)

    var mediaCodec: MediaCodec? = null
    private var width: Int = 0
    private var height: Int = 0
    var isStart = false


    fun playVideo(holder: SurfaceHolder, buffer: BufferedSource) {
        //一般H264帧大小不超过200k,如果解码失败可以尝试增大这个值
        val FRAME_MAX_LEN = 300 * 1024
        //保存完整数据帧
        val frame = ByteArray(FRAME_MAX_LEN)
        //当前帧长度
        val frameLen = 0
        val readData = ByteArray(10 * 1024)
        val startTime = System.currentTimeMillis()

        viewModelScope.launch(Dispatchers.IO) {
            while (isActive) {
                val readLen = buffer.read(readData)
                if (frameLen + readLen < FRAME_MAX_LEN) {
                    System.arraycopy(readData, 0, frame, frameLen, readLen);
                }
            }
        }
    }


    private fun initVideo(size: Size) {
/////////////////////////////////////////
        width = size.width
        height = size.height
        val mediaFormat =
            MediaFormat.createVideoFormat(
                MediaFormat.MIMETYPE_VIDEO_AVC,
                size.height,
                size.width
            ) //因为图片旋转了，所以宽高互换
        mediaFormat.setInteger(
            MediaFormat.KEY_COLOR_FORMAT,
            MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Flexible
        )
        mediaFormat.setInteger(
            MediaFormat.KEY_BIT_RATE,
            8 * size.width * size.height
        ) //码率，码率越高，视频越大，清晰度越高。
        mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, 30) //帧率，每秒帧数。帧数越高则越流畅。视频越大。
        mediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 1)
        try {
            mediaCodec = MediaCodec.createEncoderByType(MediaFormat.MIMETYPE_VIDEO_AVC)
        } catch (e: IOException) {
            e.printStackTrace()
        }
        //配置编码器参数
        mediaCodec?.configure(mediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE)
    }

    fun startVideo(file: File, size: Size) {
        initVideo(size)
        mediaCodec?.start()
        isStart = true
        queue.clear()
        viewModelScope.launch(Dispatchers.IO) {
            val buffer = Okio.buffer(Okio.sink(file))
//            val fs = FileOutputStream(file)
            try {
                encoder(buffer, this)
            } finally {
                withContext(NonCancellable) {
                    buffer.close()
                }
            }


        }
    }

    fun startAudio(file: File) {

        viewModelScope.launch(Dispatchers.IO) {
            val minBufferSize = AudioRecord.getMinBufferSize(
                44100,
                AudioFormat.CHANNEL_IN_STEREO,
                AudioFormat.ENCODING_PCM_16BIT
            )
            val audioRecord = AudioRecord(
                MediaRecorder.AudioSource.MIC,
                44100,
                AudioFormat.CHANNEL_IN_STEREO,
                AudioFormat.ENCODING_PCM_16BIT,
                minBufferSize
            )
            audioRecord.startRecording()
            val format = MediaFormat.createAudioFormat(
                MediaFormat.MIMETYPE_AUDIO_AAC,
                44100,
                AudioFormat.CHANNEL_IN_STEREO
            )
            format.setString(MediaFormat.KEY_MIME, MediaFormat.MIMETYPE_AUDIO_AAC)
            format.setInteger(MediaFormat.KEY_CHANNEL_COUNT, 2)
            format.setInteger(MediaFormat.KEY_CHANNEL_MASK, AudioFormat.CHANNEL_IN_STEREO)
            format.setInteger(
                MediaFormat.KEY_AAC_PROFILE,
                MediaCodecInfo.CodecProfileLevel.AACObjectLC
            )
            format.setInteger(MediaFormat.KEY_BIT_RATE, 96000)
            format.setInteger(MediaFormat.KEY_MAX_INPUT_SIZE, minBufferSize * 2)
            val mediaCodec = MediaCodec.createEncoderByType(MediaFormat.MIMETYPE_AUDIO_AAC)
            mediaCodec.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE)
            val buffer = Okio.buffer(Okio.sink(file))
            try {
                encoderAudio(buffer, audioRecord, minBufferSize, mediaCodec, this)
            } finally {
                withContext(NonCancellable) {
                    Log.e("TAG555555", "startAudio: ")
                    audioRecord.stop()
                    audioRecord.release()
                    buffer.close()
                }
            }

        }
    }

    private suspend fun encoderAudio(
        fs: BufferedSink,
        audioRecord: AudioRecord,
        minBufferSize: Int,
        mediaCodec: MediaCodec,
        coroutineScope: CoroutineScope
    ) {


        mediaCodec.start()
        val bufferInfo = MediaCodec.BufferInfo()
        flow {
            val outData = ByteArray(minBufferSize)
            while (coroutineScope.isActive) {
                val len = audioRecord.read(outData, 0, minBufferSize)
                Log.e("TAG222", "encoder:Audio $len")
                if (len > 0) {
                    val inputBufferIndex: Int = mediaCodec.dequeueInputBuffer(-1)
                    if (inputBufferIndex >= 0) {
                        val inputBuffer = mediaCodec.getInputBuffer(inputBufferIndex)
                        inputBuffer?.apply {
                            clear()
                            put(outData, 0, len)
                            limit(len)
                            mediaCodec.queueInputBuffer(
                                inputBufferIndex,
                                0,
                                len,
                                System.currentTimeMillis(),
                                0
                            );
                        }
                    }
                    emit(len)
                }
            }
        }
            .buffer(20)
            .collect {
                var outputBufferIndex: Int = mediaCodec.dequeueOutputBuffer(bufferInfo, -1)
                while (coroutineScope.isActive && outputBufferIndex >= 0) {
                    val outBitsSize: Int = bufferInfo.size
                    val outPacketSize = outBitsSize + 7 // ADTS头部是7个字节
                    val outputBuffer = mediaCodec.getOutputBuffer(outputBufferIndex) //获取编码数据
                    val outData = ByteArray(outPacketSize)
                    CameraUtils.addADTStoPacket(outData, outPacketSize)
                    outputBuffer?.let {
                        it.position(bufferInfo.offset)
                        it.limit(bufferInfo.offset + outBitsSize)
                        outputBuffer.get(outData, 7, outBitsSize)
                        Log.e("TAG111", "encoder:Audio ${outData.size}")
                        fs.write(outData)
                    }


                    mediaCodec.releaseOutputBuffer(outputBufferIndex, false)
                    outputBufferIndex = mediaCodec.dequeueOutputBuffer(bufferInfo, 1000)
                }
            }
    }

    //    INFO_TRY_AGAIN_LATER=-1 等待超时
//    INFO_OUTPUT_FORMAT_CHANGED=-2 媒体格式更改
//    INFO_OUTPUT_BUFFERS_CHANGED=-3 缓冲区已更改（过时）
    private fun encoder(fs: BufferedSink, coroutineScope: CoroutineScope) {
        mediaCodec?.let {
            val bufferInfo = MediaCodec.BufferInfo()
            while (coroutineScope.isActive) {
                val yuv = queue.poll(100, TimeUnit.MILLISECONDS) ?: continue
                val rotateYUV420Degree90 = CameraUtils.rotateYUV420Degree90(yuv, width, height)
                val nv12 = ByteArray(rotateYUV420Degree90.size)
                CameraUtils.NV21ToNV12(rotateYUV420Degree90, nv12, width, height)
                val inputIndex = it.dequeueInputBuffer(-1)
                if (inputIndex >= 0) {
                    val pts = System.currentTimeMillis() / 1000
                    val inputBuffer = it.getInputBuffer(inputIndex)
                    inputBuffer?.apply {
                        clear()
                        put(nv12)
                    }
                    it.queueInputBuffer(inputIndex, 0, nv12.size, pts, 0)
                }

                var outputBufferIndex = it.dequeueOutputBuffer(bufferInfo, 1000)
                while (coroutineScope.isActive && outputBufferIndex >= 0) {
                    val outputBuffer = it.getOutputBuffer(outputBufferIndex) //获取编码数据
                    val outData = ByteArray(bufferInfo.size)
                    outputBuffer?.let {
                        outputBuffer.get(outData)
                    }
//                    Log.e("TAG", "encoder:Vid " + outData.size)
                    fs.write(outData)
                    fs.flush()
                    it.releaseOutputBuffer(outputBufferIndex, false)
                    outputBufferIndex = it.dequeueOutputBuffer(bufferInfo, 1000)
                }
            }
        }
    }

    @SuppressLint("RestrictedApi")
    fun put(imageProxy: ImageProxy) {
        if (!isStart) return
        mediaCodec?.let {
            queue.put(ImageUtil.yuv_420_888toNv21(imageProxy))
        }
    }


}