package com.xyf.rtmppushdemo.stream

import android.annotation.SuppressLint
import android.media.AudioFormat
import android.media.AudioRecord
import android.media.MediaRecorder
import com.xyf.rtmppushdemo.listener.OnFrameDataCallback
import com.xyf.rtmppushdemo.param.AudioParam
import java.util.concurrent.ExecutorService
import java.util.concurrent.Executors

@SuppressLint("MissingPermission")
class AudioStream constructor(var mCallback: OnFrameDataCallback, audioParam: AudioParam) {
    private var isMute = false
    private var isLiving = false
    private var inputSamples:Int = 0
    private var executors: ExecutorService = Executors.newSingleThreadExecutor()
    private var audioRecord :AudioRecord? = null

    init {
        var channelConfig:Int = if (audioParam.numChannel == 2){
            AudioFormat.CHANNEL_IN_STEREO
        }else{
            AudioFormat.CHANNEL_IN_MONO
        }
        mCallback.onAudioCodecInfo(audioParam.sampleRate,audioParam.numChannel)
        inputSamples = mCallback.getInputSamples() * 2

        var minBufferSize = AudioRecord.getMinBufferSize(audioParam.sampleRate,
            channelConfig,audioParam.audioFormat) * 2
        var bufferSizeInBytes = minBufferSize.coerceAtLeast(inputSamples)
        audioRecord = AudioRecord(MediaRecorder.AudioSource.MIC,audioParam.sampleRate,
        channelConfig,audioParam.audioFormat,bufferSizeInBytes)
    }

    fun startLive(){
        isLiving = true
        executors.submit(AudioTask())
    }

    fun setMute(mute: Boolean) {
        isMute = mute
    }

    inner class AudioTask:Runnable{

        override fun run() {
            audioRecord?.startRecording()
            var bytes = ByteArray(inputSamples)
            while (isLiving){
                if (!isMute){
                    var len = audioRecord?.read(bytes,0,bytes.size) ?: 0
                    if (len > 0){
                        mCallback.onAudioFrame(bytes)
                    }
                }
            }
            audioRecord?.stop()
        }
    }
}