package com.zcwang.audio_spectrum

import android.app.Activity
import android.content.Context
import android.media.AudioFormat
import android.media.AudioRecord
import android.media.MediaRecorder
import android.net.Uri
import android.os.Handler
import android.util.Log
import com.google.android.exoplayer2.DefaultRenderersFactory
import com.google.android.exoplayer2.ExoPlayer
import com.google.android.exoplayer2.MediaItem
import com.google.android.exoplayer2.Renderer
import com.google.android.exoplayer2.audio.AudioCapabilities
import com.google.android.exoplayer2.audio.AudioRendererEventListener
import com.google.android.exoplayer2.audio.AudioSink
import com.google.android.exoplayer2.audio.DefaultAudioSink
import com.google.android.exoplayer2.audio.MediaCodecAudioRenderer
import com.google.android.exoplayer2.mediacodec.MediaCodecSelector
import com.google.android.exoplayer2.source.ProgressiveMediaSource
import com.google.android.exoplayer2.upstream.DefaultDataSourceFactory

import io.flutter.embedding.engine.plugins.FlutterPlugin
import io.flutter.embedding.engine.plugins.activity.ActivityAware
import io.flutter.embedding.engine.plugins.activity.ActivityPluginBinding
import io.flutter.plugin.common.EventChannel
import io.flutter.plugin.common.EventChannel.EventSink
import io.flutter.plugin.common.MethodCall
import io.flutter.plugin.common.MethodChannel
import io.flutter.plugin.common.MethodChannel.MethodCallHandler
import io.flutter.plugin.common.MethodChannel.Result
import org.apache.commons.math3.complex.Complex
import org.apache.commons.math3.transform.DftNormalization
import org.apache.commons.math3.transform.FastFourierTransformer
import org.apache.commons.math3.transform.TransformType
import java.lang.System.arraycopy
import kotlin.math.log2

/** AudioSpectrumPlugin */
class AudioSpectrumPlugin : FlutterPlugin, MethodCallHandler, ActivityAware {
    private val TAG: String = "AudioSpectrumPlugin"

    private lateinit var mContext: Context
    private lateinit var mActivity: Activity
    private lateinit var audioSpectrumChannel: MethodChannel
    private lateinit var stopPlayerChannel: MethodChannel
    private lateinit var startRecordChannel: MethodChannel
    private lateinit var stopRecordChannel: MethodChannel
    private var player: ExoPlayer? = null
    private var musicSpectrumSink: EventSink? = null
    private var recordSpectrumSink: EventSink? = null
    private lateinit var musicFftChannel: EventChannel
    private lateinit var recordFftChannel: EventChannel

    private val size = FFTAudioProcessor.SAMPLE_SIZE / 2
    private val mFft: FloatArray = FloatArray(size)

    private val audioEncodingPCM = AudioFormat.ENCODING_PCM_16BIT
    private val sampleRateInHz = 44100
    private val channel = AudioFormat.CHANNEL_IN_STEREO
    private var bufferSize: Int = 0
    private var audioRecord: AudioRecord? = null

    // 分段的频率的范围和步长（段数）
    val frequencyBands = 25
    val startFrequency = 20.0
    val endFrequency = 20000.0
    private var state: RecordState = RecordState.RECORDED

    private var isPlay: Boolean = false;

    enum class RecordState {
        RECORDING,
        RECORDED
    }

    private fun startRecord() {
        stopRecord()
        bufferSize = AudioRecord.getMinBufferSize(sampleRateInHz, channel, audioEncodingPCM)
        audioRecord = AudioRecord(
            MediaRecorder.AudioSource.MIC,
            sampleRateInHz,
            channel,
            audioEncodingPCM,
            bufferSize
        )
        state = RecordState.RECORDING
        val shortBuffer = ShortArray(2048)
        audioRecord?.startRecording()
        Thread {
            while (state == RecordState.RECORDING) {
                // 3.不断读取录音数据并保存至文件中
                audioRecord?.read(shortBuffer, 0, shortBuffer.size)
                val data: DoubleArray = fft(shortBuffer)
                mActivity.runOnUiThread {
                    if (recordSpectrumSink != null) {
                        recordSpectrumSink!!.success(data)
                    }
                }
            }
        }.start()

    }

    private fun getHanNingWindow(size: Int): DoubleArray {
        return DoubleArray(size) { i -> 0.5 * (1.0 - Math.cos(2.0 * Math.PI * i / (size - 1))) }
    }

    private fun fft(buffer: ShortArray): DoubleArray {
        val n = buffer.size
        val real = DoubleArray(n)
        val mHanNing = getHanNingWindow(n)//汉宁窗,
        for (i in 0 until n) {
            real[i] = (buffer[i] * mHanNing[i]) / (Short.MAX_VALUE.toDouble())
        }
        val imag = DoubleArray(real.size)
        val complexData = arrayOfNulls<Complex>(real.size)// 创建复数数组
        for (i in real.indices) {
            imag[i] = 0.0 // 给虚部赋值
            complexData[i] = Complex(real[i], imag[i]) // 创建复数
        }
        //执行fft,对数据有要求，必须是2^n倍，比如2048,4096,8192
        val fft = FastFourierTransformer(DftNormalization.STANDARD)
        val fftResult = fft.transform(complexData, TransformType.FORWARD)//正向fft
        //获取频谱数据(振幅)
        val spectrum = DoubleArray(fftResult.size)
        for (i in fftResult.indices) {
            spectrum[i] = fftResult[i].abs() //使用abs返回计算的振幅
        }
        //分段(顾名思义就是根据频率范围（20-20000）划分为指定分数（这里是100份）
        val bandsFrequency = onBand(n, spectrum)

        return bandsFrequency.second
    }

    private fun stopRecord() {
        if (audioRecord != null) {
            state = RecordState.RECORDED
            audioRecord?.stop()
            audioRecord?.release()
            audioRecord = null
        }
    }

    private fun stopPlayer() {
        isPlay = false
        try {
            if (player != null) {
                player?.stop()
                player?.release()
                player = null
            }
        } catch (e: Exception) {
            Log.d(TAG, "stopPlayer: $e")
        }
    }


    private fun onBand(
        bufferSize: Int,
        spectrum: DoubleArray
    ): Pair<Double, DoubleArray> {
        //1: 创建权重数组
        val aWeights = createFrequencyWeights(bufferSize)
        val bandsMaxFreqAmp = mutableListOf<Pair<Double, Double>>()//存储各分段的最高频率和最高振幅
        //1：根据起止频谱、频带数量确定增长的倍数：2^n
        val n = log2(endFrequency - startFrequency) / frequencyBands
        var lowFrequency = startFrequency
        for (i in 1..frequencyBands) {
            //2：频带的上频点是下频点的2^n倍
            val heightFrequency = lowFrequency * Math.pow(2.0, n)//2^n倍数
            var maxSpectrumInBands = Double.MIN_VALUE
            var centerFrequency = Double.MIN_VALUE
            val fs = sampleRateInHz / bufferSize
            val startFrequencyIndex = Math.max(lowFrequency / fs, startFrequency)
            val endFrequencyIndex =
                if (i == frequencyBands) endFrequency else (heightFrequency / fs)
            for (j in spectrum.indices) {
                // 假设采样率为44100Hz/窗体大小，也就是数据的大小
                val frequency = (j * fs).toDouble()
                if (frequency in startFrequencyIndex..endFrequencyIndex) {
                    if (spectrum[j] > maxSpectrumInBands) {
                        maxSpectrumInBands = spectrum[j]
                        centerFrequency = frequency
                    }
                }
            }
            val nextBand = Pair(centerFrequency, maxSpectrumInBands)//每段
            bandsMaxFreqAmp.add(nextBand)
            lowFrequency = heightFrequency//上一次的高频变成这次的低频
        }
        val endSpectrum = DoubleArray(bandsMaxFreqAmp.size)//最终去绘制的振幅（频谱）
        val endFrequency = DoubleArray(bandsMaxFreqAmp.size)//最终去绘制的振幅的对应频率
        // 输出每个频段的中心频率和最大幅度
        for (index in 0 until bandsMaxFreqAmp.size) {
            //在筛选有效的原始频谱数据后依次与权重相乘(结果*2倍数，避免起伏不多)
            val haveWeight = (bandsMaxFreqAmp[index].second * aWeights[index]) * 2
            endSpectrum[index] = haveWeight//用去绘制频谱图的
            endFrequency[index] = bandsMaxFreqAmp[index].first//频率
        }
        val mSmoothSpectrum = highlightWaveform(endSpectrum)//加权平均处理（平滑处理）
        val mBlinkSpectrum = onBlinkOps(mSmoothSpectrum)//闪动优化处理，使用缓存上一帧的方式做
        //这个分贝db数据在滤波去噪后拿
//        val mCurrentMaxDb = getDecibel1(mBlinkSpectrum).toInt()
        //用处理后的数据去拿最高振幅对应最高频率
        val mCurrentMaxFrequency = endFrequency[getAfterDealMaxFrequency(mBlinkSpectrum)]
        val endTripleSource = Pair(mCurrentMaxFrequency, mBlinkSpectrum)//组装三个参数
        return endTripleSource
    }

    private fun getAfterDealMaxFrequency(mBlinkSpectrum: DoubleArray): Int {
        //再根据这些振幅中拿到最高振幅对于的频率，代表这所有段里面频率去计算色相之类的
        var maxAllBandsAmp = Double.MIN_VALUE
        //最终拿到最高频率对应的下标，他们的长度是一致的，所以下标有效
        var maxAllBandsIndex = 0
        for (index in mBlinkSpectrum.indices) {
            //拿最高振幅的对应频率，用于计算色相的数据做筛选
            if (mBlinkSpectrum[index] > maxAllBandsAmp) {
                maxAllBandsAmp = mBlinkSpectrum[index]
                maxAllBandsIndex = index
            }
        }
        return maxAllBandsIndex
    }


    private var spectrumBuffer = DoubleArray(frequencyBands)

    private fun onBlinkOps(spectrum: DoubleArray): DoubleArray {
        val aWeight = 0.8//权重系数，0-1，越大越柔和
        if (spectrumBuffer.isEmpty()) {
            for (j in spectrum.indices) {
                spectrumBuffer[j] = spectrum[j]
            }
        }
        //（方式一）加权平均做优化
        //注意把旧数据的比重*aWeight,新数据的比重*(1 - aWeight)，这样才可以最大程度保留旧数据的帧，达到柔和目的
        for (i in spectrum.indices) {
            val sum = (spectrumBuffer[i] * aWeight) + (spectrum[i] * (1 - aWeight))
            spectrumBuffer[i] = sum / (aWeight + (1 - aWeight))//加起来就是个1，除不除都行了其实
        }

        //（方式二）这种方式的加权平均和上述的方式没啥区别
        //注意把旧数据的比重*aWeight,新数据的比重*(1 - aWeight)，这样才可以最大程度保留旧数据的帧，达到柔和目的
        //val zipped = spectrumBuffer.zip(spectrum)
        //spectrumBuffer = zipped.map { (it.first * aWeight) + (it.second * (1 - aWeight)) }.toDoubleArray()
        return spectrumBuffer
    }

    private fun createFrequencyWeights(bufferSize: Int): DoubleArray {
        val deltaF = 44100.0 / bufferSize
        val bins: Int = bufferSize / 2 // 返回数组的大小
        val f = DoubleArray(bins)
        for (i in 0 until bins) {
            f[i] = (i.toFloat() * deltaF)
        }
        val f1 = Math.pow(20.598997, 2.0)
        val f2 = Math.pow(107.65265, 2.0)
        val f3 = Math.pow(737.86223, 2.0)
        val f4 = Math.pow(12194.217, 2.0)
        val num = DoubleArray(bins)
        val den = DoubleArray(bins)
        for (i in 0 until bins) {
            num[i] = (f4 * Math.pow(f[i], 2.0))
            den[i] = ((f[i] + f1) * Math.sqrt((f[i] + f2) * (f[i] + f3)) * (f[i] + f4))
        }
        val weights = DoubleArray(bins)
        for (i in 0 until bins) {
            weights[i] = (1.2589 * num[i] / den[i])
        }
        return weights
    }

    private fun highlightWaveform(spectrum: DoubleArray): DoubleArray {
        // 1. 定义权重数组，数组中间的5表示自己的权重，个数需要奇数
        val weights = mutableListOf(1.0, 2.0, 3.0, 5.0, 3.0, 2.0, 1.0)
        val totalWeights = weights.reduce { acc, d -> acc + d }//累加
        val averagedSpectrum = DoubleArray(spectrum.size)
        val startIndex = weights.size / 2//结果是3
        // 3. 循环+1渐进重叠式计算加权平均值
        for (i in startIndex until spectrum.size - startIndex) {
            val currentSpe = mutableListOf<Double>()//不要用DoubleArray，会下标对不上
            for (j in i - startIndex..i + startIndex) {
                currentSpe.add(spectrum[j])
            }
            //写法一
            //zip作用: zip([a,b,c], [x,y,z]) -> [(a,x), (b,y), (c,z)]
            //val zipped = currentSpe.zip(weights)
            //val averaged = zipped.map {
            //    it.first * it.second
            //}.reduce { acc, d -> acc + d } / totalWeights
            //写法二
            var sum = 0.0
            for (k in currentSpe.indices) {
                sum += currentSpe[k] * weights[k]
            }
            val averaged = sum / totalWeights

            //统计到每项去
            averagedSpectrum[i] = averaged
        }
        return averagedSpectrum
    }


    override fun onAttachedToEngine(flutterPluginBinding: FlutterPlugin.FlutterPluginBinding) {
        mContext = flutterPluginBinding.applicationContext

        audioSpectrumChannel = MethodChannel(flutterPluginBinding.binaryMessenger, "audio_spectrum")
        audioSpectrumChannel.setMethodCallHandler(this)

        stopPlayerChannel = MethodChannel(flutterPluginBinding.binaryMessenger, "stop_player")
        stopPlayerChannel.setMethodCallHandler(this)


        startRecordChannel = MethodChannel(flutterPluginBinding.binaryMessenger, "start_record")
        startRecordChannel.setMethodCallHandler(this)

        stopRecordChannel = MethodChannel(flutterPluginBinding.binaryMessenger, "stop_record")
        stopRecordChannel.setMethodCallHandler(this)

        musicFftChannel = EventChannel(
            flutterPluginBinding.binaryMessenger,
            "flutter_music_plugin.event.music.spectrum"
        )
        musicFftChannel.setStreamHandler(fftStreamHandler)

        recordFftChannel = EventChannel(
            flutterPluginBinding.binaryMessenger,
            "flutter_music_plugin.event.record.spectrum"
        )

        recordFftChannel.setStreamHandler(recordStreamHandler)
    }


    private val fftStreamHandler = object : EventChannel.StreamHandler {
        override fun onListen(p0: Any?, p1: EventSink?) {
            if (p1 != null) {
                musicSpectrumSink = p1
            }
        }

        override fun onCancel(p0: Any?) {

        }

    }

    private val recordStreamHandler = object : EventChannel.StreamHandler {
        override fun onListen(p0: Any?, p1: EventSink?) {
            if (p1 != null) {
                recordSpectrumSink = p1
            }
        }

        override fun onCancel(p0: Any?) {

        }

    }

    override fun onMethodCall(call: MethodCall, result: Result) {

        when (call.method) {
            "audio_spectrum" -> {
                val url: String = call.argument<String>("url")!!
                getAudioSpectrum(url)
            }

            "stop_player" -> stopPlayer()
            "start_record" -> startRecord()
            "stop_record" -> stopRecord()
        }
    }

    val listenerImplementation = object : FFTAudioProcessor.FFTListener {
        override fun onFFTReady(sampleRateHz: Int, channelCount: Int, fft: FloatArray) {
            if (isPlay) {
                arraycopy(fft, 2, mFft, 0, size)
                mActivity.runOnUiThread {
                    if (musicSpectrumSink != null) {
                        musicSpectrumSink!!.success(mFft)
                    }
                }
            }

        }
    }

    private fun getAudioSpectrum(url: String) {
        stopPlayer()
        isPlay = true
        val uri: Uri = Uri.parse(url)

        val fftAudioProcessor = FFTAudioProcessor(listenerImplementation)

        val renderersFactory = object : DefaultRenderersFactory(mContext) {

            override fun buildAudioRenderers(
                context: Context,
                extensionRendererMode: Int,
                mediaCodecSelector: MediaCodecSelector,
                enableDecoderFallback: Boolean,
                audioSink: AudioSink,
                eventHandler: Handler,
                eventListener: AudioRendererEventListener,
                out: ArrayList<Renderer>
            ) {
                out.add(
                    MediaCodecAudioRenderer(
                        context,
                        mediaCodecSelector,
                        enableDecoderFallback,
                        eventHandler,
                        eventListener,
                        DefaultAudioSink(
                            AudioCapabilities.getCapabilities(context),
                            arrayOf(fftAudioProcessor)
                        )
                    )
                )

                super.buildAudioRenderers(
                    context,
                    extensionRendererMode,
                    mediaCodecSelector,
                    enableDecoderFallback,
                    audioSink,
                    eventHandler,
                    eventListener,
                    out
                )
            }
        }

        player = ExoPlayer.Builder(mContext, renderersFactory).build()

        val mediaSource = ProgressiveMediaSource.Factory(
            DefaultDataSourceFactory(mContext, "ExoVisualizer")
        ).createMediaSource(MediaItem.Builder().setUri(uri).build())
        player?.playWhenReady = true
        player?.setMediaSource(mediaSource)
        player?.prepare()
    }


    override fun onDetachedFromEngine(binding: FlutterPlugin.FlutterPluginBinding) {
        audioSpectrumChannel.setMethodCallHandler(null)
        stopPlayerChannel.setMethodCallHandler(null)
        startRecordChannel.setMethodCallHandler(null)
        stopRecordChannel.setMethodCallHandler(null)
        musicFftChannel.setStreamHandler(null)
        recordFftChannel.setStreamHandler(null)
    }

    override fun onAttachedToActivity(binding: ActivityPluginBinding) {
        mActivity = binding.activity
        Log.d(TAG, "onAttachedToActivity: ")
    }

    override fun onDetachedFromActivityForConfigChanges() {
        Log.d(TAG, "onDetachedFromActivityForConfigChanges: ")
    }

    override fun onReattachedToActivityForConfigChanges(binding: ActivityPluginBinding) {

    }

    override fun onDetachedFromActivity() {
        Log.d(TAG, "onDetachedFromActivity: ")
    }
}
