package com.soriya.nestlive.service

import android.Manifest
import android.app.Notification
import android.app.NotificationChannel
import android.app.NotificationManager
import android.app.PendingIntent
import android.app.Service
import android.content.Intent
import android.content.pm.PackageManager
import android.content.res.Configuration
import android.graphics.BitmapFactory
import android.hardware.display.DisplayManager
import android.hardware.display.VirtualDisplay
import android.media.AudioAttributes
import android.media.AudioFormat
import android.media.AudioPlaybackCaptureConfiguration
import android.media.AudioRecord
import android.media.MediaCodec
import android.media.MediaCodecInfo
import android.media.MediaFormat
import android.media.MediaMuxer
import android.media.MediaRecorder
import android.media.projection.MediaProjection
import android.media.projection.MediaProjectionManager
import android.os.Build
import android.os.Bundle
import android.os.Environment
import android.os.IBinder
import android.util.Log
import android.view.Surface
import android.view.WindowManager
import androidx.annotation.RequiresApi
import androidx.core.app.ActivityCompat
import com.soriya.nestlive.MainActivity
import com.soriya.nestlive.R
import com.soriya.nestlive.constant.ServerConstant
import com.soriya.nestlive.constant.StreamConstant
import com.soriya.nestlive.db.dao.ChannelInfoDao
import com.soriya.nestlive.db.dao.UserInfoDao
import com.soriya.nestlive.im.MessageType
import com.soriya.nestlive.im.NettyClient
import com.soriya.nestlive.im.message.IMMessage
import com.soriya.nestlive.model.sys.StreamConfig
import dagger.hilt.android.AndroidEntryPoint
import kotlinx.coroutines.CoroutineScope
import kotlinx.coroutines.Dispatchers
import kotlinx.coroutines.flow.collectLatest
import kotlinx.coroutines.joinAll
import kotlinx.coroutines.launch
import okhttp3.internal.and
import java.io.File
import java.util.Arrays
import java.util.Timer
import java.util.TimerTask
import javax.inject.Inject

@RequiresApi(Build.VERSION_CODES.Q)
@AndroidEntryPoint
class ScreenLiveService : Service() {

    init {
        System.loadLibrary("rtmppush")
    }

    private external fun connect(url: String): Boolean

    private external fun close()

    private external fun pushData(byteArray: ByteArray, len: Int, type: Int, tms: Long): Boolean

    private val TAG: String = "SoRiya"

    private val AUDIO_SAMPLE_RATE = 44100;

    private var videoRec = false
    private var audioRec = false

    private lateinit var videoCodec: MediaCodec

    private lateinit var audioCodec: MediaCodec

    private lateinit var virtualDisplay: VirtualDisplay

    private lateinit var mediaProjection: MediaProjection

    private var nanoTime: Long = 0L

    private var presentationTimeUs: Long = 0L

    private val path = Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_DCIM).path + File.separator + "record.mp4"
    private var mediaMuxer: MediaMuxer = MediaMuxer(
        path,
        MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4
    )

    private val width = 1960
    private val height = 1080
    private lateinit var surface: Surface

    @Inject
    lateinit var nettyClient: NettyClient

    @Inject
    lateinit var channelInfoDao: ChannelInfoDao

    lateinit var timer: Timer

    override fun onBind(intent: Intent?): IBinder? {
        return null
    }

    override fun onStartCommand(intent: Intent?, flags: Int, startId: Int): Int {
        Log.i(TAG, "启动Service...")

        createNotificationChannel()

        val mediaProjectionManager =
            getSystemService(MEDIA_PROJECTION_SERVICE) as MediaProjectionManager

        val resultCode = intent?.getIntExtra("resultCode", -1)
        val data: Intent? = intent?.getParcelableExtra("data")

        mediaProjection =
            mediaProjectionManager.getMediaProjection(resultCode!!, data!!)

        CoroutineScope(Dispatchers.IO).launch {
//            val channelId = intent.getLongExtra("channelId", 0)
//            if (channelId == 0L) return@launch

            channelInfoDao.selectOne().collectLatest {
                val channelId = it?.channelId ?: return@collectLatest

                val connect = connect("${ServerConstant.RTMP_URL}$channelId")
                Log.i(TAG, "connect: $connect")

                if (connect) {
                    val streamConfig = StreamConfig()
                    intent.let {
                        streamConfig.frameRate = intent.getIntExtra("frameRate", StreamConstant.DEFAULT_FRAME_RATE)
                        streamConfig.orientation = intent.getIntExtra("orientation", StreamConstant.SCREEN_HORIZONTAL)
                        streamConfig.bitRate = intent.getDoubleExtra("bitRate", StreamConstant.DEFAULT_BIT_RATE)
                    }
                    start(mediaProjection, streamConfig)

                    timer = Timer()
                    timer.schedule(object : TimerTask() {
                        override fun run() {
//                            Log.i(TAG, "ChannelId: $channelId")
                            val imMessage = IMMessage(
                                type = MessageType.STREAMING_HEART,
                                from = channelId,
                                to = 0L,
                                state = 1,
                                nickname = null,
                                data = null
                            )
                            nettyClient.sendMessage(imMessage)
                        }
                    }, 0, 6000)
                }
            }
        }

        return START_STICKY
    }

    override fun onDestroy() {
        Log.i(TAG, "停止录屏...")
        close()
        videoRec = false
        audioRec = false
        if (::timer.isInitialized) {
            timer.cancel()
        }
    }

    override fun onConfigurationChanged(newConfig: Configuration) {
        super.onConfigurationChanged(newConfig)
    }

    private fun createNotificationChannel() {
        Log.i(TAG, "启动通知...")
        val builder = if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) {
            val builder = Notification.Builder(this, "notification_id")

            val notificationManager = getSystemService(NOTIFICATION_SERVICE) as NotificationManager
            val notificationChannel = NotificationChannel(
                "notification_id",
                "notification_name",
                NotificationManager.IMPORTANCE_LOW
            )
            notificationManager.createNotificationChannel(notificationChannel)
            builder
        } else {
            Notification.Builder(this)
        }

        val intent = Intent(this, MainActivity::class.java)

        builder.setContentIntent(PendingIntent.getActivity(this, 0, intent,
            PendingIntent.FLAG_IMMUTABLE))
            .setLargeIcon(BitmapFactory.decodeResource(this.resources, R.mipmap.ic_launcher))
            .setSmallIcon(R.mipmap.ic_launcher)
            .setContentText("正在录屏...")
            .setContentTitle("小窝直播")
            .setWhen(System.currentTimeMillis())

        val notification = builder.build()
        notification.defaults = Notification.DEFAULT_SOUND
        startForeground(110, notification)
    }

    private fun start(mediaProjection: MediaProjection, streamConfig: StreamConfig) {
        Log.i(TAG, "启动录屏...")

        val displayMetrics = resources.displayMetrics
//        var width = displayMetrics.widthPixels * 0.7
//        var height = displayMetrics.heightPixels * 0.7

        var width = width
        var height = height
        if (streamConfig.orientation == StreamConstant.SCREEN_VERTICAL) {
            val temp = width
            width = height
            height = temp
        }

        Log.i(TAG, "width: $width, height: $height")

        // 视频编码格式
        val videoFormat =
            MediaFormat.createVideoFormat(MediaFormat.MIMETYPE_VIDEO_AVC, width.toInt(), height.toInt())

        videoFormat.apply {
            // 颜色格式
            setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface)
            // 比特率
            setInteger(MediaFormat.KEY_BIT_RATE, (width * height * streamConfig.frameRate * streamConfig.bitRate).toInt())
            // 帧率
            setInteger(MediaFormat.KEY_FRAME_RATE, streamConfig.frameRate)
            // 设置触发关键帧的时间间隔为2s
            setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 2)
        }

        videoCodec = MediaCodec.createEncoderByType(MediaFormat.MIMETYPE_VIDEO_AVC)
        videoCodec.configure(videoFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE)

        surface = videoCodec.createInputSurface()
        // 虚拟显示器，用来录制屏幕
        virtualDisplay = mediaProjection.createVirtualDisplay(
            "MyVirtualDisplay",
            width.toInt(),
            height.toInt(),
            1,
            DisplayManager.VIRTUAL_DISPLAY_FLAG_PUBLIC,
            surface,
            null,
            null
        )

        // ===

        // 音频编码格式
        val audioFormat =
            MediaFormat.createAudioFormat(MediaFormat.MIMETYPE_AUDIO_AAC, AUDIO_SAMPLE_RATE, 2)

        audioFormat.apply {
//            setInteger(MediaFormat.KEY_IS_ADTS, 1)
            setInteger(MediaFormat.KEY_BIT_RATE, 96000)
            setInteger(MediaFormat.KEY_AAC_PROFILE, MediaCodecInfo.CodecProfileLevel.AACObjectLC)
            setInteger(MediaFormat.KEY_MAX_INPUT_SIZE, 8192) // InputBuffer大小

//            val bytes: ByteArray = byteArrayOf(0x11, 0x90.toByte());
//            val audioBuffer = ByteBuffer.wrap(bytes)
//            setByteBuffer("csd-0", audioBuffer)
        }

        audioCodec = MediaCodec.createEncoderByType(MediaFormat.MIMETYPE_AUDIO_AAC)
        audioCodec.configure(audioFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE)

        val minBufferSize = AudioRecord.getMinBufferSize(
            AUDIO_SAMPLE_RATE,
            AudioFormat.CHANNEL_IN_STEREO,
            AudioFormat.ENCODING_PCM_16BIT
        )

        if (ActivityCompat.checkSelfPermission(this, Manifest.permission.RECORD_AUDIO) != PackageManager.PERMISSION_GRANTED) {
            Log.i(TAG, "权限不足")
            return
        }

        // 系统声音捕获
        val audioRecord = AudioRecord.Builder()
            .setAudioFormat(
                AudioFormat.Builder()
                    .setEncoding(AudioFormat.ENCODING_PCM_16BIT)
                    .setSampleRate(AUDIO_SAMPLE_RATE)
                    .setChannelMask(AudioFormat.CHANNEL_IN_STEREO)
                    .build()
            )
            .setBufferSizeInBytes(minBufferSize)
            // 配置捕获其他应用程序播放的音频。
            // 当捕获由其他应用程序(和你的)播放的音频信号时,你将只捕获由播放器(如AudioTrack或MediaPlayer)播放的音频信号的混合
            .setAudioPlaybackCaptureConfig(
                AudioPlaybackCaptureConfiguration.Builder(mediaProjection)
                    .addMatchingUsage(AudioAttributes.USAGE_MEDIA)
                    .addMatchingUsage(AudioAttributes.USAGE_UNKNOWN)
                    .addMatchingUsage(AudioAttributes.USAGE_GAME)
                    .build()
            )
//            .setAudioSource(MediaRecorder.AudioSource.MIC)
            .build()

        // 麦克风声音捕获
        val audioRecordMic = AudioRecord.Builder()
            .setAudioFormat(
                AudioFormat.Builder()
                    .setEncoding(AudioFormat.ENCODING_PCM_16BIT)
                    .setSampleRate(AUDIO_SAMPLE_RATE)
                    .setChannelMask(AudioFormat.CHANNEL_IN_STEREO)
                    .build()
            )
            .setBufferSizeInBytes(minBufferSize)
            .setAudioSource(MediaRecorder.AudioSource.MIC)
            .build()

        // ==

        videoCodec.start()
        audioCodec.start()

        // 视频缓存区
        val videoBufferInfo = MediaCodec.BufferInfo()
        val audioBufferInfo = MediaCodec.BufferInfo()

        // 视频轨道
        var videoTrack = -1
        // 音频轨道
        var audioTrack = -1

        CoroutineScope(Dispatchers.IO).launch {
            videoRec = true
            audioRec = true

            // 将音频送入缓冲区
            val audioRecJob = launch audioLaunch@ {
                audioRecord.startRecording() // 开始录音，系统声音
                audioRecordMic.startRecording() // 开始录音，麦克风

                while (audioRec) {
                    val audioData = ByteArray(minBufferSize) // 系统声音数据
                    val audioDataMic = ByteArray(minBufferSize) // 麦克风数据
                    val len = audioRecord.read(audioData, 0, audioData.size) // 读取系统声音
                    val lenMic = audioRecordMic.read(audioDataMic, 0, audioDataMic.size) // 读取麦克风

//                    Log.i(TAG, "audioDataMic: ${audioDataMic.contentToString()}")

                    val audioMix = audioMix(audioData, len, audioDataMic, lenMic) // 混合系统声音和麦克风

                    val index = audioCodec.dequeueInputBuffer(-1) // 获取输入队列索引
                    if (index < 0) return@audioLaunch
                    val inputBuffer = audioCodec.getInputBuffer(index) // 获取输入缓冲区
                    inputBuffer?.clear()
                    inputBuffer?.put(audioMix)
                    inputBuffer?.limit(audioMix.size)
                    presentationTimeUs += (1.0 * len / (44100 * 2 * (16 / 8) * 1000000.0)).toLong()
                    // 将声音数据送进缓冲区
                    audioCodec.queueInputBuffer(
                        index,
                        0,
                        audioMix.size,
                        (System.nanoTime() - nanoTime) / 1000,
                        0
                    )
                }
            }

            val videoJob = launch {
                var startTime: Long = 0
                var currTime = 0L
                while (videoRec) {
                    if (System.currentTimeMillis() - currTime >= 2000) {
                        currTime = System.currentTimeMillis()
                        val bundle = Bundle()
                        bundle.putInt(MediaCodec.PARAMETER_KEY_REQUEST_SYNC_FRAME, 0)
                        videoCodec.setParameters(bundle)
                    }

                    // 获取视频缓冲队列索引
                    val index = videoCodec.dequeueOutputBuffer(videoBufferInfo, 10_00)
                    if (index == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
                        Log.i(TAG, "开始写入视频... ${videoBufferInfo.presentationTimeUs}")
                        val outputFormat = videoCodec.outputFormat
                        videoTrack = mediaMuxer.addTrack(outputFormat)
                        if (audioTrack != -1) mediaMuxer.start()
                    }
                    if (index >= 0) {
                        // 获取视频缓冲队列数据
                        var byteBuffer = videoCodec.getOutputBuffer(index)
                        val outData = ByteArray(videoBufferInfo.size)
                        byteBuffer?.get(outData)

                        // 推送到RTMP服务器
                        pushData(outData, outData.size, StreamConstant.RTMP_PUSH_VIDEO, videoBufferInfo.presentationTimeUs / 1000 - startTime)

                        if (startTime == 0L) startTime = videoBufferInfo.presentationTimeUs / 1000
//                        Log.i(TAG, "开始: ${startTime}, 时间戳: ${videoBufferInfo.presentationTimeUs / 1000 - startTime}")
//                        Log.i(TAG, "NALU Type: ${outData[4] and 0x1F}")

                        if (videoBufferInfo.flags and MediaCodec.BUFFER_FLAG_CODEC_CONFIG != 0) {
                            videoBufferInfo.size = 0
                        }
                        if (videoBufferInfo.size == 0) byteBuffer = null
                        if (byteBuffer != null) {
                            byteBuffer.position(videoBufferInfo.offset)
                            byteBuffer.limit(videoBufferInfo.offset + videoBufferInfo.size)

                            // 写入视频数据(本地)
                            mediaMuxer.writeSampleData(videoTrack, byteBuffer, videoBufferInfo)
                        }
                        // 释放缓冲区
                        videoCodec.releaseOutputBuffer(index, false)
                    }
                }
            }

            val audioJob = launch {
                var startTime: Long = 0
                while (audioRec) {
                    val index = audioCodec.dequeueOutputBuffer(audioBufferInfo, 10_00)
                    if (index == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
                        Log.i(TAG, "开始写入音频...")

                        // 发送音频解码数据
                        val audioDecoderSpecificInfo: ByteArray = byteArrayOf(0x12, 0x08)
                        pushData(audioDecoderSpecificInfo, audioDecoderSpecificInfo.size, StreamConstant.RTMP_PUSH_AUDIO_DECODE, 0)

                        val outputFormat = audioCodec.outputFormat
                        audioTrack = mediaMuxer.addTrack(outputFormat)
                        if (videoTrack != -1) mediaMuxer.start()
                    }
                    if (index >= 0) {
                        // 获取音频缓冲队列数据
                        var byteBuffer = audioCodec.getOutputBuffer(index)

                        // 发送音频数据
                        if (startTime == 0L) startTime = audioBufferInfo.presentationTimeUs / 1000
                        val outData = ByteArray(audioBufferInfo.size)
                        byteBuffer?.get(outData)

                        if (audioBufferInfo.flags and MediaCodec.BUFFER_FLAG_CODEC_CONFIG != 0) {
                            audioBufferInfo.size = 0
                        }
                        if (audioBufferInfo.size == 0) byteBuffer = null
                        if (byteBuffer != null) {
//                            byteBuffer!!.position(audioBufferInfo.offset)
//                            byteBuffer!!.limit(audioBufferInfo.offset + audioBufferInfo.size)
                            // 写入音频数据(本地)
                            pushData(outData, outData.size, StreamConstant.RTMP_PUSH_AUDIO_DATA, audioBufferInfo.presentationTimeUs / 1000 - startTime)
                            mediaMuxer.writeSampleData(audioTrack, byteBuffer!!, audioBufferInfo)
                        }
                        // 是否缓冲区
                        audioCodec.releaseOutputBuffer(index, false)
                    }
                }
            }

            joinAll(audioRecJob, videoJob, audioJob)

            Log.i(TAG, "释放资源...")
            videoCodec.stop()
            videoCodec.release()

            audioCodec.stop()
            audioCodec.release()

            virtualDisplay.release()

            mediaProjection.stop()

            mediaMuxer.stop()
            mediaMuxer.release()
        }
    }

    // 音频混合
    private fun audioMix(audio1: ByteArray, audio1Limit: Int, audio2: ByteArray, audio2Limit: Int): ByteArray {
        val size = audio1Limit.coerceAtLeast(audio2Limit)
        if (size < 0) return ByteArray(0)
        val buff = ByteArray(size)
        for (i in 0 until size) {

            var sum: Int
            sum = if (i > audio1Limit) {
                audio2[i].toInt()
//                audio2[i].toInt() or audio2[i + 1].toInt() shl 8
            } else if (i > audio2Limit) {
                audio1[i].toInt()
//                audio1[i].toInt() or audio1[i + 1].toInt() shl 8
            } else {
                audio1[i] + audio2[i]

//                ((audio1[i].toInt() and 0xFF) or ((audio1[i + 1].toInt() and 0xFF) shl 8)) +
//                        ((audio2[i].toInt() and 0xFF) or ((audio2[i + 1].toInt() and 0xFF) shl 8))
            }

            if (sum > Byte.MAX_VALUE) sum = Byte.MAX_VALUE.toInt()
            if (sum < Byte.MIN_VALUE) sum = Byte.MIN_VALUE.toInt()

//            buff[i] = (sum and 0xFF).toByte()
//            buff[i + 1] = ((sum shr 8) and 0xFF).toByte()
            buff[i] = sum.toByte()
        }
        return buff
    }
}