package com.streamer.app

import android.media.MediaCodec
import android.os.Build
import android.os.Bundle
import android.util.Log
import android.view.View
import android.view.ViewGroup.LayoutParams.MATCH_PARENT
import android.view.WindowInsetsController
import android.view.WindowManager
import android.widget.ImageView
import android.widget.LinearLayout
import android.widget.Toast
import androidx.appcompat.app.AppCompatActivity
import androidx.core.view.WindowCompat
import androidx.core.view.doOnLayout
import androidx.lifecycle.lifecycleScope
import com.alivc.live.annotations.AlivcLiveMode
import com.alivc.live.pusher.AlivcAudioAACProfileEnum
import com.alivc.live.pusher.AlivcEncodeModeEnum
import com.alivc.live.pusher.AlivcFpsEnum
import com.alivc.live.pusher.AlivcLiveBase
import com.alivc.live.pusher.AlivcLiveBaseListener
import com.alivc.live.pusher.AlivcLivePushCameraTypeEnum
import com.alivc.live.pusher.AlivcLivePushConfig
import com.alivc.live.pusher.AlivcLivePushConstants.AlivcLiveLicenseCheckResultCode
import com.alivc.live.pusher.AlivcLivePusher
import com.alivc.live.pusher.AlivcPreviewOrientationEnum
import com.alivc.live.pusher.AlivcResolutionEnum
import com.alivc.live.pusher.AlivcVideoEncodeGopEnum
import io.keyss.view_record.recording.ViewLiveRecorder
import io.keyss.view_record.video.EncoderErrorCallback
import kotlinx.coroutines.Job
import kotlinx.coroutines.delay
import kotlinx.coroutines.isActive
import kotlinx.coroutines.launch


abstract class BaseActivity : AppCompatActivity() {
    val mAlivcLivePushConfig = AlivcLivePushConfig()
    private val viewLiveRecord = ViewLiveRecorder()
    private var mTimerJob: Job? = null

    override fun onCreate(savedInstanceState: Bundle?) {
        super.onCreate(savedInstanceState)
        AlivcLiveBase.setListener(object : AlivcLiveBaseListener() {
            override fun onLicenceCheck(result: AlivcLiveLicenseCheckResultCode, reason: String) {
                Log.e("xxxx", "onLicenceCheck: $result, $reason")
            }
        })
        AlivcLiveBase.registerSDK()
        window.addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON)

        // 初始化推流配置类

// 设置推流模式，默认普通推流模式
        mAlivcLivePushConfig.livePushMode = AlivcLiveMode.AlivcLiveBasicMode

// 设置分辨率，建议使用540P以提高兼容性
        mAlivcLivePushConfig.resolution = AlivcResolutionEnum.RESOLUTION_540P

// 设置帧率，默认20fps
        mAlivcLivePushConfig.setFps(AlivcFpsEnum.FPS_25)

// 设置视频编码Gop，单位秒，默认2秒
        mAlivcLivePushConfig.setVideoEncodeGop(AlivcVideoEncodeGopEnum.GOP_TWO)

// 开启码率控制，默认为true
        mAlivcLivePushConfig.isEnableBitrateControl = true

// 设置横竖屏，默认为竖屏，可设置home键向左或向右横屏
        mAlivcLivePushConfig.setPreviewOrientation(AlivcPreviewOrientationEnum.ORIENTATION_PORTRAIT)

// 设置音频编码模式，默认AAC-LC
        mAlivcLivePushConfig.audioProfile = AlivcAudioAACProfileEnum.AAC_LC

// 设置视频编码模式，默认硬编
        mAlivcLivePushConfig.videoEncodeMode = AlivcEncodeModeEnum.Encode_MODE_HARD

// 设置音频编码模式，默认软编
        mAlivcLivePushConfig.audioEncodeMode = AlivcEncodeModeEnum.Encode_MODE_SOFT

// 设置摄像头前后置，默认前置
        mAlivcLivePushConfig.setCameraType(AlivcLivePushCameraTypeEnum.CAMERA_TYPE_FRONT)

// 设置App推后台或暂停时推图片（暂时注释掉，避免路径错误）
        // mAlivcLivePushConfig.pausePushImage = "TODO: Image Path"

// 设置弱网推图片（暂时注释掉，避免路径错误）
        // mAlivcLivePushConfig.networkPoorPushImage = "TODO: Image Path"


    }

    override fun setContentView(layoutResID: Int) {
        super.setContentView(layoutResID)
        setupImmersiveStatusBar()
    }

    private fun setupImmersiveStatusBar() {
        // 启用edge-to-edge显示
        WindowCompat.setDecorFitsSystemWindows(window, false)
        
        if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.R) {
            // Android 11+ 使用新的API
            window.insetsController?.let { controller ->
                controller.setSystemBarsAppearance(
                    WindowInsetsController.APPEARANCE_LIGHT_STATUS_BARS,
                    WindowInsetsController.APPEARANCE_LIGHT_STATUS_BARS
                )
            }
        } else {
            // Android 10及以下使用旧API
            @Suppress("DEPRECATION")
            window.decorView.systemUiVisibility = 
                View.SYSTEM_UI_FLAG_LAYOUT_STABLE or
                View.SYSTEM_UI_FLAG_LAYOUT_FULLSCREEN or
                View.SYSTEM_UI_FLAG_LIGHT_STATUS_BAR
        }
        
        // 设置状态栏和导航栏透明
        window.statusBarColor = android.graphics.Color.TRANSPARENT
        window.navigationBarColor = android.graphics.Color.TRANSPARENT
    }

    fun startRecord(view: View, url: String?, isFlag: Int = 0) {

        // Ensure view has valid size to avoid Bitmap.createBitmap(width=0, height=0)
        if (view.width <= 0 || view.height <= 0) {
            view.doOnLayout {
                startRecord(view, url, isFlag)
            }
            return
        }

        // Ensure view is attached and window has surface before PixelCopy
        if (!view.isAttachedToWindow || view.windowToken == null || !view.isShown) {
            view.postDelayed({ startRecord(view, url, isFlag) }, 120)
            return
        }

        window.addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON)
        
        // 检查 URL 协议类型
        when {
            url?.startsWith("artc://") == true -> {
                Log.d("BaseActivity", "Detected artc:// protocol, using AlivcLivePusher")
                startArtcRecord(view, url)
            }
            else -> {
                Log.d("BaseActivity", "Using ViewLiveRecorder for protocol: ${url?.substringBefore("://")}")
                method4(view, url, isFlag)
            }
        }
        
        mTimerJob = lifecycleScope.launch {
            while (this.isActive) {
                delay(1000)
            }
        }
    }

    protected fun init(view: View, type: Int = 0) {
        var videoBitRate = 1920 * 1080
        var fps = 30
        videoBitRate = 9500_000
        fps = 60

            viewLiveRecord.init(
                window = window,
                view = view,
                width = view.width,
                fps = fps,
                videoBitRate = videoBitRate,
                iFrameInterval = 1,
                audioBitRate = 192_000,
                audioSampleRate = 44100,
                isStereo = true
            )

    }

    private fun method4(view: View, url: String?, isFlag: Int = 0) {
        init(view, isFlag)
            if (viewLiveRecord.isStartRecord) {
                Toast.makeText(this, "正在推流中", Toast.LENGTH_SHORT).show()
                return
            }
            viewLiveRecord.startLiveRecord(url, { status ->
                Log.i(
                    "xxxx", "viewLiveRecord= onStatusChange() called with: status = $status"
                )
            }, object : EncoderErrorCallback {
                override fun onCodecError(type: String, e: MediaCodec.CodecException) {
                    Log.e("xxxx", "onCodecError() called with: type = $type", e)
                }
            })

    }

    /**
     * 停止推流
     */
    protected fun stopRecord(pauase: Boolean) {
        if (pauase) {
            window.clearFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON)
            stopMethod4()
            mTimerJob?.cancel()
            mTimerJob = null
            // 创建 ImageView
            val imageView = ImageView(this)
            // 设置 ImageView 的布局参数
            val layoutParams = LinearLayout.LayoutParams(
                MATCH_PARENT,  // 宽度 500px
                MATCH_PARENT // 高度 500px
            )
            imageView.layoutParams = layoutParams

            // 设置图片资源
            imageView.setImageResource(R.mipmap.wait_img) // 替换为你的图片资源
            // 设置缩放类型（可选）
            imageView.scaleType = ImageView.ScaleType.CENTER_CROP
            // 避免拦截事件
            imageView.isFocusable = false
            imageView.isFocusableInTouchMode = false
            imageView.isClickable = false
            imageView.isLongClickable = false
//            _binding.container.addView(imageView)

            // 将 ImageView 添加到根布局

            window.addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON)
//            method4(_binding.container, pushStreamBean?.pushUrl.toString())
            mTimerJob = lifecycleScope.launch {
                while (this.isActive) {
                    kotlinx.coroutines.delay(1000)
                }
            }
        } else {
            window.clearFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON)
            mTimerJob?.cancel()
            mTimerJob = null
            stopMethod4()

        }


    }

    protected fun stopMethod4(): Unit {
        viewLiveRecord.stopRecord()
    }

    override fun onDestroy() {
        super.onDestroy()
        window.clearFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON)

    }
    
    /**
     * 使用 AlivcLivePusher 进行 artc:// 协议推流
     */
    private fun startArtcRecord(view: View, url: String) {
        try {
            Log.d("BaseActivity", "Starting artc record with URL: $url")
            
            // 初始化 AlivcLivePusher
            val pusher = AlivcLivePusher()
            val config = AlivcLivePushConfig()
            
            // 配置推流参数 - 专门用于外部视频源
            config.livePushMode = com.alivc.live.annotations.AlivcLiveMode.AlivcLiveBasicMode
            config.resolution = com.alivc.live.pusher.AlivcResolutionEnum.RESOLUTION_540P
            config.setFps(com.alivc.live.pusher.AlivcFpsEnum.FPS_25)
            config.setVideoEncodeGop(com.alivc.live.pusher.AlivcVideoEncodeGopEnum.GOP_TWO)
            config.isEnableBitrateControl = true
            config.setPreviewOrientation(com.alivc.live.pusher.AlivcPreviewOrientationEnum.ORIENTATION_PORTRAIT)
            config.audioProfile = com.alivc.live.pusher.AlivcAudioAACProfileEnum.AAC_LC
            config.videoEncodeMode = com.alivc.live.pusher.AlivcEncodeModeEnum.Encode_MODE_HARD
            config.audioEncodeMode = com.alivc.live.pusher.AlivcEncodeModeEnum.Encode_MODE_SOFT
            config.setCameraType(com.alivc.live.pusher.AlivcLivePushCameraTypeEnum.CAMERA_TYPE_FRONT)
            
            // 注意：外部视频源配置可能需要通过其他方式设置
            
            // 初始化推流器
            pusher.init(this, config)
            
            // 设置外部视频源（用于录制 View）
            setExternalVideoSource(pusher, true)
            
            // 开始定时捕获 View 内容并推流
            startViewCapture(view, pusher)
            
            // 开始推流
            pusher.startPush(url)
            
            Log.d("BaseActivity", "Artc push started successfully")
            
        } catch (e: Exception) {
            Log.e("BaseActivity", "Error starting artc record: ${e.message}", e)
        }
    }
    
    /**
     * 开始捕获 View 内容
     */
    private fun startViewCapture(view: View, pusher: AlivcLivePusher) {
        lifecycleScope.launch {
            while (isActive) {
                try {
                    // 捕获 View 的 Bitmap
                    val bitmap = captureViewToBitmap(view)
                    if (bitmap != null) {
                        // 将 Bitmap 发送给推流器
                        sendBitmapToPusher(pusher, bitmap)
                    }
                } catch (e: Exception) {
                    Log.e("BaseActivity", "Error capturing view: ${e.message}", e)
                }
                
                // 25fps，每帧间隔约40ms
                delay(40)
            }
        }
    }
    
    /**
     * 捕获 View 为 Bitmap
     */
    private fun captureViewToBitmap(view: View): android.graphics.Bitmap? {
        return try {
            val bitmap = android.graphics.Bitmap.createBitmap(
                view.width, 
                view.height, 
                android.graphics.Bitmap.Config.ARGB_8888
            )
            val canvas = android.graphics.Canvas(bitmap)
            view.draw(canvas)
            bitmap
        } catch (e: Exception) {
            Log.e("BaseActivity", "Error creating bitmap: ${e.message}", e)
            null
        }
    }
    
    /**
     * 将 Bitmap 发送给推流器
     */
    private fun sendBitmapToPusher(pusher: AlivcLivePusher, bitmap: android.graphics.Bitmap) {
        try {
            // 将 Bitmap 转换为 YUV 数据
            val yuvData = bitmapToYuv(bitmap)
            if (yuvData != null) {
                // 使用 inputStreamVideoData 方法发送视频数据
                val method = pusher.javaClass.getMethod(
                    "inputStreamVideoData",
                    ByteArray::class.java,
                    Int::class.javaPrimitiveType,
                    Long::class.javaPrimitiveType
                )
                method.invoke(pusher, yuvData, yuvData.size, System.currentTimeMillis())
            }
        } catch (e: Exception) {
            Log.e("BaseActivity", "Error sending bitmap to pusher: ${e.message}", e)
        }
    }
    
    /**
     * 将 Bitmap 转换为 YUV420SP 格式
     */
    private fun bitmapToYuv(bitmap: android.graphics.Bitmap): ByteArray? {
        return try {
            val width = bitmap.width
            val height = bitmap.height
            val argb = IntArray(width * height)
            bitmap.getPixels(argb, 0, width, 0, 0, width, height)
            
            val yuv = ByteArray(width * height * 3 / 2)
            encodeYUV420SP(yuv, argb, width, height)
            yuv
        } catch (e: Exception) {
            Log.e("BaseActivity", "Error converting bitmap to YUV: ${e.message}", e)
            null
        }
    }
    
    /**
     * 将 ARGB 数据编码为 YUV420SP 格式
     */
    private fun encodeYUV420SP(yuv420sp: ByteArray, argb: IntArray, width: Int, height: Int) {
        val frameSize = width * height
        var yIndex = 0
        var uvIndex = frameSize
        var R: Int
        var G: Int
        var B: Int
        var Y: Int
        var U: Int
        var V: Int
        var index = 0
        
        for (j in 0 until height) {
            for (i in 0 until width) {
                R = (argb[index] and 0xff0000) ushr 16
                G = (argb[index] and 0xff00) ushr 8
                B = argb[index] and 0xff
                
                // RGB to YUV 转换
                Y = (66 * R + 129 * G + 25 * B + 128) ushr 8
                U = (-38 * R - 74 * G + 112 * B + 128) ushr 8
                V = (112 * R - 94 * G - 18 * B + 128) ushr 8
                
                // 确保 YUV 值在 0-255 范围内
                Y = (Y + 16).coerceIn(0, 255)
                U = (U + 128).coerceIn(0, 255)
                V = (V + 128).coerceIn(0, 255)
                
                yuv420sp[yIndex++] = Y.toByte()
                
                // NV21 格式：Y 平面 + 交错的 VU 平面
                if (j % 2 == 0 && index % 2 == 0) {
                    yuv420sp[uvIndex++] = V.toByte()
                    yuv420sp[uvIndex++] = U.toByte()
                }
                index++
            }
        }
    }
    
    /**
     * 设置外部视频源
     */
    private fun setExternalVideoSource(pusher: AlivcLivePusher, enable: Boolean) {
        try {
            val method = pusher.javaClass.getMethod(
                "setExternalVideoSource",
                Boolean::class.javaPrimitiveType,
                Boolean::class.javaPrimitiveType,
                Boolean::class.javaPrimitiveType
            )
            method.invoke(pusher, enable, true, true)
        } catch (e: Exception) {
            Log.e("BaseActivity", "Error setting external video source: ${e.message}", e)
        }
    }
} 