package com.mxchip.livestar.ui.call


import android.annotation.SuppressLint
import android.os.Bundle
import android.util.Log
import android.view.LayoutInflater
import android.view.MotionEvent
import android.view.View
import android.view.ViewGroup
import androidx.fragment.app.Fragment
import androidx.fragment.app.activityViewModels
import androidx.lifecycle.Lifecycle
import androidx.lifecycle.Observer
import androidx.lifecycle.lifecycleScope
import com.trello.lifecycle2.android.lifecycle.AndroidLifecycle
import com.trello.rxlifecycle3.LifecycleProvider
import com.mxchip.livestar.R
import com.mxchip.livestar.av.camera.CameraHandle
import com.mxchip.livestar.av.camera.EncoderCallback
import com.mxchip.livestar.av.camera.HDMIHandle
import com.mxchip.livestar.av.camera.WifiCamera
import com.mxchip.livestar.av.screen.DecoderNames
import com.mxchip.livestar.av.screen.VideoGestureListener
import com.mxchip.livestar.base.AutoDisposableLifecycleAware
import com.mxchip.livestar.utils.FileLog
import com.mxchip.livestar.utils.MyLog
import com.mxchip.livestar.rx.RxSchedulersHelper
import com.mxchip.livestar.base.addTo
import com.mxchip.livestar.base.SystemData
import com.mxchip.livestar.base.toast
import com.mxchip.livestar.original.*
import com.mxchip.livestar.original.AudioPublisher.AudioCallback
import com.mxchip.livestar.repository.RoomClientRepository
import com.mxchip.livestar.rx.RxBus
import com.mxchip.livestar.rx.RxConstant
import com.videohigh.boegam.screenpush.AudioEncoderCallback
import com.videohigh.boegam.screenpush.ScreenPushManager
import com.videohigh.graham.SDKStreamEventListener
import com.videohigh.graham.SampleSDK
import com.videohigh.graham.SessionContext
import com.videohigh.graham.TermVideoDevice
import com.videohigh.hdmiin.h1.AudioResDataCallBack
import io.reactivex.Observable
import io.reactivex.functions.Consumer
import kotlinx.android.synthetic.main.fragment_video.*
import kotlinx.coroutines.Dispatchers
import kotlinx.coroutines.delay
import kotlinx.coroutines.launch
import java.text.SimpleDateFormat
import java.util.*

/**
 * 音视频播放类
 */
class VideoFragment : Fragment(), VideoGestureListener {

    // private val sampleSDK = SampleSDK.getInstance()

    private val dbyViewModel by activityViewModels<DbyViewModel>()

    private val screenViewModel by activityViewModels<ScreenViewModel>()

    private val dbyNetworkViewModel by activityViewModels<DbyNetworkViewModel>()

    private val callControlViewModel by activityViewModels<CallControlViewModel>()

    private val roomViewModel by activityViewModels<RoomViewModel>()

    private val autoDisposable = AutoDisposableLifecycleAware()

    private val logger = FileLog.getLogger(VideoFragment::class.java)

    private var shareAudio = false
    private var delayshareAudio = false

    private var mPublisher: AudioPublisher? = null

    private var localHDMIAudioUtil: LocalHDMIAudioUtil? = LocalHDMIAudioUtil()

    private var busyResAudioPubObservable: Observable<Boolean>? = null
    private var getHdmiAudioObservable: Observable<Boolean>? = null
    private lateinit var shareContentAddEventObservable: Observable<SessionContext>
    private lateinit var shareContentRemoveEventObservable: Observable<SessionContext>

    private var mScreenPushManager = ScreenPushManager.getInstance()

    private var mAudioPlayer: AudioPlayer? = null


    private var currentBitRate = SystemData.standardBitRate

    override fun onDestroyView() {
        super.onDestroyView()
    }

    private val provider: LifecycleProvider<Lifecycle.Event> = AndroidLifecycle.createLifecycleProvider(this)

    @SuppressLint("CheckResult")
    override fun onCreate(savedInstanceState: Bundle?) {
        super.onCreate(savedInstanceState)
        // 接通回调
        busyResAudioPubObservable = RxBus.get().register(RxConstant.BUSY_RES_AUDIO_PUB, Boolean::class.java)
        busyResAudioPubObservable?.compose(provider.bindToLifecycle())?.compose(RxSchedulersHelper.io_main())
                ?.subscribe(::busyResAudioPubCloseObservable) { error ->
                    run {
                        logger.error(SimpleDateFormat("yyyy-MM-dd HH:mm:ss", Locale.CHINA).format(Date()) +
                                " busyResAudioPubObservable" + error.message)
                        error.printStackTrace()
                    }
                }

        getHdmiAudioObservable = RxBus.get().register(RxConstant.GET_HDMI_AUDIO, Boolean::class.java)
        getHdmiAudioObservable?.compose(provider.bindToLifecycle())?.compose(RxSchedulersHelper.io_main())
                ?.subscribe(::hdmiAudioPublish) { error ->
                    run {
                        logger.error(SimpleDateFormat("yyyy-MM-dd HH:mm:ss", Locale.CHINA).format(Date()) +
                                " getHdmiAudioObservable" + error.message)
                        error.printStackTrace()
                    }
                }

        //接收对端发来的 ppt共享/ppt关闭
        shareContentAddEventObservable = RxBus.get().register(RxConstant.SHARE_CONTENT_ADD_EVENT, SessionContext::class.java)
        shareContentAddEventObservable./*compose(provider.bindToLifecycle()).*/compose(RxSchedulersHelper.io_main())
                .subscribe(::onShareContentAddEvent) { error ->
                    run {
                        logger.error(SimpleDateFormat("yyyy-MM-dd HH:mm:ss", Locale.CHINA).format(Date()) +
                                " shareContentAddEventObservable" + error.message)
                        error.printStackTrace()
                    }
                }

        shareContentRemoveEventObservable = RxBus.get().register(RxConstant.SHARE_CONTENT_REMOVE_EVENT, SessionContext::class.java)
        shareContentRemoveEventObservable./*compose(provider.bindToLifecycle()).*/compose(RxSchedulersHelper.io_main())
                .subscribe(::onShareContentRemoveEvent) { error ->
                    run {
                        logger.error(SimpleDateFormat("yyyy-MM-dd HH:mm:ss", Locale.CHINA).format(Date()) +
                                " shareContentRemoveEventObservable" + error.message)
                        error.printStackTrace()
                    }
                }

        if (mAudioPlayer == null) {
            mAudioPlayer = AudioPlayer(context)
            mAudioPlayer!!.startPlayer()
        }

//        FIleManager.create()
    }

    /**
     * 收到屏幕共享打开的通知
     */
    private fun onShareContentAddEvent(event: SessionContext) {
        // val fromTermNumber = "${event.from.roomId}T${event.from.termId}"
        // roomViewModel.addAVDevice(AVDevice(fromTermNumber, event.deviceName))

        // SystemData.getInstance().receiveWifiResState = true

        dbyViewModel.showRemoteResView(true)
    }

    /**
     * 收到屏幕共享关闭的通知
     */
    private fun onShareContentRemoveEvent(event: SessionContext) {
        // val fromTermNumber = "${event.from.roomId}T${event.from.termId}"
        // roomViewModel.removeAVDevice(fromTermNumber, event.deviceName)
        // SystemData.getInstance().receiveWifiResState = false

//            SystemData.getInstance().streamMode = StreamMode.SINGLE
        dbyViewModel.showRemoteResView(false)
    }

    /**
     * 无线投屏屏幕共享时,共享源声音的打开与关闭
     */
    private fun busyResAudioPubCloseObservable(aBoolean: Boolean) {
        MyLog.logD(MyLog.AUDIO_SEND_CTRL_MODEL, "VideoFragment::busyResAudioPubCloseObservable: aBoolean:${aBoolean}  ")
        if (aBoolean) {

            mScreenPushManager.addAudioEncoderCallback(audioEncoderCallback);
        } else {
            mScreenPushManager.removeAudioEncoderCallback(audioEncoderCallback);
            dbyViewModel.extMic(false)
        }
    }

    /**
     * hdmi屏幕共享时,共享源声音的打开与关闭
     */
    private fun hdmiAudioPublish(enable: Boolean) {
        MyLog.logD(MyLog.AUDIO_SEND_CTRL_MODEL, "VideoFragment::hdmiAudioPublish: enable:${enable} shareAudio:${shareAudio} delayshareAudio:${delayshareAudio}  ")
        if (enable && shareAudio && !delayshareAudio) {
            stopPublishMicAudio()
            HDMIHandle.getInstance().startAudio(audioResDataCallBack)

        } else if (!enable) {
            lifecycleScope.launch(Dispatchers.IO) {
                dbyViewModel.extMic(false)
                HDMIHandle.getInstance().stopAudio()
                startPublishMicAudio()
            }

        }
    }

    override fun onCreateView(inflater: LayoutInflater, container: ViewGroup?,
                              savedInstanceState: Bundle?): View? {
        val view = inflater.inflate(R.layout.fragment_video, container, false)

        lifecycle.addObserver(autoDisposable)

        return view
    }


    private val sdkStreamEventListener = object : SDKStreamEventListener {

        override fun onReceiveVideoBytes(device: TermVideoDevice, data: ByteArray, format: Int, width: Int, height: Int, rotation: Int, timestamp: Long) {
            val decoderName = DecoderNames.getNameFromTermVideoDevice(device)
            MyLog.logD(MyLog.VIDEO_RECEIVE_MODEL, "VideoFragment::onViewCreated: decoderName:${decoderName}  ")
            player_view_big.decodeAVPacket(data.size, data, decoderName)
        }
    }


    override fun onViewCreated(view: View, savedInstanceState: Bundle?) {
        super.onViewCreated(view, savedInstanceState)
        player_view_big.setVideoGestureListener(this)



        dbyNetworkViewModel.delay.observe(viewLifecycleOwner, Observer {
            text_network_delay.text = "${it}ms"
        })

        dbyNetworkViewModel.speed.observe(viewLifecycleOwner, Observer {
            text_network_speed.text = it
        })

        dbyViewModel.netQuality.observe(viewLifecycleOwner, Observer {
            net_describe.text = it
        })

        screenViewModel.screenSettings.observe(viewLifecycleOwner, Observer {
            Log.e("ScreenViewModel", "screenViewModel.screenSettings.observe $it")
            player_view_big.setScreenSettings(it)
        })


        /*dbyViewModel.receiveAVPacketConsumer(Consumer {
            val decoderName = "${it.uid}_${it.deviceName}"
            MyLog.logD(MyLog.VIDEO_RECEIVE_MODEL, "VideoFragment::onViewCreated: decoderName:${decoderName}  ")
            player_view_big.decodeAVPacket(it.data.size, it.data, decoderName)

        }).addTo(autoDisposable)*/


        RoomClientRepository.setStreamEventListener(sdkStreamEventListener)

        SendAudioUtil.getInstance().setGetAudioData { avPacket ->

            MyLog.logD(MyLog.SEND_AUDIO_PACKET_MODEL, "CallActivityA: getAudioData:  customAudioSource?.frameCustomMap----- avPacket.deviceName:${avPacket.deviceName} data.size:${avPacket.data.size}")

            avPacket.apply {

                MyLog.logD(MyLog.SEND_AUDIO_PACKET_MODEL, "CallActivityA: getAudioData:  customByteArrayFrame----- deviceName:$deviceName data.size:${data.size} Thread.name:${Thread.currentThread().name}")
                RoomClientRepository.sendAudioDeviceBytes(deviceName, data)
            }
        }


        dbyViewModel.showHDMIView.observe(viewLifecycleOwner, Observer { enable ->
            run {
                screenViewModel.hdmiCameraPreview(enable)

                if (enable) {
                    player_view_big.startResPub()
                } else {
                    player_view_big.stopResPub()

                }
            }
        })
        dbyViewModel.showWiFiView.observe(viewLifecycleOwner, Observer { enable ->
            run {
                screenViewModel.wifiCameraPreview(enable)
                if (enable) {
                    player_view_big.startWifiResourcePub()
                } else {
                    player_view_big.stopWifiResourcePub()
                    SystemData.getInstance().wifiResPub = false
                }
            }
        })

        dbyViewModel.showRemoteResView.observe(viewLifecycleOwner, Observer { show ->
            run {
                logger.info(SimpleDateFormat("yyyy-MM-dd HH:mm:ss", Locale.CHINA).format(Date()) +
                        " VideoFragment::" + "receive hdmi step -1 ")
                if (show) {
                    CameraHandle.getInstance().setEncoderBitRate(1024 * 512)
                } else {
                    logger.info(SimpleDateFormat("yyyy-MM-dd HH:mm:ss", Locale.CHINA).format(Date()) +
                            " VideoFragment::" + "receive hdmi step 0 ")
                    CameraHandle.getInstance().setEncoderBitRate(1024 * 1024 * 3 / 2)
                }
            }
        })

        screenViewModel.networkLevel.observe(viewLifecycleOwner, Observer {
            changeBitRate(it)
        })
        /*dbyViewModel.hideSmallView.observe(viewLifecycleOwner, Observer { hideSmallView ->
            run {
                player_view_big.hideSmallView(hideSmallView)
            }
        })*/

        player_view_big.setOnSurfaceCreatedListener {

            CameraHandle.getInstance().setEncoderCallback(mEncoderCallback)
            HDMIHandle.getInstance().setEncoderCallback(mEncoderCallback)
            WifiCamera.getInstance().setEncoderCallback(mEncoderCallback)
            // player_view_big.setEncoderCallBack(mEncoderCallback)
            dbyViewModel.videoResInitFinish(true)

            if (mPublisher == null) {
                mPublisher = AudioPublisher()
                startPublishMicAudio()
            }
            localHDMIAudioUtil?.setCallBack(callBack)
        }

        dbyViewModel.openExtMic.observe(viewLifecycleOwner, Observer { enable ->
            shareAudio = enable
            lifecycleScope.launch {
                delay(300)
                delayshareAudio = enable
            }
        })
    }

    /**
     * 改变比特率
     */
    fun changeBitRate(it: Int) {
        var h264Bitrate = 0
        var cameraBitrate = 0
        when (it) {
            SystemData.networkLevelMinimum -> {
                h264Bitrate = SystemData.minimumBitRate
                cameraBitrate = SystemData.cameraMinimumBitRate
            }
            SystemData.networkLevelLower -> {
                h264Bitrate = SystemData.lowerBitRate
                cameraBitrate = SystemData.cameraLowerBitRate
            }
            SystemData.networkLevelMiddle -> {
                h264Bitrate = SystemData.middleBitRate
                cameraBitrate = SystemData.cameraMiddleBitRate
            }
            SystemData.networkLevelStandard -> {
                h264Bitrate = SystemData.standardBitRate
                cameraBitrate = SystemData.cameraStandardBitRate
            }
        }
        if (currentBitRate != h264Bitrate) {
            Log.d("VideoFragment", "changeBitRate:  h264Bitrate:$h264Bitrate  cameraBitrate:$cameraBitrate ")
            currentBitRate = h264Bitrate
            CameraHandle.getInstance().setEncoderBitRate(h264Bitrate)
            CameraHandle.getInstance().setEncoderSmallBitRate(cameraBitrate)
            if (dbyViewModel.shareScreen.value == AVDevice.HDMI_RES) {
                HDMIHandle.getInstance().setEncodeBitRate(h264Bitrate)
            } else if (dbyViewModel.shareScreen.value == AVDevice.WIFI_RES) {
                WifiCamera.getInstance().setEncodeBitRate(h264Bitrate / 1024)
            }
        }
    }

    /**
     * 开始推送mic音频数据
     */
    private fun startPublishMicAudio() {
        mPublisher?.setAudioCallback(mAudioCallback)
        mPublisher?.startPublishAudio()
    }

    private val callBack: LocalHDMIAudioUtil.PlayLocalHDMIAudioCallBack = LocalHDMIAudioUtil.PlayLocalHDMIAudioCallBack { data ->

        mAudioPlayer?.addAudioData(data)

    }

    /**
     * 采集到mic音频数据的回调
     */
    var mAudioCallback: AudioCallback? = object : AudioCallback {

        var totalCount = 0

        override fun onGetPcmFrame(data: ByteArray, length: Int) {
            if (shareAudio) return
            else if (!shareAudio && !delayshareAudio) {
//                MyLog.logD("class name:VideoFragment\nmAudioCallback method name:onGetPcmFrame ")
                if (data.isEmpty()) {
                    return
                }
//            FIleManager.write(data)
                MyLog.logD(MyLog.AUDIO_SEND_MODEL, "VideoFragment: getAudioData:  onGetPcmFrame----- AVDevice.AUDIO_BUILD_IN:${AVDevice.AUDIO_BUILD_IN} data.size:${data.size}")
                val avPacket = AVPacket(data, AVDevice.AUDIO_BUILD_IN, SystemData.userId())
                avPacket.also {
                    //                dbyViewModel.sendAudioPacket(it)
                    SendAudioUtil.getInstance().putSendData(it)
                }

            }

        }

        override fun AudioOPenFail() {}
    }

    fun Mix2(sourseFile1: ByteArray, sourseFile2: ByteArray, len: Int): ByteArray? { //归一化混音
        val MAX = 32767
        val MIN = -32768
        var f = 1.0
        val outBuffer = ByteArray(len)
        var output: Int
        var i = 0
        while (i < len) {
            var temp = 0
            temp += sourseFile1[i + 1] * 256 + sourseFile1[i]
            temp += sourseFile2[i + 1] * 256 + sourseFile2[i]
            output = (temp * f).toInt()
            if (output > MAX) {
                f = MAX.toDouble() / output.toDouble()
                output = MAX
            }
            if (output < MIN) {
                f = MIN.toDouble() / output.toDouble()
                output = MIN
            }
            if (f < 1) {
                f += (1.toDouble() - f) / 32.toDouble()
            }
            outBuffer[i] = (output and 0xFF).toByte()
            outBuffer[i + 1] = (output shr 8).toByte()
            i += 2
        }
        return outBuffer
    }

    /**
     * 获取到视频数据的回调
     */
    private var mEncoderCallback: EncoderCallback? = object : EncoderCallback {

        override fun onH264FramePacket(data: ByteArray, deviceName: String, width: Int, height: Int, rotation: Int, timestamp: Long) {
            // val packet = AVPacket(data, deviceName, SystemData.userId(), MediaType.DBMediaFrameType.H264.type, width, height, rotation, timestamp)

//             Log.e("VideoFragment", "packet _${packet.uid}_${packet.deviceName}_${data.size}")

            // TODO: 修改为非双流模式直接停止 HDMI 和 WIFICamera 编码
            // if (/*deviceName == AVDevice.CAMERA_HDMI ||*/ deviceName == AVDevice.CAMERA_WIFI) {
            //     FIleManager.write(data)
            // }

            // MyLog.logD(MyLog.VIDEO_SEND_MODEL, "CallActivityA::onCreate deviceName1:${packet.deviceName}")

            /*packet.apply {

            }*/

            MyLog.logD(MyLog.VIDEO_SEND_MODEL, "CallActivityA::sendAVPacketConsumer deviceName:$deviceName")
            RoomClientRepository.sendVideoDeviceBytes(deviceName, data, width, height)

            // dbyViewModel.sendAVPacket(packet)
        }
    }

    /**
     * 共享源音频数据获取的回调
     */
    private val audioResDataCallBack: AudioResDataCallBack = object : AudioResDataCallBack {

        override fun onGetPcmFrame(data: ByteArray, length: Int) {
            if (!shareAudio) return
            else if (shareAudio && delayshareAudio) {
//                MyLog.logD("class name:VideoFragment\n audioResDataCallBack method name:onGetPcmFrame ")
                if (data.isEmpty()) {
                    return
                }
                val localPlay = ByteArray(640)
                System.arraycopy(data, 0, localPlay, 0, length)
                playHDMI2(localPlay)
                if (data.isEmpty()) {
                    return
                }
//            localHDMIAudioUtil!!.putSendData(data)
                val avPacket = AVPacket(data, AVDevice.AUDIO_BUILD_IN, SystemData.userId())
                avPacket.also {
                    //                dbyViewModel.sendAudioPacket(it)
                    SendAudioUtil.getInstance().putSendData(it)
                }
            }

        }
    }

    private val audioEncoderCallback = object : AudioEncoderCallback {
        override fun onGetPcmFrame(data: ByteArray, length: Int) {
            if (!shareAudio) return
            else if (shareAudio && delayshareAudio) {
//                MyLog.logD("class name:VideoFragment\n audioResDataCallBack method name:onGetPcmFrame ")
                if (data.isEmpty()) {
                    return
                }
                val localPlay = ByteArray(640)
                System.arraycopy(data, 0, localPlay, 0, length)
                playHDMI2(localPlay)
                if (data.isEmpty()) {
                    return
                }
//            localHDMIAudioUtil!!.putSendData(data)
                val avPacket = AVPacket(data, AVDevice.AUDIO_BUILD_IN, SystemData.userId())
                avPacket.also {
                    //                dbyViewModel.sendAudioPacket(it)
                    SendAudioUtil.getInstance().putSendData(it)
                }
            }

        }
    }

    /**
     * 共享源音频播放
     */
    fun playHDMI2(data: ByteArray?) {
        if (localHDMIAudioUtil == null) return
        localHDMIAudioUtil!!.putData(data)
    }

    override fun onDestroy() {

        super.onDestroy()

        MyLog.logD(MyLog.VIDEO_FRAGMENT_MODEL, "CallControlFragment::onDestroy: ")
        stopPublishMicAudio()

        if (mAudioPlayer != null) {
            mAudioPlayer!!.stopPlayer()
            mAudioPlayer = null
        }
        RxBus.get().unregister(RxConstant.BUSY_RES_AUDIO_PUB, busyResAudioPubObservable)
        RxBus.get().unregister(RxConstant.GET_HDMI_AUDIO, getHdmiAudioObservable)
        RxBus.get().unregister(RxConstant.SHARE_CONTENT_ADD_EVENT, shareContentAddEventObservable)
        RxBus.get().unregister(RxConstant.SHARE_CONTENT_REMOVE_EVENT, shareContentRemoveEventObservable)

        localHDMIAudioUtil?.stop()
        localHDMIAudioUtil = null

        mEncoderCallback = null

        RoomClientRepository.setStreamEventListener(null)
    }

    /**
     * 停止推送mic音频数据
     */
    private fun stopPublishMicAudio() {
        if (mPublisher != null) {
            mPublisher!!.stopPublishAudio()
        }
    }

    /**
     * 长按出现会控页面
     */
    override fun onLongClick() {
        callControlViewModel.onLongClick()
    }

    override fun onVolumeGesture(e1: MotionEvent?, e2: MotionEvent?, distanceX: Float, distanceY: Float) {

    }

    override fun onDown(e: MotionEvent?) {

    }

    /**
     * 点击出现功能页面
     */
    override fun onSingleTapGesture(e: MotionEvent?) {
        callControlViewModel.touched()
    }

    /**
     * 右侧滑动,切换视频列表
     */
    override fun onFling(e1: MotionEvent, e2: MotionEvent, velocityX: Float, velocityY: Float) {
        var size = screenViewModel.avDevices.value?.size
        if (size ?: return <= 4) {
            return
        }
        MyLog.logD(MyLog.VIDEO_FRAGMENT_MODEL, "VideoFragment::onFling: e1:${e1.y} e2:${e2.y}  ")
        val next = e1.y >= e2.y
        if (next) {
            toast("下一页数据")
        } else {
            toast("上一页数据")
        }
        screenViewModel.manualSelection(next)
    }

    /**
     * 单击小窗口,切换大小窗口
     */
    override fun onSelectSomeOne(index: Int) {
        screenViewModel.setSelectSomeOne(index)
    }

}
