package com.mxchip.livestarmobile.mobile.ui.call


import android.os.Bundle
import android.util.Log
import android.view.LayoutInflater
import android.view.View
import android.view.ViewGroup
import androidx.fragment.app.Fragment
import androidx.fragment.app.activityViewModels
import androidx.lifecycle.Observer
import com.duobeiyun.paassdk.media.MediaType
import com.mxchip.livestarmobile.R
import com.mxchip.livestarmobile.mobile.base.AutoDisposableLifecycleAware
import com.mxchip.livestarmobile.mobile.base.addTo
import com.mxchip.livestarmobile.mobile.origin.AudioPublisher
import com.mxchip.livestarmobile.mobile.state.SystemData
import com.mxchip.livestarmobile.mobile.util.Camera.CameraHandle
import com.mxchip.livestarmobile.mobile.util.Camera.EncoderCallback
import com.mxchip.livestarmobile.mobile.util.Log.FileLog
import com.mxchip.livestarmobile.mobile.util.Log.MyLog
import com.mxchip.livestarmobile.mobile.util.SendAudioUtil
import io.reactivex.functions.Consumer
import kotlinx.android.synthetic.main.fragment_video.*
import java.text.SimpleDateFormat
import java.util.*


class VideoFragment : Fragment() {

    private val dbyViewModel by activityViewModels<DbyViewModel>()

    private val screenViewModel by activityViewModels<ScreenViewModel>()

    private val dbyNetworkViewModel by activityViewModels<DbyNetworkViewModel>()

    private val autoDisposable = AutoDisposableLifecycleAware()

    private var mPublisher: AudioPublisher? = null

    var oldList: MutableList<AVDevice> = arrayListOf()

    private val logger = FileLog.getLogger(VideoFragment::class.java)

    var currentBitRate = SystemData.standardBitRate

    override fun onDestroyView() {
        super.onDestroyView()
    }

    override fun onCreateView(inflater: LayoutInflater, container: ViewGroup?,
                              savedInstanceState: Bundle?): View? {
        val view = inflater.inflate(R.layout.fragment_video, container, false)

        lifecycle.addObserver(autoDisposable)
        return view
    }

    override fun onViewCreated(view: View, savedInstanceState: Bundle?) {
        super.onViewCreated(view, savedInstanceState)



        dbyNetworkViewModel.delay.observe(viewLifecycleOwner, Observer {
            text_network_delay.text = "${it}ms"
        })

        dbyNetworkViewModel.speed.observe(viewLifecycleOwner, Observer {
            text_network_speed.text = it
        })

//        dbyViewModel.netQuality.observe(viewLifecycleOwner, Observer {
//            net_describe.text = it
//        })

        screenViewModel.screenSettings.observe(viewLifecycleOwner, Observer {
            Log.e("VideoFragment", "screenViewModel.screenSettings.observe $it")
            player_view_big.setScreenSettings(it)
        })

        dbyViewModel.receiveAVPacketConsumer(Consumer {

            // Log.e("VideoFragment", "avPacket ${it.uid}")

            val decoderName = "${it.uid}_${it.deviceName}"

            player_view_big.decodeAVPacket(it.data.size, it.data, decoderName)

        }).addTo(autoDisposable)

        dbyViewModel.showRemoteResView.observe(viewLifecycleOwner, Observer { show ->
            run {
                logger.info(SimpleDateFormat("yyyy-MM-dd HH:mm:ss", Locale.CHINA).format(Date()) +
                        " VideoFragment::" + "receive hdmi step -1 ")
                if (show) {
                    CameraHandle.getInstance().setEncoderBitRate(1024 * 512)
                    CameraHandle.getInstance().applyIFrame()
                } else {
                    logger.info(SimpleDateFormat("yyyy-MM-dd HH:mm:ss", Locale.CHINA).format(Date()) +
                            " VideoFragment::" + "receive hdmi step 0 ")
                    CameraHandle.getInstance().setEncoderBitRate(1024 * 1024 * 3 / 2)
                    CameraHandle.getInstance().applyIFrame()
                }
            }
        })

        screenViewModel.networkLevel.observe(viewLifecycleOwner, Observer {
            changeBitRate(it)
        })

        player_view_big.setOnSurfaceCreatedListener {

            CameraHandle.getInstance().setEncoderCallback(mEncoderCallback)
            // player_view_big.setEncoderCallBack(mEncoderCallback)
            dbyViewModel.videoResInitFinish(true)

            if (mPublisher == null) {
                mPublisher = AudioPublisher()
                mPublisher?.setAudioCallback(mAudioCallback)
                mPublisher?.startPublishAudio()
            }
        }

    }

    private fun changeBitRate(it: Int) {
        var cameraLocalBitrate = 0
        var cameraSmallBitrate = 0
        when (it) {
            SystemData.networkLevelMinimum -> {
                cameraLocalBitrate = SystemData.minimumBitRate
                cameraSmallBitrate = SystemData.cameraMinimumBitRate
            }
            SystemData.networkLevelLower -> {
                cameraLocalBitrate = SystemData.lowerBitRate
                cameraSmallBitrate = SystemData.cameraLowerBitRate
            }
            SystemData.networkLevelMiddle -> {
                cameraLocalBitrate = SystemData.middleBitRate
                cameraSmallBitrate = SystemData.cameraMiddleBitRate
            }
            SystemData.networkLevelStandard -> {
                cameraLocalBitrate = SystemData.standardBitRate
                cameraSmallBitrate = SystemData.cameraStandardBitRate
            }
        }
        if (currentBitRate != cameraSmallBitrate) {
            Log.d("VideoFragment", "changeBitRate: cameraBitrate:$cameraSmallBitrate ")
            currentBitRate = cameraSmallBitrate
            CameraHandle.getInstance().setEncoderBitRate(cameraLocalBitrate)
            CameraHandle.getInstance().setEncoder2BitRate(cameraSmallBitrate)
        }
    }

    var mAudioCallback: AudioPublisher.AudioCallback? = object : AudioPublisher.AudioCallback {

        override fun onGetPcmFrame(data: ByteArray, length: Int) {
//                MyLog.logD("class name:VideoFragment\nmAudioCallback method name:onGetPcmFrame ")
            if (data.isEmpty()) {
                return
            }
//            FIleManager.write(data)

            val avPacket = AVPacket(data, AVDevice.AUDIO_BUILD_IN, SystemData.userId())
            avPacket.also {
                //                dbyViewModel.sendAudioPacket(it)
                SendAudioUtil.getInstance().putSendData(it)
            }
        }

        override fun AudioOPenFail() {}
    }

    private var mEncoderCallback: EncoderCallback? = object : EncoderCallback {

        override fun onH264FramePacket(data: ByteArray, deviceName: String, width: Int, height: Int, rotation: Int, timestamp: Long) {
            val packet = AVPacket(data, deviceName, SystemData.userId(), MediaType.DBMediaFrameType.H264.type, width, height, rotation, timestamp)

            // Log.e("VideoFragment", "packet ${packet.deviceName}_${data.size}")

            // TODO: 修改为非双流模式直接停止 HDMI 和 WIFICamera 编码
            if (deviceName == AVDevice.CAMERA_HDMI || deviceName == AVDevice.CAMERA_WIFI) {
//                if (SystemData.getInstance().streamMode != StreamMode.SEND_DOUBLE) {
//                    return
//                }
            }

            dbyViewModel.sendAVPacket(packet)
        }
    }

    override fun onResume() {
        super.onResume()
//        player_view_big.onResume()
    }

    override fun onDestroy() {
        super.onDestroy()

        if (mPublisher != null) {
            mPublisher!!.stopPublishAudio()
        }
    }

}
