package com.yl.immodule.util

import android.util.Log
import com.android.yl.ylicpsdk.ICPRtcEventHandler
import com.android.yl.ylicpsdk.ICPRtcManager
import com.camera.base.Log.YLog
import com.camera.base.inerface.CameraServiceSingleton
import com.camera.base.inerface.FramePusher
import com.camera.base.inerface.H264Buffer
import com.camera.base.inerface.VideoFrame
import com.camera.base.inerface.VideoFrameBufferType
import com.camera.base.util.ToastUtil
import com.hm.poc.IPocEngineEventHandler

class MonitorVideoHelper(private val incomingInfo: IPocEngineEventHandler.IncomingInfo) {
    private val pocEngine by lazy { ICPRtcManager.getsInstance() }
    private val mICPInterface by lazy {  CameraServiceSingleton.getInstance() }
    private lateinit var icpRtcEventHandler: ICPRtcEventHandler;
    private lateinit var pusher: FramePusher;
    private var sessionId:Long = -1;
    init {
        icpRtcEventHandler = EventHandler();
        pocEngine.addEventHandler(icpRtcEventHandler)
        sessionId = incomingInfo.sessionId;
        pusher = MFramePusher(VideoFrameBufferType.H264_PCM_BUFFER);
    }

    fun handlerIncoming() {
//        devInfoView.addDevInfo("来电: $incomingInfo")

        when (incomingInfo.sessionType) {
            IPocEngineEventHandler.SessionType.TYPE_AUDIO_MEETING -> {
//                tryAcceptAudioMeeting(incomingInfo)
            }

            IPocEngineEventHandler.SessionType.TYPE_VIDEO_MONITOR_CALL -> {
                tryAcceptVideoMonitor(incomingInfo)
            }
            else -> {
                YLog.e("TAG", "handlerIncomingIntent: invalid session type")
            }
        }
    }
    private fun tryAcceptVideoMonitor(incomingInfo: IPocEngineEventHandler.IncomingInfo) {
//        val config = IPocEngineEventHandler.AcceptCallConfig().apply {
//            defaultMute = true // 接听后静麦，因为主要用会议的声音通道
//            notHandlerMonitor = true // 接听后不采用内部监控的方式，由业务自己获取renderView去布局
//        }
        val ret = pocEngine.acceptCall(incomingInfo.sessionId);
        if (!ret) {
            ToastUtil.showLong("视频监控接听失败")
            return
        }
    }
    // 通话状态事件
    private inner class EventHandler: ICPRtcEventHandler() {


        override fun onCallConnected(sessionId: Long, remoteId: String?, sessionType: Int) {
            setupMonitorVideo()
        }

        override fun onCallDisconnected(msessionId: Long, reason: Int) {
//            devInfoView.addDevInfo("会话断开: id=$sessionId")
            Log.e("MonitorVideoHelper", "onCallDisconnected: " )
            if (sessionId == msessionId) {
                mICPInterface?.removeSubcodeStream(pusher)
                pocEngine.removeEventHandler(icpRtcEventHandler)
            }
        }

        // 更新会议声音分贝变化
        override fun onVoiceDecibelChanged(decibel: Int, isRemote: Boolean) {
//            meetingVoiceView.updateVoiceDecibel(decibel)
        }
//
//        // 更新会议时间
//        override fun onSessionTimeTick(sesId: Long, time: Long) {
//            if (sesId == CCHelper.curCcDataModel.audioMeetingSessionId) {
//                meetingTimeTv.text = meetingTimeFormat.format(time * 1000)
//            }
//        }
    }
    fun setupMonitorVideo(){
        mICPInterface?.openICPCamera()
        mICPInterface?.addSubcodeStreamPusher(pusher);
        mICPInterface?.requestKeyFrame(5)
        pocEngine.sendLocalVideo()
    }

    inner class MFramePusher(type: Int) : FramePusher(type) {

        override fun onFrame(farme: VideoFrame?) {
            if (farme?.buffer?.bufferType== VideoFrameBufferType.H264_PCM_BUFFER && ((farme.buffer as H264Buffer).data != null)) {
//                Log.e("TAG", "onFrame: " + (pocEngine == null) + (farme == null) + ((farme.buffer as H264Buffer).data == null));
                pocEngine?.pushVideoFrame(sessionId,(farme.buffer as H264Buffer).data ,(farme.buffer as H264Buffer).data!!.size);
            }
        }
    }
}