package com.yunianvh.webrtc_compose_demo.webrtc


import android.content.Context
import android.text.TextUtils
import com.google.gson.Gson
import com.yunianvh.webrtc_compose_demo.util.FLogUtil
import okhttp3.*
import okhttp3.MediaType.Companion.toMediaTypeOrNull
import org.webrtc.*
import org.webrtc.audio.JavaAudioDeviceModule
import org.webrtc.voiceengine.WebRtcAudioUtils
import java.io.IOException
import java.util.*
import java.util.concurrent.TimeUnit
import kotlin.concurrent.schedule

/**
 * Created by 玉念聿辉.
 * Use: WebRTC推拉流工具拉
 * Date: 2023/5/9
 * Time: 11:23
 */
class WebRTCUtil(context: Context) : PeerConnection.Observer,
    SdpObserver {
    private val context: Context
    private var eglBase: EglBase? = null
    private var playUrl: String? = null
    private var peerConnection: PeerConnection? = null
    private var surfaceViewRenderer: SurfaceViewRenderer? = null
    private var peerConnectionFactory: PeerConnectionFactory? = null
    private var audioSource: AudioSource? = null
    private var videoSource: VideoSource? = null
    private var localAudioTrack: AudioTrack? = null
    private var localVideoTrack: VideoTrack? = null
    private var captureAndroid: VideoCapturer? = null
    private var surfaceTextureHelper: SurfaceTextureHelper? = null
    private var isShowCamera = false
    private var isPublish = false //isPublish true为推流 false为拉流
    private var reConnCount = 0
    fun create(
        eglBase: EglBase?,
        surfaceViewRenderer: SurfaceViewRenderer?,
        playUrl: String?,
        callBack: WebRtcCallBack?
    ) {
        create(eglBase, surfaceViewRenderer, false, playUrl, callBack)
    }

    fun create(
        eglBase: EglBase?,
        surfaceViewRenderer: SurfaceViewRenderer?,
        isPublish: Boolean,
        playUrl: String?,
        callBack: WebRtcCallBack?
    ) {
        this.eglBase = eglBase
        this.surfaceViewRenderer = surfaceViewRenderer
        this.callBack = callBack
        this.playUrl = playUrl
        this.isPublish = isPublish
        init()
    }

    fun create(
        eglBase: EglBase?,
        surfaceViewRenderer: SurfaceViewRenderer?,
        isPublish: Boolean,
        isShowCamera: Boolean,
        playUrl: String?,
        callBack: WebRtcCallBack?
    ) {
        this.eglBase = eglBase
        this.surfaceViewRenderer = surfaceViewRenderer
        this.callBack = callBack
        this.playUrl = playUrl
        this.isPublish = isPublish
        this.isShowCamera = isShowCamera
        init()
    }

    private fun init() {
        peerConnectionFactory = getPeerConnectionFactory(context)
        // 这可以通过使用 PeerConnectionFactory 类并调用 createPeerConnection() 方法来创建WebRTC PeerConnection
        Logging.enableLogToDebugOutput(Logging.Severity.LS_NONE)
        peerConnection = peerConnectionFactory!!.createPeerConnection(config, this)

        //拉流
        if (!isPublish) {
            peerConnection!!.addTransceiver(
                MediaStreamTrack.MediaType.MEDIA_TYPE_AUDIO,
                RtpTransceiver.RtpTransceiverInit(RtpTransceiver.RtpTransceiverDirection.RECV_ONLY)
            )
            peerConnection!!.addTransceiver(
                MediaStreamTrack.MediaType.MEDIA_TYPE_VIDEO,
                RtpTransceiver.RtpTransceiverInit(RtpTransceiver.RtpTransceiverDirection.RECV_ONLY)
            )
        }
        //推流
        else {
            peerConnection!!.addTransceiver(
                MediaStreamTrack.MediaType.MEDIA_TYPE_AUDIO,
                RtpTransceiver.RtpTransceiverInit(RtpTransceiver.RtpTransceiverDirection.SEND_ONLY)
            )
            peerConnection!!.addTransceiver(
                MediaStreamTrack.MediaType.MEDIA_TYPE_VIDEO,
                RtpTransceiver.RtpTransceiverInit(RtpTransceiver.RtpTransceiverDirection.SEND_ONLY)
            )

            //设置回声去噪
            WebRtcAudioUtils.setWebRtcBasedAcousticEchoCanceler(true)
            WebRtcAudioUtils.setWebRtcBasedNoiseSuppressor(true)

            // 添加音频轨道
            audioSource = peerConnectionFactory!!.createAudioSource(createAudioConstraints())
            localAudioTrack = peerConnectionFactory!!.createAudioTrack(AUDIO_TRACK_ID, audioSource)
            localAudioTrack!!.setEnabled(true)
            peerConnection!!.addTrack(localAudioTrack)

            //添加视频轨道
            if (isShowCamera) {
                captureAndroid = CameraUtil.createVideoCapture(context)
                surfaceTextureHelper =
                    SurfaceTextureHelper.create("CameraThread", eglBase!!.eglBaseContext)
                videoSource = peerConnectionFactory!!.createVideoSource(false)
                captureAndroid!!.initialize(
                    surfaceTextureHelper,
                    context,
                    videoSource!!.capturerObserver
                )
                captureAndroid!!.startCapture(VIDEO_RESOLUTION_WIDTH, VIDEO_RESOLUTION_HEIGHT, FPS)
                localVideoTrack = peerConnectionFactory!!.createVideoTrack(VIDEO_TRACK_ID, videoSource)
                localVideoTrack!!.setEnabled(true)
                if (surfaceViewRenderer != null) {
                    val videoSink = ProxyVideoSink()
                    videoSink.setTarget(surfaceViewRenderer)
                    localVideoTrack!!.addSink(videoSink)
                }
                peerConnection!!.addTrack(localVideoTrack)
            }
        }
        peerConnection!!.createOffer(this, MediaConstraints())
    }

    fun destroy() {
        if (callBack != null) {
            callBack = null
        }
        if (peerConnection != null) {
            peerConnection!!.dispose()
            peerConnection = null
        }
        if (surfaceTextureHelper != null) {
            surfaceTextureHelper!!.dispose()
            surfaceTextureHelper = null
        }
        if (captureAndroid != null) {
            captureAndroid!!.dispose()
            captureAndroid = null
        }
        if (surfaceViewRenderer != null) {
            surfaceViewRenderer!!.clearImage()
        }
        if (peerConnectionFactory != null) {
            peerConnectionFactory!!.dispose()
            peerConnectionFactory = null
        }
    }

    /**
     * 配置音频参数
     * @return
     */
    private fun createAudioConstraints(): MediaConstraints {
        val audioConstraints = MediaConstraints()
        audioConstraints.mandatory.add(
            MediaConstraints.KeyValuePair(AUDIO_ECHO_CANCELLATION_CONSTRAINT, "true")
        )
        audioConstraints.mandatory.add(
            MediaConstraints.KeyValuePair(AUDIO_AUTO_GAIN_CONTROL_CONSTRAINT, "false")
        )
        audioConstraints.mandatory.add(
            MediaConstraints.KeyValuePair(AUDIO_HIGH_PASS_FILTER_CONSTRAINT, "false")
        )
        audioConstraints.mandatory.add(
            MediaConstraints.KeyValuePair(AUDIO_NOISE_SUPPRESSION_CONSTRAINT, "true")
        )
        return audioConstraints
    }

    /**
     * 交换sdp
     */
    private fun openWebRtc(sdp: String?) {
//        scopeNet {
//            val mediaType: MediaType = "application/json".toMediaTypeOrNull()!!
//            var mBody: RequestBody? = RequestBody.create(mediaType, sdp!!)
//            var result = Post<SdpBean>(playUrl!!) {
//                body = mBody
//                addHeader("Content-Type", "application/json")
//            }.await()
//            reConnCount = 0
//            setRemoteSdp(result.sdp)
//            FLogUtil.e(TAG, "交换sdp: ${Gson().toJson(result)}")
//        }.catch {
//            FLogUtil.e(TAG, "交换sdp 异常: $it")
//            reConnCount++
//            if (reConnCount < 50) {
//                Timer().schedule(300){ //执行的任务
//                    openWebRtc(sdp)
//                }
//            }
//        }

        reConnCount++
        val client: OkHttpClient = OkHttpClient.Builder()
            .connectTimeout(60, TimeUnit.SECONDS)
            .hostnameVerifier { _, _ -> true }.build()
        val mediaType: MediaType = "application/json".toMediaTypeOrNull()!!
        val body = RequestBody.create(mediaType, sdp!!)
        val request: Request = Request.Builder()
            .url(playUrl!!)
            .method("POST", body)
            .addHeader("Content-Type", "application/json")
            .build()
        val call = client.newCall(request)
        call.enqueue(object : Callback {
            override fun onFailure(call: Call, e: IOException) {
                FLogUtil.e(TAG, "交换sdp reConnCount:$reConnCount  异常: ${e.message}")
                Timer().schedule(300) { //执行的任务
                    openWebRtc(sdp)
                }
            }

            @Throws(IOException::class)
            override fun onResponse(call: Call, response: Response) {
                val result = response.body!!.string()
                FLogUtil.e(TAG, "交换sdp: $result")
                var sdpBean = Gson().fromJson(result, SdpBean::class.java)
                if (sdpBean != null && !TextUtils.isEmpty(sdpBean.sdp)) {
                    if (sdpBean.code === 400) {
                        Timer().schedule(300) { //执行的任务
                            openWebRtc(sdp)
                        }
                    } else {
                        reConnCount = 0
                        setRemoteSdp(sdpBean.sdp)
                        if (callBack != null) callBack!!.onSuccess()
                    }
                }
            }
        })
    }

    fun setRemoteSdp(sdp: String?) {
        if (peerConnection != null) {
            val remoteSpd = SessionDescription(SessionDescription.Type.ANSWER, sdp)
            peerConnection!!.setRemoteDescription(this, remoteSpd)
        }
    }

    interface WebRtcCallBack {
        fun onSuccess()
        fun onFail()
    }

    private var callBack: WebRtcCallBack? = null

    init {
        this.context = context.applicationContext
    }

    /**
     * 获取 PeerConnectionFactory
     */
    private fun getPeerConnectionFactory(context: Context): PeerConnectionFactory {
        val initializationOptions: PeerConnectionFactory.InitializationOptions =
            PeerConnectionFactory.InitializationOptions.builder(context)
                .setEnableInternalTracer(true)
                .setFieldTrials("WebRTC-H264HighProfile/Enabled/")
                .createInitializationOptions()
        PeerConnectionFactory.initialize(initializationOptions)

        // 2. 设置编解码方式：默认方法
        val encoderFactory: VideoEncoderFactory = DefaultVideoEncoderFactory(
            eglBase!!.eglBaseContext,
            false,
            true
        )
        val decoderFactory: VideoDecoderFactory =
            DefaultVideoDecoderFactory(eglBase!!.eglBaseContext)

        // 构造Factory
        PeerConnectionFactory.initialize(
            PeerConnectionFactory.InitializationOptions
                .builder(context)
                .createInitializationOptions()
        )
        return PeerConnectionFactory.builder()
            .setOptions(PeerConnectionFactory.Options())
            .setAudioDeviceModule(JavaAudioDeviceModule.builder(context).createAudioDeviceModule())
            .setVideoEncoderFactory(encoderFactory)
            .setVideoDecoderFactory(decoderFactory)
            .createPeerConnectionFactory()
    }

    //修改模式 PlanB无法使用仅接收音视频的配置
    private val config: PeerConnection.RTCConfiguration
        private get() {
            val rtcConfig: PeerConnection.RTCConfiguration =
                PeerConnection.RTCConfiguration(ArrayList())
            //关闭分辨率变换
            rtcConfig.enableCpuOveruseDetection = false
            //修改模式 PlanB无法使用仅接收音视频的配置
            rtcConfig.sdpSemantics = PeerConnection.SdpSemantics.UNIFIED_PLAN
            return rtcConfig
        }

    override fun onCreateSuccess(sdp: SessionDescription) {
        if (sdp.type === SessionDescription.Type.OFFER) {
            //设置setLocalDescription offer返回sdp
            peerConnection!!.setLocalDescription(this, sdp)
            if (!TextUtils.isEmpty(sdp.description)) {
                reConnCount = 0
                openWebRtc(sdp.description)
            }
        }
    }

    override fun onSetSuccess() {}
    override fun onCreateFailure(error: String?) {}
    override fun onSetFailure(error: String?) {}
    override fun onSignalingChange(newState: PeerConnection.SignalingState?) {}
    override fun onIceConnectionChange(newState: PeerConnection.IceConnectionState?) {}
    override fun onIceConnectionReceivingChange(receiving: Boolean) {}
    override fun onIceGatheringChange(newState: PeerConnection.IceGatheringState?) {}
    override fun onIceCandidate(candidate: IceCandidate?) {
        peerConnection!!.addIceCandidate(candidate)
    }
    override fun onIceCandidatesRemoved(candidates: Array<IceCandidate?>?) {
        peerConnection!!.removeIceCandidates(candidates)
    }
    override fun onAddStream(stream: MediaStream?) {}
    override fun onRemoveStream(stream: MediaStream?) {}
    override fun onDataChannel(dataChannel: DataChannel?) {}
    override fun onRenegotiationNeeded() {}
    override fun onAddTrack(receiver: RtpReceiver, mediaStreams: Array<MediaStream?>?) {
        val track: MediaStreamTrack = receiver.track()!!
        if (track is VideoTrack) {
            val remoteVideoTrack: VideoTrack = track
            remoteVideoTrack.setEnabled(true)
            if (surfaceViewRenderer != null && isShowCamera) {
                val videoSink = ProxyVideoSink()
                videoSink.setTarget(surfaceViewRenderer)
                remoteVideoTrack.addSink(videoSink)
            }
        }
    }

    companion object {
        private const val TAG = "WebRTCUtil"
        const val VIDEO_TRACK_ID = "ARDAMSv0"
        const val AUDIO_TRACK_ID = "ARDAMSa0"
        private const val VIDEO_RESOLUTION_WIDTH = 1280
        private const val VIDEO_RESOLUTION_HEIGHT = 720
        private const val FPS = 30
        private const val AUDIO_ECHO_CANCELLATION_CONSTRAINT = "googEchoCancellation"
        private const val AUDIO_AUTO_GAIN_CONTROL_CONSTRAINT = "googAutoGainControl"
        private const val AUDIO_HIGH_PASS_FILTER_CONSTRAINT = "googHighpassFilter"
        private const val AUDIO_NOISE_SUPPRESSION_CONSTRAINT = "googNoiseSuppression"
    }
}