package cn.wecloud.im.multiplayervideodemo.live

import android.animation.ObjectAnimator
import android.content.Context
import android.util.DisplayMetrics
import android.view.View
import android.view.WindowManager
import android.view.animation.AccelerateInterpolator
import android.widget.ImageView
import androidx.core.animation.addListener
import androidx.core.view.isInvisible
import androidx.core.view.isVisible
import cn.wecloud.im.multiplayervideodemo.App
import cn.wecloud.im.multiplayervideodemo.R
import cn.wecloud.im.multiplayervideodemo.databinding.ViewCallMemberBinding
import cn.wecloud.im.multiplayervideodemo.ext.loadAvatar
import cn.wecloud.im.multiplayervideodemo.utils.DisplayUtils
import com.github.ajalt.timberkt.Timber
import com.xwray.groupie.viewbinding.BindableItem
import com.xwray.groupie.viewbinding.GroupieViewHolder
import io.livekit.android.VideoMember
import io.livekit.android.events.ParticipantEvent
import io.livekit.android.events.collect
import io.livekit.android.renderer.TextureViewRenderer
import io.livekit.android.room.Room
import io.livekit.android.room.track.Track
import io.livekit.android.room.track.VideoTrack
import io.livekit.android.util.flow
import kotlinx.coroutines.*
import kotlinx.coroutines.flow.*

/**
 * 多人视频列表item
 */
class GridParticipantItem(
    val room: Room,
    val videoMember: VideoMember
) : BindableItem<ViewCallMemberBinding>() {

    companion object {
        private const val TAG = "GridParticipantItem"
        val rendererInitCache = HashSet<TextureViewRenderer>()

        fun releaseRenderers() {
            rendererInitCache.forEach { it.release() }
            rendererInitCache.clear()
        }
    }

    private var boundVideoTrack: VideoTrack? = null
    private var coroutineScope: CoroutineScope? = null
    protected var mItemWidth = 0
    protected var mItemHeight = 0

    init {
        customizedInit(App.instance)
    }

    override fun initializeViewBinding(view: View): ViewCallMemberBinding {
        val binding = ViewCallMemberBinding.bind(view)
//        room.initVideoRenderer(binding.renderer)
//        rendererCache.add(binding.renderer)
        return binding
    }

    private fun customizedInit(context: Context) {
        //计算item的宽高
        val windowManager = context.getSystemService(Context.WINDOW_SERVICE) as WindowManager
        val padding = DisplayUtils.dp2px(context, 14f)
        val outMetrics = DisplayMetrics()
        windowManager.defaultDisplay.getMetrics(outMetrics)
        val width = (outMetrics.widthPixels - padding * 2)
        mItemWidth = width / 2
        mItemHeight = (mItemWidth / 0.7).toInt()
    }

    override fun createViewHolder(itemView: View): GroupieViewHolder<ViewCallMemberBinding> {
        itemView.layoutParams.width = mItemWidth
        itemView.layoutParams.height = mItemHeight
        return super.createViewHolder(itemView)
    }

    private fun ensureCoroutineScope() {
        if (coroutineScope == null) {
            coroutineScope = CoroutineScope(SupervisorJob() + Dispatchers.Main)
        }
    }

    override fun bind(viewBinding: ViewCallMemberBinding, position: Int) {
        ensureCoroutineScope()

        viewBinding.ivBgAvatar.loadAvatar("")
        viewBinding.ivBgAvatar.isVisible = true
        viewBinding.ivAvatar.loadAvatar("")
        viewBinding.tvUsername.text = videoMember.clientId
        //视频是否开启
        coroutineScope?.launch {
            videoMember::participant.flow
                .filterNotNull()
                .flatMapLatest { it::videoTracks.flow }
                .flatMapLatest { tracks ->
                    val videoTracks = tracks.values.firstOrNull()
                    if (videoTracks != null) {
                        videoTracks::muted.flow
                    } else {
                        flowOf(true)
                    }
                }
                .collect { muted ->
                    viewBinding.ivDisableVideo.visibility =
                        if (muted) View.VISIBLE else View.INVISIBLE
                    viewBinding.ivBgAvatar.visibility =
                        if (muted) View.VISIBLE else View.INVISIBLE
                }
        }
        //音量
        coroutineScope?.launch {
            videoMember::participant.flow
                .filterNotNull()
                .flatMapLatest {
                    combine(
                        it::isSpeaking.flow,
                        it::audioLevel.flow
                    ) { isSpeaking, audioLevel -> isSpeaking to audioLevel }
                }.collect {
                    speakAnimation(viewBinding.ivSpeakVolume, it.first, it.second)
                }
        }
        //音频是否开启
        coroutineScope?.launch {
            videoMember::participant.flow
                .filterNotNull()
                .flatMapLatest { it::audioTracks.flow }
                .flatMapLatest { tracks ->
                    val audioTrack = tracks.values.firstOrNull()
                    if (audioTrack != null) {
                        audioTrack::muted.flow
                    } else {
                        flowOf(true)
                    }
                }
                .collect { muted ->
                    viewBinding.ivDisableVoice.visibility =
                        if (muted) View.VISIBLE else View.INVISIBLE
                }
        }
        coroutineScope?.launch {
            videoMember::participant.flow
                .filterNotNull()
                .collect {
                    it.events.collect {
                        when (it) {
                            is ParticipantEvent.TrackSubscribed -> {
                                val track = it.track
                                val publication = it.publication
                                if (track !is VideoTrack) return@collect
                                if (publication.source == Track.Source.CAMERA) {
                                    setupVideoIfNeeded(track, viewBinding)
                                }
                            }
                            else -> {
                            }
                        }
                    }
                }
        }
        val existingTrack = getVideoTrack()
        if (existingTrack != null) {
            setupVideoIfNeeded(existingTrack, viewBinding)
        }
    }

    private fun getVideoTrack(): VideoTrack? {
        return videoMember.participant?.getTrackPublication(Track.Source.CAMERA)?.track as? VideoTrack
    }

    internal fun setupVideoIfNeeded(videoTrack: VideoTrack, viewBinding: ViewCallMemberBinding) {
        if (boundVideoTrack != null) {
            return
        }

        boundVideoTrack = videoTrack
        Timber.v { "adding renderer to $videoTrack" }

        if (!rendererInitCache.contains(viewBinding.renderer)) {
            room.initVideoRenderer(viewBinding.renderer)
            rendererInitCache.add(viewBinding.renderer)
        }

        videoTrack.addRenderer(viewBinding.renderer)
    }

    override fun unbind(viewHolder: GroupieViewHolder<ViewCallMemberBinding>) {
        coroutineScope?.cancel()
        coroutineScope = null
        super.unbind(viewHolder)
        val renderer = viewHolder.binding.renderer
        boundVideoTrack?.removeRenderer(renderer)
        renderer.release()
        rendererInitCache.remove(renderer)
        boundVideoTrack = null
        speakAnimation = null
    }

    override fun getLayout(): Int = R.layout.view_call_member


    private var speakAnimation: ObjectAnimator? = null
    private fun speakAnimation(
        callView: ImageView, isSpeaking: Boolean, audioLevel: Float
    ) {
        callView.isInvisible = !isSpeaking
        if (isSpeaking) {
            if (speakAnimation?.isRunning != true) {
                speakAnimation = ObjectAnimator.ofFloat(callView, "alpha", 1f, 0f)
                    .apply {
                        interpolator = AccelerateInterpolator()
                        duration = 300//(500 * audioLevel).toLong()
                        addListener(onEnd = {
                            callView.alpha = 1f
                            callView.isInvisible = true
                        })
                    }
            }
            speakAnimation?.start()
        }
    }
}