package cn.wecloud.im.multiplayervideodemo.live

import android.animation.ObjectAnimator
import android.view.View
import android.view.animation.AccelerateInterpolator
import android.widget.ImageView
import androidx.core.animation.addListener
import androidx.core.view.isInvisible
import androidx.core.view.isVisible
import cn.wecloud.im.core.im.messages.MemberDto
import cn.wecloud.im.multiplayervideodemo.R
import cn.wecloud.im.multiplayervideodemo.databinding.ItemVideoUserBinding
import cn.wecloud.im.multiplayervideodemo.ext.loadAvatar
import com.xwray.groupie.viewbinding.BindableItem
import com.xwray.groupie.viewbinding.GroupieViewHolder
import io.livekit.android.VideoMember
import io.livekit.android.util.flow
import kotlinx.coroutines.*
import kotlinx.coroutines.flow.FlowCollector
import kotlinx.coroutines.flow.combine
import kotlinx.coroutines.flow.filterNotNull

/**
 * 多人视频单人模式底部用户列表item
 */
class VideoUserItem(
    val videoMember: VideoMember
) : BindableItem<ItemVideoUserBinding>() {

    private var coroutineScope: CoroutineScope? = null

    override fun initializeViewBinding(view: View): ItemVideoUserBinding {
        val binding = ItemVideoUserBinding.bind(view)
        return binding
    }

    private fun ensureCoroutineScope() {
        if (coroutineScope == null) {
            coroutineScope = CoroutineScope(SupervisorJob() + Dispatchers.Main)
        }
    }
    override fun bind(viewBinding: ItemVideoUserBinding, position: Int) {
        ensureCoroutineScope()
        viewBinding.ivUserAvatar.loadAvatar("")
        viewBinding.tvUserName.text = videoMember.clientId

        coroutineScope?.launch {
            videoMember::state.flow.collect(FlowCollector {
                viewBinding.pbLoad.isVisible = it != MemberDto.STATE_ANSWERED
            })
        }

        coroutineScope?.launch {
            videoMember::participant.flow
                .filterNotNull()
                .collect { participant ->
                    combine(
                        participant::isSpeaking.flow,
                        participant::audioLevel.flow
                    ) { isSpeaking, audioLevel -> isSpeaking to audioLevel }
                        .collect {
                            speakAnimation(viewBinding.ivStatus, it.first, it.second)
                        }
                }
        }
    }


    override fun unbind(viewHolder: GroupieViewHolder<ItemVideoUserBinding>) {
        coroutineScope?.cancel()
        coroutineScope = null
        super.unbind(viewHolder)
    }

    override fun getLayout(): Int = R.layout.item_video_user

    private var speakAnimation: ObjectAnimator? = null

    private fun speakAnimation(
        callView: ImageView, isSpeaking: Boolean, audioLevel: Float
    ) {
        callView.isInvisible = !isSpeaking
        if (isSpeaking) {
            if (speakAnimation?.isRunning != true) {
                speakAnimation = ObjectAnimator.ofFloat(callView, "alpha", 1f, 0f)
                    .apply {
                        interpolator = AccelerateInterpolator()
                        duration = 300//(500 * audioLevel).toLong()
                        addListener(onEnd = {
                            callView.alpha = 1f
                            callView.isInvisible = true
                        })
                    }
            }
            speakAnimation?.start()
        }
    }

}