import React, { useEffect, useRef, useState } from 'react'
import Taro, { FC, useReady } from '@tarojs/taro'
import { View, LivePusher, LivePlayer, Image, Video, ITouchEvent } from '@tarojs/components'
import { asyncScheduler, combineLatest, EMPTY, firstValueFrom, Subscription } from 'rxjs';
import { AtSlider } from 'taro-ui';
import { switchMap, tap } from 'rxjs/operators';

import { COMPONENT_OPERATION, LIVE_OPERATION, LIVE_TYPE, ROOM_ROLE, ROOM_STATUS } from '@/constants/room';
import { useRoomDataFacade } from '@/hooks/useRoomDataFacade'
import { getLogger } from '@/utils/log';
import { useAuthFacade } from '@/hooks/useAuthFacade';
import { formatSecond, isObjEmpty } from '@/utils';
import { HDLevel, LiveStream } from '@/models/LiveRoom';
import { CMD } from '@/constants/message';
import { useMsgFacade } from '@/hooks/useMsgFacade';
import { useRoomUiFacade } from '@/hooks/useRoomUiFacade';

import "taro-ui/dist/style/components/button.scss" // 按需引入  
import "taro-ui/dist/style/components/grid.scss";
import "taro-ui/dist/style/components/float-layout.scss";
import "taro-ui/dist/style/components/slider.scss";
import './index.scss';

//定义组件入参
type Props = {
  //房间Id
  roomId: string;
}

//直播未开始
export const Index: FC<Props> = ({ roomId, children }) => {
  const logger = getLogger("/component/liveRoom/index");
  //导入用户门面
  const [authFacade, , userId] = useAuthFacade();
  const [msgFacade] = useMsgFacade();
  //导入门面的所有元素作为页面绑定
  const [, roomName, , anchorName, ownerName, , ownerLogo, anchorLogo, , , , , , , , , , , , , , , , , , , , , , , , ,] = useRoomUiFacade();

  //BUG 直接使用useRoomDataFacade的话下面的值都为空，可能与LiveOperator层的hook冲突
  const [roomFacade, roomRole, roomStatus, type, ownerId, anchorId, assistants, liveStreams, linkStreams, ui_muted, ui_debug, ui_isFront, ui_mode, ui_beauty, ui_operation] = useRoomDataFacade();

  const [currentLinkStream, setCurrentLinkStream] = useState({} as LiveStream);

  //直播推流组件对象
  const pusherContextRef = useRef({} as Taro.LivePusherContext);

  //默认拉流组件
  const playerContextRef = useRef({} as Taro.LivePlayerContext);

  //视频组件对象
  const videoContextRef = useRef({} as Taro.VideoContext);

  //音频组件对象
  const innerAudioContextRef = useRef({} as Taro.InnerAudioContext);
  const [innerAudioSrc, setInnerAudioSrc] = useState('');
  const [audioPaused, setAudioPaused] = useState(false) //语音直播是否暂停的状态
  const [audioShow, setAudioShow] = useState(false) //语音组件显示
  const [audioDurations, setAudioDurations] = useState(0) //音频总时长未格式化
  const [audioDuration, setAudioDuration] = useState(0) //语音当前播放时长
  const [audioSeek, setAudioSeek] = useState(0) // 拖动进度条后的播放时间
  const [audioTime, setAudioTime] = useState(0) // 进度条当前取值  
  const sliderStatusRef = useRef(false)//进度条拖动的状态 
  const [audioCurrent, setAudioCurrent] = useState(formatSecond(0) as unknown as number) //语音当前播放时长
  const [audioAllTime, setAudioAllTime] = useState(formatSecond(0) as unknown as number) // 音频总时长  
  const [isHorizontal, setIsHorizontal] = useState(false) //判断是否为横屏

  // 处理之后的推流数组
  const [linkStreamList, setLinkStreamList] = useState([] as LiveStream[])

  //FIXME 用于解决iOS切后台回来后自动推流的问题
  const [pushUrl, setPushUrl] = useState("");

  const handleEffect = () => {
    const subscriptions: Subscription[] = [
      //监听组件层发送过来的操作通知
      combineLatest([roomFacade.ui_operation$, roomFacade.ui_isFront$, roomFacade.liveStreams$, roomFacade.type$])
        .pipe(
          switchMap(([op, isFront, streams, roomType]) => {
            if (!op) return EMPTY;

            if (isObjEmpty(pusherContextRef.current)) {
              pusherContextRef.current = Taro.createLivePusherContext();
            }

            //其他需要操作原生组件对象的类型
            if (op === COMPONENT_OPERATION.PLAY_VIDEO) {
              if (LIVE_TYPE.AUDIO !== roomType) {
                if (isObjEmpty(videoContextRef.current)) {
                  videoContextRef.current = Taro.createVideoContext("player");
                }
                videoContextRef.current.play();
              } else {
                console.log("观众点击观看音频回放");
                const audioContext = Taro.createInnerAudioContext();
                const audioUrl = streams[0].videoUrl;
                if (audioUrl) {
                  audioContext.src = audioUrl;
                  setAudioPaused(true)
                  audioContext.play()
                  audioContext.onPlay(() => {
                    asyncScheduler.schedule(() => {
                      audioContext.onTimeUpdate(() => {
                        const currentTime = formatSecond(audioContext.currentTime) as unknown as number
                        const value = parseInt(((100 / audioContext.duration) * audioContext.currentTime).toString())
                        setAudioCurrent(currentTime)
                        setAudioTime(value)
                        setAudioDuration(audioContext.currentTime)
                      })
                    }, 500);
                  })

                  audioContext.onCanplay(() => {
                    //初始化duration 
                    const duration = audioContext.duration;
                    asyncScheduler.schedule(() => {
                      //延时获取音频真正的duration
                      const durations = formatSecond(audioContext.duration) as unknown as number;
                      setAudioAllTime(durations)
                      setAudioDurations(audioContext.duration)
                    }, 1000);
                  })

                  audioContext.onEnded(() => {
                    audioContext.stop();
                    audioContext.seek(0);
                    //播放停止，销毁该实例
                    // audioContext.destroy()
                    setAudioPaused(false)
                    setAudioCurrent(formatSecond(0) as unknown as number)
                    setAudioTime(0)
                    setAudioDuration(0)
                    console.log('结束播放')
                  })

                  audioContext.onError((res) => {
                    console.log(res.errMsg)
                    console.log(res.errCode)
                  })

                  innerAudioContextRef.current = audioContext;
                  setAudioShow(true);
                  setInnerAudioSrc(audioUrl)
                }
              }
              return EMPTY;
            }


            if (op === COMPONENT_OPERATION.TOGGLE_CAMERA) {
              console.log("切换摄像头")

              pusherContextRef.current.switchCamera();
              return EMPTY;
            }

            if ([LIVE_OPERATION.PAUSE, LIVE_OPERATION.RESUME, LIVE_OPERATION.START, LIVE_OPERATION.STOP].includes(op)) {
              return roomFacade.operateCamera(op)
                .pipe(
                  tap(() => {
                    switch (op) {
                      //开始和恢复直播，恢复直播在前端和开始直播是同样的处理方式
                      case LIVE_OPERATION.START: //收到操作层的开播操作
                      case LIVE_OPERATION.RESUME: //收到操作层的恢复操作
                        {
                          console.log('当前直播类型', roomType, streams[0]);
                          if (roomType === LIVE_TYPE.OBS || roomType === LIVE_TYPE.OBS_REC || roomType === LIVE_TYPE.VR) {
                            const this_playerContext = Taro.createLivePlayerContext("player");
                            this_playerContext.play();

                          } else {
                            setPushUrl(streams[0].pushUrl);
                            console.log("获取的推流地址1：", streams[0]);
                            //BUG 解决iOS正常情况下提前渲染或者无法推流的问题
                            setTimeout(() => {
                              console.log("获取的推流地址2：", pushUrl);
                              pusherContextRef.current.start();
                            }, 500);
                          }
                          break;
                        }
                      //暂停和结束直播，暂停直播在前端和结束直播是同样的处理方式
                      case LIVE_OPERATION.PAUSE: //收到操作层的暂停操作
                      case LIVE_OPERATION.STOP: //收到操作层的结束操作
                        {
                          if (roomType == LIVE_TYPE.VIDEO) {
                            //清空pushUrl避免iOS自动推流
                            pusherContextRef.current.stop();
                            setPushUrl("");
                          }
                          break;
                        }
                      case COMPONENT_OPERATION.TOGGLE_CAMERA: //收到操作层的切换摄像头操作
                        {
                          if (roomType == LIVE_TYPE.VIDEO) {
                            pusherContextRef.current.switchCamera();
                          }
                          break;
                        }
                      default:
                        break;
                    }
                  })
                )
            }
            return EMPTY;
          })
        )
        .subscribe(),

      //监控连麦
      combineLatest([roomFacade.linkStreams$, authFacade.userId$])
        .pipe(
          tap(([this_linkStreams, this_userId]) => {
            if (this_linkStreams.length > 0) {
              //获取当前用户的连麦流(如果有参与连麦)
              // console.log('符合条件的linkStream', this_linkStreams);
              const streamList = this_linkStreams.filter(stream => {
                //本人的连麦中的流
                return stream.owner === this_userId;
              });
              // console.log('监控连麦下，小主播的流(第一个应该是小主播自己)', streamList);

              //更新当前连麦流
              if (streamList.length > 0) {
                setCurrentLinkStream(streamList[0]);
              }
            }
          })
        )
        .subscribe(),

      //处理系统群发通知
      combineLatest([roomFacade.handleGroupSystemNotify$, roomFacade.type$])
        .pipe(
          tap(([msg, roomType]) => {
            console.log('观众端未能收到通知?');

            if (msg.cmd === CMD.OPERATE_CAMERA) {
              const this_playerContext = Taro.createLivePlayerContext("player");
              // if (isObjEmpty(this_playerContext)) {
              //   this_playerContext = Taro.createLivePlayerContext("player")
              //   setPlayerContext(this_playerContext);
              // }
              switch (parseInt(msg.data)) {
                case ROOM_STATUS.STARTED:
                  {
                    this_playerContext.play();
                    break;
                  }
                case ROOM_STATUS.PAUSED:
                  {
                    //this_playerContext.pause();
                    break;
                  }
                case ROOM_STATUS.ENDED:
                  {
                    this_playerContext.stop();
                    if (isObjEmpty(videoContextRef.current) && roomType !== LIVE_TYPE.AUDIO) {
                      videoContextRef.current = Taro.createVideoContext("video");
                    } else {
                      innerAudioContextRef.current = Taro.createInnerAudioContext();
                    }
                    break;
                  }
                default:
                  break;
              }
            }
            //音频播放相关逻辑
            if (msg.cmd === CMD.VIDEO_BACK && roomType === LIVE_TYPE.AUDIO) {
              innerAudioContextRef.current.src = msg.data.videoUrl as string;
            }
          })
        )
        .subscribe({
          next: console.log,
          error: console.error,
          complete: () => console.warn("直接结束")
        }),

      //开启主播摄像头预览
      combineLatest([roomFacade.roomRole$, roomFacade.status$])
        .pipe(
          //值不为空，主播身份并且是未开始或者暂停状态
          tap(([role, status]) => {
            if ((role === ROOM_ROLE.ANCHOR || role === ROOM_ROLE.OWNER_ANCHOR) && (status === ROOM_STATUS.NOT_STARTED || status === ROOM_STATUS.PAUSED || status === ROOM_STATUS.RESUME)) {
              if (isObjEmpty(pusherContextRef.current)) {
                pusherContextRef.current = Taro.createLivePusherContext();
              }
              //开启预览
              pusherContextRef.current.startPreview();
            }

            // 对于用到livePlayer组件的用户，进来需要自动拉流一次 等效与autoplay
            if (role !== ROOM_ROLE.ANCHOR && role !== ROOM_ROLE.OWNER_ANCHOR && (status === ROOM_STATUS.STARTED || status === ROOM_STATUS.RESUME)) {
              if (isObjEmpty(playerContextRef.current)) {
                playerContextRef.current = Taro.createLivePlayerContext("player");
              }
              //开启预览
              playerContextRef.current.play();
            }
          })
        )
        .subscribe()
    ]

    return subscriptions;
  }

  //state的观察者对象总入口
  useEffect(() => {
    const subscriptions = handleEffect();

    return () => { subscriptions.map(sub => sub.unsubscribe()) };
  }, [])

  // 大主播点击踢出连麦
  const kickoutAnchor = (e: ITouchEvent, uId) => {
    // 阻止事件冒泡
    e.stopPropagation()
    Taro.showModal({
      content: '是否与其断开连麦',
      success: (res) => {
        if (res.confirm) {
          console.log('已断开连麦');
          firstValueFrom(roomFacade.kickoutJoinAnchor(uId))
            .then(() => msgFacade.sendLinkMsg(uId, CMD.KICK_ANCHOR))
            .catch(err => {
              console.error("踢出连麦主播异常：", err)
            })
        }
      }
    })
  }

  // 小主播主动退出连麦
  const quitLink = () => {
    Taro.showModal({
      content: '是否退出连麦',
      success: (res) => {
        if (res.confirm) {
          console.log('点击了退出连麦1', userId);
          firstValueFrom(roomFacade.quitLink(userId))
            .then(() => msgFacade.sendLinkMsg(ownerId, CMD.KICK_ANCHOR))
            .catch(err => {
              console.error('退出连麦异常', err);
            })
        }
      }
    })
  }

  const sliderChangeing = () => {
    sliderStatusRef.current = false
  }

  //拖动进度条事件
  const sliderChange = (e) => {
    sliderStatusRef.current = true
    //获取进度条百分比 
    if (sliderStatusRef.current == true) {
      let value = e as number;
      setAudioTime(value)
      const duration = audioDurations
      //根据进度条百分比及音频总时间，计算拖动位置的时间
      value = parseInt((value * duration / 100).toString());
      const currentTime = formatSecond(value) as unknown as number
      setAudioSeek(value)
      setAudioPaused(true)
      setAudioCurrent(currentTime)
      //跳转到指定的时间
      innerAudioContextRef.current.seek(value);
      innerAudioContextRef.current.play();
    }
  }

  //音频播放、暂停按钮
  const playAudio = () => {
    //获取播放状态和当前播放时间   
    const seek = audioDuration
    //更改播放状态
    setAudioPaused(!audioPaused)
    if (audioPaused) {
      innerAudioContextRef.current.pause();
      //如果在播放则记录播放的时间，暂停
      setAudioDuration(innerAudioContextRef.current.currentTime)
    } else {
      //跳转到指定时间播放
      innerAudioContextRef.current.seek(seek);
      innerAudioContextRef.current.play();
    }
  }

  // useDidHide(() => {
  //   console.log('componentDidHide')
  //   innerAudioContextRef.current.stop();
  //   // innerAudioContextRef.current.destroy();
  // })

  const horiEffect = (isHori: boolean) => {
    Taro.onWindowResize((res) => {
      if (res.size.windowWidth > res.size.windowHeight) {
        setIsHorizontal(true)
      } else {
        setIsHorizontal(false)
      }
    })
  }

  //监听手机屏幕的宽度变化，横竖屏切换
  useEffect(() => {
    horiEffect(isHorizontal);
  }, [isHorizontal])

  const linkEffect = (link_streams: LiveStream[], live_streams: LiveStream[], role: string) => {
    let streamsRes = JSON.parse(JSON.stringify(link_streams)) as LiveStream[];
    if (link_streams.length > 0 && live_streams.length > 0 && role === ROOM_ROLE.LINK_ANCHOR) {
      // 此处需要对主播流进行去重处理
      if (streamsRes[0].owner !== live_streams[0].owner) {
        streamsRes.unshift(live_streams[0])
      }
      streamsRes = streamsRes.filter(item => item.owner !== userId && item.status !== ROOM_STATUS.ENDED);
      // console.log('(小主播)此时该出现的小窗口有', streamsRes, live_streams);

      setLinkStreamList(streamsRes)
    } else {
      // console.log('(非小主播)此时该出现的小窗口有', link_streams, live_streams);
      streamsRes = streamsRes.filter(item => item.status !== ROOM_STATUS.ENDED);
      roomFacade.audienceListToast(streamsRes.length)
      setLinkStreamList(streamsRes)
    }
  }

  // 处理linkStreams,'小主播'去自身并拉大主播，其他身份不作处理
  useEffect(() => {
    // console.log('linkStreams和liveStreams的最终状态', linkStreams , liveStreams);
    linkEffect(linkStreams, liveStreams, roomRole)
  }, [linkStreams, liveStreams, roomRole])

  useReady(() => {
    Taro.setKeepScreenOn({
      keepScreenOn: true
    })
  })

  const eventHandle = (e) => {
    const currentTime = Math.trunc(e.detail.currentTime)
    roomFacade.updateProgress(currentTime);
  }

  const ended = (e) => {
    const cancelable = e.cancelable
    roomFacade.updateVideoEnd(cancelable)
  }

  return (
    <View className='container-box'>
      {roomStatus < ROOM_STATUS.ENDED && (
        <View className='v-full2'>
          {/* 未结束的各种主播身份才显示的大屏幕 */}
          {(roomRole === ROOM_ROLE.OWNER_ANCHOR || roomRole === ROOM_ROLE.ANCHOR) && (type !== LIVE_TYPE.OBS && type !== LIVE_TYPE.OBS_REC && type !== LIVE_TYPE.VR) && (roomStatus < ROOM_STATUS.ENDED) && (
            <View className=''>
              <View className='v-main-video'>
                <LivePusher id='pusher' mode={ui_mode as HDLevel} url={pushUrl} maxBitrate={1500} beauty={ui_beauty}
                  enableCamera={type === LIVE_TYPE.VIDEO} enableMic={!ui_muted} enableAgc enableAns autopush={false}
                  aspect='9:16' backgroundMute={false} onStateChange={roomFacade.onMainPush} onError={roomFacade.onMainError}
                  onNetstatus={roomFacade.onMainStash}
                >
                  {children}
                </LivePusher>
              </View>
            </View>
          )}
          {/* 未结束的各种观众身份才显示的大屏幕； obs直播下主播也使用livePlayer*/}
          {(((roomRole === ROOM_ROLE.OWNER_ASSISTANT || roomRole === ROOM_ROLE.ASSISTANT) && linkStreams.length == 0) || ((roomRole === ROOM_ROLE.OWNER_ANCHOR || roomRole === ROOM_ROLE.ANCHOR) && (type === LIVE_TYPE.OBS || type === LIVE_TYPE.OBS_REC || type === LIVE_TYPE.VR)) || roomRole === ROOM_ROLE.AUDIENCE) && roomStatus <= ROOM_STATUS.ENDED && (
            <View>
              <View className='v-main-video'>
                <LivePlayer id='player' mode='live' minCache={0.1} maxCache={0.3} autoPauseIfNavigate={false} autoPauseIfOpenNavigate={false}
                  objectFit='fillCrop' src={liveStreams[0].flvPlayUrl} muted={ui_muted} backgroundMute={false} autoplay
                  onStateChange={roomFacade.onMainPlayState} pictureInPictureMode={roomStatus === ROOM_STATUS.STARTED ? ["push", "pop"] : []}
                >
                  {children}
                </LivePlayer>
              </View>
            </View>
          )}
        </View>
      )}
      {/* 未结束的各种主播身份才显示的小屏幕 */}
      {linkStreams.length > 0 && (roomRole === ROOM_ROLE.OWNER_ANCHOR || roomRole === ROOM_ROLE.ANCHOR || roomRole === ROOM_ROLE.LINK_ANCHOR || roomRole === ROOM_ROLE.OWNER_ASSISTANT || roomRole === ROOM_ROLE.ASSISTANT) && (roomStatus < ROOM_STATUS.ENDED) && (
        <View>
          <View className={roomRole === ROOM_ROLE.LINK_ANCHOR && currentLinkStream.status < ROOM_STATUS.ENDED ? '' : 'v-sub-video-list'}>
            {roomRole === ROOM_ROLE.LINK_ANCHOR && (
              <View className={roomRole === ROOM_ROLE.LINK_ANCHOR ? 'v-main-video' : 'v-sub-video'}>
                <LivePusher max-bitrate='1000' id='audience_pusher' mode='RTC' url={currentLinkStream.pushUrl} beauty={ui_beauty} autopush enableAgc enableAns
                  enableCamera={type == LIVE_TYPE.VIDEO} aspect='9:16' waiting-image='https://mc.qcloudimg.com/static/img/daeed8616ac5df256c0591c22a65c4d3/pause_publish.jpg'
                  backgroundMute onStateChange={roomFacade.onLinkPush} onError={roomFacade.onLinkError}
                >
                  {children}
                  <View className='' onClick={quitLink}>
                    <Image src='https://leconglive-wxapp-1301839528.file.myqcloud.com/wxIcon/mic_close.png' className='close-ico-self'></Image>
                  </View>
                </LivePusher>
              </View>
            )}
          </View>
          <View className='v-sub-video-list'>
            {!!linkStreamList && linkStreamList.map((stream, index) => (
              <View className='v-sub-video' key={index}>
                <LivePlayer id={stream.owner} autoplay mode='RTC' objectFit='fillCrop' minCache={0.1} maxCache={0.3} autoPauseIfNavigate={false} autoPauseIfOpenNavigate={false}
                  src={stream.rtmpPlayUrl} onStateChange={roomFacade.onLinkPlay}
                >
                  {/* 踢出连麦 */}
                  {(stream.owner !== ownerId && (roomRole === ROOM_ROLE.OWNER_ANCHOR || roomRole === ROOM_ROLE.ANCHOR || roomRole === ROOM_ROLE.OWNER_ASSISTANT || roomRole === ROOM_ROLE.ASSISTANT)) && (
                    <View className='' onClick={(e) => { kickoutAnchor(e, stream.owner) }} data-userid={stream.owner}>
                      <Image src='https://leconglive-wxapp-1301839528.file.myqcloud.com/wxIcon/mic_close.png' className='close-ico'></Image>
                    </View>
                  )}
                </LivePlayer>
              </View>
            ))
            }
          </View>
        </View>
      )}

      {/* 直播已结束则显示视频组件 */}
      {(roomStatus === ROOM_STATUS.ENDED && type !== LIVE_TYPE.AUDIO) && (
        <View className='v-main-video'>
          <Video id='video' src={liveStreams[0].videoUrl} onTimeUpdate={eventHandle} onEnded={ended} enable-auto-rotation object-fit='cover' controls playBtnPosition='center' autoPauseIfNavigate={false} autoPauseIfOpenNative={false} show-center-play-btn picture-in-picture-mode={["push", "pop"]}>
            {children}
          </Video>
        </View>
      )}

      {/* 语音直播结束后显示的头像 */}
      {type === LIVE_TYPE.AUDIO && roomStatus === ROOM_STATUS.ENDED && (
        <View className='v-main-video'>
          {children}
          {audioShow && (
            <View>
              <View className='type-audio'>
                <Image src={anchorId ? anchorLogo : ownerLogo} className='type-audio-img'></Image>
                <View className='type-audio-name'>{anchorId ? anchorName : ownerName}</View>
                <Image src='https://leconglive-wxapp-1301839528.file.myqcloud.com/wxIcon/mic.png' className='type-audio-icon'></Image>
              </View>
              <View className='slider'>
                <Image src={audioPaused ? 'https://leconglive-wxapp-1301839528.file.myqcloud.com/wxIcon/audioStop.png' : 'https://leconglive-wxapp-1301839528.file.myqcloud.com/wxIcon/audioPlay.png'} className='playIcon' onClick={playAudio}></Image>
                <AtSlider min={0} max={100} value={audioTime} activeColor='#ffffff' backgroundColor='#BDBDBD' blockColor='#ffffff' blockSize={13} onChange={sliderChange} onChanging={sliderChangeing}></AtSlider>
                <View className='audioTime'> {audioCurrent}/{audioAllTime}</View>
              </View>
            </View>
          )}
        </View>
      )}
    </View>
  )
}
export default React.memo(Index)