import { RTCConnection } from "@/utils/RTCConnection";
import { getWsClient } from "@/getClient";
import { useClientStore } from '../pinia/stores/client';
import router from "@/route";
import { onMounted, onUnmounted, provide, ref, reactive, watch, Ref } from 'vue';
import mitt from "@/mitt";
import { users } from "@/shared/types/user";
import { courses } from "@/shared/types/courses";
import { VideoStreamGetter } from '@/utils/decorators';

export function wsLiveReceive(courseId: string) {

  const wsclient = getWsClient();
  const {currentUser,client} = useClientStore()
  const students = ref(
    new Array<{
      student: users;
      status: "online" | "offline";
    }>()
  );
  const courseInfo = ref<courses>({} as courses);
  
  const videoRef = ref<HTMLVideoElement>();

  wsclient.listenMsg("AMessages/Live/LiveConn", (msg) => {
    // console.log(msg);

    if (msg.type === "video-offer") {
      rtcToHost.value = new RTCConnection(
        wsclient,
        currentUser._id,
        "live",
        courseId
      );
      rtcToHost.value.handleVideoOffer(msg);
    } else if (msg.type === "new-ice-candidate") {
      if(talkChannel && talkChannel.target === msg.origin){
        talkChannel.addCandidate(msg)
        return
      }

      if (msg.origin === rtcToHost.value?.target) {
        console.log("和host沟通");
        rtcToHost.value?.addCandidate(msg);
      } else {
        console.log("和后节点沟通");
        const afterConnection = afterConnections.get(msg.origin);
        console.log(afterConnections);
        console.log(msg.origin);
        console.log(afterConnection);
        if (afterConnection === undefined) return console.log("没有这个后节点");
        afterConnection.addCandidate(msg);
      }
    } else {
      //host 搭answer talk response
      if(courseInfo.value.teacher === msg.origin){
        console.log("talk response");
        talkChannel?.handleVideoAnswer(msg)
        return
      }

      //video-answer 后节点的answer 可能有多个
      const after = msg.origin;
      const afterConnection = afterConnections.get(after);
      if (afterConnection === undefined) throw Error("target出错");

      afterConnection.handleVideoAnswer(msg);
    }
  });
  wsclient.listenMsg("AMessages/Live/LiveTarget", (msg) => {
    const target = msg.target;
    console.log(target);
    //被当做node，需要向后传输
    const afterConnection = new RTCConnection(
      wsclient,
      currentUser._id,
      "live",
      courseId
    );
    afterConnection.target = target;
    afterConnections.set(target, afterConnection);

    if (rtcToHost.value && rtcToHost.value.streams.length !== 0) {
      const hostStream = rtcToHost.value.streams[0];
      const videoTracks = hostStream.getVideoTracks();
      const audioracks = hostStream.getAudioTracks();

      afterConnection.videoProvider = afterConnection.peerConnection.addTrack(
        videoTracks[0],
        hostStream
      );
      afterConnection.audioProvider = afterConnection.peerConnection.addTrack(
        audioracks[0],
        hostStream
      );
    }
  });
  wsclient.listenMsg('AMessages/Live/StuList',msg =>{
    students.value = msg.students.map((stu) => {
        return {
          status: stu.status,
          student: stu,
        };
      });
      courseInfo.value = msg.course;
  })
  wsclient.listenMsg("AMessages/Live/StuStatus",(msg)=>{
    if(msg.status === 'connect'){

    }
    else{
        afterConnections.delete(msg.student)
    }
    const student = students.value.find(
    (stu) => stu.student._id === msg.student
    );
    student!.status = msg.status === "disconnect" ? "offline" : "online";
    console.log(
    students.value.map((stu) => {
        return {
        name: stu.student.name,
        status: stu.status,
        };
    })
    );
  })
  wsclient.listenMsg("AMessages/Live/LiveEnd", (msg) => {
    //直播结束
    ElMessage.warning("直播结束");
    router.back();
  });

  let rtcToHost = ref<RTCConnection>();
  provide('rtc',rtcToHost)
  const afterConnections = new Map<string, RTCConnection>();

  let talkChannel:RTCConnection | undefined
  let notice:any
  const talk = async () =>{
    if(talkChannel){
      stopTalk()
    }
    const ret = await client.callApi('AMessages/Live/TalkRequest',{
      course:courseId
    })
    if(ret.isSucc){
      ElMessage.success('申请成功，等待教师回应')
    }
  }
  const stopTalk = () =>{
    talkChannel?.closeConnection()
    talkChannel = undefined
    stopStream()
    notice.close()
  }
  wsclient.listenMsg('AMessages/Live/TalkRequest',msg =>{
    talkConnect(courseInfo.value.teacher)
    notice = ElNotification({
      type:'warning',
      message: '点击结束发言...',
      duration: 0,
      showClose:false,
      onClick:async ()=>{
        try {
          await ElMessageBox.confirm('结束讲话')
          stopTalk()
          await client.callApi('AMessages/Live/StopTalk',{
            course:courseId
          })
        } catch (error) {
          
        }
      }
    })
  })
  const talkConnect = async (host:string) =>{
    try {
      audioStream.value = await VideoStreamGetter.getCameraStream({
        audio:true
      })
      const audioTrack = audioStream.value.getAudioTracks()[0]
      audioTrack.enabled = true
      selfAudioOn.audio = true
      // videoRef.value!.srcObject = audioStream
      console.log('prepare for talking...');

      talkChannel = new RTCConnection(
        wsclient, 
        currentUser._id,
        "live",
        courseId
      )
      talkChannel.target = host
      talkChannel.audioProvider = talkChannel.peerConnection.addTrack(audioTrack,audioStream.value)

    } catch (error) {
      if(error instanceof Error){
        stopTalk()
        client.callApi('AMessages/Live/StopTalk',{
          course:courseId
        })
        // ElMessage.error(error.message)
        return
      }
    }
  }

  const audioStream:Ref<MediaStream | undefined> = ref();
  const stopStream = () =>{
    audioStream.value?.getAudioTracks().forEach(t => t.stop())
    audioStream.value = undefined
    selfAudioOn.audio = false
  }
  const selfAudioOn = reactive({
    audio:true
  })
  watch(selfAudioOn,newValue =>{
    if(audioStream.value){
      const track = audioStream.value.getAudioTracks()[0]
      track.enabled = newValue.audio
    }
  })
  onMounted(async () => {
    const v = await wsclient.connect();
    if (!v.isSucc) {
      ElMessage.error(v.errMsg);
      return 0;
    }

    wsclient.sendMsg("AMessages/Live/StuStatus", {
      student: currentUser._id,
      course: courseId,
      status: "connect",
    });

    mitt.on("trackReady", () => {
      // (videoRef.value as HTMLVideoElement).srcObject = null
      if(!rtcToHost.value) return
      const newStream = rtcToHost.value.streams[0];
      const videoTrack = newStream.getVideoTracks()[0];
      const audioTrack = newStream.getAudioTracks()[0];

      (videoRef.value as HTMLVideoElement).srcObject = newStream;
      if (afterConnections.size !== 0) {
        console.log("readd tracks");
        afterConnections.forEach((afterConnection) => {
          afterConnection.videoProvider?.replaceTrack(videoTrack);
          afterConnection.audioProvider?.replaceTrack(audioTrack);
        });
      }
    });

  });
  onUnmounted(async () => {
    stopTalk()
    await client.callApi('AMessages/Live/StopTalk',{
      course:courseId
    })

    mitt.off("trackReady");
    wsclient.disconnect();
    //关闭host连接
    if (rtcToHost.value) rtcToHost.value.closeConnection();
    
  });

  // const getStream = ()=>{
  //   return rtcToHost.streams
  // }

  return {
    wsclient,videoRef,students,courseInfo,talk,
    audioStream,selfAudioOn
  };
}
