import { ref, onMounted, Ref, onUnmounted, reactive, watch, } from "vue";
import { useClientStore } from "../pinia/stores/client";
import { getWsClient } from "../getClient";
import { RTCConnection } from "@/utils/RTCConnection";
// import { wsLiveHost } from '@/hooks/wsLiveHost';
import { users } from "../shared/types/user";
import { courses } from '../shared/types/courses';
import { main } from "@popperjs/core";
import { VideoStreamGetter } from "@/utils/decorators";
import mitt from '@/mitt';
export function wsLiveHost(courseId: string,vloumn:Ref<number>) {

  const courseInfo = ref<courses>({} as courses);
  const students = ref(
    new Array<{
      student: users;
      status: "online" | "offline";
    }>()
  );
  const { client, currentUser } = useClientStore();
  const wscli = getWsClient();
  //发起端 只需发送流
  const rtcConnections = new Map<string, RTCConnection>();

  let talkChannel:RTCConnection | undefined
  wscli.listenMsg('AMessages/Live/TalkRequest', async msg =>{
    try {
      await ElMessageBox.confirm(`${msg.studentName}申请发言`)
      const ret = await client.callApi('AMessages/Live/TalkRequest',{
        course:courseId,
        stu:msg.studentId
      })
      if(ret.isSucc){
        ElMessage.success(`${msg.studentName}发言`)
      }
    } catch (error) {
      
    }
  })


  const videoRef = ref<HTMLVideoElement>();

  // wsLiveHost(wscli)
  wscli.listenMsg("AMessages/Live/LiveTarget", (msg) => {
    const target = msg.target;
    console.log(target);

    const rtcConnection = new RTCConnection(
      wscli,
      currentUser._id,
      "live",
      courseId
    );
    rtcConnection.target = target;
    rtcConnections.set(target, rtcConnection);
    console.log(rtcConnections);

    const cameraVideoTracks = cameraStream.value.getVideoTracks();
    // let screenVideoTracks:MediaStreamTrack[]
    // if(screenStream) screenVideoTracks = screenStream.getVideoTracks()
    const audioTracks = cameraStream.value.getAudioTracks();

    rtcConnection.videoProvider = rtcConnection.peerConnection.addTrack(
      cameraVideoTracks[0] || screenStream?.getVideoTracks()[0],
      cameraStream.value
    );
    rtcConnection.audioProvider = rtcConnection.peerConnection.addTrack(
      audioTracks[0],
      cameraStream.value
    );

    // if(videoStream === undefined) return
    // videoStream.getTracks().forEach(track =>{
    //     channelProviders.add(rtcConnection.peerConnection.addTrack(track,videoStream as MediaStream))
    // })
  });
  wscli.listenMsg("AMessages/Live/LiveConn", (msg) => {
    if (msg.type === "video-answer") {
      const rtcConnection = rtcConnections.get(msg.origin);
      rtcConnection?.handleVideoAnswer(msg);
    } else if (msg.type === "new-ice-candidate") {
      if(talkChannel && talkChannel.target === msg.origin ){
        talkChannel.addCandidate(msg)
        return
      }

      const rtcConnection = rtcConnections.get(msg.origin);
      rtcConnection?.addCandidate(msg);
    }
    else if(msg.type === 'video-offer'){
      if(talkChannel){
        talkChannel.closeConnection()
        talkChannel = undefined
        vloumn.value = 0
        videoRef.value!.muted = true
        console.log('removeTrack');
        mainStream.getAudioTracks().forEach(t =>{
          t.stop
          mainStream.removeTrack(t)
        })
      }
      
      //talk request
      console.log('talk request');
      talkChannel = new RTCConnection(
        wscli,
        currentUser._id,
        "live",
        courseId
      )
      talkChannel.target = msg.origin
      talkChannel.handleVideoOffer(msg)
    }
  });
  wscli.listenMsg("AMessages/Live/StuStatus", (msg) => {
    if (msg.status === "disconnect") {
      if (rtcConnections.has(msg.student)) {
        const studentConnection = rtcConnections.get(msg.student);
        studentConnection?.peerConnection.close();
        rtcConnections.delete(msg.student);
        // console.log(rtcConnections);
      }
      // else{
      console.log(msg.student, " disconnect");
      // }
    } else {
      console.log(msg.student, " connected");
    }
    const student = students.value.find(
      (stu) => stu.student._id === msg.student
    );
    student!.status = msg.status === "disconnect" ? "offline" : "online";
    console.log(
      students.value.map((stu) => {
        return {
          name: stu.student.name,
          status: stu.status,
        };
      })
    );
  });
  wscli.listenMsg("AMessages/Live/StuList", (msg) => {
    students.value = msg.students.map((stu) => {
      return {
        status: "offline",
        student: stu,
      };
    });
    courseInfo.value = msg.course;
  });

  let cameraStream: Ref<MediaStream> =  ref(new MediaStream);
  let screenStream: MediaStream | undefined;
  const mainStream = new MediaStream()
  // let videoProvider:RTCRtpSender
  // const audioProvider = new Set<RTCRtpSender>()
  // const channelProviders = new Set<RTCRtpSender>()
  // const channel:Ref<'camera' | 'screen'> = ref('camera')
  const supportedConstraints = navigator.mediaDevices.getSupportedConstraints();
  console.log(supportedConstraints);
  
  const liveOption = reactive({
    channel: "camera" as "camera" | "screen",
    videoRate: { width: 1280, height: 720 },

    video: false,
    audio: false,
    screenAudio: false,
    cameraConstraints: {
      // video:true,
      //##ideal --- inuse
      video:{
        width: {min: 640, ideal: 1280,max:1920},
        height: {min: 480, ideal: 720,max:1080},
        aspectRatio: 16 / 9,
        facingMode:'user'
      },

      // video:{
      //   height:{
      //     exact:1080
      //   },
      //   width:{
      //     exact:1920
      //   }
      // },

      //##test1 odd resolutions 
      // video:{//try to honor
      //   height:55,
      //   width:721
      // },
      
      //## ideal == notkeyword
      // video:{
      //   height:1080,
      //   width:1920,
      // },

      audio: true,
    } as MediaStreamConstraints,
    screenConstraints: {
      //## min not allow
      // video: { 
      //   width: {min:640,ideal:1280,max:1920},
      //   height: { min:480,ideal:720,max:1080 },
      //   aspectRatio:16 / 9
      // },
      video: { 
        width: {ideal:1920,},
        height: {ideal:1080,},
        aspectRatio:16 / 9
      },
      audio: true,
    } as MediaStreamConstraints,
  });
  const connected = ref(false);
  const shareScreen = async () => {
    liveOption.channel = "screen";
    // if(channel.value === 'screen') return
    // channel.value = 'screen'

    // const screenStream = await navigator.mediaDevices.getDisplayMedia(screenOption.value)
    // const videoTracks = screenStream.getVideoTracks()
    // videoProvider.replaceTrack(videoTracks[0])

    // videoStream?.getVideoTracks().forEach(videotrack => videotrack.stop())
    // videoStream = screenStream
  };
  const camera = async () => {
    liveOption.channel = "camera";
    // if(channel.value === 'camera') return
    // channel.value = 'camera'

    // const cameraStream.value = await navigator.mediaDevices.getUserMedia(screenOption.value)
    // const videoTracks = cameraStream.value.getVideoTracks()
    // videoProvider.replaceTrack(videoTracks[0])

    // videoStream?.getVideoTracks().forEach(videoTrack => videoTrack.stop())
    // videoStream = cameraStream.value
  };
  const reShare = async () => {
    let audioStreamTrack:MediaStreamTrack;
    try {
      audioStreamTrack = (await navigator.mediaDevices.getUserMedia({
        audio:true
      })).getAudioTracks()[0]
    } catch (error) {
      audioStreamTrack = new AudioContext().createMediaStreamDestination().stream.getAudioTracks()[0]
    }

    try {
      //获取屏幕流
      const newScreenStream = await navigator.mediaDevices.getDisplayMedia(
        liveOption.screenConstraints
      );
      const newVideoTrack = newScreenStream.getVideoTracks()[0];
      //替换视频流
      rtcConnections.forEach((rtcConnection) => {
        // console.log(rtcConnection.videoProvider);
        rtcConnection.videoProvider?.replaceTrack(newVideoTrack);
      });


      // videoRef.value!.srcObject = newScreenStream
      // console.log(mainStream.getTracks());
      // console.log(screenStream?.getTracks());

      mainStream.removeTrack(mainStream.getVideoTracks()[0])
      mainStream.addTrack(newVideoTrack)
      
      //停止无用的视频流
      screenStream?.getVideoTracks().forEach((track) => {
        track.stop();
        screenStream?.removeTrack(track);
      });
      screenStream?.addTrack(newVideoTrack);
    } catch (error) {
      shareScreen();
    }

    rtcConnections.forEach((rtcConnection) => {
      rtcConnection.audioProvider?.replaceTrack(audioStreamTrack);
    });
    cameraStream.value.getAudioTracks().forEach(at =>{
      at.stop()
      cameraStream.value.removeTrack(at)
    })
    cameraStream.value.addTrack(audioStreamTrack)
    audioStreamTrack.enabled = liveOption.audio
    mitt.emit('retransaudio')
  };

  watch(
    liveOption,
    async (newValue) => {
      // console.log(newValue);

      // console.log(cameraStream.value);
      // console.log(cameraStream.value.getVideoTracks());
      const audioTracks = cameraStream.value.getAudioTracks();
      const videoTracks = cameraStream.value.getVideoTracks();

      if (audioTracks.length !== 0)
        cameraStream.value.getAudioTracks()[0].enabled = newValue.audio;
      // if(newValue.audio){
      //   try {
      //     const newAudioTrack = (await VideoStreamGetter.getCameraStream(liveOption)).getAudioTracks()[0]
      //     console.log("add audio track");
          
      //     cameraStream.value.addTrack(newAudioTrack)
      //     rtcConnections.forEach(rtcc =>{
      //       rtcc.audioProvider?.replaceTrack(newAudioTrack)
      //     })
      //   } catch (error) {
      //     const emptyAudioTrack = new AudioContext().createMediaStreamDestination().stream.getAudioTracks()[0]
      //     cameraStream.value.addTrack(emptyAudioTrack)
      //     rtcConnections.forEach(rtcc =>{
      //       rtcc.audioProvider?.replaceTrack(emptyAudioTrack)
      //     })
      //   }
      // }
      // else{
      //   cameraStream.value.getAudioTracks().forEach(at =>{
      //     at.stop()
      //     cameraStream.value.removeTrack(at)
      //   })
      // }


      if (videoTracks.length !== 0)
        cameraStream.value.getVideoTracks()[0].enabled = newValue.video;

      // await VideoStreamGetter.getCameraStream(liveOption.cameraConstraints)

      // if(newValue.video){
      //     videoStream.getVideoTracks()[0].enabled = true

      //     const newStream = await navigator.mediaDevices.getUserMedia({video:liveOption.videoRate})
      //     const [track] = newStream.getVideoTracks()
      //     rtcConnections.forEach(rtcConnection =>{
      //         rtcConnection.videoProvider?.replaceTrack(track)
      //     })
      // }
      // else{
      //     videoStream.getVideoTracks()[0].enabled = false
      //     rtcConnections.forEach(rtcConnection =>{
      //         rtcConnection.videoProvider?.replaceTrack(null)
      //     })
      //     videoStream?.getVideoTracks().forEach(videoTrack => videoTrack.stop())
      // }
    },
    { immediate: false }
  );
  //watcher
  let watchStopHandler = watch(
    () => liveOption.channel,
    watcher
  );
  async function watcher(value:string){
    let audioStreamTrack:MediaStreamTrack;
    try {
      audioStreamTrack = (await navigator.mediaDevices.getUserMedia({
        audio:true
      })).getAudioTracks()[0]
    } catch (error) {
      audioStreamTrack = new AudioContext().createMediaStreamDestination().stream.getAudioTracks()[0]
    }
    
    if (value === "camera") {
      try {
        //获取屏幕流
        const newCameraStream = await VideoStreamGetter.getCameraStream(liveOption.cameraConstraints)
        if(!newCameraStream) return
        const newVideoTrack = newCameraStream.getVideoTracks()[0];

        //替换视频流
        rtcConnections.forEach((rtcConnection) => {
          rtcConnection.videoProvider?.replaceTrack(newVideoTrack);
        });
        cameraStream.value.addTrack(newVideoTrack);
        mainStream.removeTrack(mainStream.getVideoTracks()[0])
        mainStream.addTrack(newVideoTrack)
        // cameraStream.value.getVideoTracks()[0] = newVideoTrack
        //停止无用的视频流
        liveOption.video = true;
        screenStream?.getVideoTracks().forEach((track) => {
          track.stop();
          cameraStream.value.removeTrack(track);
        });
      } catch (error) {
        watchStopHandler()
        shareScreen();
        watchStopHandler = watch(
          () => liveOption.channel,
          watcher
        );
      }
    } else {
      //获取屏幕流
      try {
        screenStream = await VideoStreamGetter.getScreenStream(liveOption.screenConstraints)
        if(!screenStream) return
        const screenVideoTracks = screenStream.getVideoTracks();

        //替换视频流
        rtcConnections.forEach((rtcConnection) => {
          rtcConnection.videoProvider?.replaceTrack(screenVideoTracks[0]);
        });
        mainStream.removeTrack(mainStream.getVideoTracks()[0])
        mainStream.addTrack(screenVideoTracks[0])
        //停止无用的视频流
        liveOption.video = false;
        cameraStream.value.getVideoTracks().forEach((track) => {
          track.stop();
          cameraStream.value.removeTrack(track);
        });

        //添加 || 替换 音频流
        // console.log(screenAudioTracks);
        // if(screenAudioTracks.length !== 0){
        //     cameraStream.value.addTrack(screenAudioTracks[0])
        //     rtcConnections.forEach(rtcConnection =>{
        //         if(rtcConnection.screenAudioProvider === undefined){
        //             rtcConnection.screenAudioProvider = rtcConnection.peerConnection.addTrack(screenAudioTracks[0],cameraStream.value)
        //         }
        //         else{
        //             rtcConnection.screenAudioProvider.replaceTrack(screenAudioTracks[0])
        //         }
        //     })
        //  cameraStream.value.addTrack(screenAudioTracks[0])
        //  rtcConnections.forEach(rtcConnection =>{
        //     rtcConnection.peerConnection.addTrack(screenAudioTracks[0],cameraStream.value)
        //  })
        // }
      } catch (error) {
        console.log("camera");
        watchStopHandler()
        camera();
        watchStopHandler = watch(
          () => liveOption.channel,
          watcher
        );
      }
    }
    
    rtcConnections.forEach((rtcConnection) => {
      rtcConnection.audioProvider?.replaceTrack(audioStreamTrack);
    });
    cameraStream.value.getAudioTracks().forEach(at =>{
      at.stop()
      cameraStream.value.removeTrack(at)
    })
    cameraStream.value.addTrack(audioStreamTrack)
    audioStreamTrack.enabled = liveOption.audio
    mitt.emit('retransaudio')
  }

  onMounted(async () => {
    // try {
    try {
      cameraStream.value = await VideoStreamGetter.getCameraStream(liveOption.cameraConstraints) as MediaStream
    } catch (error) {
      console.log("fake stream");
      
      cameraStream.value = new MediaStream
      
      //emptyAudioTrack
      const emptyAudioTrack = new AudioContext().createMediaStreamDestination().stream.getAudioTracks()[0]
      cameraStream.value.addTrack(emptyAudioTrack)

      //emptyVideoTrack
      const canvas = document.createElement('canvas')
      const ctx = canvas.getContext('2d')
      // console.log(canvas.captureStream );
      //@ts-ignore
      const emptyVideo = (canvas.captureStream(25))
      // || document.createElement('canvas').mozCaptureStream()
      //document.createElement('canvas').captureStream()
      cameraStream.value.addTrack(emptyVideo.getVideoTracks()[0])
      
    }
    console.log(cameraStream.value);
    
    const video = cameraStream.value.getVideoTracks()[0]
    const audio = cameraStream.value.getAudioTracks()[0]
    // createStreamSourceNode(cameraStream.value)
   
    // mainStream.addTrack(audio)
    videoRef.value!.srcObject = mainStream
    if(video){
      mainStream.addTrack(video)
      video.enabled = liveOption.audio;
    }
    if(audio){
      audio.enabled = liveOption.video;
    }
   
    // } catch (error) {
    //   if(error instanceof Error ) ElMessage.error(error.message)
    // }

    try {
      const v = await wscli.connect();
      if (!v.isSucc) {
        ElMessage.error(v.errMsg);
        return 0;
      }
      console.log('StartLive');
      await wscli.sendMsg("AMessages/Live/StartLive", {
        courseId: courseId,
        teacher: currentUser._id,
      });
      connected.value = true;
    } catch (error) {
      ElMessage.error((error as Error).message);
    }

    mitt.on("trackReady", () => {
      // console.log(talkChannel.streams);
      if(!talkChannel) return
      const audioStream = talkChannel.streams[0]
      const audioTrack = talkChannel.streams[0].getAudioTracks()[0]

      mainStream.addTrack(audioTrack)
      vloumn.value = 100
      videoRef.value!.srcObject = mainStream
      // console.log(mainStream.getTracks());

      // rtcConnections.forEach(rtcc =>{
      //   mainStream.getTracks().forEach(t =>{
      //     // console.log(t);
      //     // if(t.kind === 'video'){
      //     //   rtcc.videoProvider = rtcc.peerConnection.addTrack(t,mainStream)
      //     // }
      //     // else{
      //     //   rtcc.peerConnection.addTrack(t,mainStream)
      //     // }
      //   })
      //   // rtcc.peerConnection.addTrack(audioTrack,mainStream)
      // })
      
      // rtcConnections.forEach(rtcc =>{
      //   rtcc.peerConnection.addTrack(audioTrack,audioStream)
      //   // rtcc.peerConnection.setLocalDescription()
      // })
    })
  });
  onUnmounted(() => {
    wscli.disconnect();
    //清空流
    cameraStream.value.getTracks().forEach((track) => {
      track.stop();
    });
    screenStream?.getTracks().forEach((track) => {
      track.stop();
    });

    mitt.off('trackReady')

    //关闭连接
    rtcConnections.forEach((rtc) => {
      rtc.closeConnection();
    });
    rtcConnections.clear();

    
  });

  return {
    courseInfo,connected,liveOption,students,wscli,videoRef,
    shareScreen,camera,reShare,
    cameraStream
  };
}
