const CONFIG = require("./config");
const MediasoupClient = require("mediasoup-client");
const SocketClient = require("socket.io-client");
const SocketPromise = require("socket.io-promise").default;

// Global state
// A real application would store this in user session(s)
const global = {
  server: {
    socket: null
  },
  mediasoup: {
    device: null,

    // WebRTC connection with mediasoup
    webrtc: {
      transport: null,
      audioProducer: null,
      videoProducer: null
    }
  },
  recording: {
    waitForAudio: false,
    waitForVideo: false
  }
};

// ----------------------------------------------------------------------------

// HTML UI elements
// ================

const ui = {
  // ffmpeg子进程输出文本域
  console: document.getElementById("uiConsole"), 

  // <button>
  startWebRTC: document.getElementById("uiStartWebRTC"), // 打开摄像头
  startRecording: document.getElementById("uiStartRecording"), // 开始录制
  stopRecording: document.getElementById("uiStopRecording"), // 停止录制

  // <video>
  localVideo: document.getElementById("uiLocalVideo") // 本地视频播放video
};

// 三个按钮的点击事件
ui.startWebRTC.onclick = startWebRTC;
ui.startRecording.onclick = startRecording;
ui.stopRecording.onclick = stopRecording;

// ----------------------------------------------------------------------------

// 播放视频点击事件
async function startWebRTC() {
  console.log("Start WebRTC transmission from browser to mediasoup");

  // websocket连接
  const socket = connectSocket();
  // 设别加载
  const device = await startMediasoup(socket);
  // 建立传输，这里返回的transport是由device.createSendTransport(webrtcTransportOptions)函数建立的
  const transport = await startTransport(socket, device);
  // 开始发送数据
  await startProducer(socket, transport);

  // ... do something clever with all this!
}

// ----
// 建立websocket连接
function connectSocket() {
  const serverUrl = `https://${CONFIG.https.ip}:${CONFIG.https.port}`;
  
  // websocket是基于https url建立的
  const socket = SocketClient(serverUrl, {
    path: CONFIG.https.wsPath,
    transports: ["websocket"]
  });
  global.server.socket = socket;

  socket.on("connect", () => {
    console.log(`WebSocket connected to server: ${serverUrl}`);
  });

  socket.on("error", err => {
    console.error(`WebSocket error: ${err}`);
  });

  // 服务端在webrtcTransport.produce函数调用后发出SERVER_PRODUCER_READY消息
  // 告诉客户端我已经准备好接受了
  socket.on("SERVER_PRODUCER_READY", kind => {
    console.log(`Server producer is ready, kind: ${kind}`);
    switch (kind) {
      case "audio":
        global.recording.waitForAudio = false;
        break;
      case "video":
        global.recording.waitForVideo = false;
        break;
    }

    if (!global.recording.waitForAudio && !global.recording.waitForVideo) {
      ui.startRecording.disabled = false;
      ui.stopRecording.disabled = false;
    }
  });

  socket.on("SERVER_LOG_LINE", line => {
    ui.console.value += line + "\n";
    ui.console.scrollTop = ui.console.scrollHeight;
  });

  return socket;
}

// ----

// 加载设别
async function startMediasoup(socket) {
  // 视频编码选项：vp8/h264
  const uiVCodecName = document.querySelector(
    "input[name='uiVCodecName']:checked"
  ).value;
  // socket请求对象
  const socketRequest = SocketPromise(socket);
  // 发送客户端准备就绪信令
  const response = await socketRequest({
    type: "CLIENT_START_MEDIASOUP",
    vCodecName: uiVCodecName
  });
  // 服务端返回路由rtp参数
  const rtpCapabilities = response.data;

  console.log("[Server] Created mediasoup router, rtpCapabilities is: ");
  console.log(JSON.stringify(rtpCapabilities));
  // 服务端返回的RTP路由参数，这么长一串，包括支持的音频，视频编码（视频分了好几种），扩展头信息
  

  let device = null;
  try {
    device = new MediasoupClient.Device();
  } catch (err) {
    if (err.name === "UnsupportedError") {
      console.error("mediasoup-client doesn't support this browser");
      return null;
    }
  }
  global.mediasoup.device = device;

  try {
    // 使用mediasoup路由器的RTP功能加载设备，这是设备了解允许的媒体编解码器和其他设置的方式。
    // 也就是说设备的媒体编解码方式是由路由参数决定的
    await device.load({ routerRtpCapabilities: rtpCapabilities });
  } catch (err) {
    if (err.name === "InvalidStateError") {
      console.warn("mediasoup device was already loaded");
    }
  }

  console.log(
    "Created mediasoup device, handlerName: %s, has audio: %s, has video: %s",
    device.handlerName,
    device.canProduce("audio"),
    device.canProduce("video")
  );

  // Uncomment for debug
  // console.log("rtpCapabilities: %s", JSON.stringify(device.rtpCapabilities, null, 2));

  return device;
}

// ----

// 创建webrtc传输
async function startTransport(socket, device) {
  // socket请求对象
  const socketRequest = SocketPromise(socket);
  // 发送请求，拿到响应数据
  // 该消息发送给server后调用 router.createWebRtcTransport()创建webrtcTransport传输，返回webrtcTransportOptions
  // 然后下面device再根据该参数创建传输的另一端
  const response = await socketRequest({ type: "CLIENT_START_TRANSPORT" });
  const webrtcTransportOptions = response.data;

  console.log("Create webrtc transport, 'CLIENT_START_TRANSPORT' message response: ");
  // 这里响应的webrtcTransportOptions包含了dtlsParameters、iceCandidates、iceParameters、id
  // 按照《WebRTC权威指南》2.1.2节，这个过程其实就是建立对等连接(RTCPeerConnection)的过程(交换对等端信息)
  // createSendTransport()函数的底层应该是RTCPeerConnection()构造函数
  // 对等连接的建立见《WebRTC权威指南》6.1节-对等连接只协商/应答
  console.log(webrtcTransportOptions);

  let transport = null;

  console.log("[Server] Created mediasoup WebRTC transport");

  // 客户端具有device，所以用device创建发送传输SendTransport, 
  // 参见 https://mediasoup.org/documentation/v3/mediasoup-client/api/#device-createSendTransport
  // 创建新的WebRTC传输以发送媒体，必须事先通过router.createWebRtcTransport()在mediasoup路由器中创建传输的另一端。
  // 这个 WebRTC transport options 很关键，是传输通道相关的信息
  // 然后客户端和服务端各持有一个对应的WebRTC transport对象

  // 关于协商/应答，通过web服务器交换信令，并且转发媒体数据，那这还算是对等连接吗？

  transport = await device.createSendTransport(webrtcTransportOptions);
  global.mediasoup.webrtc.transport = transport;

  transport.on("connect", ({ dtlsParameters }, callback, _errback) => {
    // 向服务器端传输发送本地DTLS参数信号
    socket.emit("CLIENT_CONNECT_TRANSPORT", dtlsParameters);
    callback();
  });

  // 归根结底，客户端和服务端之间建立起一个WebrtcTransport，客户端是send端，服务端是recive端
  console.log(
    "Created mediasoup device transport, direction: %s",
    transport.direction
  );

  return transport;
}

// ----

// 开始发送数据，原来数据发送只是发了个轨道id过去
async function startProducer(socket, transport) {
  // Get user media as required
  // 媒体数据组成：音频，视频还是两者都有
  const uiMedia = document.querySelector("input[name='uiMedia']:checked").value;
  let hasAudio = false;
  let hasVideo = false;
  if (uiMedia.indexOf("audio") !== -1) {
    hasAudio = true;
    global.recording.waitForAudio = true;
  }
  if (uiMedia.indexOf("video") !== -1) {
    hasVideo = true;
    global.recording.waitForVideo = true;
  }

  // 加载设备，并在本地播放
  const stream = await navigator.mediaDevices.getUserMedia({
    audio: hasAudio,
    video: hasVideo
  });

  ui.localVideo.srcObject = stream;

  // Start mediasoup-client's WebRTC producer(s)

  // 该事件中通过回调函数反过来再调服务端handleStartProducer函数
  transport.on("produce", (produceParameters, callback, _errback) => {
    // 发出CLIENT_START_PRODUCER消息，server执行对应的回调函数，参数为produceParameters
    // 和回调函数，也就是说这个socket.emit是在服务端调用的，我去
    // 服务端调用这个以producerId为参数的回调函数
    socket.emit("CLIENT_START_PRODUCER", produceParameters, producerId => {
      callback({ producerId });
    });
  });

  if (hasAudio) {
    // 音频轨道
    const audioTrack = stream.getAudioTracks()[0];
    // transport.produce()，指示传输将音频或视频轨道发送到mediasoup路由器。
    // 客户端根据音视频轨道创建对应的音视频生产者
    // 服务端根据produceParameters参数创建对应的音视频生产者
    // 然后这里调用scendTraonport的produce生产方法指示传输将音频或视频轨道发送到mediasoup路由器
    // 所以并没有发送媒体数据，只是发了个轨道数据
    const audioProducer = await transport.produce({ track: audioTrack });
    global.mediasoup.webrtc.audioProducer = audioProducer;
  }

  if (hasVideo) {
    const videoTrack = stream.getVideoTracks()[0];
    // 发送具有3个同播流的网络摄像头视频轨道数据。
    const videoProducer = await transport.produce({
      track: videoTrack,
      ...CONFIG.mediasoup.client.videoProducer
    });
    global.mediasoup.webrtc.videoProducer = videoProducer;
  }
}

// ----------------------------------------------------------------------------

// {start,stop}Recording
// =====================
// 录制，发送消息告诉用ffmpeg还是gstreamer
function startRecording() {
  const uiRecorder = document.querySelector("input[name='uiRecorder']:checked")
    .value;
  // 发送消息告诉服务端开始录制，也就是开始往plantRtpTransport通道中注入数据
  global.server.socket.emit("CLIENT_START_RECORDING", uiRecorder);  
}

function stopRecording() {
  global.server.socket.emit("CLIENT_STOP_RECORDING");
}

// ----------------------------------------------------------------------------
