<template>
  <div class="container">
    <video id="rtc_media_player" width="1270" autoplay muted></video>
  </div>
</template>
<script>
export default {
  data() {
    return {
      sdk: null,
    };
  },
  methods: {
    startPublish() {
      document.getElementById("rtc_media_player").style.display = "block";

      // Close PC when user replay.

      // User should set the stream when publish is done, @see https://webrtc.org/getting-started/media-devices
      // However SRS SDK provides a consist API like https://webrtc.org/getting-started/remote-streams
      document.getElementById("rtc_media_player").srcObject = this.sdk.stream;
      // Optional callback, SDK will add track to stream.
      // sdk.ontrack = function (event) { console.log('Got track', event); sdk.stream.addTrack(event.track); };

      // https://developer.mozilla.org/en-US/docs/Web/Media/Formats/WebRTC_codecs#getting_the_supported_codecs

      // For example: webrtc://r.ossrs.net/live/livestream
      var url =
        "http://192.168.40.130:1985/rtc/v1/whip/?app=live&stream=livestream";
      this.sdk.publish(url).catch((reason) => {
        // Throw by sdk.
        if (reason instanceof SrsError) {
          if (reason.name === "HttpsRequiredError") {
            alert(
              `WebRTC推流必须是HTTPS或者localhost：${reason.name} ${reason.message}`
            );
          } else {
            alert(`${reason.name} ${reason.message}`);
          }
        }
        // See https://developer.mozilla.org/en-US/docs/Web/API/MediaDevices/getUserMedia#exceptions
        if (reason instanceof DOMException) {
          if (reason.name === "NotFoundError") {
            alert(
              `找不到麦克风和摄像头设备：getUserMedia ${reason.name} ${reason.message}`
            );
          } else if (reason.name === "NotAllowedError") {
            alert(
              `你禁止了网页访问摄像头和麦克风：getUserMedia ${reason.name} ${reason.message}`
            );
          } else if (
            [
              "AbortError",
              "NotAllowedError",
              "NotFoundError",
              "NotReadableError",
              "OverconstrainedError",
              "SecurityError",
              "TypeError",
            ].includes(reason.name)
          ) {
            alert(`getUserMedia ${reason.name} ${reason.message}`);
          }
        }

        this.sdk.close();
        document.getElementById("rtc_media_player").style.display = "none";
        console.error(reason);
      });
    },
  },
  mounted() {
    class SrsRtcWhipWhepAsync {
      constructor() {
        var self = {};

        // https://developer.mozilla.org/en-US/docs/Web/API/MediaDevices/getUserMedia
        self.constraints = {
          audio: true,
          video: {
            width: { ideal: 1280, max: 1600 },
          },
        };

        // See https://datatracker.ietf.org/doc/draft-ietf-wish-whip/
        // @url The WebRTC url to publish with, for example:
        //      http://localhost:1985/rtc/v1/whip/?app=live&stream=livestream
        // @options The options to control playing, supports:
        //      camera: boolean, whether capture video from camera, default to true.
        //      screen: boolean, whether capture video from screen, default to false.
        //      audio: boolean, whether play audio, default to true.
        self.publish = async function (url, options) {
          if (url.indexOf("/whip/") === -1)
            throw new Error(`invalid WHIP url ${url}`);
          const hasAudio = options?.audio ?? true;
          const useCamera = options?.camera ?? true;
          const useScreen = options?.screen ?? false;

          if (!hasAudio && !useCamera && !useScreen)
            throw new Error(
              `The camera, screen and audio can't be false at the same time`
            );

          if (hasAudio) {
            self.pc.addTransceiver("audio", { direction: "sendonly" });
          } else {
            self.constraints.audio = false;
          }

          if (useCamera || useScreen) {
            self.pc.addTransceiver("video", { direction: "sendonly" });
          }

          if (!useCamera) {
            self.constraints.video = false;
          }

          if (
            !navigator.mediaDevices &&
            window.location.protocol === "http:" &&
            window.location.hostname !== "localhost"
          ) {
            throw new SrsError(
              "HttpsRequiredError",
              `Please use HTTPS or localhost to publish, read https://github.com/ossrs/srs/issues/2762#issuecomment-983147576`
            );
          }

          if (useScreen) {
            const displayStream = await navigator.mediaDevices.getDisplayMedia({
              video: true,
            });
            // @see https://developer.mozilla.org/en-US/docs/Web/API/RTCPeerConnection/addStream#Migrating_to_addTrack
            displayStream.getTracks().forEach(function (track) {
              self.pc.addTrack(track);
              // Notify about local track when stream is ok.
              self.ontrack && self.ontrack({ track: track });
            });
          }

          if (useCamera || hasAudio) {
            await navigator.mediaDevices.enumerateDevices().then((devices) => {
              // 过滤出视频输入设备（摄像头）
              const videoDevices = devices.filter(
                (device) => device.kind === "videoinput"
              );

              // 查找外接摄像头（一般是非默认设备）
              const externalCamera = videoDevices.find(
                (device) => device.label && device.label !== "Default Camera"
              );
              // 如果找到了外接摄像头，使用它
              if (externalCamera) {
                self.constraints = {
                  audio: true,
                  video: {
                    width: { ideal: 1280  , max: 1600 },
                    deviceId: externalCamera.deviceId, // 选择外接摄像头的 deviceId
                  },
                };
                // console.log(self.constraints);
              }
            });
            const userStream = await navigator.mediaDevices.getUserMedia(
              self.constraints
            );

            userStream.getTracks().forEach(function (track) {
              self.pc.addTrack(track);
              // Notify about local track when stream is ok.
              self.ontrack && self.ontrack({ track: track });
            });
          }

          var offer = await self.pc.createOffer();
          await self.pc.setLocalDescription(offer);
          const answer = await new Promise(function (resolve, reject) {
            // console.log(`Generated offer: ${offer.sdp}`);

            const xhr = new XMLHttpRequest();
            xhr.onload = function () {
              if (xhr.readyState !== xhr.DONE) return;
              if (xhr.status !== 200 && xhr.status !== 201) return reject(xhr);
              const data = xhr.responseText;
              // console.log("Got answer: ", data);
              return data.code ? reject(xhr) : resolve(data);
            };
            xhr.open("POST", url, true);
            xhr.setRequestHeader("Content-type", "application/sdp");
            xhr.send(offer.sdp);
          });
          await self.pc.setRemoteDescription(
            new RTCSessionDescription({ type: "answer", sdp: answer })
          );

          return self.__internal.parseId(url, offer.sdp, answer);
        };

        // See https://datatracker.ietf.org/doc/draft-ietf-wish-whip/
        // @url The WebRTC url to play with, for example:
        //      http://localhost:1985/rtc/v1/whep/?app=live&stream=livestream
        // @options The options to control playing, supports:
        //      videoOnly: boolean, whether only play video, default to false.
        //      audioOnly: boolean, whether only play audio, default to false.
        self.play = async function (url, options) {
          if (url.indexOf("/whip-play/") === -1 && url.indexOf("/whep/") === -1)
            throw new Error(`invalid WHEP url ${url}`);
          if (options?.videoOnly && options?.audioOnly)
            throw new Error(
              `The videoOnly and audioOnly in options can't be true at the same time`
            );

          if (!options?.videoOnly)
            self.pc.addTransceiver("audio", { direction: "recvonly" });
          if (!options?.audioOnly)
            self.pc.addTransceiver("video", { direction: "recvonly" });

          var offer = await self.pc.createOffer();
          await self.pc.setLocalDescription(offer);
          const answer = await new Promise(function (resolve, reject) {
            console.log(`Generated offer: ${offer.sdp}`);

            const xhr = new XMLHttpRequest();
            xhr.onload = function () {
              if (xhr.readyState !== xhr.DONE) return;
              if (xhr.status !== 200 && xhr.status !== 201) return reject(xhr);
              const data = xhr.responseText;
              console.log("Got answer: ", data);
              return data.code ? reject(xhr) : resolve(data);
            };
            xhr.open("POST", url, true);
            xhr.setRequestHeader("Content-type", "application/sdp");
            xhr.send(offer.sdp);
          });
          await self.pc.setRemoteDescription(
            new RTCSessionDescription({ type: "answer", sdp: answer })
          );

          return self.__internal.parseId(url, offer.sdp, answer);
        };

        // Close the publisher.
        self.close = function () {
          self.pc && self.pc.close();
          self.pc = null;
        };

        // The callback when got local stream.
        // @see https://developer.mozilla.org/en-US/docs/Web/API/RTCPeerConnection/addStream#Migrating_to_addTrack
        self.ontrack = function (event) {
          // Add track to stream of SDK.
          self.stream.addTrack(event.track);
        };

        self.pc = new RTCPeerConnection(null);

        // To keep api consistent between player and publisher.
        // @see https://developer.mozilla.org/en-US/docs/Web/API/RTCPeerConnection/addStream#Migrating_to_addTrack
        // @see https://webrtc.org/getting-started/media-devices
        self.stream = new MediaStream();

        // Internal APIs.
        self.__internal = {
          parseId: (url, offer, answer) => {
            let sessionid = offer.substr(
              offer.indexOf("a=ice-ufrag:") + "a=ice-ufrag:".length
            );
            sessionid = sessionid.substr(0, sessionid.indexOf("\n") - 1) + ":";
            sessionid += answer.substr(
              answer.indexOf("a=ice-ufrag:") + "a=ice-ufrag:".length
            );
            sessionid = sessionid.substr(0, sessionid.indexOf("\n"));

            const a = document.createElement("a");
            a.href = url;
            return {
              sessionid: sessionid, // Should be ice-ufrag of answer:offer.
              simulator: a.protocol + "//" + a.host + "/rtc/v1/nack/",
            };
          },
        };

        // https://developer.mozilla.org/en-US/docs/Web/API/RTCPeerConnection/ontrack
        self.pc.ontrack = function (event) {
          if (self.ontrack) {
            self.ontrack(event);
          }
        };

        return self;
      }
    }

    if (this.sdk) {
      this.sdk.close();
    }
    this.sdk = new SrsRtcWhipWhepAsync();
    document.getElementById("rtc_media_player").style.display = "none";
    document.getElementById("rtc_media_player").style.border = "1px solid #fff";
    this.startPublish();
    // Never play util windows loaded @see https://github.com/ossrs/srs/issues/2732
  },
};
</script>
<style lang="less" scoped></style>
