<template>
  <div id="content">
    <video id="jswebrtc" controls autoplay style="display: none"></video>
    <canvas id="can" style="border: 1px solid;margin-top: 70px;"></canvas>
  </div>
</template>
<script>
export default {
  data() {
    return {
      sdk: null,
      videoSrc: null,
      mediaPlayer: null,
    };
  },
  mounted() {
    class SrsRtcWhipWhepAsync {
      constructor(vueInstance) {
        var self = {};
        self.vueInstance = vueInstance; // 将 Vue 实例传递给 self 对象
        self.constraints = {
          audio: true,
          video: {
            width: { ideal: 320, max: 576 },
          },
        };

        self.publish = async function (url, options) {
          if (url.indexOf("/whip/") === -1)
            throw new Error(`invalid WHIP url ${url}`);
          const hasAudio = options?.audio ?? true;
          const useCamera = options?.camera ?? true;
          const useScreen = options?.screen ?? false;

          if (!hasAudio && !useCamera && !useScreen)
            throw new Error(
              `The camera, screen and audio can't be false at the same time`
            );

          if (hasAudio) {
            self.pc.addTransceiver("audio", { direction: "sendonly" });
          } else {
            self.constraints.audio = false;
          }

          if (useCamera || useScreen) {
            self.pc.addTransceiver("video", { direction: "sendonly" });
          }

          if (!useCamera) {
            self.constraints.video = false;
          }

          if (
            !navigator.mediaDevices &&
            window.location.protocol === "http:" &&
            window.location.hostname !== "localhost"
          ) {
            throw new SrsError(
              "HttpsRequiredError",
              `Please use HTTPS or localhost to publish, read https://github.com/ossrs/srs/issues/2762#issuecomment-983147576`
            );
          }

          if (useScreen) {
            const displayStream = await navigator.mediaDevices.getDisplayMedia({
              video: true,
            });
            // @see https://developer.mozilla.org/en-US/docs/Web/API/RTCPeerConnection/addStream#Migrating_to_addTrack
            displayStream.getTracks().forEach(function (track) {
              self.pc.addTrack(track);
              // Notify about local track when stream is ok.
              self.ontrack && self.ontrack({ track: track });
            });
          }

          if (useCamera || hasAudio) {
            const userStream = await navigator.mediaDevices.getUserMedia(
              self.constraints
            );

            userStream.getTracks().forEach(function (track) {
              self.pc.addTrack(track);
              // Notify about local track when stream is ok.
              self.ontrack && self.ontrack({ track: track });
            });
          }

          var offer = await self.pc.createOffer();
          await self.pc.setLocalDescription(offer);
          const answer = await new Promise(function (resolve, reject) {
            console.log(`Generated offer: ${offer.sdp}`);

            const xhr = new XMLHttpRequest();
            xhr.onload = function () {
              if (xhr.readyState !== xhr.DONE) return;
              if (xhr.status !== 200 && xhr.status !== 201) return reject(xhr);
              const data = xhr.responseText;
              console.log("Got answer: ", data);
              return data.code ? reject(xhr) : resolve(data);
            };
            xhr.open("POST", url, true);
            xhr.setRequestHeader("Content-type", "application/sdp");
            xhr.send(offer.sdp);
          });
          await self.pc.setRemoteDescription(
            new RTCSessionDescription({ type: "answer", sdp: answer })
          );

          return self.__internal.parseId(url, offer.sdp, answer);
        };

        self.play = async function (url, options) {
          if (url.indexOf("/whip-play/") === -1 && url.indexOf("/whep/") === -1)
            throw new Error(`invalid WHEP url ${url}`);
          if (options?.videoOnly && options?.audioOnly)
            throw new Error(
              `The videoOnly and audioOnly in options can't be true at the same time`
            );

          if (!options?.videoOnly)
            self.pc.addTransceiver("audio", { direction: "recvonly" });
          if (!options?.audioOnly)
            self.pc.addTransceiver("video", { direction: "recvonly" });

          var offer = await self.pc.createOffer();
          await self.pc.setLocalDescription(offer);
          const answer = await new Promise(function (resolve, reject) {
            // console.log(`Generated offer: ${offer.sdp}`);

            const xhr = new XMLHttpRequest();
            xhr.onload = function () {
              if (xhr.readyState !== xhr.DONE) return;
              if (xhr.status !== 200 && xhr.status !== 201) return reject(xhr);
              const data = xhr.responseText;
              // console.log("Got answer: ", data);
              return data.code ? reject(xhr) : resolve(data);
            };
            xhr.open("POST", url, true);
            xhr.setRequestHeader("Content-type", "application/sdp");
            xhr.send(offer.sdp);
          });
          await self.pc.setRemoteDescription(
            new RTCSessionDescription({ type: "answer", sdp: answer })
          );

          return self.__internal.parseId(url, offer.sdp, answer);
        };

        // Close the publisher.
        self.close = function () {
          self.pc && self.pc.close();
          self.pc = null;
        };

        // The callback when got local stream.
        self.ontrack = (event) => {
          if (event.track.kind === "video") {
            console.log("video轨道已接入");
            // 创建 MediaStreamTrackProcessor
            const processor = new MediaStreamTrackProcessor({
              track: event.track,
            });

            // 处理视频帧  首先获取读取器
            const reader = processor.readable.getReader();

            // 使用异步迭代处理视频帧
            async function processVideo() {
              try {
                while (true) {
                  const { done, value: frame } = await reader.read(); // 从读取器读取一帧
                  if (done) return; // 如果没有更多数据了，就返回

                  // 假设 frame 是一个 VideoFrame 对象
                  const canvas = document.createElement("canvas");
                  const ctx = canvas.getContext("2d");

                  // 设置 canvas 尺寸为视频帧的宽高
                  canvas.width = frame.displayWidth;
                  canvas.height = frame.displayHeight;

                  // 将帧绘制到 canvas 上
                  ctx.drawImage(frame, 0, 0);

                  // 获取 canvas 上的图像数据
                  const imageData = ctx.getImageData(
                    0,
                    0,
                    canvas.width,
                    canvas.height
                  );

                  // 将 WebCodecs 的 VideoFrame 转为 OpenCV Mat
                  let mat = cv.matFromImageData(imageData);

                  // 获取图像的宽度和高度
                  const width = mat.cols;
                  const height = mat.rows;

                  // 计算左右两部分的分割点
                  const mid = Math.floor(width / 2);
                  const leftMat = mat.roi(new cv.Rect(0, 0, mid, height)); //左边部分
                  let resized = new cv.Mat();
                  //按当前窗口大小进行缩放
                  let ratio = width/2 / height;
                  let resizedWidth = 1000;
                  let resizedHeight = 1000;
                  if ((window.innerWidth -360) / (window.innerHeight-150) > ratio) {
                    resizedWidth = (window.innerHeight-150) * ratio;
                    resizedHeight = (window.innerHeight-150);
                  } else {
                    resizedWidth = window.innerWidth - 360;
                    resizedHeight = (window.innerWidth - 360) / ratio;
                  }
                  cv.resize(
                    leftMat,
                    resized,
                    new cv.Size(
                      // Math.min(resizedWidth, width / 2),
                      resizedWidth,
                      // Math.min(resizedHeight, height)
                      resizedHeight
                    )
                  );

                  // 显示左边部分的图像
                  cv.imshow("can", resized);

                  // 释放 OpenCV Mat 资源
                  mat.delete();
                  leftMat.delete();
                  resized.delete();
                  // 关闭帧对象
                  frame.close();

                  // 处理下一帧
                  // 使用 requestAnimationFrame 或 setTimeout 来避免堆栈溢出
                  // await new Promise((resolve) => setTimeout(resolve, 0)); // 通过异步等待继续处理
                  processVideo();
                }
              } catch (error) {
                console.error("Error processing video frame:", error);
              }
            }
            // 开始处理视频流
            processVideo();
          } else if (event.track.kind === "audio") {
            console.log("audio轨道已接入");
          }
          // Add track to stream of SDK.
          self.stream.addTrack(event.track);
        };

        self.pc = new RTCPeerConnection(null);

        self.stream = new MediaStream();

        // Internal APIs.
        self.__internal = {
          parseId: (url, offer, answer) => {
            let sessionid = offer.substr(
              offer.indexOf("a=ice-ufrag:") + "a=ice-ufrag:".length
            );
            sessionid = sessionid.substr(0, sessionid.indexOf("\n") - 1) + ":";
            sessionid += answer.substr(
              answer.indexOf("a=ice-ufrag:") + "a=ice-ufrag:".length
            );
            sessionid = sessionid.substr(0, sessionid.indexOf("\n"));

            const a = document.createElement("a");
            a.href = url;
            return {
              sessionid: sessionid, // Should be ice-ufrag of answer:offer.
              simulator: a.protocol + "//" + a.host + "/rtc/v1/nack/",
            };
          },
        };

        // https://developer.mozilla.org/en-US/docs/Web/API/RTCPeerConnection/ontrack
        self.pc.ontrack = function (event) {
          if (self.ontrack) {
            self.ontrack(event);
          }
        };
        return self;
      }
    }
    this.sdk = new SrsRtcWhipWhepAsync(this);
    this.videoSrc =
      "http://192.168.130.40:1985/rtc/v1/whep/?app=live&stream=livestream";
    // "http://172.16.1.110:1985/rtc/v1/whep/?app=live&stream=livestream";
    this.mediaPlayer = document.getElementById("jswebrtc");
    this.mediaPlayer.srcObject = this.sdk.stream;
    this.sdk.play(this.videoSrc).catch(function (reason) {
      this.sdk.close();
      this.mediaPlayer.style.display="none";
      console.error(reason);
    });
  },
  beforeDestroy() {
    this.sdk.close();
  },
};
</script>
<style lang="less" scoped>
#content {
  // height: 100vh;
  display: flex;
  justify-content: center;
  align-items: center;
}
</style>
