<template>
  <div class="content">
    <video id="jswebrtc" controls autoplay style="display: none"></video>
    <canvas id="can"></canvas>
  </div>
</template>
<script>
/* global cv */
export default {
  data() {
    return {
      sdk: null,
      videoSrc: null,
      mediaPlayer: null,
    };
  },
  mounted() {
    class SrsRtcWhipWhepAsync {
      constructor(vueInstance) {
        var self = {};
        self.vueInstance = vueInstance; // 将 Vue 实例传递给 self 对象
        self.constraints = {
          audio: true,
          video: {
            width: { ideal: 320, max: 576 },
          },
        };

        self.publish = async function (url, options) {
          if (url.indexOf("/whip/") === -1)
            throw new Error(`invalid WHIP url ${url}`);
          const hasAudio = options?.audio ?? true;
          const useCamera = options?.camera ?? true;
          const useScreen = options?.screen ?? false;

          if (!hasAudio && !useCamera && !useScreen)
            throw new Error(
              `The camera, screen and audio can't be false at the same time`
            );

          if (hasAudio) {
            self.pc.addTransceiver("audio", { direction: "sendonly" });
          } else {
            self.constraints.audio = false;
          }

          if (useCamera || useScreen) {
            self.pc.addTransceiver("video", { direction: "sendonly" });
          }

          if (!useCamera) {
            self.constraints.video = false;
          }

          if (
            !navigator.mediaDevices &&
            window.location.protocol === "http:" &&
            window.location.hostname !== "localhost"
          ) {
            throw new SrsError(
              "HttpsRequiredError",
              `Please use HTTPS or localhost to publish, read https://github.com/ossrs/srs/issues/2762#issuecomment-983147576`
            );
          }

          if (useScreen) {
            const displayStream = await navigator.mediaDevices.getDisplayMedia({
              video: true,
            });
            // @see https://developer.mozilla.org/en-US/docs/Web/API/RTCPeerConnection/addStream#Migrating_to_addTrack
            displayStream.getTracks().forEach(function (track) {
              self.pc.addTrack(track);
              // Notify about local track when stream is ok.
              self.ontrack && self.ontrack({ track: track });
            });
          }

          if (useCamera || hasAudio) {
            const userStream = await navigator.mediaDevices.getUserMedia(
              self.constraints
            );

            userStream.getTracks().forEach(function (track) {
              self.pc.addTrack(track);
              // Notify about local track when stream is ok.
              self.ontrack && self.ontrack({ track: track });
            });
          }

          var offer = await self.pc.createOffer();
          await self.pc.setLocalDescription(offer);
          const answer = await new Promise(function (resolve, reject) {
            console.log(`Generated offer: ${offer.sdp}`);

            const xhr = new XMLHttpRequest();
            xhr.onload = function () {
              if (xhr.readyState !== xhr.DONE) return;
              if (xhr.status !== 200 && xhr.status !== 201) return reject(xhr);
              const data = xhr.responseText;
              console.log("Got answer: ", data);
              return data.code ? reject(xhr) : resolve(data);
            };
            xhr.open("POST", url, true);
            xhr.setRequestHeader("Content-type", "application/sdp");
            xhr.send(offer.sdp);
          });
          await self.pc.setRemoteDescription(
            new RTCSessionDescription({ type: "answer", sdp: answer })
          );

          return self.__internal.parseId(url, offer.sdp, answer);
        };

        self.play = async function (url, options) {
          if (url.indexOf("/whip-play/") === -1 && url.indexOf("/whep/") === -1)
            throw new Error(`invalid WHEP url ${url}`);
          if (options?.videoOnly && options?.audioOnly)
            throw new Error(
              `The videoOnly and audioOnly in options can't be true at the same time`
            );

          if (!options?.videoOnly)
            self.pc.addTransceiver("audio", { direction: "recvonly" });
          if (!options?.audioOnly)
            self.pc.addTransceiver("video", { direction: "recvonly" });

          var offer = await self.pc.createOffer();
          await self.pc.setLocalDescription(offer);
          const answer = await new Promise(function (resolve, reject) {
            // console.log(`Generated offer: ${offer.sdp}`);

            const xhr = new XMLHttpRequest();
            xhr.onload = function () {
              if (xhr.readyState !== xhr.DONE) return;
              if (xhr.status !== 200 && xhr.status !== 201) return reject(xhr);
              const data = xhr.responseText;
              // console.log("Got answer: ", data);
              return data.code ? reject(xhr) : resolve(data);
            };
            xhr.open("POST", url, true);
            xhr.setRequestHeader("Content-type", "application/sdp");
            xhr.send(offer.sdp);
          });
          await self.pc.setRemoteDescription(
            new RTCSessionDescription({ type: "answer", sdp: answer })
          );

          return self.__internal.parseId(url, offer.sdp, answer);
        };

        // Close the publisher.
        self.close = function () {
          self.pc && self.pc.close();
          self.pc = null;
        };

        self.ontrack = (event) => {
          // Add track to stream of SDK.
          // console.log("ontrack", event);
          if (event.track.kind === "video") {
            // 创建 MediaStreamTrackProcessor
            const processor = new MediaStreamTrackProcessor({
              track: event.track,
            });

            // 处理视频帧 首先获取读取器
            const reader = processor.readable.getReader();

            // 使用递归函数处理视频帧
            async function processVideo() {
              let start = performance.now();
              const { done, value: frame } = await reader.read(); // 从读取器读取一帧
              if (done) return; // 如果没有更多数据了，就返回
              // 假设 frame 是一个 VideoFrame 对象
              const canvas = document.createElement("canvas");
              const ctx = canvas.getContext("2d");

              // 设置 canvas 尺寸为视频帧的宽高
              canvas.width = frame.displayWidth;
              canvas.height = frame.displayHeight;

              // 将帧绘制到 canvas 上
              ctx.drawImage(frame, 0, 0);

              // 获取 canvas 上的图像数据
              const imageData = ctx.getImageData(
                0,
                0,
                canvas.width,
                canvas.height
              );
              // 将 WebCodecs 的 VideoFrame 转为 OpenCV Mat
              let mat = cv.matFromImageData(imageData);
              // 获取图像的宽度和高度
              const width = mat.cols;
              const height = mat.rows;
              // 计算左右两部分的分割点
              const mid = Math.floor(width / 2);
              const leftMat = mat.roi(new cv.Rect(0, 0, mid, height)); //左边部分
              const rightMat = mat.roi(
                new cv.Rect(mid, 0, width - mid, height)
              ); //右边部分

              // 使用 OpenCV.js 执行图像拼接和去重处理（假设 stitchAndRemoveDuplicates 是定义好的）
              let resultImage;
              // console.log(self.vueInstance.$options.method)
              if (self.vueInstance) {
                resultImage =
                  self.vueInstance.$options.method.stitchAndRemoveDuplicates(
                    // 调用 Vue 实例中的 methodName
                    leftMat,
                    rightMat
                  );
              }
              //按当前窗口大小进行缩放
              let ratio = width/2 / height;
              let resizedWidth = 1000;
              let resizedHeight = 1000;
              if ((window.innerWidth - 360) / (window.innerHeight-125) > ratio) {
                resizedWidth = (window.innerHeight-150) * ratio;
                resizedHeight = (window.innerHeight-150);
              } else {
                resizedWidth = window.innerWidth - 360;
                resizedHeight = (window.innerWidth - 360) / ratio;
              }
              let resized = new cv.Mat();
              cv.resize(
                resultImage,
                resized,
                new cv.Size(
                  // Math.min(resizedWidth, width / 2),
                  resizedWidth,
                  // Math.min(resizedHeight, height)
                  resizedHeight
                )
              );
              cv.imshow("can", resized);
              // 释放 OpenCV Mat 资源
              mat.delete();
              rightMat.delete();
              leftMat.delete();
              resultImage.delete();
              resized.delete();
              //关闭帧对象
              frame.close();
              // let end = performance.now();
              // const time = end - start;
              // console.log("处理一帧耗时：", time.toFixed(2));
              // 继续处理下一帧
              processVideo();
            }
            // 开始处理视频流
            processVideo();
            // self.stream.addTrack(generator);
          }
          self.stream.addTrack(event.track);
        };

        self.pc = new RTCPeerConnection(null);

        self.stream = new MediaStream();

        // Internal APIs.
        self.__internal = {
          parseId: (url, offer, answer) => {
            let sessionid = offer.substr(
              offer.indexOf("a=ice-ufrag:") + "a=ice-ufrag:".length
            );
            sessionid = sessionid.substr(0, sessionid.indexOf("\n") - 1) + ":";
            sessionid += answer.substr(
              answer.indexOf("a=ice-ufrag:") + "a=ice-ufrag:".length
            );
            sessionid = sessionid.substr(0, sessionid.indexOf("\n"));

            const a = document.createElement("a");
            a.href = url;
            return {
              sessionid: sessionid, // Should be ice-ufrag of answer:offer.
              simulator: a.protocol + "//" + a.host + "/rtc/v1/nack/",
            };
          },
        };

        // https://developer.mozilla.org/en-US/docs/Web/API/RTCPeerConnection/ontrack
        self.pc.ontrack = (event) => {
          if (self.ontrack) {
            self.ontrack(event);
          }
        };

        return self;
      }
    }
    this.sdk = new SrsRtcWhipWhepAsync(this);
    this.videoSrc =
      "http://192.168.40.130:1985/rtc/v1/whep/?app=live&stream=livestream";
    // "http://172.16.1.110:1985/rtc/v1/whep/?app=live&stream=livestream";
    this.mediaPlayer = document.getElementById("jswebrtc");
    this.mediaPlayer.srcObject = this.sdk.stream;
    this.sdk.play(this.videoSrc).catch(function (reason) {
      this.sdk.close();
      this.mediaPlayer.hide();
      console.error(reason);
    });
  },
  method: {
    // 拼接图片并去重
    stitchAndRemoveDuplicates(mat1, mat2) {
      // 夜色空间转换
      let gray1 = new cv.Mat();
      let gray2 = new cv.Mat();
      cv.cvtColor(mat1, gray1, cv.COLOR_RGBA2GRAY);
      cv.cvtColor(mat2, gray2, cv.COLOR_RGBA2GRAY);

      // 使用高斯模糊进行去噪
      cv.GaussianBlur(gray1, gray1, new cv.Size(5, 5), 1.5, 1.5);
      cv.GaussianBlur(gray2, gray2, new cv.Size(5, 5), 1.5, 1.5);

      // ORB 特征提取
      let orb = new cv.ORB();
      orb.setEdgeThreshold(15); // 增加边缘阈值，可以改善特征提取
      // orb.setFastThreshold(20); // 控制ORB角点检测的敏感度
      let keypoints1 = new cv.KeyPointVector();
      let keypoints2 = new cv.KeyPointVector();
      let descriptors1 = new cv.Mat();
      let descriptors2 = new cv.Mat();

      orb.detectAndCompute(gray1, new cv.Mat(), keypoints1, descriptors1);
      orb.detectAndCompute(gray2, new cv.Mat(), keypoints2, descriptors2);

      // 使用暴力匹配器进行特征匹配
      let matcher = new cv.BFMatcher(cv.NORM_HAMMING, true);
      // let matcher = new cv.BFMatcher(cv.NORM_L2, true);
      let matches = new cv.DMatchVector();
      matcher.match(descriptors1, descriptors2, matches);

      //使用Low's ratio test过滤不好的匹配
      let goodMatches = [];
      let ratio_thresh = 0.7; // 比率阈值

      for (let i = 0; i < matches.size(); i++) {
        let match = matches.get(i);
        let dist1 = match.distance;
        let dist2 = matches.get(i + 1) ? matches.get(i + 1).distance : dist1;

        // Lowe's ratio test
        if (dist1 < ratio_thresh * dist2) {
          goodMatches.push(match);
        }
      }

      // 提取匹配点并存储为 cv.Mat 格式
      let matchedPoints1 = [];
      let matchedPoints2 = [];
      for (let i = 0; i < goodMatches.length; i++) {
        let match = goodMatches[i];
        matchedPoints1.push(keypoints1.get(match.queryIdx).pt); // 获取第一张图的匹配点
        matchedPoints2.push(keypoints2.get(match.trainIdx).pt); // 获取第二张图的匹配点
      }

      // 将匹配点转换为 Mat 类型
      let points1Array = matchedPoints1.map((p) => [p.x, p.y]);
      let points2Array = matchedPoints2.map((p) => [p.x, p.y]);

      let points1 = cv.matFromArray(
        points1Array.length,
        2,
        cv.CV_32FC1,
        points1Array.flat()
      );
      let points2 = cv.matFromArray(
        points2Array.length,
        2,
        cv.CV_32FC1,
        points2Array.flat()
      );

      // 计算透视变换矩阵
      let H = cv.findHomography(points1, points2, cv.RANSAC, 5.0);

      // 计算仿射变换矩阵
      // let M = cv.estimateAffinePartial2D(points1, points2);

      // 透视变换第二张图片
      let warpedImage2 = new cv.Mat(
        mat1.rows,
        mat1.cols + mat2.cols / 15,
        mat1.type()
      );
      cv.warpPerspective(
        mat2,
        warpedImage2,
        H,
        new cv.Size(mat1.cols + mat2.cols / 15, mat1.rows),
        // new cv.Size(mat2.cols , mat2.rows),
        cv.INTER_CUBIC // 使用线性插值  双三次插值
        // cv.BORDER_REFLECT // 使用反射边界处理模式
      );

      // 将创建拼接后的图像存到resultImage中
      let resultImage = warpedImage2.clone();
      // mat1.copyTo(resultImage.roi(new cv.Rect(0, 0, mat1.cols, mat1.rows)));
      // let resultImage=new cv.Mat()
      // cv.addWeighted(mat1, 0.3, warpedImage2, 0.7, 0, resultImage);

      // 释放资源
      gray1.delete();
      gray2.delete();
      descriptors1.delete();
      descriptors2.delete();
      matches.delete();
      points1.delete();
      points2.delete();
      H.delete();
      warpedImage2.delete();

      return resultImage;
    },
  },
  beforeDestroy() {
    this.sdk.close();
  },
};
</script>
<style lang="less" scoped>
.content {
  margin: 0;
  // height: 100vh;
  width: 100vw-200px;
  display: flex;
  justify-content: center;
  align-items: center;
}
</style>
