package top.wilsonlv.jaguar.webrtc.compnent;

import dev.onvoid.webrtc.PeerConnectionObserver;
import dev.onvoid.webrtc.RTCIceConnectionState;
import dev.onvoid.webrtc.RTCRtpTransceiver;
import dev.onvoid.webrtc.media.MediaStreamTrack;
import dev.onvoid.webrtc.media.video.I420Buffer;
import dev.onvoid.webrtc.media.video.VideoFrame;
import dev.onvoid.webrtc.media.video.VideoFrameBuffer;
import dev.onvoid.webrtc.media.video.VideoTrack;
import lombok.Getter;
import lombok.extern.slf4j.Slf4j;

import javax.swing.*;
import java.awt.*;
import java.awt.image.BufferedImage;
import java.awt.image.DataBufferByte;
import java.nio.ByteBuffer;

/**
 * @author lvws
 * @since 2025/1/10
 */
@Slf4j
public abstract class VideoStreamPeerConnectionObserver implements PeerConnectionObserver {

    @Getter
    private final JFrame jFrame = new JFrame("Preview");

    private final JLabel label = new JLabel();


    {
        jFrame.setSize(800, 600);
        jFrame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
        jFrame.add(label, BorderLayout.CENTER);
        jFrame.setVisible(true);
    }


    @Override
    public void onTrack(RTCRtpTransceiver transceiver) {
        log.info("onTrack");
        MediaStreamTrack track = transceiver.getReceiver().getTrack();
        if (track instanceof VideoTrack videoTrack) {
            videoTrack.addSink(frame -> {
                VideoFrameBuffer buffer = frame.buffer;
                log.info("width: {}, height: {}", buffer.getWidth(), buffer.getHeight());

                BufferedImage bufferedImage = frame2Image(frame);
                log.info("转化成功");

                label.setIcon(new ImageIcon(bufferedImage));
                log.info("渲染成功");
            });
        }
    }

    private BufferedImage frame2Image(VideoFrame frame) {
        I420Buffer i420Buffer = frame.buffer.toI420();
        try {
            int width = i420Buffer.getWidth();
            int height = i420Buffer.getHeight();
            ByteBuffer yBuffer = i420Buffer.getDataY();
            ByteBuffer uBuffer = i420Buffer.getDataU();
            ByteBuffer vBuffer = i420Buffer.getDataV();
            int yStride = i420Buffer.getStrideY();
            int uStride = i420Buffer.getStrideU();
            int vStride = i420Buffer.getStrideV();

            // 创建一个 BufferedImage
            BufferedImage bufferedImage = new BufferedImage(width, height, BufferedImage.TYPE_3BYTE_BGR);

            // 将 YUV 数据转换为 RGB 数据
            byte[] rgbData = ((DataBufferByte) bufferedImage.getRaster().getDataBuffer()).getData();
            for (int y = 0; y < height; y++) {
                for (int x = 0; x < width; x++) {
                    int yValue = yBuffer.get(y * yStride + x) & 0xFF;
                    int uValue = uBuffer.get((y / 2) * uStride + (x / 2)) & 0xFF;
                    int vValue = vBuffer.get((y / 2) * vStride + (x / 2)) & 0xFF;

                    // YUV to RGB conversion
                    int r = Math.max(0, Math.min(255, (int) (yValue + 1.402 * (vValue - 128))));
                    int g = Math.max(0, Math.min(255, (int) (yValue - 0.34414 * (uValue - 128) - 0.71414 * (vValue - 128))));
                    int b = Math.max(0, Math.min(255, (int) (yValue + 1.772 * (uValue - 128))));

                    rgbData[(y * width + x) * 3] = (byte) b;
                    rgbData[(y * width + x) * 3 + 1] = (byte) g;
                    rgbData[(y * width + x) * 3 + 2] = (byte) r;
                }
            }
            return bufferedImage;
        } finally {
            i420Buffer.release();
        }
    }

}
