//
// Created by pengx on 2025/11/19.
//

#include "rtp_sender.hpp"
#include <android/log.h>
#include <cstring>
#include <sys/socket.h>
#include <netinet/in.h>
#include <arpa/inet.h>
#include <unistd.h>
#include <cstdio>

#define TAG "JNI-RtpSender"
#define LGD(...) __android_log_print(ANDROID_LOG_DEBUG, TAG, __VA_ARGS__)
#define LGE(...) __android_log_print(ANDROID_LOG_ERROR, TAG, __VA_ARGS__)

static void writePsPackHeader(std::vector<uint8_t> &buf, uint64_t scr) {
    // Pack Header for MPEG-2 PS (ISO/IEC 13818-1 §2.5.3.1)
    // pack_start_code: 0x000001BA
    buf.push_back(0x00);
    buf.push_back(0x00);
    buf.push_back(0x01);
    buf.push_back(0xBA);

    uint64_t scr_base = scr / 300;
    auto scr_ext = static_cast<uint16_t>(scr % 300);

    scr_base &= 0x1FFFFFFFFULL; // 33 bits

    uint8_t byte0 = 0x44 | ((scr_base >> 29) & 0x06); // '01' + SCR[32..30] + '1'
    uint8_t byte1 = ((scr_base >> 22) & 0xFF);
    uint8_t byte2 = 0x04 | ((scr_base >> 14) & 0xFC); // SCR[21..15] + '1'
    uint8_t byte3 = ((scr_base >> 7) & 0xFF);
    uint8_t byte4 = 0x04 | ((scr_base << 1) & 0xFE);  // SCR[6..0] + '1'

    buf.push_back(byte0);
    buf.push_back(byte1);
    buf.push_back(byte2);
    buf.push_back(byte3);
    buf.push_back(byte4);

    // SCR extension (9 bits) + marker_bit + stuffing
    uint8_t byte5 = ((scr_ext >> 1) & 0xFF);
    uint8_t byte6 = ((scr_ext << 7) & 0x80) | 0x03; // marker_bit=1, stuffing=0b11

    buf.push_back(byte5);
    buf.push_back(byte6);

    buf.push_back(0xFF); // 更合理的mux_rate值
    buf.push_back(0xFF);
    buf.push_back(0xE1); // 保留原有标志位

//    std::string header_str = "PackHeader (" + std::to_string(buf.size()) + " bytes): ";
//    for (unsigned char _bf: buf) {
//        char hex_str[4];
//        sprintf(hex_str, "%02X ", _bf);
//        header_str += hex_str;
//    }
//    LGD("%s", header_str.c_str());
}

static void writeSystemHeader(std::vector<uint8_t> &buf, bool hasVideo = true,
                              bool hasAudio = false) {
    std::vector<uint8_t> sys;

    // system_header_start_code: 0x000001BB
    sys.push_back(0x00);
    sys.push_back(0x00);
    sys.push_back(0x01);
    sys.push_back(0xBB);

    // 预留 2 字节给 header_length（稍后填充）
    size_t lenPos = sys.size();
    sys.push_back(0x00); // high
    sys.push_back(0x00); // low

    // rate_bound: 22 bits (unit: 50 bytes/sec), max = 0x3FFFFF
    // 通常设为最大值：0x3FFFFF >> 2 = 0xFFFFF, 但字段是 [22b][marker=1]
    // 实际常用 0xFFFF << 2 | 1 → 0x3FFFE1，但简化用 0xFF FF E1
    sys.push_back(0xFF); // rate_bound_high (bits 22-15)
    sys.push_back(0xFF); // rate_bound_low  (bits 14-8 + marker)
    sys.push_back(0xE1); // audio_bound=0, fixed_flag=1, CSPS_flag=1, etc.

    // 流描述：每个流占 3 字节（stream_id + 2字节 buffer info）
    auto addStream = [&](uint8_t stream_id, bool isVideo) {
        sys.push_back(stream_id);

        // STD_buffer_bound_scale: video=1 (1024 bytes), audio=0 (128 bytes)
        uint8_t scale = isVideo ? 1 : 0;

        // 是 MPEG-2 PS（Program Stream）系统头中为每个音视频流分配的解码缓冲区大小建议值，但它不是实际内存大小，而是一个标准化的编码参数，用于告诉接收端（如解码器或平台）：“请为这个流预留多大的缓冲区”
        // 视频建议 >= 0x800 (2MB)，音频 >= 0x80 (16KB)，可以调整，但是不建议调整
        // 视频 0x1000 × 1024 = 4MB，音频 0x200 × 128 = 64KB
        uint16_t bufferSize = isVideo ? 0x1000 : 0x200;

        // 构造 16-bit buffer info: [scale(1)][bufferSize(13)][marker(2)=11]
        uint16_t bufferInfo = (scale << 15) | ((bufferSize & 0x1FFF) << 2) | 0x03;

        sys.push_back((bufferInfo >> 8) & 0xFF); // high byte
        sys.push_back(bufferInfo & 0xFF);        // low byte (ends with 0bxx11)
    };

    if (hasVideo) {
        addStream(0xE0, true);  // 视频流
    }
    if (hasAudio) {
        addStream(0xC0, false); // 音频流
    }

    // 计算 header_length（从 rate_bound 开始到结束的字节数）
    auto headerLength = static_cast<uint16_t>(sys.size() - lenPos - 2);
    sys[lenPos] = (headerLength >> 8) & 0xFF;
    sys[lenPos + 1] = headerLength & 0xFF;

    buf.insert(buf.end(), sys.begin(), sys.end());
}

/**
 * 写入 Program Stream Map (PSM) 到 buf
 * @param isH265 true=H.265(HEVC), false=H.264(AVC)
 * @param hasAudio 是否包含音频（G.711）
 * 0x1B → H.264 (AVC)
 * 0x24 → H.265 (HEVC)
 * 0x90 → G.711 (PCM)
 * 0x0F → AAC
 * */
static void writeProgramStreamMap(std::vector<uint8_t> &buf, bool isH265 = false,
                                  bool hasAudio = false) {
    std::vector<uint8_t> psm;

    // packet_start_code_prefix + stream_id (0xBC for PSM)
    psm.push_back(0x00);
    psm.push_back(0x00);
    psm.push_back(0x01);
    psm.push_back(0xBC);

    // 预留 2 字节给 program_stream_map_length
    size_t lenPos = psm.size();
    psm.push_back(0x00);
    psm.push_back(0x00);

    // current_next_indicator = 1, reserved = 0x7F, program_stream_map_version = 0
    psm.push_back(0x7F); // 0b01111111 → reserved(7)=0x7F, current_next_indicator=1
    psm.push_back(0x00); // program_stream_map_version

    // reserved (3 bits) + marker_bit (1) + elementary_stream_map_length (12 bits)
    // 先预留位置
    size_t mapLenPos = psm.size();
    psm.push_back(0x00);
    psm.push_back(0x00);

    // 添加 elementary stream 映射
    auto addStream = [&](uint8_t stream_type, uint8_t stream_id) {
        psm.push_back(stream_type);
        psm.push_back(stream_id);
    };

    if (!isH265) {
        addStream(0x1B, 0xE0); // H.264
    } else {
        addStream(0x24, 0xE0); // H.265
    }

    if (hasAudio) {
        addStream(0x90, 0xC0); // G.711
    }

    // 计算 elementary_stream_map_length（刚添加的流描述总字节数）
    auto elemMapLen = static_cast<uint16_t>(psm.size() - mapLenPos - 2);
    psm[mapLenPos] = 0xE0 | ((elemMapLen >> 8) & 0x0F); // 1110 xxxx
    psm[mapLenPos + 1] = elemMapLen & 0xFF;

    // CRC (32-bit): 简化处理，设为 0（很多平台不校验）
    // 严格实现应计算 CRC32，但 GB28181 通常接受 0
    psm.push_back(0x00);
    psm.push_back(0x00);
    psm.push_back(0x00);
    psm.push_back(0x00);

    // 最后填写总长度（从 version_number 到 CRC 前）
    auto totalLen = static_cast<uint16_t>(psm.size() - lenPos - 2);
    psm[lenPos] = (totalLen >> 8) & 0xFF;
    psm[lenPos + 1] = totalLen & 0xFF;

    buf.insert(buf.end(), psm.begin(), psm.end());

//    std::string psm_str = "ProgramStreamMap (" + std::to_string(buf.size()) + " bytes): ";
//    for (unsigned char _bf: buf) {
//        char hex_str[4];
//        sprintf(hex_str, "%02X ", _bf);
//        psm_str += hex_str;
//    }
//    LGD("%s", psm_str.c_str());
}

/**
 * 写入 PES 包头（符合 ISO/IEC 13818-1）
 *
 * @param buf 输出缓冲区
 * @param stream_id: 0xE0=视频, 0xC0=音频
 * @param payload_size: PES 负载字节数（<= 0xFFFF，若 > 则设为 0）
 * @param pts: 33-bit presentation timestamp (基于90kHz)
 * @param has_pts: 是否写入 PTS（I/P/B 帧通常都要）
 */
static void writePesHeader(std::vector<uint8_t> &buf, uint8_t stream_id, size_t payload_size,
                           uint64_t pts, bool has_pts = true) {
    size_t pos = buf.size();
    buf.insert(buf.end(), {0x00, 0x00, 0x01, 0xE0, 0x00, 0x00}); // 暂填长度 0

    // packet_start_code_prefix: 0x000001
    buf.push_back(0x00);
    buf.push_back(0x00);
    buf.push_back(0x01);
    buf.push_back(stream_id);

    // PES_packet_length (16 bits)
    // 如果 payload_size > 0xFFFF 或未知，设为 0（表示长度不限）
    uint16_t pesLen = (payload_size <= 0xFFFF) ? static_cast<uint16_t>(payload_size) : 0;
    buf.push_back((pesLen >> 8) & 0xFF);
    buf.push_back(pesLen & 0xFF);

    // marker_bits=10, PES_scrambling_control=00, PES_priority=0, data_alignment=0, copyright=0, original=0
    buf.push_back(0x80);

    // Flags: PTS_DTS_flags, ESCR_flag, ES_rate_flag, etc.
    uint8_t flags = has_pts ? 0x80 : 0x00; // 10xx xxxx → only PTS present
    buf.push_back(flags);

    // PES_header_data_length
    uint8_t headerDataLen = has_pts ? 0x05 : 0x00;
    buf.push_back(headerDataLen);

    // Write PTS (5 bytes)
    if (has_pts) {
        pts &= 0x1FFFFFFFFULL; // keep 33 bits

        buf.push_back(static_cast<uint8_t>(0x21 | ((pts >> 29) & 0x0E)));
        buf.push_back(static_cast<uint8_t>((pts >> 22) & 0xFF));
        buf.push_back(static_cast<uint8_t>(0x01 | ((pts >> 14) & 0xFE)));
        buf.push_back(static_cast<uint8_t>((pts >> 7) & 0xFF));
        buf.push_back(static_cast<uint8_t>(0x01 | ((pts << 1) & 0xFE)));
    }

//    std::string header_str = "PesHeader (" + std::to_string(buf.size()) + " bytes): ";
//    for (unsigned char _bf: buf) {
//        char hex_str[4];
//        sprintf(hex_str, "%02X ", _bf);
//        header_str += hex_str;
//    }
//    LGD("%s", header_str.c_str());
}

bool RtpSender::initRtpSocket(SdpStruct &sdp) {
    if (sdp.remote_ip.empty() || sdp.remote_port == 0) {
        LGE("Invalid SDP parameters");
        return false;
    }

    // 创建 socket
    _rtp_socket = socket(AF_INET, SOCK_DGRAM, 0);
    if (_rtp_socket < 0) {
        LGE("Failed to create UDP socket: %s", strerror(errno));
        return false;
    }

    // 绑定本地端口（使用端口0让系统自动分配）
    struct sockaddr_in local_addr{};
    memset(&local_addr, 0, sizeof(local_addr));
    local_addr.sin_family = AF_INET;
    local_addr.sin_addr.s_addr = htonl(INADDR_ANY);
    local_addr.sin_port = htons(0); // 系统自动分配

    if (bind(_rtp_socket, (struct sockaddr *) &local_addr, sizeof(local_addr)) < 0) {
        LGE("Failed to bind local port: %s", strerror(errno));
        close(_rtp_socket);
        return false;
    }

    // 获取系统分配的本地端口号
    socklen_t addr_len = sizeof(local_addr);
    if (getsockname(_rtp_socket, (struct sockaddr *) &local_addr, &addr_len) == 0) {
        sdp.local_port = ntohs(local_addr.sin_port); // 保存到sdp结构中
    }

    if (!sdp.ssrc.empty()) {
        _ssrc = stoul(sdp.ssrc);
    }

    // 设置远端地址信息
    memset(&_remote_addr, 0, sizeof(_remote_addr));
    _remote_addr.sin_family = AF_INET;
    _remote_addr.sin_port = htons(sdp.remote_port);
    if (inet_pton(AF_INET, sdp.remote_ip.c_str(), &_remote_addr.sin_addr) <= 0) {
        LGE("Invalid IP address: %s", sdp.remote_ip.c_str());
        close(_rtp_socket);
        return false;
    }

    LGD("initRtpSocket - Local Port: %d, Remote IP: %s, Port: %d, SSRC: %s",
        sdp.local_port, sdp.remote_ip.c_str(), sdp.remote_port, sdp.ssrc.c_str());

    return true;
}

void RtpSender::initSpsPps(const uint8_t *sps, size_t sps_len, const uint8_t *pps, size_t pps_len) {
    if (sps && sps_len > 0) {
        _sps.assign(sps, sps + sps_len);
    }
    if (pps && pps_len > 0) {
        _pps.assign(pps, pps + pps_len);
    }

    // 打印SPS的值
    if (!_sps.empty()) {
        std::string sps_str = "SPS Data (" + std::to_string(_sps.size()) + " bytes): ";
        for (unsigned char _sp: _sps) {
            char hex_str[4];
            sprintf(hex_str, "%02X ", _sp);
            sps_str += hex_str;
        }
        LGD("%s", sps_str.c_str());
    }

    // 打印PPS的值
    if (!_pps.empty()) {
        std::string pps_str = "PPS Data (" + std::to_string(_pps.size()) + " bytes): ";
        for (unsigned char _pp: _pps) {
            char hex_str[4];
            sprintf(hex_str, "%02X ", _pp);
            pps_str += hex_str;
        }
        LGD("%s", pps_str.c_str());
    }
}

void RtpSender::pushVideoFrame(const uint8_t *h264, size_t h264_length, uint64_t pts_90khz,
                               bool is_key_frame) {
    if (!h264 || h264_length == 0) {
        LGE("Invalid H.264 data");
        return;
    }
    buildPsPacket(h264, h264_length, static_cast<uint64_t>(pts_90khz), true, is_key_frame);
}

void RtpSender::pushAudioFrame(const uint8_t *alaw, size_t alaw_length, uint64_t pts_90khz) {
    if (!alaw || alaw_length == 0) {
        LGE("Invalid Alaw data");
        return;
    }
    buildPsPacket(alaw, alaw_length, static_cast<uint64_t>(pts_90khz), false, false);
}

bool _first_packet = true;

void RtpSender::buildPsPacket(const uint8_t *payload, size_t len, uint64_t pts_90khz, bool is_video,
                              bool is_key_frame) {
    if (!payload || len == 0) return;

    std::vector<uint8_t> psPacket;

    // 1. Pack Header (每次都需要)
    writePsPackHeader(psPacket, pts_90khz);

    // 2. System Header 和 PSM 只在关键帧或首次发送
    if (is_key_frame || _first_packet) {
        writeSystemHeader(psPacket, true, true);
        writeProgramStreamMap(psPacket, false, true);
        _first_packet = false;
    }

    // 4. PES Packet
    uint8_t stream_id = is_video ? 0xE0 : 0xC0;
    size_t payload_size = len;

    // For video key frame, prepend SPS/PPS
    std::vector<uint8_t> fullPayload;
    if (is_video && is_key_frame && !_sps.empty() && !_pps.empty()) {
        // Add start code 0x00000001 before SPS/PPS/NALU
        static const uint8_t startCode[4] = {0x00, 0x00, 0x00, 0x01};
        fullPayload.insert(fullPayload.end(), startCode, startCode + 4);
        fullPayload.insert(fullPayload.end(), _sps.begin(), _sps.end());
        fullPayload.insert(fullPayload.end(), startCode, startCode + 4);
        fullPayload.insert(fullPayload.end(), _pps.begin(), _pps.end());
        fullPayload.insert(fullPayload.end(), startCode, startCode + 4);
        fullPayload.insert(fullPayload.end(), payload, payload + len);
        payload_size = fullPayload.size();
    } else {
        // For non-key video or audio, just use raw data (assume start code included or not needed)
        fullPayload.assign(payload, payload + len);
    }

    // Write PES header
    writePesHeader(psPacket, stream_id, payload_size, pts_90khz, true);

    // Append payload
    psPacket.insert(psPacket.end(), fullPayload.begin(), fullPayload.end());

    // Add MPEG_program_end_code (required by GB28181)
    psPacket.push_back(0x00);
    psPacket.push_back(0x00);
    psPacket.push_back(0x01);
    psPacket.push_back(0xB9);

    // 5. Send via RTP (fragment if needed)
    const size_t MAX_RTP_PAYLOAD = 1400;
    size_t offset = 0;
    size_t total = psPacket.size();

    while (offset < total) {
        size_t left = total - offset;
        size_t sendSize = (left < MAX_RTP_PAYLOAD) ? left : MAX_RTP_PAYLOAD;
        bool marker = (offset + sendSize >= total); // last fragment

        sendRtpPacket(&psPacket[offset], sendSize, pts_90khz, marker);
        offset += sendSize;
    }
}

void RtpSender::sendRtpPacket(const uint8_t *payload, size_t len, uint64_t timestamp, bool marker) {
    uint8_t rtp_hdr[12];
    rtp_hdr[0] = 0x80;
    rtp_hdr[1] = ((_payload_type & 0x7F) | (marker ? 0x80 : 0x00));
    rtp_hdr[2] = (_seq >> 8) & 0xFF;
    rtp_hdr[3] = _seq & 0xFF;
    rtp_hdr[4] = (timestamp >> 24) & 0xFF;
    rtp_hdr[5] = (timestamp >> 16) & 0xFF;
    rtp_hdr[6] = (timestamp >> 8) & 0xFF;
    rtp_hdr[7] = timestamp & 0xFF;
    rtp_hdr[8] = (_ssrc >> 24) & 0xFF;
    rtp_hdr[9] = (_ssrc >> 16) & 0xFF;
    rtp_hdr[10] = (_ssrc >> 8) & 0xFF;
    rtp_hdr[11] = _ssrc & 0xFF;

    iovec iov[2];
    iov[0].iov_base = rtp_hdr;
    iov[0].iov_len = 12;
    iov[1].iov_base = (void *) payload;
    iov[1].iov_len = len;

    msghdr msg = {};
    msg.msg_name = (struct sockaddr *) &_remote_addr;
    msg.msg_namelen = sizeof(_remote_addr);
    msg.msg_iov = iov;
    msg.msg_iovlen = 2;

    sendmsg(_rtp_socket, &msg, 0);
    _seq++;
}

RtpSender::~RtpSender() {
    if (_rtp_socket > 0) {
        close(_rtp_socket);
        _rtp_socket = -1;
    }
}