//
// Created by haoy on 2017/3/16.
//
#include "rtp_reader.hpp"

#include <h264/h264_nal.hpp>

#include "packet.hpp"
#include "tlv.hpp"
#include "mod/rtp/RTPRedundantPacket.h"



#define READER_DEBUG(event, fmt) LOG4CPLUS_DEBUG("", "reader: " << event << fmt)
#define READER_INFO(event, fmt) LOG4CPLUS_INFO("", "reader: " << event << fmt)
#define READER_WARN(event, fmt) LOG4CPLUS_WARN("", "reader: " << event << fmt)
#define READER_ERROR(event, fmt) LOG4CPLUS_ERROR("", "reader: " << event << fmt)

#define IS_RTPRTCP(x) ((x) > 127 && (x) < 192)
#define IS_DTLS(x) ((x) > 19 && (x) < 64)
#define IS_STUN(x) ((x) < 2)

namespace {
mediacodec_id_t codecToMCodec(uint32_t codec) {
    switch (codec) {
        case VideoCodec::VP8:
            return MCODEC_ID_VP8;
        case VideoCodec::H264:
            return MCODEC_ID_H264;
        case AudioCodec::OPUS:
            return MCODEC_ID_OPUS;
        case AudioCodec::AAC:
            return MCODEC_ID_AAC;
    }
    return MCODEC_ID_UNKNOWN;
}
}

static 
bool IsRTCP(const uint8_t *data, size_t size){
    
    if (size<4) return false; // Check size
    
    if ((data[0]>>6)!=2) return false; // Check version
    
    if (data[1]<200 ||  data[1] > 206) return false; //Check type
    
    return true;
}

RTPReader::RTPReader()
{
    _buf = (uint8_t*) malloc(BUF_SIZE);
}

RTPReader::~RTPReader() {
    if (_input_fp) {
        fclose(_input_fp);
        _input_fp = nullptr;
    }
    free(_buf);
    _buf = NULL;

    for(auto&& channel : _channels) {
        if(channel.second.ready && channel.second.dtls) {
            xdtls_srtp_destroy(channel.second.dtls);
            channel.second.dtls = nullptr;
            channel.second.ready = false;
        }
    }
    _channels.clear();
}

void RTPReader::input(const std::string& input_fn) {
    _input_fp = fopen(input_fn.c_str(), "rb");
    if (_input_fp == nullptr) {
        dbge("reader: cannot open input file %s", input_fn.c_str());
        exit(-1);
    }
}

void RTPReader::range(const std::list<TimeRange>& ranges) {
    _time_ranges = ranges;
}

void RTPReader::audio(bool audio) {
    _read_audio = audio;
}

void RTPReader::video(bool video) {
    _read_video = video;
}

RTPReader &RTPReader::handler(PacketHandler *handler) {
    _handler = handler;
    return *this;
}

void RTPReader::run() {
    int type = 0;
    int length = 0;

    if (_time_ranges.empty()) {
        _cur_time_range = TimeRange {.from = 0, .to = (uint64_t)-1};
    } else {
        _cur_time_range = _time_ranges.front();
        _time_ranges.pop_front();
    }

    while(true) {
        int ret = read_tlv(_input_fp, _buf, BUF_SIZE, &length, &type);
        if(ret < 0){
            break;
        }

        READER_DEBUG("tlv", FMT().kv("type", type).kv("len", length));

        if(type == TLV_TYPE_FILE_START) {
        } else if(type == TLV_TYPE_CODEC) {
            onCodec(_buf, length);
        } else if(type == TLV_TYPE_SDP
        || TLV_TYPE_REMOTE_SDP == type
        || TLV_TYPE_LOCAL_SDP == type) {
            onSDP(_buf, length);
        } else if(type == TLV_TYPE_RTP) {
            uint64_t u64 = be_get_u32(_buf);
            uint64_t rcv_ts = (u64 << 32) + be_get_u32(_buf + 4);
            onRTP(rcv_ts, _buf+8, length-8);
        } else if (type == TLV_TYPE_RTCP) {
            onRTCP(_buf+8, length-8);
        } else if (type == TLV_TYPE_SRTP_REMOTE_KEY) {
            onSRTPKey(true, _buf, length);
        } else if (type == TLV_TYPE_SRTP_LOCAL_KEY) {
        } else if (type == TLV_TYPE_RECV_UDP) {
            onSRTP(false, _buf, length);
        } else if (type == TLV_TYPE_SEND_UDP) {
        } else if (type == TLV_TYPE_END) {
            break;
        } else {
            // could be caused by reading a file being write to
            READER_ERROR("unknown tlv type", FMT().kv("type", type).kv("length", length));
            break;
        }
        type = 0;
        length = 0;
    }
    onEnd();

}

void RTPReader::onCodec(uint8_t *buf, size_t len) {
    // 1. read
    uint32_t codec_id = be_get_u32(buf);
    uint32_t payload_type = be_get_u32(buf+4);
    uint32_t sample_rate = be_get_u32(buf+8);
    uint32_t channels = be_get_u32(buf+12);
    uint32_t codec_name_len = be_get_u32(buf+16);
    char codec_name[32] = {0};
    memcpy(codec_name, buf+20, codec_name_len < 31 ? codec_name_len : 31);

    // 2. audio or video
    MediaFrame::Type media_type = MediaFrame::Audio;

    int codec = VideoCodec::GetCodecForName(codec_name);
    if (codec != VideoCodec::UNKNOWN) {
        media_type = MediaFrame::Video;
    } else {
        codec = AudioCodec::GetCodecForName(codec_name);
        if (codec != AudioCodec::UNKNOWN) {
            media_type = MediaFrame::Audio;
        }
    }

    // 3. filling rtp map
    _rtp_map[(uint8_t)payload_type] = (uint8_t)codec;
    _codecs.addCodec(payload_type, media_type, sample_rate, channels);

    if (_handler) {
        _handler->onCodecRegistry(_codecs, payload_type, std::string(codec_name));
    }
}

static
int on_sending_dtls_data(xdtls_srtp_t dtls,  void * cb_context, const unsigned char * data, int length){
    return 0;
}

void RTPReader::onSDP(uint8_t *buf, size_t len) {
    READER_DEBUG("found sdp", FMT().kv("sdp", std::string((char*)buf, len)));
}

void RTPReader::onSRTPKey(bool is_remote,  uint8_t *buf, size_t len) {
    if(is_remote) {
        
        uint32_t mlineindex = get4(buf, 8);
        auto search = _channels.find(mlineindex);
        if(search == _channels.end()) {
            _channels[mlineindex] = Channel();
        }

        Channel& channel = _channels[mlineindex];

        if(!channel.ready) {
            buf += 16;
            len -= 16;

            if(!channel.dtls) {
                channel.dtls = xdtls_srtp_create(nullptr, nullptr, XDTLS_HASH_SHA256, XDTLS_ROLE_CLIENT, on_sending_dtls_data, nullptr);
                if(!channel.dtls){
                    READER_ERROR("create dtls-srtp fail!", FMT());       
                    return ;
                }
            }


            READER_DEBUG("got remote dtls key.", FMT());
            int ret = xdtls_srtp_set_keys(channel.dtls, buf, len, buf, len);
            if(!ret) {
                channel.ready = true;
            }
        }

    } else {
        READER_DEBUG("got local dtls key.", FMT());
    }
}

void RTPReader::onSRTP(bool is_remote, uint8_t *buf, size_t len) {
    if(is_remote) {
        return ;
    }

    uint64_t timestamp = get8(buf, 0);
    uint32_t mlineindex = get4(buf, 8);
    if(_channels.find(mlineindex) == _channels.end()
    || !_channels[mlineindex].ready) {
        return ;
    }
    Channel& channel = _channels[mlineindex];
    
    buf += 16;
    len -= 16 ;
    uint8_t first_byte = buf[0];
    
    if(IS_RTPRTCP(first_byte)){ // rtp/rtcp
        bool is_rtcp = IsRTCP(buf, len);
        int slen = 0;
        if(is_rtcp){
            slen = xdtls_srtp_unprotect_rtcp(channel.dtls, buf, len);
            if(slen <= 0) return ; // TODO: print log
            onRTCP(buf, slen);
        }else{
            slen = xdtls_srtp_unprotect(channel.dtls, buf, len);
            if(slen <= 0) return ; // TODO: print log
            onRTP(timestamp, buf, slen);
        }
    }else if(IS_DTLS(first_byte)){ // dtls
        READER_DEBUG("got dtls packet.", FMT());
    }else if(IS_STUN(first_byte)){
        READER_DEBUG("got stun packet. ", FMT().kv("first_byte", first_byte));
    }else{
        READER_DEBUG("got unknown packet. ", FMT().kv("first_byte", first_byte));
    }
}

void RTPReader::onRTP(uint64_t& rcv_ts, uint8_t *buf, size_t len) {
    RTPHeader header;
    RTPHeaderExtension extension;
    // 1. parse header and extension

    int header_len = header.Parse(buf, len);
    if (header_len <= 0) {
        READER_WARN("corrupted tlv, skipped", FMT().kv("len", len));
        return;
    }
    if (header.extension) {
        int extension_len = extension.Parse(_rtp_map, buf+header_len, len-header_len);
        header_len += extension_len;
    }

    // 2. is video or audio ?
    RTPPacket* packet = nullptr;
    PacketReorder<RTPPacket>* reorder = NULL;
    uint8_t codec = _rtp_map.GetCodecForType(header.payloadType);
    uint32_t* last_seq = nullptr;
    uint16_t* cycles = nullptr;

    if (_codecs.getMediaTypeForPT(header.payloadType) == MediaFrame::Audio) {
        if (!_read_audio) {
            READER_DEBUG("audio ignored due to option -A", FMT().kv("seq", header.sequenceNumber));
            return;
        }
        if(!_audio_ssrc) {
            _audio_ssrc = header.ssrc;
        }
        packet = new RTPPacket(MediaFrame::Audio, codec, header, extension);
        uint32_t padding_len = 0;
        if (header.padding) {
            padding_len = buf[len-1];
        }
        packet->SetMCodec(codecToMCodec(codec));
        packet->SetPayload(buf + header_len, len - header_len - padding_len);
        reorder = &_audio_reorder;
        last_seq = &_audio_last_seq;
        cycles = &_audio_cycles;
    } else if (_codecs.getMediaTypeForPT(header.payloadType) == MediaFrame::Video) {
        if (!_read_video) {
            READER_DEBUG("video ignored due to option -V", FMT().kv("seq", header.sequenceNumber));
            return;
        }
        if(!_video_ssrc) {
            _video_ssrc = header.ssrc;
        }

        packet = new RTPPacket(MediaFrame::Video, codec, header, extension);
        uint32_t padding_len = 0;
        if (header.padding) {
            padding_len = buf[len-1];
        }
        packet->SetPayload(buf + header_len, len - header_len - padding_len);
        if (packet->GetMediaLength() == 0) {
            READER_INFO("dropped empty packet", FMT().kv("packet", packet));
            delete packet;
            return;
        }
        packet->SetMCodec(codecToMCodec(codec));
        // restore primary from redundant packet
        if (codec == VideoCodec::RTX_VP8 || codec == VideoCodec::RTX_RED) {
            uint16_t osn = be_get_u16(packet->GetMediaData());
            // ssrc is not restored, which is not a problem, since we dont use it
            // packet->SetSSRC();
            switch (codec) {
                case VideoCodec::RTX_VP8:
                    READER_DEBUG("rtx+vp8 packet, restoring", FMT().kv("packet", packet).kv("osn", osn));
                    packet->SetSeqNum(osn);
                    packet->SetType(_rtp_map.GetTypeForCodec(VideoCodec::VP8));
                    packet->SetCodec(VideoCodec::VP8);
                    codec = VideoCodec::VP8;
                    packet->SetMCodec(codecToMCodec(codec));
                    break;
                case VideoCodec::RTX_RED:
                    READER_DEBUG("rtx+red packet, restoring", FMT().kv("packet", packet).kv("osn", osn));
                    packet->SetSeqNum(osn);
                    packet->SetType(_rtp_map.GetTypeForCodec(VideoCodec::RED));
                    packet->SetCodec(VideoCodec::RED);
                    codec = VideoCodec::RED;
                    packet->SetMCodec(codecToMCodec(codec));
                    break;
            }
            packet->SkipPayload(2);
            packet->SetMediaLength(packet->GetMediaLength() - 2);
        }

        if (codec == VideoCodec::RED) {
            // chrome contains only final block
            uint8_t apt = packet->GetMediaData()[0] & 0x7F;
            READER_DEBUG("red packet, restoring", FMT().kv("packet", packet).kv("apt", (int)apt));
            codec = _rtp_map.GetCodecForType(apt);
            packet->SetType(apt);
            packet->SetCodec(codec);
            packet->SetMCodec(codecToMCodec(codec));
            packet->SkipPayload(1);
            packet->SetMediaLength(packet->GetMediaLength() - 1);
        }

        if (codec == VideoCodec::ULPFEC) {
            // todo: restore packet from fec
            READER_DEBUG("fec packet, not handled", FMT().kv("packet", packet));
            delete packet;
            return;
        }

        reorder = &_video_reorder;
        last_seq = &_video_last_seq;
        cycles = &_video_cycles;
    } else {
        READER_INFO("unknown rtp header, dropping", FMT().kv("header", header));
        return;
    }

    // overwrite packet.time with ts from file
    packet->SetTime(rcv_ts);
    packet->SetClockRate(_codecs.getSampleRateForPT(header.payloadType));

    // 3. reorder packet
    std::vector<RTPPacket*> ordered_packets;
    bool added = reorder->add(ordered_packets, packet);
    if (!added) {
        delete packet;
        packet = NULL;
    }
    for (auto packet : ordered_packets) {
        bool is_sps_pps = false;
        if (packet->GetCodec() == VideoCodec::H264) {
            h264::nal_packet nalp;
            int nalp_read = nalp.read(packet->GetMediaData(), packet->GetMediaLength());
            if (nalp_read && nalp.header.type == h264::STAP_A) {
                for (auto& unit : nalp.stap_a.units) {
                    if (unit.header.type == h264::SPS || unit.header.type == h264::PPS) {
                        is_sps_pps = true;
                        // make sure depacketizer wont ignore it
                        packet->SetMark(true);
                        break;
                    }
                }
            }
        }

        // deals wrapping
        if (packet->GetSeqNum() <0x0FFF && (*last_seq & 0xFFFF)>0xF000) {
            READER_INFO("rtp sequence wrapping detected", FMT().kv("ssrc", header.ssrc)
                    .kv("cycles", *cycles)
                    .kv("seq", packet->GetSeqNum())
                    .kv("last", *last_seq));
            (*cycles)++;
        }
        packet->SetSeqCycles(*cycles);
        *last_seq = packet->GetSeqNum();
        READER_DEBUG("rtp packet created", FMT().kv("packet", packet).kv("rcv_ts", rcv_ts));

        uint64_t pts = packet->GetTime();
        if (pts > _cur_time_range.to) {
            next_time_range(pts);
        }
        if (pts < _cur_time_range.from && !is_sps_pps) {
            READER_DEBUG("rtp packet dropped: out of time range (cur < begin)", FMT().kv("cur", pts).kv("begin", _cur_time_range.from));
            delete packet;
            continue;
        } else if (pts > _cur_time_range.to) {
            READER_DEBUG("rtp packet dropped: out of time range (cur > end)", FMT().kv("cur", pts).kv("end", _cur_time_range.to));
            delete packet;
            continue;
        }

        if (_time_set) {
            if(_timestamps.calculatePts(packet, pts) < 0) {
                delete packet;
                packet = nullptr;
                continue;
            }
        }

        if (packet->GetMedia() == MediaFrame::Audio) {
            if (_handler) {
                _handler->onAudioPacket(packet, pts);
            }
        } else if (packet->GetMedia() == MediaFrame::Video) {
            if (_handler) {
                _handler->onVideoPacket(packet, pts);
            }
        }
        delete packet;
        packet = NULL;
    }
    ordered_packets.clear();

    return;
}

void RTPReader::onRTCP(uint8_t *buf, size_t len) {
    RTCPCompoundPacket* rtcp = RTCPCompoundPacket::Parse(buf, len);
    if (!rtcp) {
        READER_WARN("rtcp packet dropped: can't parse it", "");
        return;
    }

    for (int i = 0; i < rtcp->GetPacketCount(); ++i) {
        RTCPPacket* packet = rtcp->GetPacket(i);
        switch (packet->GetType()) {
            case RTCPPacket::SenderReport: {
                //READER_INFO("NTP disabled due to lip sync problem with *-extract-*.tlv files", FMT());
                // todo: NTP caused lip sync failed on those *-extract-wave.tlv
                if (!_time_set) {
                    RTCPSenderReport* sr = (RTCPSenderReport*) packet;
                    uint32_t ssrc = sr->GetSSRC();
                    if(sr->GetSSRC() == 1111) {
                        ssrc = _audio_ssrc;
                    } else if(sr->GetSSRC() == 2222) {
                        ssrc = _video_ssrc;
                    }
                    READER_DEBUG("RTCP SR", FMT().kv("ssrc", ssrc));
                    // _timestamps.updateNTP(ssrc, sr->GetRTPTimestamp(), sr->GetTimestamp());
                }
            } break;
            case RTCPPacket::ReceiverReport: {
                //READER_INFO("found rtcp rr", FMT());
            } break;
            case RTCPPacket::SDES: {
                //READER_INFO("found rtcp sdes", FMT());
            } break;
            case RTCPPacket::Bye: {
                //READER_INFO("found rtcp bye", FMT());
            } break;
            case RTCPPacket::App: {
                //READER_INFO("found rtcp app", FMT());
            } break;
            case RTCPPacket::FullIntraRequest: {
                //READER_INFO("found rtcp full intra request", FMT());
            } break;
            case RTCPPacket::NACK: {
                //READER_INFO("found rtcp nack", FMT());
            } break;
            case RTCPPacket::ExtendedJitterReport: {
                //READER_INFO("found rtcp extended jitter report", FMT());
            } break;
            case RTCPPacket::RTPFeedback: {
                //READER_INFO("found rtcp rtp feedback", FMT());
            } break;
            case RTCPPacket::PayloadFeedback: {
                //READER_INFO("found rtcp payload feedback", FMT());
            } break;
        }
    }
}

void RTPReader::onEnd() {
    READER_INFO("rtp reader end", FMT());
    // 1. squeeze audio reorder
    std::vector<RTPPacket*> audio_packets;
    _audio_reorder.reset(audio_packets);
    for (int i = 0; i < audio_packets.size(); ++i) {
        if (_handler) {
            RTPPacket* a = audio_packets[i];
            uint64_t pts = a->GetTime();
            if (pts > _cur_time_range.to) {
                next_time_range(pts);
            }
            if (pts < _cur_time_range.from) {
                READER_DEBUG("rtp packet dropped: out of time range (cur < begin)", FMT().kv("cur", pts).kv("begin", _cur_time_range.from));
                delete(a);
                continue;
            } else if (pts > _cur_time_range.to) {
                READER_DEBUG("rtp packet dropped: out of time range (cur > end)", FMT().kv("cur", pts).kv("end", _cur_time_range.to));
                delete(a);
                continue;
            }
            if (_time_set) {
                if(_timestamps.calculatePts(a, pts) < 0) {
                    delete(a);
                    continue;
                }
            }
            _handler->onAudioPacket(a, pts);
        }
        delete audio_packets[i];
    }
    audio_packets.clear();

    // 2. squeeze video reorder
    std::vector<RTPPacket*> video_packets;
    _video_reorder.reset(video_packets);
    for (int i = 0; i < video_packets.size(); ++i) {
        if (_handler) {
            RTPPacket* v = video_packets[i];
            bool is_sps_pps = false;
            if (v->GetCodec() == VideoCodec::H264) {
                h264::nal_packet nalp;
                int nalp_read = nalp.read(v->GetMediaData(), v->GetMediaLength());
                if (nalp_read && nalp.header.type == h264::STAP_A) {
                    for (auto& unit : nalp.stap_a.units) {
                        if (unit.header.type == h264::SPS || unit.header.type == h264::PPS) {
                            is_sps_pps = true;
                            // make sure depacketizer wont ignore it
                            v->SetMark(true);
                            break;
                        }
                    }
                }
            }
            uint64_t pts = v->GetTime();
            if (pts > _cur_time_range.to) {
                next_time_range(pts);
            }
            if (pts < _cur_time_range.from && !is_sps_pps) {
                READER_DEBUG("rtp packet dropped: out of time range (cur < begin)", FMT().kv("cur", pts).kv("begin", _cur_time_range.from));
                delete v;
                continue;
            } else if (pts > _cur_time_range.to) {
                READER_DEBUG("rtp packet dropped: out of time range (cur > end)", FMT().kv("cur", pts).kv("end", _cur_time_range.to));
                delete v;
                continue;
            }
            if (_time_set) {
                if(_timestamps.calculatePts(v, pts) < 0) {
                    delete v;
                    continue;
                }
            }

            _handler->onVideoPacket(v, pts);
        }
        delete video_packets[i];
    }
    video_packets.clear();

    _codecs.clear();
    _rtp_map.clear();
    return;
}

//void RTPReader::onOpusEncoded(uint8_t* buf, size_t len) {
//    // too many classes dedicated to rtp
//    // todo: deal rtp and raw stream in a function
//    if (_codecs.getSampleRateForPT(111) == 0 && _handler) {
//        _codecs.addCodec(111, MediaFrame::Audio, 16000, 2);
//        _handler->onCodecRegistry(_codecs);
//    }
//
//    uint64_t u64 = be_get_u32(buf);
//    uint64_t rcv_ts = (u64 << 32) + be_get_u32(buf + 4);
//    buf += 8;
//    len -= 8;
//    if (rcv_ts > _cur_time_range.to) {
//        next_time_range(rcv_ts);
//    }
//    if (rcv_ts < _cur_time_range.from) {
//        READER_DEBUG("opus frame dropped: out of time range (cur < begin)", FMT().kv("cur", rcv_ts).kv("begin", _cur_time_range.from));
//        return;
//    } else if (rcv_ts > _cur_time_range.to) {
//        READER_DEBUG("opus frame dropped: out of time range (cur > end)", FMT().kv("cur", rcv_ts).kv("end", _cur_time_range.to));
//        return;
//    }
//
//    if (_handler) {
//
//        RTPHeader rtpHeader;
//        rtpHeader.payloadType = 111;
//        RTPHeaderExtension rtpHeaderExtension;
//        auto packet = new RTPPacket(MediaFrame::Audio, AudioCodec::OPUS, rtpHeader, rtpHeaderExtension);
//        packet->SetTime(rcv_ts);
//        packet->SetPayload(buf, len);
//        _handler->onAudioPacket(packet, rcv_ts);
//    }
//}

RTPReader &RTPReader::time(const Timestamp &t) {
    _timestamps = t;
    _time_set = true;
    return *this;
}

const Timestamp &RTPReader::time() const {
    return _timestamps;
}

// return false if no more time range
void RTPReader::next_time_range(uint64_t cur) {
    while (_time_ranges.size()) {
        TimeRange r = _time_ranges.front();
        _time_ranges.pop_front();
        if (cur <= r.from || (cur > r.from && cur <= r.to)) {
            _cur_time_range.from = r.from;
            _cur_time_range.to = r.to;
            return;
        }
    }
}