//
// Created by haoy on 2017/3/16.
//
#include "rtp_reader.hpp"

#include <h264/h264_nal.hpp>

#include "packet.hpp"
#include "tlv.hpp"
#include "rtp/RTPRedundantPacket.h"
#include "rtp_analyzer.hpp"


#define READER_DEBUG(event, fmt) LOG4CPLUS_DEBUG("", "reader: " << event << fmt)
#define READER_INFO(event, fmt) LOG4CPLUS_INFO("", "reader: " << event << fmt)
#define READER_WARN(event, fmt) LOG4CPLUS_WARN("", "reader: " << event << fmt)
#define READER_ERROR(event, fmt) LOG4CPLUS_ERROR("", "reader: " << event << fmt)

RTPReader::RTPReader()
{
    _buf = (uint8_t*) malloc(BUF_SIZE);
}

RTPReader::~RTPReader() {
    if (_input_fp) {
        fclose(_input_fp);
        _input_fp = nullptr;
    }
    free(_buf);
    _buf = NULL;
}

void RTPReader::setLogLevel(const uint32_t lv) {
    _lv = lv;
}

void RTPReader::setRollNumber(const int roll) {
    _roll_number = roll;
}

void RTPReader::input(const std::string& input_fn) {
    _input_fp = fopen(input_fn.c_str(), "rb");
    if (_input_fp == nullptr) {
        dbge("reader: cannot open input file %s", input_fn.c_str());
        exit(-1);
    }
}

// void RTPReader::range(const std::list<TimeRange>& ranges) {
//     _time_ranges = ranges;
// }

void RTPReader::audio(bool audio) {
    _read_audio = audio;
}

void RTPReader::video(bool video) {
    _read_video = video;
}

RTPReader &RTPReader::handler(PacketHandler *handler) {
    _handler = handler;
    return *this;
}

void RTPReader::run() {
    int type = 0;
    int length = 0;

    // if (_time_ranges.empty()) {
    //     _cur_time_range = TimeRange {.from = 0, .to = (uint64_t)-1};
    // } else {
    //     _cur_time_range = _time_ranges.front();
    //     _time_ranges.pop_front();
    // }

    while(true) {
        int ret = read_tlv(_input_fp, _buf, BUF_SIZE, &length, &type);
        if(ret < 0){
            break;
        }

        // READER_DEBUG("tlv", FMT().kv("type", type).kv("len", length));

        if(type == TLV_TYPE_FILE_START) {
        } else if(type == TLV_TYPE_CODEC) {
            onCodec(_buf, length);
        } else if(type == TLV_TYPE_SDP
        || TLV_TYPE_REMOTE_SDP == type
        || TLV_TYPE_LOCAL_SDP == type) {
            onSDP(_buf, length);
        } else if(type == TLV_TYPE_RTP) {
            uint64_t u64 = be_get_u32(_buf);
            uint64_t rcv_ts = (u64 << 32) + be_get_u32(_buf + 4);
            onRTP(rcv_ts, _buf+8, length-8);
        } else if (type == TLV_TYPE_RTCP) {
            onRTCP(_buf+8, length-8);
        // } else if (type == TLV_TYPE_SRTP_REMOTE_KEY) {
        //     onSRTPKey(true, _buf, length);
        // } else if (type == TLV_TYPE_SRTP_LOCAL_KEY) {
        // } else if (type == TLV_TYPE_RECV_UDP) {
        //     onSRTP(false, _buf, length);
        // } else if (type == TLV_TYPE_SEND_UDP) {
        } else if (type == TLV_TYPE_END) {
            break;
        } else {
            // could be caused by reading a file being write to
            READER_ERROR("unknown tlv type", FMT().kv("type", type).kv("length", length));
            break;
        }
        type = 0;
        length = 0;
    }
    onEnd();

}

void RTPReader::onCodec(uint8_t *buf, size_t len) {
    // 1. read
    uint32_t codec_id = be_get_u32(buf);
    uint32_t payload_type = be_get_u32(buf+4);
    uint32_t sample_rate = be_get_u32(buf+8);
    uint32_t channels = be_get_u32(buf+12);
    uint32_t codec_name_len = be_get_u32(buf+16);
    char codec_name[32] = {0};
    memcpy(codec_name, buf+20, codec_name_len < 31 ? codec_name_len : 31);

    // 2. audio or video
    MediaFrame::Type media_type = MediaFrame::Audio;

    int codec = VideoCodec::GetCodecForName(codec_name);
    if (codec != VideoCodec::UNKNOWN) {
        media_type = MediaFrame::Video;
    } else {
        codec = AudioCodec::GetCodecForName(codec_name);
        if (codec != AudioCodec::UNKNOWN) {
            media_type = MediaFrame::Audio;
        }
    }

    // 3. filling rtp map
    _rtp_map[(uint8_t)payload_type] = (uint8_t)codec;
    _codecs.addCodec(payload_type, codec_name, media_type, sample_rate, channels);

    if (_handler) {
        _handler->onCodecRegistry(_codecs, (uint8_t)payload_type);
    }
    // READER_INFO("found codec pt mapping", FMT().kv("codec", std::string(codec_name))
    //         .kv("pt", payload_type)
    //         .kv("clockrate", sample_rate)
    //         .kv("channels", channels));
}

void RTPReader::onSDP(uint8_t *buf, size_t len) {
    if(_lv&Filters::LogLv::SDP) {
        READER_DEBUG("found sdp", FMT().kv("sdp", std::string((char*)buf, len)));
    }
}

void RTPReader::onRTP(uint64_t& rcv_ts, uint8_t *buf, size_t len) {

    RTPHeader header;
    RTPHeaderExtension extension;
    // 1. parse header and extension

    int header_len = header.Parse(buf+8, len-8);
    if (header_len <= 0) {
        READER_WARN("corrupted tlv, skipped", FMT().kv("len", len));
        return;
    }
    if (header.extension) {
        int extension_len = extension.Parse(_rtp_map, buf+8+header_len, len-8-header_len);
        header_len += extension_len;
    }

    // 2. is video or audio ?
    RTPPacket* packet = nullptr;
    PacketReorder<RTPPacket>* reorder = NULL;
    uint8_t codec = _rtp_map.GetCodecForType(header.payloadType);
    uint32_t* last_seq = nullptr;
    uint16_t* cycles = nullptr;

    if (_codecs.getMediaTypeForPT(header.payloadType) == MediaFrame::Audio) {
        if (!_read_audio) {
            // READER_DEBUG("audio ignored due to option -A", FMT().kv("seq", header.sequenceNumber));
            return;
        }
        packet = new RTPPacket(MediaFrame::Audio, codec, header, extension);
        uint32_t padding_len = 0;
        if (header.padding) {
            padding_len = buf[len-1];
        }
        packet->SetPayload(buf + 8 + header_len, len - 8 - header_len - padding_len);
        reorder = &_audio_reorder;
        last_seq = &_audio_last_seq;
        cycles = &_audio_cycles;
    } else if (_codecs.getMediaTypeForPT(header.payloadType) == MediaFrame::Video) {
        if (!_read_video) {
            // READER_DEBUG("video ignored due to option -V", FMT().kv("seq", header.sequenceNumber));
            return;
        }
        packet = new RTPPacket(MediaFrame::Video, codec, header, extension);
        uint32_t padding_len = 0;
        if (header.padding) {
            padding_len = buf[len-1];
        }
        packet->SetPayload(buf + 8 + header_len, len - 8 - header_len - padding_len);
        if (packet->GetMediaLength() == 0) {
            READER_INFO("dropped empty packet", FMT().kv("packet", packet));
            delete packet;
            return;
        }
        // restore primary from redundant packet
        if (codec == VideoCodec::RTX_VP8 || codec == VideoCodec::RTX_RED) {
            uint16_t osn = be_get_u16(packet->GetMediaData());
            // ssrc is not restored, which is not a problem, since we dont use it
            // packet->SetSSRC();
            switch (codec) {
                case VideoCodec::RTX_VP8:
                    READER_DEBUG("rtx+vp8 packet, restoring", FMT().kv("packet", packet).kv("osn", osn));
                    packet->SetSeqNum(osn);
                    packet->SetType(_rtp_map.GetTypeForCodec(VideoCodec::VP8));
                    packet->SetCodec(VideoCodec::VP8);
                    codec = VideoCodec::VP8;
                    break;
                case VideoCodec::RTX_RED:
                    READER_DEBUG("rtx+red packet, restoring", FMT().kv("packet", packet).kv("osn", osn));
                    packet->SetSeqNum(osn);
                    packet->SetType(_rtp_map.GetTypeForCodec(VideoCodec::RED));
                    packet->SetCodec(VideoCodec::RED);
                    codec = VideoCodec::RED;
                    break;
            }
            packet->SkipPayload(2);
            packet->SetMediaLength(packet->GetMediaLength() - 2);
        }

        if (codec == VideoCodec::RED) {
            // chrome contains only final block
            uint8_t apt = packet->GetMediaData()[0] & 0x7F;
            READER_DEBUG("red packet, restoring", FMT().kv("packet", packet).kv("apt", (int)apt));
            codec = _rtp_map.GetCodecForType(apt);
            packet->SetType(apt);
            packet->SetCodec(codec);
            packet->SkipPayload(1);
            packet->SetMediaLength(packet->GetMediaLength() - 1);
        }

        if (codec == VideoCodec::ULPFEC) {
            // todo: restore packet from fec
            READER_DEBUG("fec packet, not handled", FMT().kv("packet", packet));
            delete packet;
            return;
        }

        reorder = &_video_reorder;
        last_seq = &_video_last_seq;
        cycles = &_video_cycles;
    } else {
        READER_INFO("unknown rtp header, dropping", FMT().kv("header", header));
        return;
    }

    // overwrite packet.time with ts from file
    packet->SetTime(rcv_ts);
    packet->SetClockRate(_codecs.getSampleRateForPT(header.payloadType));
    // deals wrapping
    if (packet->GetSeqNum() <0x0FFF && (*last_seq & 0xFFFF)>0xF000) {
        READER_INFO("rtp sequence wrapping detected", FMT().kv("ssrc", header.ssrc)
                .kv("cycles", *cycles)
                .kv("seq", packet->GetSeqNum())
                .kv("last", *last_seq));
        (*cycles)++;
    }
    packet->SetSeqCycles(*cycles);
    *last_seq = packet->GetSeqNum();
    // READER_DEBUG("rtp packet created", FMT().kv("packet", packet).kv("rcv_ts", rcv_ts));

    // 3. reorder packet
    std::vector<RTPPacket*> ordered_packets;
    bool added = reorder->add(ordered_packets, packet);
    if (!added) {
        delete packet;
        packet = NULL;
    }
    for (auto packet : ordered_packets) {
        bool is_sps_pps = false;
        if (packet->GetCodec() == VideoCodec::H264) {
            h264::nal_packet nalp;
            int nalp_read = nalp.read(packet->GetMediaData(), packet->GetMediaLength());
            if (nalp_read && nalp.header.type == h264::STAP_A) {
                for (auto& unit : nalp.stap_a.units) {
                    if (unit.header.type == h264::SPS || unit.header.type == h264::PPS) {
                        is_sps_pps = true;
                        // make sure depacketizer wont ignore it
                        packet->SetMark(true);
                        break;
                    }
                }
            }
        }

        uint64_t pts = packet->GetTime();
        // if (pts > _cur_time_range.to) {
        //     next_time_range(pts);
        // }
        // if (pts < _cur_time_range.from && !is_sps_pps) {
        //     READER_DEBUG("rtp packet dropped: out of time range (cur < begin)", FMT().kv("cur", pts).kv("begin", _cur_time_range.from));
        //     delete packet;
        //     continue;
        // } else if (pts > _cur_time_range.to) {
        //     READER_DEBUG("rtp packet dropped: out of time range (cur > end)", FMT().kv("cur", pts).kv("end", _cur_time_range.to));
        //     delete packet;
        //     continue;
        // }

        // if (_time_set) {
        //     pts = _timestamps.calculatePts(packet);
        // }
        ++_rtp_pkt_num;
        if(_lv&Filters::LogLv::RTP && _rtp_pkt_num <= _roll_number) {
            READER_DEBUG("processing", FMT().kv("packet", packet));
        }
        if (packet->GetMedia() == MediaFrame::Audio) {
            if (_handler) {
                _handler->onAudioPacket(packet, pts);
            }
        } else if (packet->GetMedia() == MediaFrame::Video) {
            if (_handler) {
                _handler->onVideoPacket(packet, pts);
            }
        }
        delete packet;
        packet = NULL;
    }
    ordered_packets.clear();
    
    return;
}

void RTPReader::onRTCP(uint8_t *buf, size_t len) {
    RTCPCompoundPacket* rtcp = RTCPCompoundPacket::Parse(buf+8, len-8);
    if (!rtcp) {
        READER_WARN("rtcp packet dropped: can't parse it", "");
        return;
    }
    for (int i = 0; i < rtcp->GetPacketCount(); ++i) {
        if(!(_lv&Filters::LogLv::RTCP)) {
            break;
        }
        RTCPPacket* packet = rtcp->GetPacket(i);
        switch (packet->GetType()) {
            case RTCPPacket::SenderReport: {
                READER_INFO("found rtcp sr", FMT());
                READER_INFO("NTP disabled due to lip sync problem with *-extract-*.tlv files", FMT());
                // RTCPSenderReport* sr = (RTCPSenderReport*) packet;
                // todo: NTP caused lip sync failed on those *-extract-wave.tlv
                // if (!_time_set) {
                //     _timestamps.updateNTP(sr->GetSSRC(), sr->GetRTPTimestamp(), sr->GetTimestamp());
                // }
            } break;
            case RTCPPacket::ReceiverReport: {
                READER_INFO("found rtcp rr", FMT());
            } break;
            case RTCPPacket::SDES: {
                READER_INFO("found rtcp sdes", FMT());
            } break;
            case RTCPPacket::Bye: {
                READER_INFO("found rtcp bye", FMT());
            } break;
            case RTCPPacket::App: {
                READER_INFO("found rtcp app", FMT());
            } break;
            case RTCPPacket::FullIntraRequest: {
                READER_INFO("found rtcp full intra request", FMT());
            } break;
            case RTCPPacket::NACK: {
                READER_INFO("found rtcp nack", FMT());
            } break;
            case RTCPPacket::ExtendedJitterReport: {
                READER_INFO("found rtcp extended jitter report", FMT());
            } break;
            case RTCPPacket::RTPFeedback: {
                READER_INFO("found rtcp rtp feedback", FMT());
            } break;
            case RTCPPacket::PayloadFeedback: {
                READER_INFO("found rtcp payload feedback", FMT());
            } break;
        }
    }
    ++_rtcp_pkt_num;
}

void RTPReader::onEnd() {
    READER_INFO("rtp reader end", FMT());
    // 1. squeeze audio reorder
    std::vector<RTPPacket*> audio_packets;
    _audio_reorder.reset(audio_packets);
    for (int i = 0; i < audio_packets.size(); ++i) {
        if (_handler) {
            RTPPacket* a = audio_packets[i];
            uint64_t pts = a->GetTime();
            // if (pts > _cur_time_range.to) {
            //     next_time_range(pts);
            // }
            // if (pts < _cur_time_range.from) {
            //     READER_DEBUG("rtp packet dropped: out of time range (cur < begin)", FMT().kv("cur", pts).kv("begin", _cur_time_range.from));
            //     delete(a);
            //     continue;
            // } else if (pts > _cur_time_range.to) {
            //     READER_DEBUG("rtp packet dropped: out of time range (cur > end)", FMT().kv("cur", pts).kv("end", _cur_time_range.to));
            //     delete(a);
            //     continue;
            // }
            // if (_time_set) {
            //     pts = _timestamps.calculatePts(a);
            // }
            _handler->onAudioPacket(a, pts);
        }
        delete audio_packets[i];
    }
    audio_packets.clear();

    // 2. squeeze video reorder
    std::vector<RTPPacket*> video_packets;
    _video_reorder.reset(video_packets);
    for (int i = 0; i < video_packets.size(); ++i) {
        if (_handler) {
            RTPPacket* v = video_packets[i];
            bool is_sps_pps = false;
            if (v->GetCodec() == VideoCodec::H264) {
                h264::nal_packet nalp;
                int nalp_read = nalp.read(v->GetMediaData(), v->GetMediaLength());
                if (nalp_read && nalp.header.type == h264::STAP_A) {
                    for (auto& unit : nalp.stap_a.units) {
                        if (unit.header.type == h264::SPS || unit.header.type == h264::PPS) {
                            is_sps_pps = true;
                            // make sure depacketizer wont ignore it
                            v->SetMark(true);
                            break;
                        }
                    }
                }
            }
            uint64_t pts = v->GetTime();
            // if (pts > _cur_time_range.to) {
            //     next_time_range(pts);
            // }
            // if (pts < _cur_time_range.from && !is_sps_pps) {
            //     READER_DEBUG("rtp packet dropped: out of time range (cur < begin)", FMT().kv("cur", pts).kv("begin", _cur_time_range.from));
            //     delete v;
            //     continue;
            // } else if (pts > _cur_time_range.to) {
            //     READER_DEBUG("rtp packet dropped: out of time range (cur > end)", FMT().kv("cur", pts).kv("end", _cur_time_range.to));
            //     delete v;
            //     continue;
            // }

            // if (_time_set) {
            //     pts = _timestamps.calculatePts(v);
            // }

            _handler->onVideoPacket(v, pts);
        }
        delete video_packets[i];
    }
    video_packets.clear();

    _codecs.clear();
    _rtp_map.clear();

    _handler->onEnd();

    return;
}

//void RTPReader::onOpusEncoded(uint8_t* buf, size_t len) {
//    // too many classes dedicated to rtp
//    // todo: deal rtp and raw stream in a function
//    if (_codecs.getSampleRateForPT(111) == 0 && _handler) {
//        _codecs.addCodec(111, "opus", MediaFrame::Audio, 16000, 2);
//        _handler->onCodecRegistry(_codecs, 111);
//    }
//
//    uint64_t u64 = be_get_u32(buf);
//    uint64_t rcv_ts = (u64 << 32) + be_get_u32(buf + 4);
//    buf += 8;
//    len -= 8;
//    if (rcv_ts > _cur_time_range.to) {
//        next_time_range(rcv_ts);
//    }
//    if (rcv_ts < _cur_time_range.from) {
//        READER_DEBUG("opus frame dropped: out of time range (cur < begin)", FMT().kv("cur", rcv_ts).kv("begin", _cur_time_range.from));
//        return;
//    } else if (rcv_ts > _cur_time_range.to) {
//        READER_DEBUG("opus frame dropped: out of time range (cur > end)", FMT().kv("cur", rcv_ts).kv("end", _cur_time_range.to));
//        return;
//    }
//
//    if (_handler) {
//
//        RTPHeader rtpHeader;
//        rtpHeader.payloadType = 111;
//        RTPHeaderExtension rtpHeaderExtension;
//        auto packet = new RTPPacket(MediaFrame::Audio, AudioCodec::OPUS, rtpHeader, rtpHeaderExtension);
//        packet->SetTime(rcv_ts);
//        packet->SetPayload(buf, len);
//        _handler->onAudioPacket(packet, rcv_ts);
//    }
//}

// RTPReader &RTPReader::time(const Timestamp &t) {
//     _timestamps = t;
//     _time_set = true;
//     return *this;
// }

// const Timestamp &RTPReader::time() const {
//     return _timestamps;
// }

// return false if no more time range
// void RTPReader::next_time_range(uint64_t cur) {
//     while (_time_ranges.size()) {
//         TimeRange r = _time_ranges.front();
//         _time_ranges.pop_front();
//         if (cur <= r.from || (cur > r.from && cur <= r.to)) {
//             _cur_time_range.from = r.from;
//             _cur_time_range.to = r.to;
//             return;
//         }
//     }
// }