#include "H264VideoServerMediaSubsession.hh"
#include "T32H264LiveSource.hh"

#include <H264VideoRTPSink.hh>
#include <H264VideoStreamFramer.hh>
#include <Base64.hh>

#include "../service/Logger.hh"

H264VideoServerMediaSubsession* H264VideoServerMediaSubsession::createNew(UsageEnvironment& env,
                                                                         StreamReplicator* replicator,
                                                                         IMediaStreamHandler* handler,
                                                                         Boolean reuseFirstSource) {
    if (replicator == nullptr) {
        ErrorHandler::reportError(VideoErrorCode::RTSP_ERROR, "StreamReplicator is null");
        return nullptr;
    }
    return new H264VideoServerMediaSubsession(env, replicator, handler, reuseFirstSource);
}

H264VideoServerMediaSubsession::H264VideoServerMediaSubsession(UsageEnvironment& env,
                                                               StreamReplicator* replicator,
                                                               IMediaStreamHandler* handler,
                                                               Boolean reuseFirstSource)
    : OnDemandServerMediaSubsession(env, reuseFirstSource),
      fHandler(handler),
      fReplicator(replicator),
      fAuxSDPLine(nullptr),
      fReuseFirstSourceFlag(reuseFirstSource) {
    ErrorHandler::logInfo("H264VideoServerMediaSubsession constructed");
}

H264VideoServerMediaSubsession::~H264VideoServerMediaSubsession() {
    if (fAuxSDPLine != nullptr) {
        free((char*)fAuxSDPLine);
        fAuxSDPLine = nullptr;
    }
}

FramedSource* H264VideoServerMediaSubsession::createNewStreamSource(unsigned clientSessionId,
                                                                    unsigned& estBitrate) {
    char msg[128];
    snprintf(msg, sizeof(msg), "Creating new stream source for client: %u", clientSessionId);
    ErrorHandler::logInfo(msg);

    if (fReplicator == nullptr) {
        ErrorHandler::reportError(VideoErrorCode::RTSP_ERROR,
                                  "StreamReplicator not available when creating stream source");
        return nullptr;
    }

    estBitrate = 500;

    FramedSource* replica = fReplicator->createStreamReplica();
    if (replica == nullptr) {
        ErrorHandler::reportError(VideoErrorCode::RTSP_ERROR,
                                  "Failed to create stream replica from StreamReplicator");
        return nullptr;
    }

    FramedSource* framer = H264VideoStreamFramer::createNew(envir(), replica);
    if (framer == nullptr) {
        ErrorHandler::reportError(VideoErrorCode::RTSP_ERROR,
                                  "Failed to create H264VideoStreamFramer");
        Medium::close(replica);
        return nullptr;
    }

    ErrorHandler::logInfo("Stream source created successfully");
    return framer;
}

RTPSink* H264VideoServerMediaSubsession::createNewRTPSink(Groupsock* rtpGroupsock,
                                                          unsigned char rtpPayloadTypeIfDynamic,
                                                          FramedSource* /*inputSource*/) {
    char msg[128];
    snprintf(msg, sizeof(msg), "Creating new RTP sink with payload type: %u", rtpPayloadTypeIfDynamic);
    ErrorHandler::logInfo(msg);

    RTPSink* sink = H264VideoRTPSink::createNew(envir(), rtpGroupsock, rtpPayloadTypeIfDynamic);
    if (sink == nullptr) {
        ErrorHandler::reportError(VideoErrorCode::RTSP_ERROR, "Failed to create H.264 RTP sink");
        return nullptr;
    }

    ErrorHandler::logInfo("RTP sink created successfully");
    return sink;
}

char const* H264VideoServerMediaSubsession::getAuxSDPLine(RTPSink* /*rtpSink*/,
                                                          FramedSource* /*inputSource*/) {
    ErrorHandler::logInfo("Generating auxiliary SDP line");

    if (fAuxSDPLine != nullptr) {
        return fAuxSDPLine;
    }

    const unsigned char* sps = nullptr;
    const unsigned char* pps = nullptr;
    size_t spsSize = 0;
    size_t ppsSize = 0;

    if (fHandler != nullptr) {
        sps = fHandler->getSPS();
        spsSize = fHandler->getSPSSize();
        pps = fHandler->getPPS();
        ppsSize = fHandler->getPPSSize();
        LOG_INFO("Got SPS/PPS from handler: SPS=%zu bytes, PPS=%zu bytes", spsSize, ppsSize);
    } else {
        LOG_WARNING("Video handler is NULL, cannot get SPS/PPS");
    }

    if (sps != nullptr && spsSize > 0 && pps != nullptr && ppsSize > 0) {
        char* spsB64 = base64Encode((u_int8_t const*)sps, (unsigned)spsSize);
        char* ppsB64 = base64Encode((u_int8_t const*)pps, (unsigned)ppsSize);
        if (spsB64 && ppsB64) {
            unsigned totalLen = 64 + strlen(spsB64) + strlen(ppsB64);
            char* line = (char*)malloc(totalLen);
            if (line) {
                snprintf(line, totalLen,
                         "a=fmtp:96 packetization-mode=1;sprop-parameter-sets=%s,%s",
                         spsB64, ppsB64);
                fAuxSDPLine = line;
            } else {
                LOG_ERROR("Failed to allocate memory for SDP line");
            }
        } else {
            LOG_WARNING("Failed to base64 encode SPS/PPS");
        }
        if (spsB64) delete[] spsB64;
        if (ppsB64) delete[] ppsB64;
    }

    if (fAuxSDPLine == nullptr) {
        fAuxSDPLine = strDup("a=fmtp:96 packetization-mode=1;profile-level-id=42e01f");
        LOG_WARNING("Using default SDP line (no SPS/PPS available): %s", fAuxSDPLine);
    }

    return fAuxSDPLine;
}

Boolean H264VideoServerMediaSubsession::shouldDelayBeginningOfSession() const {
    return False;
}

void H264VideoServerMediaSubsession::closeStreamSource(FramedSource* inputSource) {
    OnDemandServerMediaSubsession::closeStreamSource(inputSource);
}
