#include "stream_server_g711a.h"

namespace El {
namespace StreamServer {

G711aAudioSource *G711aAudioSource::createNew(UsageEnvironment &env, int32_t ch, int32_t type)
{
    return new G711aAudioSource(env, ch, type);
}

G711aAudioSource::G711aAudioSource(UsageEnvironment &env, int32_t ch, int32_t type)
    : FramedSource(env), fCh(ch), fType(type)
{
    fHandle = Media::StreamSource::GetInstance(fCh, fType)
                  .Start(std::bind(&G711aAudioSource::onFrame, this, std::placeholders::_1));

    queue_ = std::unique_ptr<Base::BlockingQueue<Media::MediaFramePtr>>(
        new Base::BlockingQueue<Media::MediaFramePtr>("audio data", 10));
}

G711aAudioSource::~G711aAudioSource()
{
    queue_->SetActive(false);
    Media::StreamSource::GetInstance(fCh, fType).Stop(fHandle);

    queue_->Clear();
    queue_.reset();
}

void G711aAudioSource::bufferExcessData(const uint8_t *data, size_t size)
{
    excessDataBuffer.insert(excessDataBuffer.end(), data, data + size);
}

void G711aAudioSource::deliverFrame0(void *clientData)
{
    ((G711aAudioSource *)clientData)->deliverFrame();
}

void G711aAudioSource::deliverFrame()
{
    size_t totalDataSize = excessDataBuffer.size();
    Media::MediaFramePtr frame = queue_->Pop(0);
    if (frame) {
        totalDataSize += frame->GetLength();
        currentFrame_ = frame;
    }

    if (totalDataSize > 0) {
        if (totalDataSize > fMaxSize) {
            fFrameSize = fMaxSize;
            fNumTruncatedBytes = totalDataSize - fMaxSize;
        } else {
            fFrameSize = totalDataSize;
            fNumTruncatedBytes = 0;
        }

        size_t copiedSize = 0;

        // First, copy data from excessDataBuffer
        if (!excessDataBuffer.empty()) {
            size_t sizeToCopy = std::min(excessDataBuffer.size(), static_cast<size_t>(fMaxSize));
            memcpy(fTo, excessDataBuffer.data(), sizeToCopy);
            copiedSize += sizeToCopy;

            // Remove copied data from excessDataBuffer
            excessDataBuffer.erase(excessDataBuffer.begin(), excessDataBuffer.begin() + sizeToCopy);
        }

        // Then, copy data from the new frame if available
        if (frame && copiedSize < fMaxSize) {
            size_t remainingSpace = fMaxSize - copiedSize;
            size_t sizeToCopy = std::min(static_cast<size_t>(frame->GetLength()), remainingSpace);
            memcpy(fTo + copiedSize, frame->GetBuffer(), sizeToCopy);
            copiedSize += sizeToCopy;

            // Buffer any remaining data from the new frame
            if (sizeToCopy < (size_t)frame->GetLength()) {
                bufferExcessData(frame->GetBuffer() + sizeToCopy, frame->GetLength() - sizeToCopy);
            }
        }

        // Set presentation time
        fPresentationTime.tv_sec = currentFrame_->GetPts() / 1000000;
        fPresentationTime.tv_usec = currentFrame_->GetPts() % 1000000;
        // LOG_INFO("G711aAudioSource: time {} {}", fPresentationTime.tv_sec, fPresentationTime.tv_usec);

        // Set frame duration
        fDurationInMicroseconds = (fFrameSize * 1000000) / (8000 * 1); // 8000 samples per second, 1 byte per sample

        // Deliver the frame
        // LOG_INFO("StreamFramedSource: Delivered {} bytes (truncated {} bytes)", fFrameSize, fNumTruncatedBytes);
        FramedSource::afterGetting(this);
    } else {
        // No data available, schedule next delivery attempt
        nextTask() = envir().taskScheduler().scheduleDelayedTask(100000, // 10 ms
                                                                 (TaskFunc *)deliverFrame0, this);
    }
}

void G711aAudioSource::doGetNextFrame()
{
    deliverFrame();
}

void G711aAudioSource::onFrame(const Media::MediaFramePtr &frame)
{
    if (frame->GetFrameType() == MEDIA_FRAME_G711A) {
        // LOG_INFO("Received frame of size {} type {}", frame->GetLength(), frame->GetFrameType());
        queue_->Push(frame);
    }
}

G711aAudioSubsession *G711aAudioSubsession::createNew(UsageEnvironment &env,
                                                      int32_t ch,
                                                      int32_t type,
                                                      bool reuseFirstSource)
{
    return new G711aAudioSubsession(env, ch, type, reuseFirstSource);
}

G711aAudioSubsession::G711aAudioSubsession(UsageEnvironment &env, int32_t ch, int32_t type, bool reuseFirstSource)
    : OnDemandServerMediaSubsession(env, reuseFirstSource), fCh(ch), fType(type)
{
}

G711aAudioSubsession::~G711aAudioSubsession() {}

FramedSource *G711aAudioSubsession::createNewStreamSource(unsigned clientSessionId, unsigned &estBitrate)
{
    (void)clientSessionId;
    estBitrate = 64; // kbps
    return G711aAudioSource::createNew(envir(), fCh, fType);
}

RTPSink *G711aAudioSubsession::createNewRTPSink(Groupsock *rtpGroupsock,
                                                unsigned char rtpPayloadTypeIfDynamic,
                                                FramedSource *inputSource)
{
    (void)rtpPayloadTypeIfDynamic;
    (void)inputSource;
    return SimpleRTPSink::createNew(envir(), rtpGroupsock, 8, 8000, "audio", "PCMA", 1, False, False);
}

} // namespace StreamServer
} // namespace El