#include "stream_gst_rtmp.h"

#include <gst/gst.h>
#include <gst/app/gstappsrc.h>
#include "utils_log.h"
#include "common_config_manager.h"
#include "common_product_definition.h"

namespace El {
namespace StreamService {

StreamGstRtmp &StreamGstRtmp::GetInstance()
{
    static StreamGstRtmp instance;
    return instance;
}

StreamGstRtmp::~StreamGstRtmp()
{
    Stop();
}

bool StreamGstRtmp::Start()
{
    auto &configManager = Common::ConfigManager::GetInstance();

    {
        std::lock_guard<std::mutex> lock(mutex_);
        if (started_) {
            EL_INFO("RTMP push already started");
            return true;
        }

        configHandle_ = configManager.Register(
            "/rtmp",
            [this](const nlohmann::json &config) {
                EL_INFO("/rtmp config updated, applying RTMP pipeline changes");
                return this->ApplyConfig(config);
            },
            Common::ConfigPriority::POST_APPLY);

        if (configHandle_ < 0) {
            EL_ERROR("Failed to register /rtmp config listener");
            return false;
        }

        started_ = true;
    }

    nlohmann::json rtmpConfig = configManager.GetConfig("/rtmp");
    if (!ApplyConfig(rtmpConfig)) {
        EL_ERROR("Initial /rtmp config apply failed");
        return false;
    }

    return true;
}

void StreamGstRtmp::Stop()
{
    std::lock_guard<std::mutex> reconfigLock(reconfigMutex_);

    ClearSinks();

    int handle = -1;
    {
        std::lock_guard<std::mutex> lock(mutex_);
        handle = configHandle_;
        configHandle_ = -1;
        started_ = false;
    }

    if (handle >= 0) {
        Common::ConfigManager::GetInstance().Unregister("/rtmp", handle);
    }
}

void StreamGstRtmp::ClearSinks()
{
    std::vector<PushDataPtr> sinks;
    {
        std::lock_guard<std::mutex> lock(mutex_);
        sinks.swap(sinks_);
    }

    for (auto &it : sinks) {
        if (it)
            it->Cleanup();
    }
}

bool StreamGstRtmp::ApplyConfig(const nlohmann::json &rtmpConfig)
{
    std::lock_guard<std::mutex> lock(reconfigMutex_);

    ClearSinks();

    auto resetAudioState = [this]() {
        std::lock_guard<std::mutex> guard(mutex_);
        audioEnable_ = false;
        audioCodec_ = "aac";
        audioSampleRate_ = 8000;
        audioChannels_ = 1;
        audioBitWidth_ = 16;
    };

    if (rtmpConfig.is_null()) {
        EL_INFO("/rtmp config not found, RTMP push disabled");
        resetAudioState();
        return true;
    }

    if (!rtmpConfig.value("enabled", false)) {
        EL_INFO("RTMP push disabled by config");
        resetAudioState();
        return true;
    }

    bool audioEnable = false;
    std::string audioCodec = "aac";
    int audioSampleRate = 8000;
    int audioChannels = 1;
    int audioBitWidth = 16;

    auto audioCfg = Common::ConfigManager::GetInstance().GetConfig("/audio_encode/0");
    if (!audioCfg.is_null()) {
        audioEnable = audioCfg.value("enable", false);
        audioCodec = audioCfg.value("codec", std::string("aac"));
        audioSampleRate = audioCfg.value("sampleRate", 8000);
        audioChannels = audioCfg.value("channel", 1);
        audioBitWidth = audioCfg.value("bitWidth", 16);
    }

    {
        std::lock_guard<std::mutex> guard(mutex_);
        audioEnable_ = audioEnable;
        audioCodec_ = audioCodec;
        audioSampleRate_ = audioSampleRate;
        audioChannels_ = audioChannels;
        audioBitWidth_ = audioBitWidth;
    }

    if (!rtmpConfig.contains("sinks") || !rtmpConfig["sinks"].is_array()) {
        EL_WARN("/rtmp.sinks is missing or not array; nothing to push");
        return true;
    }

    size_t created = 0;
    for (const auto &item : rtmpConfig["sinks"]) {
        if (!item.is_object())
            continue;
        if (!item.value("enabled", true))
            continue;

        std::string url = item.value("url", std::string());
        uint32_t channel = item.value("channel", 0u);
        uint32_t streamType = item.value("stream_type", 0u);

        if (url.empty()) {
            EL_WARN("Skip RTMP sink with empty url (ch:{} type:{})", channel, streamType);
            continue;
        }

        if (SetupSink(url, channel, streamType)) {
            created++;
        }
    }

    EL_INFO("RTMP started, sinks created: {}", created);
    return true;
}

bool StreamGstRtmp::SetupSink(const std::string &url, uint32_t channel, uint32_t streamType)
{
    // 查询视频编码参数
    auto videoCfg = Common::ConfigManager::GetInstance().GetConfig("/video_encode/" + std::to_string(channel) + "/" +
                                                                   std::to_string(streamType));
    if (videoCfg.is_null()) {
        EL_ERROR("Failed to get /video_encode/{}/{} config", channel, streamType);
        return false;
    }
    uint32_t width = videoCfg.value("width", 0u);
    uint32_t height = videoCfg.value("height", 0u);
    uint32_t fps = videoCfg.value("fps", 0u);
    if (!width || !height || !fps) {
        EL_ERROR("Invalid video params for ch:{} type:{} ({}x{} @{}fps)", channel, streamType, width, height, fps);
        return false;
    }

    // 仅在启用且为AAC时推送音频
    bool enableAudio = audioEnable_ && (audioCodec_ == "aac");

    // 使用配置中的原始音频参数，避免不必要的重采样和转码
    int sourceSampleRate = audioSampleRate_;
    int sourceChannels = audioChannels_;

    // 构建管线，使用 rtmp2sink 输出
    auto buildPipeline = [&]() -> std::string {
        std::string pipe = "";
        // video branch
        pipe += "appsrc name=video_src ! queue leaky=2 ! h264parse config-interval=-1 ! mux. ";
        // audio branch - 直接透传AAC，不做重采样转码
        if (enableAudio) {
            pipe += "appsrc name=audio_src ! queue leaky=2 ! aacparse ! mux. ";
        }
        // mux and sink
        pipe += fmt::format("flvmux streamable=true name=mux ! queue leaky=2 ! rtmp2sink location={} sync=false ", url);
        return pipe;
    };

    GError *error = nullptr;
    GstElement *pipeline = nullptr;
    const char *sinkName = "rtmp2sink";

    std::string pipeStr = buildPipeline();
    pipeline = gst_parse_launch(pipeStr.c_str(), &error);
    if (!pipeline || error) {
        if (error) {
            EL_ERROR("rtmp2sink pipeline parse error: {}", error->message);
            g_clear_error(&error);
        }
        if (pipeline) {
            gst_object_unref(pipeline);
        }
        return false;
    }

    auto data = std::make_shared<PushData>();
    data->url = url;
    data->channel = channel;
    data->streamType = streamType;
    data->pipeline = pipeline;
    data->videoFps = fps;
    if (enableAudio) {
        data->audioSampleRate = static_cast<uint32_t>(sourceSampleRate);
        data->audioChannels = static_cast<uint32_t>(sourceChannels);
    }

    // 创建流控管理器（视频20帧，音频50帧）
    std::string flowControlName = fmt::format("RTMP-ch{}-type{}", channel, streamType);
    data->flowControl = std::make_shared<StreamFlowControl>(flowControlName, 20, 50);

    // 获取appsrc元素
    data->appsrcVideo = gst_bin_get_by_name(GST_BIN(data->pipeline), "video_src");
    if (!data->appsrcVideo) {
        EL_ERROR("Failed to get video_src from RTMP pipeline");
        data->Cleanup();
        return false;
    }
    if (enableAudio) {
        data->appsrcAudio = gst_bin_get_by_name(GST_BIN(data->pipeline), "audio_src");
        if (!data->appsrcAudio) {
            EL_ERROR("Failed to get audio_src from RTMP pipeline when audio enabled");
            data->Cleanup();
            return false;
        }
    }

    // 配置appsrc通用属性
    g_object_set(G_OBJECT(data->appsrcVideo), "format", GST_FORMAT_TIME, "is-live", TRUE, "do-timestamp", TRUE,
                 "stream-type", GST_APP_STREAM_TYPE_STREAM, "emit-signals", FALSE, NULL);

    // 设置视频caps
    GstCaps *videoCaps =
        gst_caps_new_simple("video/x-h264", "stream-format", G_TYPE_STRING, "byte-stream", "alignment", G_TYPE_STRING,
                            "au", "width", G_TYPE_INT, (gint)width, "height", G_TYPE_INT, (gint)height, "framerate",
                            GST_TYPE_FRACTION, (guint)fps, 1, NULL);
    if (!videoCaps) {
        EL_ERROR("Failed to create video caps for RTMP");
        data->Cleanup();
        return false;
    }
    gst_app_src_set_caps(GST_APP_SRC(data->appsrcVideo), videoCaps);
    gst_caps_unref(videoCaps);

    // 设置音频caps (AAC ADTS 格式,直接透传)
    if (enableAudio && data->appsrcAudio) {
        g_object_set(G_OBJECT(data->appsrcAudio), "format", GST_FORMAT_TIME, "is-live", TRUE, "do-timestamp", TRUE,
                     "stream-type", GST_APP_STREAM_TYPE_STREAM, "emit-signals", FALSE, NULL);

        // ADTS格式的AAC,让aacparse自动解析采样率和声道数
        GstCaps *audioCaps =
            gst_caps_new_simple("audio/mpeg", "mpegversion", G_TYPE_INT, 4, "framed", G_TYPE_BOOLEAN, FALSE,
                                "stream-format", G_TYPE_STRING, "adts", NULL);

        if (!audioCaps) {
            EL_ERROR("Failed to create audio caps for RTMP");
            data->Cleanup();
            return false;
        }
        gst_app_src_set_caps(GST_APP_SRC(data->appsrcAudio), audioCaps);
        gst_caps_unref(audioCaps);
    }

    // 订阅媒体帧
    data->streamSource = Media::StreamSource::Create(data->channel, data->streamType);
    data->streamSource->SetInfo(60, "rtmp frame data");
    data->streamHandle =
        data->streamSource->Register([this, data](const Media::MediaFramePtr &frame) { this->PushFrame(frame, data); });
    data->streamSource->Start();

    // 开始推流
    gst_element_set_state(data->pipeline, GST_STATE_PLAYING);

    {
        std::lock_guard<std::mutex> lock(mutex_);
        sinks_.push_back(data);
    }

    EL_INFO("RTMP sink started: url:{} (ch:{} type:{}, sink:{} )", url, channel, streamType, sinkName);
    return true;
}

void StreamGstRtmp::PushFrame(const Media::MediaFramePtr &frame, PushDataPtr data)
{
    if (!frame || !data || !data->flowControl)
        return;

    GstAppSrc *appsrc = nullptr;
    bool isVideo = frame->IsVideoFrame();
    bool isAudio = frame->IsAudioFrame();

    if (isVideo) {
        appsrc = GST_APP_SRC(data->appsrcVideo);
    } else if (isAudio && audioEnable_ && audioCodec_ == "aac" && data->appsrcAudio) {
        appsrc = GST_APP_SRC(data->appsrcAudio);
    } else {
        return;
    }

    if (!appsrc)
        return;

    // 流控检查：是否应接受该帧
    if (data->flowControl->ShouldAcceptFrame(frame) == StreamFlowControl::Decision::DROP) {
        return; // 丢弃该帧
    }

    // 创建GstBuffer，使用自定义销毁回调
    auto *bufData = new BufferReleaseContext{frame, data->flowControl, isVideo};
    GstBuffer *buffer = gst_buffer_new_wrapped_full(
        GST_MEMORY_FLAG_READONLY, (gpointer)frame->GetBuffer(), frame->GetLength(), 0, frame->GetLength(), bufData,
        [](gpointer user_data) {
            auto *buf = static_cast<BufferReleaseContext *>(user_data);
            // GStreamer释放buffer时通知流控管理器
            if (buf->flowControl) {
                buf->flowControl->OnFrameReleased(buf->isVideo);
            }
            delete buf;
        });

    if (!buffer) {
        EL_ERROR("Failed to create wrapped GstBuffer for {}", frame->ToString().c_str());
        delete bufData;
        return;
    }

    // 设置视频帧标志
    if (isVideo) {
        if (frame->GetFrameType() == MEDIA_FRAME_I) {
            GST_BUFFER_FLAG_UNSET(buffer, GST_BUFFER_FLAG_DELTA_UNIT);
        } else {
            GST_BUFFER_FLAG_SET(buffer, GST_BUFFER_FLAG_DELTA_UNIT);
        }
    }

    // 推送buffer到GStreamer
    GstFlowReturn flowRet = gst_app_src_push_buffer(appsrc, buffer);
    if (G_UNLIKELY(flowRet != GST_FLOW_OK)) {
        EL_ERROR("RTMP push buffer failed: {}", gst_flow_get_name(flowRet));
        // buffer已被push_buffer接管，即使失败也不需要手动释放
        return;
    }

    // 推送成功，通知流控管理器
    data->flowControl->OnFramePushed(isVideo);
}

} // namespace StreamService
} // namespace El
