#include "stream_gst_rtsp.h"
#include <gst/app/gstappsrc.h>
#include <gst/app/gstappsink.h>
#include <gst/sdp/gstsdpmessage.h>
#include <gst/rtsp-server/rtsp-sdp.h>
#include "utils_log.h"
#include "common_media_define.h"
#include "network.h"
#include <memory>
#include "common_config_manager.h"
#include "common_product_definition.h"
#include "media_speak.h"

namespace El {
namespace StreamService {

// 常量定义
static const char *DEFAULT_RTSP_SERVICE_PORT = "554";
static const char *DEFAULT_RTSP_INTERFACE = "eth0";
static const int DEFAULT_VIDEO_PT = 96;
static const int DEFAULT_AUDIO_PT = 97;
static const int DEFAULT_AUDIO_SAMPLE_RATE = 8000;
static const int DEFAULT_AUDIO_CHANNELS = 1;
static const int DEFAULT_AUDIO_VOLUME = 100;
static const int DEFAULT_SESSION_TIMEOUT = 60;

// 静态函数声明
static GstFlowReturn OnClientOpusAudioSampleCallback(GstAppSink *appsink, gpointer userData);
static void OnRecordMediaConfigure(GstRTSPMediaFactory *factory, GstRTSPMedia *media, gpointer userData);
static void OnNewMedia(GstRTSPMediaFactory *factory, GstRTSPMedia *media, gpointer userData);
static void OnClientConnected(GstRTSPServer *server, GstRTSPClient *client, gpointer userData);
static void OnClientDisconnected(GstRTSPClient *client, gpointer userData);
static void OnRecordMediaUnprepared(GstRTSPMedia *media, gpointer userData);

/**
 * @brief 获取StreamGstRtsp单例实例
 * @return StreamGstRtsp& 单例实例引用
 */
StreamGstRtsp &StreamGstRtsp::GetInstance()
{
    static StreamGstRtsp instance;
    return instance;
}

/**
 * @brief 构造函数
 */
StreamGstRtsp::StreamGstRtsp() : loop_(nullptr), server_(nullptr)
{
    servicePort_ = DEFAULT_RTSP_SERVICE_PORT;
}

/**
 * @brief 析构函数
 */
StreamGstRtsp::~StreamGstRtsp()
{
    Stop();
}

/**
 * @brief 启动RTSP服务
 * @return bool 成功返回true，失败返回false
 */
bool StreamGstRtsp::Start()
{
    std::lock_guard<std::mutex> lock(lifecycleMutex_);

    if (initialized_) {
        EL_INFO("RTSP service already initialized");
        return true;
    }

    auto &configManager = Common::ConfigManager::GetInstance();

    if (configListenerId_ < 0) {
        configListenerId_ = configManager.Register(
            "/rtsp", [this](const nlohmann::json &config) -> bool { return OnRtspConfigChanged(config); });
        if (configListenerId_ < 0) {
            EL_WARN("Failed to register RTSP configuration listener");
        }
    }

    nlohmann::json config;
    try {
        config = configManager.GetConfig("/rtsp");
    } catch (const std::exception &e) {
        EL_WARN("Failed to load RTSP configuration: {}", e.what());
    }

    if (!ApplyRtspConfigLocked(config)) {
        EL_ERROR("Failed to apply initial RTSP configuration");
        return false;
    }

    initialized_ = true;
    return true;
}

/**
 * @brief 停止RTSP服务
 */
void StreamGstRtsp::Stop()
{
    std::lock_guard<std::mutex> lock(lifecycleMutex_);

    if (!initialized_) {
        return;
    }

    StopServerLocked();

    auto &configManager = Common::ConfigManager::GetInstance();
    if (configListenerId_ >= 0) {
        configManager.Unregister("/rtsp", configListenerId_);
        configListenerId_ = -1;
    }

    initialized_ = false;
}

bool StreamGstRtsp::OnRtspConfigChanged(const nlohmann::json &config)
{
    std::lock_guard<std::mutex> lock(lifecycleMutex_);
    return ApplyRtspConfigLocked(config);
}

bool StreamGstRtsp::ApplyRtspConfigLocked(const nlohmann::json &config)
{
    bool enabled = true;
    std::string newPort = servicePort_;

    try {
        if (config.is_null()) {
            newPort = DEFAULT_RTSP_SERVICE_PORT;
        } else {
            if (config.contains("enabled") && config["enabled"].is_boolean()) {
                enabled = config["enabled"].get<bool>();
            } else if (config.contains("enable") && config["enable"].is_boolean()) {
                enabled = config["enable"].get<bool>();
            }

            auto parsePort = [](const nlohmann::json &value, std::string &outPort) {
                if (value.is_string()) {
                    outPort = value.get<std::string>();
                } else if (value.is_number_integer()) {
                    outPort = std::to_string(value.get<int>());
                }
            };

            if (config.contains("port")) {
                parsePort(config["port"], newPort);
            } else if (config.contains("service_port")) {
                parsePort(config["service_port"], newPort);
            } else if (config.contains("servicePort")) {
                parsePort(config["servicePort"], newPort);
            }
        }
    } catch (const std::exception &e) {
        EL_ERROR("Failed to parse RTSP configuration: {}", e.what());
        return false;
    }

    if (newPort.empty()) {
        newPort = DEFAULT_RTSP_SERVICE_PORT;
    }
    bool needRestart = (server_ != nullptr) && (newPort != servicePort_);
    bool wasEnabled = rtspEnabled_;

    rtspEnabled_ = enabled;
    servicePort_ = newPort;

    if (!rtspEnabled_) {
        if (server_) {
            EL_INFO("RTSP service disabled by configuration, stopping server");
            StopServerLocked();
        } else if (wasEnabled) {
            EL_INFO("RTSP service disabled by configuration");
        }
        return true;
    }

    if (needRestart) {
        EL_INFO("RTSP configuration changed, restarting server on port {}", servicePort_);
        StopServerLocked();
    }

    if (!server_) {
        if (!StartServerLocked()) {
            EL_ERROR("Failed to start RTSP server on port {}", servicePort_);
            return false;
        }
    }

    return true;
}

bool StreamGstRtsp::StartServerLocked()
{
    if (!rtspEnabled_) {
        return true;
    }

    if (server_) {
        return true;
    }

    auto audioConfig = Common::ConfigManager::GetInstance().GetConfig("/audio_encode/0");
    if (audioConfig.is_null()) {
        EL_ERROR("get audio_encode failed");
        return false;
    }
    audioEnable_ = audioConfig.value("enable", false);
    audioCodec_ = audioConfig.value("codec", "aac");
    audioSampleRate_ = audioConfig.value("sampleRate", DEFAULT_AUDIO_SAMPLE_RATE);

    std::string audioPipe;
    if (audioEnable_) {
        if (audioCodec_ == "aac") {
            audioPipe = fmt::format("appsrc name=audio_src ! queue leaky=2 ! aacparse ! rtpmp4apay name=pay1 pt={}",
                                    DEFAULT_AUDIO_PT);
        } else if (audioCodec_ == "opus") {
            audioPipe = fmt::format("appsrc name=audio_src ! queue leaky=2 ! opusparse ! rtpopuspay name=pay1 pt={}",
                                    DEFAULT_AUDIO_PT);
        } else {
            EL_ERROR("not support {}", audioCodec_.c_str());
            return false;
        }
    }

    server_ = gst_rtsp_server_new();
    if (!server_) {
        EL_ERROR("Failed to create RTSP server");
        return false;
    }
    g_object_set(server_, "service", servicePort_.c_str(), nullptr);

    GstRTSPMountPoints *mounts = gst_rtsp_server_get_mount_points(server_);
    if (!mounts) {
        EL_ERROR("Failed to get mount points");
        StopServerLocked();
        return false;
    }

    int32_t channelCount = 1;
    if (!Common::ProductDefinition::GetInstance().GetValue("video_channel_count", channelCount)) {
        EL_ERROR("get video channel count failed");
    }
    int32_t streamsCount = 1;
    if (!Common::ProductDefinition::GetInstance().GetValue("video_stream_count", streamsCount)) {
        EL_ERROR("get video stream count failed");
    }

    for (int32_t channel = 1; channel <= channelCount; ++channel) {
        for (int32_t streamType = 0; streamType < streamsCount; ++streamType) {
            GstRTSPMediaFactory *factory = gst_rtsp_media_factory_new();
            if (!factory) {
                EL_ERROR("Failed to create media factory for channel {} stream {}", channel, streamType);
                continue;
            }

            std::string pipeline = fmt::format(
                "( appsrc name=video_src ! queue leaky=2 ! h264parse ! "
                "rtph264pay name=pay0 pt={} {} )",
                DEFAULT_VIDEO_PT, audioPipe);
            gst_rtsp_media_factory_set_launch(factory, pipeline.c_str());

            gst_rtsp_media_factory_set_shared(factory, FALSE);
            g_signal_connect(factory, "media-configure", G_CALLBACK(OnNewMedia), this);

            std::string path = fmt::format("/live/{}/{}", channel, streamType);
            gst_rtsp_mount_points_add_factory(mounts, path.c_str(), factory);

            std::string ipAddress = Network::Network::GetInstance().GetInterfaceIpAddress(DEFAULT_RTSP_INTERFACE);
            std::string rtspUrl =
                fmt::format("rtsp://{}:{}{}", ipAddress.empty() ? "0.0.0.0" : ipAddress, servicePort_, path);
            EL_INFO("RTSP URL for channel {} stream {}: {}", channel, streamType, rtspUrl);
        }
    }

    for (int32_t channel = 1; channel <= channelCount; ++channel) {
        GstRTSPMediaFactory *recordFactory = gst_rtsp_media_factory_new();
        if (!recordFactory) {
            EL_ERROR("Failed to create media factory for recording on channel {}", channel);
            continue;
        }

        gst_rtsp_media_factory_set_transport_mode(recordFactory, GST_RTSP_TRANSPORT_MODE_RECORD);

        std::string recordLaunchStr = "( rtpopusdepay name=depay0 ! opusparse ! appsink name=sink async=false )";

        gst_rtsp_media_factory_set_launch(recordFactory, recordLaunchStr.c_str());
        gst_rtsp_media_factory_set_enable_rtcp(recordFactory, TRUE);
        gst_rtsp_media_factory_set_protocols(recordFactory,
                                             (GstRTSPLowerTrans)(GST_RTSP_LOWER_TRANS_UDP | GST_RTSP_LOWER_TRANS_TCP));
        gst_rtsp_media_factory_set_shared(recordFactory, FALSE);
        g_signal_connect(recordFactory, "media-configure", G_CALLBACK(OnRecordMediaConfigure), this);

        std::string recordPath = fmt::format("/record/{}", channel);
        gst_rtsp_mount_points_add_factory(mounts, recordPath.c_str(), recordFactory);

        std::string ipAddress = Network::Network::GetInstance().GetInterfaceIpAddress(DEFAULT_RTSP_INTERFACE);
        std::string rtspUrl =
            fmt::format("rtsp://{}:{}{}", ipAddress.empty() ? "0.0.0.0" : ipAddress, servicePort_, recordPath);
        EL_INFO("RTSP Record URL for channel {}: {}", channel, rtspUrl);
    }

    g_object_unref(mounts);

    GMainContext *mainContext = g_main_context_new();
    if (!mainContext) {
        EL_ERROR("Failed to create GMainContext");
        StopServerLocked();
        return false;
    }

    guint serverId = gst_rtsp_server_attach(server_, mainContext);
    if (serverId == 0) {
        EL_ERROR("Failed to attach RTSP server");
        g_main_context_unref(mainContext);
        StopServerLocked();
        return false;
    }

    g_signal_connect(server_, "client-connected", G_CALLBACK(OnClientConnected), this);

    mainLoopThread_ = std::thread([this, mainContext]() {
        g_main_context_push_thread_default(mainContext);
        loop_ = g_main_loop_new(mainContext, FALSE);
        if (!loop_) {
            EL_ERROR("Failed to create GMainLoop");
            g_main_context_pop_thread_default(mainContext);
            g_main_context_unref(mainContext);
            return;
        }

        EL_INFO("Starting GLib main loop");
        g_main_loop_run(loop_);
        g_main_context_pop_thread_default(mainContext);
        g_main_context_unref(mainContext);
    });

    std::string localIp = Network::Network::GetInstance().GetInterfaceIpAddress(DEFAULT_RTSP_INTERFACE);
    EL_INFO("RTSP server started on {}:{}, session timeout {}s", localIp.empty() ? "0.0.0.0" : localIp, servicePort_,
            DEFAULT_SESSION_TIMEOUT);

    return true;
}

void StreamGstRtsp::StopServerLocked()
{
    {
        std::lock_guard<std::mutex> lock(mediaDataMutex_);
        for (auto &pair : mediaData_) {
            pair.second->Cleanup();
        }
        mediaData_.clear();
    }

    {
        std::lock_guard<std::mutex> lock(recordStateMutex_);
        if (recordAudioStreamActive_) {
            El::Media::Speak::GetInstance().Stop();
            recordAudioStreamActive_ = false;
        }
        activeRecordClient_ = nullptr;
    }

    if (loop_) {
        g_main_loop_quit(loop_);
    }

    if (mainLoopThread_.joinable()) {
        mainLoopThread_.join();
    }
    mainLoopThread_ = std::thread();

    if (loop_) {
        g_main_loop_unref(loop_);
        loop_ = nullptr;
    }

    if (server_) {
        g_object_unref(server_);
        server_ = nullptr;
    }
}

/**
 * @brief 清理指定的媒体资源
 * @param handle 媒体句柄
 */
void StreamGstRtsp::CleanupMedia(int handle)
{
    std::lock_guard<std::mutex> lock(mediaDataMutex_);
    auto it = mediaData_.find(handle);
    if (it != mediaData_.end()) {
        it->second->Cleanup();
        mediaData_.erase(it);
        EL_INFO("Media stopped and cleaned up, handle: {}", handle);
    }
}

/**
 * @brief 处理新媒体创建
 * @param factory 媒体工厂指针
 * @param media 媒体对象指针
 */
void StreamGstRtsp::HandleNewMedia(GstRTSPMediaFactory *factory, GstRTSPMedia *media)
{
    (void)factory;
    GstElement *element = gst_rtsp_media_get_element(media);

    // 从URI中解析通道和流类型信息
    uint32_t channel = 0;
    uint32_t streamType = 0;
    GstRTSPContext *ctx = gst_rtsp_context_get_current();
    if (ctx && ctx->uri) {
        const gchar *abspath = ctx->uri->abspath;
        if (abspath) {
            sscanf(abspath, "/live/%u/%u", &channel, &streamType);
        }
    }

    // 验证通道和流类型范围
    if (channel < 1 || channel > CHANNEL_MAX || streamType > CHANNEL_STREAM_TYPE_MAX) {
        EL_ERROR("Invalid channel or stream type {}", ctx->uri->abspath);
        return;
    }
    EL_INFO("OnNewMedia channel {} streamType {}", channel, streamType);

    // 创建媒体数据对象
    auto stream = std::make_shared<MediaData>();
    stream->server = this;
    stream->channel = channel - 1;
    stream->streamType = streamType;
    stream->appsrcVideo = gst_bin_get_by_name(GST_BIN(element), "video_src");

    if (!stream->appsrcVideo) {
        EL_ERROR("Failed to get video appsrc element");
        gst_object_unref(element);
        return;
    }

    // 创建流控实例
    std::string flowControlName = fmt::format("RTSP-CH{}-ST{}", channel, streamType);
    stream->flowControl = std::make_shared<StreamFlowControl>(flowControlName, 20, 50);

    // 获取视频编码配置
    auto video = Common::ConfigManager::GetInstance().GetConfig("/video_encode/" + std::to_string(stream->channel) +
                                                                "/" + std::to_string(stream->streamType));
    if (video.is_null()) {
        EL_ERROR("Failed to get video config");
        gst_object_unref(element);
        return;
    }

    // 提取视频参数
    uint32_t width = video.value("width", 0u);
    uint32_t height = video.value("height", 0u);
    uint32_t fps = video.value("fps", 0u);

    // 验证视频参数有效性
    if (width == 0 || height == 0 || fps == 0) {
        EL_ERROR("Invalid video parameters (width={}, height={}, fps={}) in config for channel {} stream {}", width,
                 height, fps, stream->channel, stream->streamType);
        gst_object_unref(element);
        return;
    }

    // 配置视频appsrc
    g_object_set(G_OBJECT(stream->appsrcVideo), "format", GST_FORMAT_TIME, "is-live", TRUE, "do-timestamp", TRUE,
                 "stream-type", GST_APP_STREAM_TYPE_STREAM, "emit-signals", FALSE, NULL);

    // 创建并设置视频caps
    GstCaps *videoCaps =
        gst_caps_new_simple("video/x-h264", "stream-format", G_TYPE_STRING, "byte-stream", "alignment", G_TYPE_STRING,
                            "au", "width", G_TYPE_INT, (gint)width, "height", G_TYPE_INT, (gint)height, "framerate",
                            GST_TYPE_FRACTION, (guint)fps, 1, NULL);

    if (!videoCaps) {
        EL_ERROR("Failed to create video caps");
        gst_object_unref(element);
        return;
    }

    gst_app_src_set_caps(GST_APP_SRC(stream->appsrcVideo), videoCaps);
    gst_caps_unref(videoCaps);

    // 处理音频配置（如果启用）
    if (audioEnable_) {
        stream->appsrcAudio = gst_bin_get_by_name(GST_BIN(element), "audio_src");
        if (!stream->appsrcAudio) {
            EL_ERROR("Failed to find required audio element 'audio_src' when audio is enabled.");
            gst_object_unref(element);
            return;
        }

        // 配置音频appsrc
        g_object_set(G_OBJECT(stream->appsrcAudio), "format", GST_FORMAT_TIME, "is-live", TRUE, "do-timestamp", TRUE,
                     "stream-type", GST_APP_STREAM_TYPE_STREAM, "emit-signals", FALSE, NULL);

        // 创建音频caps
        GstCaps *audioCaps = nullptr;
        if (audioCodec_ == "opus") {
            audioCaps = gst_caps_new_simple("audio/x-opus", "rate", G_TYPE_INT, audioSampleRate_, "channels",
                                            G_TYPE_INT, DEFAULT_AUDIO_CHANNELS, NULL);
        } else if (audioCodec_ == "aac") {
            // Create AudioSpecificConfig for raw AAC
            // Format: 5 bits object type + 4 bits sample rate index + 4 bits channel config + 3 bits padding
            uint8_t object_type = 2; // AAC LC
            uint8_t sample_rate_index = GetAacSampleRateIndex(audioSampleRate_);
            uint8_t channel_config = DEFAULT_AUDIO_CHANNELS;

            uint8_t codec_data[2];
            codec_data[0] = (object_type << 3) | (sample_rate_index >> 1);
            codec_data[1] = ((sample_rate_index & 0x1) << 7) | (channel_config << 3);

            GstBuffer *codec_buffer = gst_buffer_new_and_alloc(2);
            gst_buffer_fill(codec_buffer, 0, codec_data, 2);

            audioCaps = gst_caps_new_simple("audio/mpeg", "mpegversion", G_TYPE_INT, 4, "stream-format", G_TYPE_STRING,
                                            "raw", "rate", G_TYPE_INT, audioSampleRate_, "channels", G_TYPE_INT,
                                            DEFAULT_AUDIO_CHANNELS, "codec_data", GST_TYPE_BUFFER, codec_buffer, NULL);

            gst_buffer_unref(codec_buffer);
        }

        if (!audioCaps) {
            EL_ERROR("Failed to create audio caps");
            gst_object_unref(stream->appsrcAudio);
            stream->appsrcAudio = nullptr;
            gst_object_unref(element);
            return;
        }

        gst_app_src_set_caps(GST_APP_SRC(stream->appsrcAudio), audioCaps);
        gst_caps_unref(audioCaps);
    }

    // 创建并启动流媒体源
    stream->streamSource = Media::StreamSource::Create(stream->channel, stream->streamType);
    stream->streamSource->SetInfo(DEFAULT_SESSION_TIMEOUT, "rtsp frame data");
    stream->streamHandle = stream->streamSource->Register(
        [this, stream](const Media::MediaFramePtr &frame) { this->PushFrame(frame, stream); });
    stream->streamSource->Start();

    // 注册媒体数据
    {
        std::lock_guard<std::mutex> lock(mediaDataMutex_);
        mediaData_[stream->streamHandle] = stream;
    }

    // 设置媒体清理回调
    g_object_set_data_full(G_OBJECT(media), "media-data", new int(stream->streamHandle), [](gpointer data) {
        int *handle = static_cast<int *>(data);
        StreamGstRtsp::GetInstance().CleanupMedia(*handle);
        delete handle;
    });

    gst_object_unref(element);
}

/**
 * @brief 推送音视频帧到GStreamer
 * @param frame 媒体帧指针
 * @param stream 流数据对象
 */
void StreamGstRtsp::PushFrame(const Media::MediaFramePtr &frame, MediaDataPtr stream)
{
    GstAppSrc *appsrc = nullptr;
    bool isVideo = false;

    if (frame->IsVideoFrame()) {
        appsrc = GST_APP_SRC(stream->appsrcVideo);
        isVideo = true;
    } else if (frame->IsAudioFrame() && audioEnable_) {
        appsrc = GST_APP_SRC(stream->appsrcAudio);
        isVideo = false;
    } else {
        EL_DEBUG("Ignoring unknown frame type: {}", frame->GetFrameType());
        return;
    }

    if (!appsrc) {
        return;
    }

    // 流控检查: 判断是否应该接受该帧
    if (stream->flowControl) {
        auto decision = stream->flowControl->ShouldAcceptFrame(frame);
        if (decision == StreamFlowControl::Decision::DROP) {
            return; // 丢弃该帧
        }
    }

    // 创建buffer释放上下文
    auto *context = new BufferReleaseContext{frame, stream->flowControl, isVideo};

    // 使用wrapped_full创建buffer，在释放时调用流控回调
    GstBuffer *buffer = gst_buffer_new_wrapped_full(
        GST_MEMORY_FLAG_READONLY,                    // 内存标志
        (gpointer)frame->GetBuffer(),                // 实际内存指针
        frame->GetLength(),                          // 总大小
        0,                                           // 偏移
        frame->GetLength(),                          // 使用的大小
        context,                                     // 传递上下文
        [](gpointer data) {                          // lambda作为释放回调
            auto *ctx = static_cast<BufferReleaseContext *>(data);
            // 通知流控帧已释放
            if (ctx->flowControl) {
                ctx->flowControl->OnFrameReleased(ctx->isVideo);
            }
            delete ctx;
        });

    if (!buffer) {
        EL_ERROR("Failed to create wrapped GstBuffer for {}", frame->ToString().c_str());
        delete context;
        return;
    }

    if (frame->IsVideoFrame()) {
        // 为I帧设置关键帧标志
        if (frame->GetFrameType() == MEDIA_FRAME_I) {
            GST_BUFFER_FLAG_UNSET(buffer, GST_BUFFER_FLAG_DELTA_UNIT);
        } else {
            GST_BUFFER_FLAG_SET(buffer, GST_BUFFER_FLAG_DELTA_UNIT);
        }
    } else if (frame->IsAudioFrame()) {
        // 音频帧通常都是关键帧,明确标记为非增量帧
        GST_BUFFER_FLAG_UNSET(buffer, GST_BUFFER_FLAG_DELTA_UNIT);
    }

    // 使用快速路径push buffer
    GstFlowReturn flowRet = gst_app_src_push_buffer(GST_APP_SRC(appsrc), buffer);
    if (G_UNLIKELY(flowRet != GST_FLOW_OK)) {
        EL_ERROR("Failed to push buffer: {}", gst_flow_get_name(flowRet));
        return;
    }

    // 通知流控帧已推送
    if (stream->flowControl) {
        stream->flowControl->OnFramePushed(isVideo);
    }
}

/**
 * @brief 处理客户端连接事件
 * @param server RTSP服务器指针
 * @param client RTSP客户端指针
 */
void StreamGstRtsp::HandleClientConnected(GstRTSPServer *server, GstRTSPClient *client)
{
    (void)server;
    // 获取客户端IP地址
    GstRTSPConnection *connection = gst_rtsp_client_get_connection(client);
    const gchar *clientIp = NULL;
    if (connection) {
        clientIp = gst_rtsp_connection_get_ip(connection);
    }

    if (clientIp) {
        EL_INFO("RTSP client {} connected", clientIp);
    } else {
        EL_INFO("RTSP client connected");
    }

    // 监听客户端断开连接的信号
    g_signal_connect(client, "closed", G_CALLBACK(OnClientDisconnected), this);
}

/**
 * @brief 处理客户端断开连接事件
 * @param client RTSP客户端指针
 */
void StreamGstRtsp::HandleClientDisconnected(GstRTSPClient *client)
{
    // 获取客户端IP地址
    GstRTSPConnection *connection = gst_rtsp_client_get_connection(client);
    const gchar *clientIp = NULL;
    if (connection) {
        clientIp = gst_rtsp_connection_get_ip(connection);
    }

    if (clientIp) {
        EL_INFO("RTSP client {} disconnected", clientIp);
    } else {
        EL_INFO("RTSP client disconnected");
    }
}

/**
 * @brief 处理录制媒体配置
 * @param media 媒体对象指针
 * @param channel 通道号
 */
void StreamGstRtsp::HandleRecordMediaConfigure(GstRTSPMedia *media, int channel)
{
    EL_INFO("Configuring media for recording on channel: {}", channel);

    // 获取当前RTSP客户端
    GstRTSPContext *ctx = gst_rtsp_context_get_current();
    GstRTSPClient *currentClient = ctx ? ctx->client : nullptr;
    GstRTSPClient *oldClient = nullptr;

    {
        std::lock_guard<std::mutex> lock(recordStateMutex_);
        if (activeRecordClient_ && activeRecordClient_ != currentClient) {
            oldClient = activeRecordClient_;
            EL_WARN("Record client %p will be preempted by new client %p", (void *)activeRecordClient_,
                    (void *)currentClient);
        }
        activeRecordClient_ = currentClient;
        EL_INFO("Set active record client: %p", (void *)currentClient);
    }

    // 关闭旧客户端连接
    if (oldClient) {
        gst_rtsp_client_close(oldClient);
    }

    GstElement *element = gst_rtsp_media_get_element(media);
    if (!element) {
        EL_ERROR("Failed to get pipeline element");
        return;
    }

    // 配置Opus音频appsink
    GstElement *opusSink = gst_bin_get_by_name(GST_BIN(element), "sink");
    if (opusSink && GST_IS_APP_SINK(opusSink)) {
        g_object_set(G_OBJECT(opusSink), "emit-signals", TRUE, "sync", FALSE, NULL);
        g_signal_connect(opusSink, "new-sample", G_CALLBACK(OnClientOpusAudioSampleCallback),
                         GUINT_TO_POINTER(channel));
        EL_INFO("Connected appsink signal for Opus audio recording");
        gst_object_unref(opusSink);
    } else {
        EL_WARN("Failed to find Opus audio appsink element");
    }

    // 配置媒体属性
    gst_rtsp_media_set_reusable(media, FALSE);
    gst_rtsp_media_set_suspend_mode(media, GST_RTSP_SUSPEND_MODE_NONE);

    // 连接unprepared信号
    g_signal_connect(media, "unprepared", G_CALLBACK(OnRecordMediaUnprepared), this);

    {
        std::lock_guard<std::mutex> lock(recordStateMutex_);
        EL_INFO("Record media configured for channel: {}", channel);
    }

    gst_object_unref(element);
    EL_INFO("Media configured for Opus audio recording, channel: {}", channel);
}

/**
 * @brief 处理录制媒体取消准备事件
 * @param media 媒体对象指针
 */
void StreamGstRtsp::HandleRecordMediaUnprepared(GstRTSPMedia *media)
{
    (void)media;
    EL_INFO("Handling RecordMediaUnprepared.");
    {
        std::lock_guard<std::mutex> lock(recordStateMutex_);
        if (recordAudioStreamActive_) {
            El::Media::Speak::GetInstance().Stop();
            recordAudioStreamActive_ = false;
        }
        // 清空活跃录制客户端
        activeRecordClient_ = nullptr;
        EL_INFO("Active record client cleared");
    }
}

/**
 * @brief 处理客户端Opus音频采样数据
 * @param appsink 应用sink指针
 * @param userData 用户数据
 * @return GstFlowReturn 流状态返回值
 */
GstFlowReturn StreamGstRtsp::HandleOpusAudioSample(GstAppSink *appsink, gpointer userData)
{
    (void)userData; // userData (channel) 未直接使用，但可用于日志记录
    GstSample *sample = gst_app_sink_pull_sample(appsink);
    if (!sample) {
        return GST_FLOW_ERROR;
    }

    GstBuffer *buffer = gst_sample_get_buffer(sample);
    if (buffer) {
        GstMapInfo map;
        if (gst_buffer_map(buffer, &map, GST_MAP_READ)) {
            bool shouldPushData = false;
            {
                std::lock_guard<std::mutex> lock(recordStateMutex_);
                // 只有当对讲没有激活时才初始化
                if (!recordAudioStreamActive_) {
                    // 启动新的对讲
                    HAL_ADEC_COM_CONFIG_S adecConfig;
                    memset(&adecConfig, 0, sizeof(HAL_ADEC_COM_CONFIG_S));

                    adecConfig.eType = HAL_AUDIO_TYPE_OPUS;
                    adecConfig.u8ChanNum = DEFAULT_AUDIO_CHANNELS;
                    // 根据配置的采样率设置HAL采样率
                    if (audioSampleRate_ == 16000) {
                        adecConfig.u32SampleRate = HAL_AUDIO_SAMPLE_RATE_16000;
                    } else if (audioSampleRate_ == 48000) {
                        adecConfig.u32SampleRate = HAL_AUDIO_SAMPLE_RATE_48000;
                    } else {
                        adecConfig.u32SampleRate = HAL_AUDIO_SAMPLE_RATE_8000;
                    }
                    adecConfig.u8Vol = DEFAULT_AUDIO_VOLUME;

                    if (El::Media::Speak::GetInstance().StartStreamPlay(adecConfig)) {
                        EL_INFO("Successfully started Opus audio streaming playback");
                        recordAudioStreamActive_ = true;
                    } else {
                        EL_ERROR("Failed to start Opus audio streaming playback");
                        gst_buffer_unmap(buffer, &map);
                        gst_sample_unref(sample);
                        return GST_FLOW_ERROR;
                    }
                }
                // 对讲已经激活，直接推送数据
                shouldPushData = recordAudioStreamActive_;
            } // 释放互斥锁

            if (shouldPushData) {
                if (!El::Media::Speak::GetInstance().PushAudioData(reinterpret_cast<const char *>(map.data),
                                                                   map.size)) {
                    EL_WARN("Failed to push audio data for playback, data may be dropped");
                }
            }
            gst_buffer_unmap(buffer, &map);
        }
    }

    gst_sample_unref(sample);
    return GST_FLOW_OK;
}

// 静态回调函数实现

/**
 * @brief 录制媒体配置回调包装器
 */
static void OnRecordMediaConfigure(GstRTSPMediaFactory *factory, GstRTSPMedia *media, gpointer userData)
{
    (void)factory;
    StreamGstRtsp *self = static_cast<StreamGstRtsp *>(userData);
    int channel = 0;

    // 从URI中获取channel信息
    GstRTSPContext *ctx = gst_rtsp_context_get_current();
    if (ctx && ctx->uri && ctx->uri->abspath) {
        sscanf(ctx->uri->abspath, "/record/%d", &channel);
    }

    self->HandleRecordMediaConfigure(media, channel);
}

/**
 * @brief 新媒体创建回调包装器
 */
static void OnNewMedia(GstRTSPMediaFactory *factory, GstRTSPMedia *media, gpointer userData)
{
    StreamGstRtsp *server = static_cast<StreamGstRtsp *>(userData);
    server->HandleNewMedia(factory, media);
}

/**
 * @brief 客户端连接回调包装器
 */
static void OnClientConnected(GstRTSPServer *server, GstRTSPClient *client, gpointer userData)
{
    StreamGstRtsp *rtspServer = static_cast<StreamGstRtsp *>(userData);
    rtspServer->HandleClientConnected(server, client);
}

/**
 * @brief 客户端断开连接回调包装器
 */
static void OnClientDisconnected(GstRTSPClient *client, gpointer userData)
{
    StreamGstRtsp *server = static_cast<StreamGstRtsp *>(userData);
    server->HandleClientDisconnected(client);
}

/**
 * @brief 录制媒体取消准备回调包装器
 */
static void OnRecordMediaUnprepared(GstRTSPMedia *media, gpointer userData)
{
    StreamGstRtsp *self = static_cast<StreamGstRtsp *>(userData);
    self->HandleRecordMediaUnprepared(media);
}

/**
 * @brief Opus音频采样回调包装器
 */
static GstFlowReturn OnClientOpusAudioSampleCallback(GstAppSink *appsink, gpointer userData)
{
    return StreamGstRtsp::GetInstance().HandleOpusAudioSample(appsink, userData);
}

/**
 * @brief 获取AAC采样率索引
 * @param sampleRate 采样率
 * @return uint8_t 采样率索引
 */
uint8_t StreamGstRtsp::GetAacSampleRateIndex(int sampleRate)
{
    switch (sampleRate) {
        case 96000:
            return 0;
        case 88200:
            return 1;
        case 64000:
            return 2;
        case 48000:
            return 3;
        case 44100:
            return 4;
        case 32000:
            return 5;
        case 24000:
            return 6;
        case 22050:
            return 7;
        case 16000:
            return 8;
        case 12000:
            return 9;
        case 11025:
            return 10;
        case 8000:
            return 11;
        case 7350:
            return 12;
        default:
            return 11; // Default to 8000Hz
    }
}

} // namespace StreamService
} // namespace El
