#include "stream_gst_webrtc.h"
#include <gst/app/gstappsrc.h>
#include "utils_log.h"
#include <iostream>
#include <nlohmann/json.hpp>
#include "common_media_define.h"
#include "common_config_manager.h"
#include "algo_manager.h"
#include <chrono>

namespace El {
namespace StreamService {

// 常量定义
static const int DEFAULT_VIDEO_PT = 96;
static const int DEFAULT_AUDIO_PT = 97;
static const int DEFAULT_AUDIO_SAMPLE_RATE = 8000;
static const int DEFAULT_AUDIO_CHANNELS = 1;
static const int DEFAULT_SESSION_TIMEOUT = 60;
static const int DEFAULT_EVENT_QUEUE_SIZE = 100;
static const int DEFAULT_EVENT_TIMEOUT_MS = 100;

/**
 * @brief 获取StreamGstWebRTC单例实例
 * @return StreamGstWebRTC& 单例实例引用
 */
StreamGstWebRTC &StreamGstWebRTC::GetInstance()
{
    static StreamGstWebRTC instance;
    return instance;
}

/**
 * @brief 析构函数
 */
StreamGstWebRTC::~StreamGstWebRTC()
{
    Stop();
}

/**
 * @brief 启动WebRTC服务
 * @return bool 成功返回true，失败返回false
 */
bool StreamGstWebRTC::Start()
{
    if (running_) {
        EL_ERROR("StreamGstWebRTC already started");
        return false;
    }

    // 获取音频编码配置
    auto audioConfig = Common::ConfigManager::GetInstance().GetConfig("/audio_encode/0");
    if (audioConfig.is_null()) {
        EL_ERROR("get audio_encode failed");
        audioEnable_ = false; // 配置缺失时禁用音频
    } else {
        audioCodec_ = audioConfig.value("codec", "aac");
        audioSampleRate_ = audioConfig.value("sampleRate", DEFAULT_AUDIO_SAMPLE_RATE);
        // 启用音频，支持AAC输入转换为Opus输出
        audioEnable_ = audioConfig.value("enable", false);
    }

    // 初始化事件队列
    eventQueue_ = std::make_unique<El::Utils::BlockingQueue<std::shared_ptr<El::WebServer::WebSocketEvent>>>(
        "WebRTCEventQueue", DEFAULT_EVENT_QUEUE_SIZE);

    // 启动处理线程
    running_ = true;
    processThread_ = std::make_unique<std::thread>(&StreamGstWebRTC::ProcessWebSocketEvents, this);

    // 设置算法管理器的结果回调
    auto &algoManager = El::Algo::IAlgoManager::GetInstance();
    algoManager.SetResultCallback([this](const nlohmann::json &result) {
        this->HandleAlgoResult(result);
    });

    // 注册WebSocket处理
    auto &httpServer = El::WebServer::IHttpServer::GetInstance();
    if (!httpServer.RegisterWebSocketHandler(
            "^/v1/webrtc/live$",
            [this](std::shared_ptr<El::WebServer::WebSocketEvent> &event) { HandleWebSocketEvent(event); })) {
        EL_ERROR("Failed to register WebSocket handler");
        Stop();
        return false;
    }

    EL_INFO("WebRTC server started");
    return true;
}

/**
 * @brief 停止WebRTC服务
 */
void StreamGstWebRTC::Stop()
{
    if (!running_) {
        return;
    }

    // 停止处理线程
    running_ = false;

    // 清除算法管理器回调
    auto &algoManager = El::Algo::IAlgoManager::GetInstance();
    algoManager.SetResultCallback(nullptr);

    // 停止队列
    if (eventQueue_) {
        eventQueue_->SetActive(false);
    }

    // 等待线程结束
    if (processThread_ && processThread_->joinable()) {
        processThread_->join();
        processThread_ = nullptr;
    }

    // 清理所有客户端连接
    std::vector<std::shared_ptr<ClientConnection>> connectionsToCleanup;
    {
        std::lock_guard<std::mutex> lock(connectionsMutex_);
        // 预分配空间以避免重新分配
        connectionsToCleanup.reserve(connections_.size());
        for (auto &pair : connections_) {
            connectionsToCleanup.push_back(pair.second);
        }
        // 使用一个临时对象和swap来清空map
        decltype(connections_) emptyConnections;
        connections_.swap(emptyConnections);
    }

    // 锁释放后再清理连接
    for (auto &conn : connectionsToCleanup) {
        CleanupClientConnection(conn);
    }

    // 清理资源
    eventQueue_ = nullptr;
}

/**
 * @brief 为客户端设置管道
 * @param clientConn 客户端连接对象
 */
void StreamGstWebRTC::SetupPipelineForClient(std::shared_ptr<ClientConnection> clientConn)
{
    clientConn->timeCost.Mark("SetupPipelineForClient");

    // queue leaky=2 存在是当队列满的时候丢弃数据，如果没有这个队列内存会无限上涨
    std::string pipelineStr = fmt::format(
        "appsrc name=video_src ! queue leaky=2 ! h264parse ! "
        "rtph264pay name=pay0 pt={} ! "
        "webrtcbin name=webrtc ",
        DEFAULT_VIDEO_PT);

    if (audioEnable_) {
        // 音频管道：AAC ADTS输入 -> 解析 -> 解码 -> Opus编码 -> RTP传输
        pipelineStr += fmt::format(
            "appsrc name=audio_src ! queue leaky=2 ! aacparse ! avdec_aac ! "
            "audioconvert ! audioresample ! audio/x-raw,rate={},channels={} ! "
            "opusenc bitrate=32000 ! rtpopuspay name=pay1 pt={} ! webrtc.",
            audioSampleRate_, DEFAULT_AUDIO_CHANNELS, DEFAULT_AUDIO_PT);
    }

    clientConn->pipeline = gst_parse_launch(pipelineStr.c_str(), nullptr);
    if (!clientConn->pipeline) {
        EL_ERROR("Failed to create pipeline");
        CleanupClientConnection(clientConn);
        return;
    }
    clientConn->timeCost.Mark("gst_parse_launch");

    // 获取并配置appsrc元素
    GstElement *videoSrc = gst_bin_get_by_name(GST_BIN(clientConn->pipeline), "video_src");
    GstElement *audioSrc = nullptr;
    if (audioEnable_) {
        audioSrc = gst_bin_get_by_name(GST_BIN(clientConn->pipeline), "audio_src");
    }

    if (!videoSrc || (audioEnable_ && !audioSrc)) {
        EL_ERROR("Failed to get appsrc elements");
        CleanupClientConnection(clientConn);
        return;
    }

    // 配置视频和音频源
    auto stream = std::make_shared<MediaData>();
    stream->appsrcVideo = videoSrc;
    stream->appsrcAudio = audioSrc;
    stream->channel = 0;
    stream->streamType = 0;
    clientConn->mediaData = stream;

    // 创建流控实例
    std::string flowControlName = fmt::format("WebRTC-{}", (void *)clientConn->wsConn);
    stream->flowControl = std::make_shared<StreamFlowControl>(flowControlName, 20, 50);

    // 获取视频编码配置
    auto video = Common::ConfigManager::GetInstance().GetConfig("/video_encode/" + std::to_string(stream->channel) +
                                                                "/" + std::to_string(stream->streamType));
    if (video.is_null()) {
        EL_ERROR("Failed to get video config");
        CleanupClientConnection(clientConn);
        return;
    }

    // 提取视频参数
    uint32_t width = video.value("width", 0u);
    uint32_t height = video.value("height", 0u);
    uint32_t fps = video.value("fps", 0u);

    // 检查提取的值是否有效
    if (width == 0 || height == 0 || fps == 0) {
        EL_ERROR("Invalid video parameters (width={}, height={}, fps={}) in config for channel {} stream {}", width,
                  height, fps, stream->channel, stream->streamType);
        CleanupClientConnection(clientConn);
        return;
    }

    // 优化appsrc参数配置
    g_object_set(G_OBJECT(videoSrc), "format", GST_FORMAT_TIME, "is-live", TRUE, "do-timestamp", TRUE, "stream-type",
                 GST_APP_STREAM_TYPE_STREAM, "emit-signals", FALSE, NULL);

    // 设置视频caps，使用实际的编码参数
    GstCaps *videoCaps =
        gst_caps_new_simple("video/x-h264", "stream-format", G_TYPE_STRING, "byte-stream", "alignment", G_TYPE_STRING,
                            "au", "width", G_TYPE_INT, (gint)width, "height", G_TYPE_INT, (gint)height, "framerate",
                            GST_TYPE_FRACTION, (guint)fps, 1, NULL);
    if (!videoCaps) {
        EL_ERROR("Failed to create video caps");
        CleanupClientConnection(clientConn);
        return;
    }
    gst_app_src_set_caps(GST_APP_SRC(videoSrc), videoCaps);
    gst_caps_unref(videoCaps);

    // 配置音频源（仅在音频启用时）
    if (audioEnable_ && audioSrc) {
        g_object_set(G_OBJECT(audioSrc), "format", GST_FORMAT_TIME, "is-live", TRUE, "do-timestamp", TRUE,
                     "stream-type", GST_APP_STREAM_TYPE_STREAM, "emit-signals", FALSE, NULL);

        // 设置AAC原始音频源的caps（匹配muxer输出格式）
        // Create AudioSpecificConfig for raw AAC
        uint8_t object_type = 2; // AAC LC
        uint8_t sample_rate_index = GetAacSampleRateIndex(audioSampleRate_);
        uint8_t channel_config = DEFAULT_AUDIO_CHANNELS;
        
        uint8_t codec_data[2];
        codec_data[0] = (object_type << 3) | (sample_rate_index >> 1);
        codec_data[1] = ((sample_rate_index & 0x1) << 7) | (channel_config << 3);
        
        GstBuffer *codec_buffer = gst_buffer_new_and_alloc(2);
        gst_buffer_fill(codec_buffer, 0, codec_data, 2);
        
        GstCaps *audioCaps = gst_caps_new_simple("audio/mpeg", 
            "mpegversion", G_TYPE_INT, 4,
            "framed", G_TYPE_BOOLEAN, TRUE,
            "stream-format", G_TYPE_STRING, "raw",
            "rate", G_TYPE_INT, audioSampleRate_, 
            "channels", G_TYPE_INT, DEFAULT_AUDIO_CHANNELS,
            "codec_data", GST_TYPE_BUFFER, codec_buffer,
            NULL);
        
        gst_buffer_unref(codec_buffer);
        
        if (audioCaps) {
            gst_app_src_set_caps(GST_APP_SRC(audioSrc), audioCaps);
            gst_caps_unref(audioCaps);
        } else {
            EL_ERROR("Failed to create AAC ADTS audio caps");
        }
    }
    clientConn->timeCost.Mark("gst_app_src_set_caps");

    // 配置WebRTC
    GstElement *webrtc = gst_bin_get_by_name(GST_BIN(clientConn->pipeline), "webrtc");
    if (!webrtc) {
        EL_ERROR("Failed to get webrtcbin element");
        CleanupClientConnection(clientConn);
        return;
    }

    // 优化WebRTC配置以减少延迟和内存占用
    g_object_set(G_OBJECT(webrtc), "bundle-policy", GST_WEBRTC_BUNDLE_POLICY_MAX_BUNDLE, // max-bundle
                 "latency", 0,                                                           // 最小延迟
                 "stun-server", NULL,                                                    // 禁用STUN服务器
                 "turn-server", NULL,                                                    // 禁用TURN服务器
                 NULL);

    // 保存webrtc元素引用
    clientConn->webrtcElement = webrtc;

    // 连接信号，传递clientConn作为用户数据
    g_signal_connect(webrtc, "on-negotiation-needed", G_CALLBACK(+[](GstElement *element, gpointer userData) {
                         auto conn = static_cast<ClientConnection *>(userData);
                         static_cast<StreamGstWebRTC *>(g_object_get_data(G_OBJECT(element), "self"))
                             ->OnNegotiationNeeded(element, conn);
                     }),
                     clientConn.get());

    g_signal_connect(webrtc, "on-ice-candidate",
                     G_CALLBACK(+[](GstElement *element, guint mlineindex, gchar *candidate, gpointer userData) {
                         auto conn = static_cast<ClientConnection *>(userData);
                         static_cast<StreamGstWebRTC *>(g_object_get_data(G_OBJECT(element), "self"))
                             ->OnIceCandidate(element, mlineindex, candidate, conn);
                     }),
                     clientConn.get());

    // 存储this指针以便在回调中使用
    g_object_set_data(G_OBJECT(webrtc), "self", this);

    // 启动pipeline
    GstStateChangeReturn ret = gst_element_set_state(clientConn->pipeline, GST_STATE_PLAYING);
    if (ret == GST_STATE_CHANGE_FAILURE) {
        EL_ERROR("Failed to start pipeline");
        CleanupClientConnection(clientConn);
        return;
    }
    clientConn->timeCost.Mark("GST_STATE_PLAYING");

    // 启动流
    stream->streamSource = Media::StreamSource::Create(stream->channel, stream->streamType);
    stream->streamSource->SetInfo(DEFAULT_SESSION_TIMEOUT, "webrtc frame data");
    stream->streamHandle = stream->streamSource->Register(
        [this, stream](const Media::MediaFramePtr &frame) { this->PushFrame(frame, stream); });
    stream->streamSource->Start();
}

/**
 * @brief 清理客户端连接
 * @param client 客户端连接对象
 */
void StreamGstWebRTC::CleanupClientConnection(std::shared_ptr<ClientConnection> client)
{
    if (!client || !client->wsConn) {
        EL_ERROR("Invalid client connection");
        return;
    }

    // 先停止媒体流
    if (client->mediaData) {
        // 停止streamSource之前先解除注册回调
        if (client->mediaData->streamSource) {
            client->mediaData->streamSource->Unregister(client->mediaData->streamHandle);
            client->mediaData->streamSource->Stop();
        }

        // 设置pipeline状态为NULL，这会导致appsrc停止接收数据
        if (client->pipeline) {
            gst_element_set_state(client->pipeline, GST_STATE_NULL);
            // 等待pipeline状态变化完成
            gst_element_get_state(client->pipeline, nullptr, nullptr, GST_CLOCK_TIME_NONE);
        }

        // 清理appsrc
        if (client->mediaData->appsrcVideo) {
            gst_object_unref(client->mediaData->appsrcVideo);
            client->mediaData->appsrcVideo = nullptr;
        }
        if (client->mediaData->appsrcAudio) {
            gst_object_unref(client->mediaData->appsrcAudio);
            client->mediaData->appsrcAudio = nullptr;
        }
    }

    // 清理WebRTC相关资源
    if (client->webrtcElement) {
        gst_element_set_state(GST_ELEMENT(client->webrtcElement), GST_STATE_NULL);
        gst_element_get_state(GST_ELEMENT(client->webrtcElement), nullptr, nullptr, GST_CLOCK_TIME_NONE);
        g_object_unref(client->webrtcElement);
        client->webrtcElement = nullptr;
    }

    // 清理pipeline
    if (client->pipeline) {
        gst_object_unref(client->pipeline);
        client->pipeline = nullptr;
    }

    {
        std::lock_guard<std::mutex> lock(connectionsMutex_);
        connections_.erase(client->wsConn);
    }

    EL_INFO("Client connection cleaned up");
}

/**
 * @brief 处理WebSocket事件
 * @param event WebSocket事件对象
 */
void StreamGstWebRTC::HandleWebSocketEvent(std::shared_ptr<El::WebServer::WebSocketEvent> &event)
{
    EL_DEBUG("Received WebSocketEvent, pushing to queue");

    // 将事件推送到队列
    if (eventQueue_ && !eventQueue_->Push(event)) {
        EL_ERROR("Failed to push WebSocketEvent to queue: queue is full or inactive");
    }
}

/**
 * @brief 处理WebSocket事件的主循环
 */
void StreamGstWebRTC::ProcessWebSocketEvents()
{
    EL_INFO("WebRTC event processing thread started");

    while (running_) {
        // 从队列获取事件，设置超时为100ms，定期检查running_状态
        auto event = eventQueue_->Pop(DEFAULT_EVENT_TIMEOUT_MS);
        if (event == nullptr) {
            continue;
        }

        switch (event->type) {
            case El::WebServer::WebSocketEventType::Connect: {
                EL_INFO("New WebSocket connection established");
                auto client = std::make_shared<ClientConnection>();
                client->wsConn = event->connection;

                {
                    std::lock_guard<std::mutex> lock(connectionsMutex_);
                    connections_[event->connection] = client;
                }

                SetupPipelineForClient(client);
                break;
            }
            case El::WebServer::WebSocketEventType::Message: {
                std::shared_ptr<ClientConnection> client;
                {
                    std::lock_guard<std::mutex> lock(connectionsMutex_);
                    auto it = connections_.find(event->connection);
                    if (it == connections_.end()) {
                        EL_ERROR("Client connection not found");
                        return;
                    }
                    client = it->second;
                }
                HandleWebSocketMessage(client, event->message);
                break;
            }
            case El::WebServer::WebSocketEventType::Disconnect: {
                EL_INFO("WebSocket connection closed");
                std::shared_ptr<ClientConnection> client;
                {
                    std::lock_guard<std::mutex> lock(connectionsMutex_);
                    auto it = connections_.find(event->connection);
                    if (it == connections_.end()) {
                        EL_ERROR("Client connection not found");
                        return;
                    }
                    client = it->second;
                }
                CleanupClientConnection(client);
                break;
            }
            default:
                EL_ERROR("Unknown WebSocket event type: {}", static_cast<int>(event->type));
                break;
        }
    }

    EL_INFO("WebRTC event processing thread stopped");
}

/**
 * @brief 发送WebSocket文本消息
 * @param connection WebSocket连接指针
 * @param message 消息内容
 */
void StreamGstWebRTC::SendWebSocketTextMessage(void *connection, const std::string &message)
{
    EL_DEBUG("{}", message);
    El::WebServer::IHttpServer::GetInstance().SendWebSocketTextMessage(connection, message);
}

/**
 * @brief 处理WebSocket消息
 * @param client 客户端连接对象
 * @param message 消息内容
 */
void StreamGstWebRTC::HandleWebSocketMessage(std::shared_ptr<ClientConnection> client, const std::string &message)
{
    nlohmann::json msg = nlohmann::json::parse(message, nullptr, false);
    if (msg.is_discarded()) {
        EL_ERROR("Failed to parse JSON message: {}", message);
        return;
    }

    EL_DEBUG("Received RPC message: {}", msg.dump(4));

    // 检查是否是JSON-RPC 2.0格式
    if (!msg.contains("jsonrpc") || msg["jsonrpc"] != "2.0") {
        EL_ERROR("Invalid JSON-RPC format: missing or incorrect jsonrpc field");
        return;
    }

    // 处理RPC请求（包含id字段）
    if (msg.contains("method") && msg.contains("id")) {
        std::string method = msg["method"];
        auto id = msg["id"];
        auto params = msg.value("params", nlohmann::json::object());

        if (method == "webrtc.answer") {
            HandleWebRTCAnswer(client, params, id);
        } else if (method == "webrtc.iceCandidate") {
            HandleWebRTCIceCandidate(client, params, id);
        } else if (method == "webrtc.ready") {
            HandleWebRTCReady(client, params, id);
        } else {
            // 返回方法不存在错误
            SendRpcError(client, id, -32601, "Method not found", nlohmann::json::object());
        }
    }
    // 处理RPC通知（不包含id字段）
    else if (msg.contains("method") && !msg.contains("id")) {
        std::string method = msg["method"];
        auto params = msg.value("params", nlohmann::json::object());

        if (method == "webrtc.iceCandidate") {
            HandleWebRTCIceCandidateNotification(client, params);
        } else if (method == "webrtc.ready") {
            EL_DEBUG("Received webrtc.ready notification from client");
            // webrtc.ready is just a notification, no response needed
        } else {
            EL_WARN("Unknown notification method: {}", method);
        }
    }
    // 处理RPC响应（包含result或error字段）
    else if (msg.contains("id") && (msg.contains("result") || msg.contains("error"))) {
        // 这里可以处理对之前发送的请求的响应
        // 当前实现中服务端主要发送通知和响应，不发送请求，所以这里暂时只记录日志
        EL_INFO("Received RPC response for id: {}", msg["id"].dump());
    } else {
        EL_ERROR("Invalid JSON-RPC message format");
    }
}

/**
 * @brief 处理WebRTC Answer
 * @param client 客户端连接对象
 * @param params RPC参数
 * @param id RPC请求ID
 */
void StreamGstWebRTC::HandleWebRTCAnswer(std::shared_ptr<ClientConnection> client,
                                         const nlohmann::json &params,
                                         const nlohmann::json &id)
{
    if (!params.contains("sdp") || !params.contains("type")) {
        SendRpcError(client, id, -32602, "Invalid parameters: missing sdp or type", nlohmann::json::object());
        return;
    }

    std::string sdpStr = params["sdp"];
    std::string typeStr = params["type"];

    // 只处理answer类型的SDP
    if (typeStr == "answer") {
        GstSDPMessage *sdp;
        gst_sdp_message_new(&sdp);

        if (gst_sdp_message_parse_buffer((guint8 *)sdpStr.c_str(), sdpStr.length(), sdp) != GST_SDP_OK) {
            SendRpcError(client, id, -32603, "Failed to parse SDP", nlohmann::json::object());
            gst_sdp_message_free(sdp);
            return;
        }

        GstWebRTCSessionDescription *desc = gst_webrtc_session_description_new(GST_WEBRTC_SDP_TYPE_ANSWER, sdp);

        GstElement *webrtc = gst_bin_get_by_name(GST_BIN(client->pipeline), "webrtc");
        if (webrtc) {
            g_signal_emit_by_name(webrtc, "set-remote-description", desc, nullptr);
            gst_webrtc_session_description_free(desc);
            g_object_unref(webrtc);

            // 发送成功响应
            SendRpcResponse(client, id, nlohmann::json::object({{"status", "success"}}));
        } else {
            SendRpcError(client, id, -32603, "WebRTC element not found", nlohmann::json::object());
            gst_webrtc_session_description_free(desc);
        }
    } else {
        SendRpcError(client, id, -32602, "Invalid SDP type: expected 'answer'",
                     nlohmann::json::object({{"received_type", typeStr}}));
    }

    client->timeCost.Mark("Received Answer");
    client->timeCost.Print();
}

/**
 * @brief 处理WebRTC ICE候选（请求模式）
 * @param client 客户端连接对象
 * @param params RPC参数
 * @param id RPC请求ID
 */
void StreamGstWebRTC::HandleWebRTCIceCandidate(std::shared_ptr<ClientConnection> client,
                                               const nlohmann::json &params,
                                               const nlohmann::json &id)
{
    if (!params.contains("candidate") || !params.contains("sdpMLineIndex")) {
        SendRpcError(client, id, -32602, "Invalid parameters: missing candidate or sdpMLineIndex",
                     nlohmann::json::object());
        return;
    }

    const std::string candidate = params["candidate"];
    const int sdpMLineIndex = params["sdpMLineIndex"].get<int>();

    GstElement *webrtc = gst_bin_get_by_name(GST_BIN(client->pipeline), "webrtc");
    if (webrtc) {
        g_signal_emit_by_name(webrtc, "add-ice-candidate", sdpMLineIndex, candidate.c_str());
        g_object_unref(webrtc);

        // 发送成功响应
        SendRpcResponse(client, id, nlohmann::json::object({{"status", "success"}}));
    } else {
        SendRpcError(client, id, -32603, "WebRTC element not found", nlohmann::json::object());
    }
}

/**
 * @brief 处理WebRTC ICE候选（通知模式）
 * @param client 客户端连接对象
 * @param params RPC参数
 */
void StreamGstWebRTC::HandleWebRTCIceCandidateNotification(std::shared_ptr<ClientConnection> client,
                                                           const nlohmann::json &params)
{
    if (!params.contains("candidate") || !params.contains("sdpMLineIndex")) {
        EL_ERROR("Invalid ICE candidate notification: missing candidate or sdpMLineIndex");
        return;
    }

    const std::string candidate = params["candidate"];
    const int sdpMLineIndex = params["sdpMLineIndex"].get<int>();

    GstElement *webrtc = gst_bin_get_by_name(GST_BIN(client->pipeline), "webrtc");
    if (webrtc) {
        g_signal_emit_by_name(webrtc, "add-ice-candidate", sdpMLineIndex, candidate.c_str());
        g_object_unref(webrtc);
    } else {
        EL_ERROR("WebRTC element not found for ICE candidate notification");
    }
}

/**
 * @brief 处理WebRTC Ready信号
 * @param client 客户端连接对象
 * @param params RPC参数
 * @param id RPC请求ID
 */
void StreamGstWebRTC::HandleWebRTCReady(std::shared_ptr<ClientConnection> client,
                                        [[maybe_unused]] const nlohmann::json &params,
                                        const nlohmann::json &id)
{
    // 发送成功响应
    SendRpcResponse(client, id, nlohmann::json::object({{"status", "ready"}}));

    EL_INFO("Client ready signal received");
}

/**
 * @brief 发送RPC响应
 * @param client 客户端连接对象
 * @param id 请求ID
 * @param result 响应结果
 */
void StreamGstWebRTC::SendRpcResponse(std::shared_ptr<ClientConnection> client,
                                      const nlohmann::json &id,
                                      const nlohmann::json &result)
{
    nlohmann::json response;
    response["jsonrpc"] = "2.0";
    response["id"] = id;
    response["result"] = result;

    std::string msgStr = response.dump();
    SendWebSocketTextMessage(client->wsConn, msgStr);
}

/**
 * @brief 发送RPC错误响应
 * @param client 客户端连接对象
 * @param id 请求ID
 * @param code 错误码
 * @param message 错误消息
 * @param data 错误数据（可选）
 */
void StreamGstWebRTC::SendRpcError(std::shared_ptr<ClientConnection> client,
                                   const nlohmann::json &id,
                                   int code,
                                   const std::string &message,
                                   const nlohmann::json &data)
{
    nlohmann::json error;
    error["code"] = code;
    error["message"] = message;
    if (!data.is_null()) {
        error["data"] = data;
    }

    nlohmann::json response;
    response["jsonrpc"] = "2.0";
    response["id"] = id;
    response["error"] = error;

    std::string msgStr = response.dump();
    SendWebSocketTextMessage(client->wsConn, msgStr);
}

/**
 * @brief 发送RPC通知
 * @param client 客户端连接对象
 * @param method 方法名
 * @param params 参数
 */
void StreamGstWebRTC::SendRpcNotification(std::shared_ptr<ClientConnection> client,
                                          const std::string &method,
                                          const nlohmann::json &params)
{
    nlohmann::json notification;
    notification["jsonrpc"] = "2.0";
    notification["method"] = method;
    notification["params"] = params;

    std::string msgStr = notification.dump();
    SendWebSocketTextMessage(client->wsConn, msgStr);
}

/**
 * @brief 推送音视频帧到GStreamer
 * @param frame 媒体帧指针
 * @param stream 流数据对象
 */
void StreamGstWebRTC::PushFrame(const Media::MediaFramePtr &frame, MediaDataPtr stream)
{
    if (!stream || !frame) {
        return;
    }

    Utils::TimeCost timeCost;
    timeCost.Mark("frame_start");

    GstAppSrc *appsrc = nullptr;
    bool isVideo = false;

    if (frame->IsVideoFrame()) {
        appsrc = GST_APP_SRC(stream->appsrcVideo);
        isVideo = true;
    } else if (frame->IsAudioFrame() && audioEnable_) {
        appsrc = GST_APP_SRC(stream->appsrcAudio);
        isVideo = false;
    } else {
        if (audioEnable_ && frame->IsAudioFrame()) {
            EL_DEBUG("Processing audio frame type {} with AAC to Opus conversion", frame->GetFrameType());
        }
        return;
    }

    if (!appsrc) {
        return;
    }

    // 流控检查: 判断是否应该接受该帧
    if (stream->flowControl) {
        auto decision = stream->flowControl->ShouldAcceptFrame(frame);
        if (decision == StreamFlowControl::Decision::DROP) {
            return; // 丢弃该帧
        }
    }

    // 快速检查状态，避免每帧都进行完整状态检查
    if (G_UNLIKELY(GST_STATE(appsrc) != GST_STATE_PLAYING)) {
        GstState state;
        GstStateChangeReturn stateRet = gst_element_get_state(GST_ELEMENT(appsrc), &state, nullptr, 0);
        if (stateRet == GST_STATE_CHANGE_FAILURE || state != GST_STATE_PLAYING) {
            return;
        }
    }

    timeCost.Mark("state_check");

    // 创建buffer释放上下文
    auto *context = new BufferReleaseContext{frame, stream->flowControl, isVideo};

    // 使用零拷贝方式创建buffer，在释放时调用流控回调
    GstBuffer *buffer = gst_buffer_new_wrapped_full(
        GST_MEMORY_FLAG_READONLY,
        (gpointer)frame->GetBuffer(),
        frame->GetLength(),
        0,
        frame->GetLength(),
        context,
        [](gpointer data) {
            auto *ctx = static_cast<BufferReleaseContext *>(data);
            // 通知流控帧已释放
            if (ctx->flowControl) {
                ctx->flowControl->OnFrameReleased(ctx->isVideo);
            }
            delete ctx;
        });

    if (!buffer) {
        EL_ERROR("Failed to create wrapped GstBuffer for {}", frame->ToString().c_str());
        delete context;
        return;
    }

    timeCost.Mark("buffer_create");

    // 为I帧设置关键帧标志
    if (frame->GetFrameType() == MEDIA_FRAME_I) {
        GST_BUFFER_FLAG_UNSET(buffer, GST_BUFFER_FLAG_DELTA_UNIT);
    } else {
        GST_BUFFER_FLAG_SET(buffer, GST_BUFFER_FLAG_DELTA_UNIT);
    }

    // 使用快速路径push buffer
    GstFlowReturn flowRet = gst_app_src_push_buffer(GST_APP_SRC(appsrc), buffer);
    timeCost.Mark("push_buffer");

    if (G_UNLIKELY(flowRet != GST_FLOW_OK)) {
        EL_ERROR("Failed to push buffer: {}", gst_flow_get_name(flowRet));
        return;
    }

    // 通知流控帧已推送
    if (stream->flowControl) {
        stream->flowControl->OnFramePushed(isVideo);
    }

    timeCost.Print(true);
}

/**
 * @brief 发送SDP到对端
 * @param desc WebRTC会话描述
 * @param client 客户端连接对象
 */
void StreamGstWebRTC::SendSdpToPeer(GstWebRTCSessionDescription *desc, std::shared_ptr<ClientConnection> client)
{
    if (!client || !desc || !desc->sdp) {
        EL_ERROR("Invalid client connection or SDP");
        return;
    }

    gchar *sdpStr = gst_sdp_message_as_text(desc->sdp);
    if (!sdpStr) {
        EL_ERROR("Failed to convert SDP to string");
        return;
    }

    nlohmann::json params;
    params["type"] = desc->type == GST_WEBRTC_SDP_TYPE_OFFER ? "offer" : "answer";
    params["sdp"] = std::string(sdpStr);

    SendRpcNotification(client, "webrtc.offer", params);

    g_free(sdpStr);
}

/**
 * @brief 发送ICE候选到对端
 * @param mlineindex 媒体行索引
 * @param candidate ICE候选字符串
 * @param client 客户端连接对象
 */
void StreamGstWebRTC::SendIceCandidateToPeer(guint mlineindex,
                                             gchar *candidate,
                                             std::shared_ptr<ClientConnection> client)
{
    if (!client || !candidate) {
        EL_ERROR("Invalid client connection or ICE candidate");
        return;
    }

    nlohmann::json params;
    params["candidate"] = candidate;
    params["sdpMLineIndex"] = mlineindex;

    SendRpcNotification(client, "webrtc.iceCandidate", params);
}

/**
 * @brief Offer创建完成回调
 * @param promise GStreamer Promise对象
 * @param webrtc WebRTC元素
 * @param client 客户端连接对象
 */
void StreamGstWebRTC::OnOfferCreated(GstPromise *promise, GstElement *webrtc, ClientConnection *client)
{
    client->timeCost.Mark("OnOfferCreated");

    GstWebRTCSessionDescription *offer = nullptr;
    const GstStructure *reply = gst_promise_get_reply(promise);
    gst_structure_get(reply, "offer", GST_TYPE_WEBRTC_SESSION_DESCRIPTION, &offer, nullptr);
    gst_promise_unref(promise);

    GstPromise *localDescPromise = gst_promise_new();
    g_signal_emit_by_name(webrtc, "set-local-description", offer, localDescPromise);
    gst_promise_interrupt(localDescPromise);
    gst_promise_unref(localDescPromise);

    // 需要找到对应的ClientConnection shared_ptr
    std::shared_ptr<ClientConnection> clientPtr;
    {
        std::lock_guard<std::mutex> lock(connectionsMutex_);
        for (auto &pair : connections_) {
            if (pair.second.get() == client) {
                clientPtr = pair.second;
                break;
            }
        }
    }

    if (clientPtr) {
        SendSdpToPeer(offer, clientPtr);
    } else {
        EL_ERROR("Failed to find ClientConnection for offer callback");
    }

    gst_webrtc_session_description_free(offer);
}

/**
 * @brief 协商需要回调
 * @param webrtc WebRTC元素
 * @param client 客户端连接对象
 */
void StreamGstWebRTC::OnNegotiationNeeded(GstElement *webrtc, ClientConnection *client)
{
    client->timeCost.Mark("OnNegotiationNeeded");

    GstPromise *promise = gst_promise_new_with_change_func(
        +[](GstPromise *p, gpointer userData) {
            auto *conn = static_cast<ClientConnection *>(userData);
            static_cast<StreamGstWebRTC *>(g_object_get_data(G_OBJECT(conn->webrtcElement), "self"))
                ->OnOfferCreated(p, conn->webrtcElement, conn);
        },
        client, nullptr);

    // 为 create-offer 准备 options
    // 由于服务器是发送方，不期望接收媒体，设置 offer-to-receive 为 FALSE
    GstStructure *offer_options = gst_structure_new("offer_options",
                                                    "offer-to-receive-video", G_TYPE_BOOLEAN, FALSE,
                                                    "offer-to-receive-audio", G_TYPE_BOOLEAN, FALSE,
                                                    NULL);

    g_signal_emit_by_name(webrtc, "create-offer", offer_options, promise);
    // offer_options 的所有权会被 g_signal_emit_by_name 消耗掉，不需要手动释放
    // gst_structure_free(offer_options); 
}

/**
 * @brief ICE候选回调
 * @param webrtc WebRTC元素（未使用）
 * @param mlineindex 媒体行索引
 * @param candidate ICE候选字符串
 * @param client 客户端连接对象
 */
void StreamGstWebRTC::OnIceCandidate([[maybe_unused]] GstElement *webrtc,
                                     guint mlineindex,
                                     gchar *candidate,
                                     ClientConnection *client)
{
    // 需要找到对应的ClientConnection shared_ptr
    std::shared_ptr<ClientConnection> clientPtr;
    {
        std::lock_guard<std::mutex> lock(connectionsMutex_);
        for (auto &pair : connections_) {
            if (pair.second.get() == client) {
                clientPtr = pair.second;
                break;
            }
        }
    }

    if (clientPtr) {
        SendIceCandidateToPeer(mlineindex, candidate, clientPtr);
    } else {
        EL_ERROR("Failed to find ClientConnection for ICE candidate callback");
    }
}

/**
 * @brief 处理算法管理器返回的检测结果
 * @param result 算法检测结果的JSON数据
 */
void StreamGstWebRTC::HandleAlgoResult(const nlohmann::json &result)
{
    if (result.is_null() || !result.is_object()) {
        EL_WARN("Received invalid algorithm result");
        return;
    }

    EL_DEBUG("Received algorithm result: {}", result.dump(4));

    // 解析算法结果JSON数据
    IntelligentResult intelligentResult;
    
    // 提取时间戳和帧ID
    intelligentResult.timestamp = result.value("timestamp", 0ULL);
    intelligentResult.frameId = result.value("frame_id", 0U);
    
    // 提取检测框数组
    if (result.contains("objects") && result["objects"].is_array()) {
        for (const auto &obj : result["objects"]) {
            if (!obj.is_object()) {
                continue;
            }
            
            BoundingBox box;
            box.confidence = obj.value("confidence", 0.0f);
            // 接收归一化坐标 (0-1.0范围的float值)
            box.x = obj.value("x", 0.0f);
            box.y = obj.value("y", 0.0f);
            box.width = obj.value("width", 0.0f);
            box.height = obj.value("height", 0.0f);
            box.label = obj.value("label", "");
            box.classId = 0; // 算法结果中没有classId，设为默认值
            
            // 验证检测框数据的有效性
            if (box.confidence > 0.0f && box.width > 0.0f && box.height > 0.0f) {
                intelligentResult.boxes.push_back(box);
            }
        }
    }
    
    // 发送给所有连接的客户端
    std::vector<std::shared_ptr<ClientConnection>> clientList;
    {
        std::lock_guard<std::mutex> lock(connectionsMutex_);
        clientList.reserve(connections_.size());
        for (const auto &pair : connections_) {
            clientList.push_back(pair.second);
        }
    }
    
    for (const auto &client : clientList) {
        if (client && client->wsConn) {
            SendIntelligentBoxes(client, intelligentResult);
        }
    }
}

/**
 * @brief 发送智能框数据到前端
 * @param client 客户端连接对象
 * @param result 智能检测结果
 */
void StreamGstWebRTC::SendIntelligentBoxes(std::shared_ptr<ClientConnection> client, const IntelligentResult &result)
{
    nlohmann::json params;
    params["timestamp"] = result.timestamp;
    params["frameId"] = result.frameId;
    params["boxes"] = nlohmann::json::array();

    for (const auto &box : result.boxes) {
        nlohmann::json boxJson;
        boxJson["x"] = box.x;
        boxJson["y"] = box.y;
        boxJson["width"] = box.width;
        boxJson["height"] = box.height;
        boxJson["confidence"] = box.confidence;
        boxJson["label"] = box.label;
        boxJson["classId"] = box.classId;
        params["boxes"].push_back(boxJson);
    }

    SendRpcNotification(client, "ai.boundingBoxes", params);
}

/**
 * @brief 获取AAC采样率索引
 * @param sampleRate 采样率
 * @return uint8_t 采样率索引
 */
uint8_t StreamGstWebRTC::GetAacSampleRateIndex(int sampleRate)
{
    switch (sampleRate) {
        case 96000: return 0;
        case 88200: return 1;
        case 64000: return 2;
        case 48000: return 3;
        case 44100: return 4;
        case 32000: return 5;
        case 24000: return 6;
        case 22050: return 7;
        case 16000: return 8;
        case 12000: return 9;
        case 11025: return 10;
        case 8000:  return 11;
        case 7350:  return 12;
        default:    return 11; // Default to 8000Hz
    }
}



} // namespace StreamService
} // namespace El
