#include "stream_gst_webrtc.h"
#include <gst/app/gstappsrc.h>
#include "base_log.h"
#include <iostream>
#include <nlohmann/json.hpp>
#include "common_config_manager.h"
#include "camera_video_enc.h"

namespace El {
namespace StreamServer {

StreamGstWebRTC &StreamGstWebRTC::GetInstance()
{
    static StreamGstWebRTC instance;
    return instance;
}

StreamGstWebRTC::StreamGstWebRTC() : pipeline_(nullptr), running_(false), audio_enable_(false) {}

StreamGstWebRTC::~StreamGstWebRTC()
{
    Stop();
}

bool StreamGstWebRTC::Start()
{
    // 读取音频配置
    auto config = Common::ConfigManager::GetInstance().GetConfig("audio");
    if (!config.empty()) {
        audio_enable_ = config["enable"].get<bool>();
    }

    running_ = true;

    // 注册WebSocket处理
    auto &http_server = El::WebServer::IHttpServer::GetInstance();
    if (!http_server.RegisterWebSocketHandler(
            "^/v1/webrtc$", [this](std::shared_ptr<El::WebServer::WebSocketEvent> &event) { HandleWebSocketEvent(event); })) {
        LOG_ERROR("Failed to register WebSocket handler");
        return false;
    }

    LOG_INFO("WebRTC server started");
    return true;
}

void StreamGstWebRTC::Stop()
{
    running_ = false;

    // 清理所有客户端连接
    {
        std::lock_guard<std::mutex> lock(connections_mutex_);
        for (auto &pair : connections_) {
            CleanupClientConnection(pair.second);
        }
        connections_.clear();
    }

    gst_deinit();
}

void StreamGstWebRTC::SetupPipelineForClient(std::shared_ptr<ClientConnection> client_conn)
{
    client_conn->time_cost.Mark("SetupPipelineForClient");

    std::string pipeline_str =
        "appsrc name=video_src ! h264parse ! "
        "rtph264pay name=pay0 pt=96 ! "
        "webrtcbin name=webrtc ";

    if (audio_enable_) {
        pipeline_str +=
            "appsrc name=audio_src ! "
            "aacparse ! avdec_aac ! audioconvert ! audioresample ! opusenc ! "
            "rtpopuspay name=pay1 pt=97 ! "
            "webrtc.";
    }

    client_conn->pipeline = gst_parse_launch(pipeline_str.c_str(), nullptr);
    if (!client_conn->pipeline) {
        LOG_ERROR("Failed to create pipeline");
        CleanupClientConnection(client_conn);
        return;
    }
    client_conn->time_cost.Mark("gst_parse_launch");

    // 获取并配置 appsrc 元素
    GstElement *video_src = gst_bin_get_by_name(GST_BIN(client_conn->pipeline), "video_src");
    GstElement *audio_src = nullptr;
    if (audio_enable_) {
        audio_src = gst_bin_get_by_name(GST_BIN(client_conn->pipeline), "audio_src");
    }

    if (!video_src || (audio_enable_ && !audio_src)) {
        LOG_ERROR("Failed to get appsrc elements");
        CleanupClientConnection(client_conn);
        return;
    }

    // 配置视频和音频源
    auto stream = std::make_shared<MediaData>();
    stream->appsrc_video = video_src;
    stream->appsrc_audio = audio_src;
    stream->channel = 0;
    stream->stream_type = 0;

    // 配置 appsrc 参数
    g_object_set(G_OBJECT(video_src), "format", GST_FORMAT_TIME, "is-live", TRUE, "do-timestamp", TRUE, "stream-type",
                 GST_APP_STREAM_TYPE_STREAM, nullptr);

    if (audio_enable_ && audio_src) {
        g_object_set(G_OBJECT(audio_src), "format", GST_FORMAT_TIME, "is-live", TRUE, "do-timestamp", TRUE,
                     "stream-type", GST_APP_STREAM_TYPE_STREAM, nullptr);
    }

    // 获取编码格式
    auto &video_enc = Hal::ICameraVideoEnc::GetInstance(stream->channel, stream->stream_type);
    VideoEncFormat enc_format;
    if (!video_enc.GetEncFormat(enc_format)) {
        LOG_ERROR("Failed to get video encode format for channel {} stream {}", stream->channel, stream->stream_type);
        CleanupClientConnection(client_conn);
        return;
    }

    // 设置视频caps，使用实际的编码参数
    GstCaps *video_caps =
        gst_caps_new_simple("video/x-h264", "stream-format", G_TYPE_STRING, "byte-stream", "alignment", G_TYPE_STRING,
                            "au", "width", G_TYPE_INT, (gint)enc_format.width, "height", G_TYPE_INT,
                            (gint)enc_format.height, "framerate", GST_TYPE_FRACTION, (gint)enc_format.fps, 1, NULL);
    if (!video_caps) {
        LOG_ERROR("Failed to create video caps");
        CleanupClientConnection(client_conn);
        return;
    }
    gst_app_src_set_caps(GST_APP_SRC(video_src), video_caps);
    gst_caps_unref(video_caps);

    if (audio_enable_ && audio_src) {
        GstCaps *audio_caps =
            gst_caps_new_simple("audio/mpeg", "mpegversion", G_TYPE_INT, 4, "stream-format", G_TYPE_STRING, "adts",
                                "rate", G_TYPE_INT, 8000, "channels", G_TYPE_INT, 1, nullptr);
        gst_app_src_set_caps(GST_APP_SRC(audio_src), audio_caps);
        gst_caps_unref(audio_caps);
    }
    client_conn->time_cost.Mark("gst_app_src_set_caps");

    // 启动流并注册回调
    stream->streamSource = Media::StreamSource::Create(stream->channel, stream->stream_type);
    stream->streamSource->SetInfo(60, "webrtc frame data");
    stream->stream_handle = stream->streamSource->Register(
        [this, stream](const Media::MediaFramePtr &frame) { this->PushFrame(frame, stream); });
    stream->streamSource->Start();

    // 保存media_data到client_conn中
    client_conn->media_data = stream;

    // 配置 WebRTC
    GstElement *webrtc = gst_bin_get_by_name(GST_BIN(client_conn->pipeline), "webrtc");
    if (!webrtc) {
        LOG_ERROR("Failed to get webrtcbin element");
        CleanupClientConnection(client_conn);
        return;
    }

    // 优化WebRTC配置以减少延迟和内存占用
    g_object_set(G_OBJECT(webrtc), "bundle-policy", GST_WEBRTC_BUNDLE_POLICY_MAX_BUNDLE, // max-bundle
                 "latency", 0,                                                           // 最小延迟
                 NULL);

    // 保存webrtc元素引用
    client_conn->webrtc_element = webrtc;

    // 连接信号，传递client_conn作为用户数据
    g_signal_connect(webrtc, "on-negotiation-needed", G_CALLBACK(+[](GstElement *element, gpointer user_data) {
                         auto conn = static_cast<ClientConnection *>(user_data);
                         static_cast<StreamGstWebRTC *>(g_object_get_data(G_OBJECT(element), "self"))
                             ->OnNegotiationNeeded(element, conn);
                     }),
                     client_conn.get());

    g_signal_connect(webrtc, "on-ice-candidate",
                     G_CALLBACK(+[](GstElement *element, guint mlineindex, gchar *candidate, gpointer user_data) {
                         auto conn = static_cast<ClientConnection *>(user_data);
                         static_cast<StreamGstWebRTC *>(g_object_get_data(G_OBJECT(element), "self"))
                             ->OnIceCandidate(element, mlineindex, candidate, conn);
                     }),
                     client_conn.get());

    // 存储this指针以便在回调中使用
    g_object_set_data(G_OBJECT(webrtc), "self", this);

    // 启动pipeline
    GstStateChangeReturn ret = gst_element_set_state(client_conn->pipeline, GST_STATE_PLAYING);
    if (ret == GST_STATE_CHANGE_FAILURE) {
        LOG_ERROR("Failed to start pipeline");
        CleanupClientConnection(client_conn);
    }
}

void StreamGstWebRTC::CleanupClientConnection(std::shared_ptr<ClientConnection> client)
{
    if (!client || !client->ws_conn) {
        LOG_ERROR("Invalid client connection");
        return;
    }

    // 先停止媒体流
    if (client->media_data) {
        // 停止streamSource之前先解除注册回调
        if (client->media_data->streamSource) {
            client->media_data->streamSource->Unregister(client->media_data->stream_handle);
            client->media_data->streamSource->Stop();
        }

        // 设置pipeline状态为NULL，这会导致appsrc停止接收数据
        if (client->pipeline) {
            gst_element_set_state(client->pipeline, GST_STATE_NULL);
            // 等待pipeline状态变化完成
            gst_element_get_state(client->pipeline, nullptr, nullptr, GST_CLOCK_TIME_NONE);
        }

        // 清理appsrc
        if (client->media_data->appsrc_video) {
            gst_object_unref(client->media_data->appsrc_video);
            client->media_data->appsrc_video = nullptr;
        }
        if (client->media_data->appsrc_audio) {
            gst_object_unref(client->media_data->appsrc_audio);
            client->media_data->appsrc_audio = nullptr;
        }
    }

    // 清理WebRTC相关资源
    if (client->webrtc_element) {
        gst_element_set_state(GST_ELEMENT(client->webrtc_element), GST_STATE_NULL);
        gst_element_get_state(GST_ELEMENT(client->webrtc_element), nullptr, nullptr, GST_CLOCK_TIME_NONE);
        g_object_unref(client->webrtc_element);
        client->webrtc_element = nullptr;
    }

    // 清理pipeline
    if (client->pipeline) {
        gst_object_unref(client->pipeline);
        client->pipeline = nullptr;
    }

    {
        std::lock_guard<std::mutex> lock(connections_mutex_);
        connections_.erase(client->ws_conn);
    }

    LOG_INFO("Client connection cleaned up");
}

void StreamGstWebRTC::HandleWebSocketEvent(std::shared_ptr<El::WebServer::WebSocketEvent> &event)
{
    switch (event->type) {
        case El::WebServer::WebSocketEventType::Connect: {
            LOG_INFO("New WebSocket connection established");
            auto client = std::make_shared<ClientConnection>();
            client->ws_conn = event->connection;

            {
                std::lock_guard<std::mutex> lock(connections_mutex_);
                connections_[event->connection] = client;
            }

            SetupPipelineForClient(client);
            break;
        }
        case El::WebServer::WebSocketEventType::Message: {
            std::shared_ptr<ClientConnection> client;
            {
                std::lock_guard<std::mutex> lock(connections_mutex_);
                auto it = connections_.find(event->connection);
                if (it == connections_.end()) {
                    LOG_ERROR("Client connection not found");
                    return;
                }
                client = it->second;
            }
            HandleWebSocketMessage(client, event->message);
            break;
        }
        case El::WebServer::WebSocketEventType::Disconnect: {
            LOG_INFO("WebSocket connection closed");
            std::shared_ptr<ClientConnection> client;
            {
                std::lock_guard<std::mutex> lock(connections_mutex_);
                auto it = connections_.find(event->connection);
                if (it == connections_.end()) {
                    LOG_ERROR("Client connection not found");
                    return;
                }
                client = it->second;
            }
            CleanupClientConnection(client);
            break;
        }
        default:
            LOG_ERROR("Unknown WebSocket event type: {}", static_cast<int>(event->type));
            break;
    }
}

void StreamGstWebRTC::SendWebSocketTextMessage(void *connection, const std::string &message)
{
    LOG_INFO("{}", message);
    El::WebServer::IHttpServer::GetInstance().SendWebSocketTextMessage(connection, message);
}

void StreamGstWebRTC::HandleWebSocketMessage(std::shared_ptr<ClientConnection> client, const std::string &message)
{
    nlohmann::json msg = nlohmann::json::parse(message, nullptr, false);
    if (msg.is_discarded()) {
        LOG_ERROR("Failed to parse JSON message {}", message.c_str());
        return;
    }
    LOG_INFO("Received message: {}", msg.dump(4).c_str());
    if (msg.contains("sdp")) {
        std::string sdp_str = msg["sdp"];
        std::string type_str = msg["type"];

        // 只处理 answer 类型的 SDP
        if (type_str == "answer") {
            GstSDPMessage *sdp;
            gst_sdp_message_new(&sdp);
            gst_sdp_message_parse_buffer((guint8 *)sdp_str.c_str(), sdp_str.length(), sdp);

            GstWebRTCSessionDescription *desc = gst_webrtc_session_description_new(GST_WEBRTC_SDP_TYPE_ANSWER, sdp);

            GstElement *webrtc = gst_bin_get_by_name(GST_BIN(client->pipeline), "webrtc");
            if (webrtc) {
                g_signal_emit_by_name(webrtc, "set-remote-description", desc, nullptr);
                gst_webrtc_session_description_free(desc);
                g_object_unref(webrtc);
            }
        } else {
            LOG_INFO("Ignoring SDP of type: {}", type_str);
        }
        client->time_cost.Mark("Received Answer");
        client->time_cost.Print();
    } else if (msg.contains("ice")) {
        // 添加 ICE 候选者处理，以支持更复杂的网络环境
        auto ice = msg["ice"];
        if (!ice.is_null()) {
            GstElement *webrtc = gst_bin_get_by_name(GST_BIN(client->pipeline), "webrtc");
            if (webrtc) {
                const std::string candidate = ice["candidate"];
                const int sdpMLineIndex = ice["sdpMLineIndex"].get<int>();
                g_signal_emit_by_name(webrtc, "add-ice-candidate", sdpMLineIndex, candidate.c_str());
                g_object_unref(webrtc);
            }
        }
    }
}

void StreamGstWebRTC::PushFrame(const Media::MediaFramePtr &frame, MediaDataPtr stream)
{
    if (!stream || !frame) {
        return;
    }

    GstAppSrc *appsrc = nullptr;
    if (frame->GetFrameType() == MEDIA_FRAME_I || frame->GetFrameType() == MEDIA_FRAME_P) {
        appsrc = GST_APP_SRC(stream->appsrc_video);
    } else if (frame->GetFrameType() == MEDIA_FRAME_AAC) {
        if (!audio_enable_) {
            return;
        }
        appsrc = GST_APP_SRC(stream->appsrc_audio);
    } else {
        LOG_WARN("Ignoring unknown frame type: {}", frame->GetFrameType());
        return;
    }

    if (!appsrc) {
        return;
    }

    // 快速检查状态，避免每帧都进行完整状态检查
    if (G_UNLIKELY(GST_STATE(appsrc) != GST_STATE_PLAYING)) {
        GstState state;
        GstStateChangeReturn state_ret = gst_element_get_state(GST_ELEMENT(appsrc), &state, nullptr, 0);
        if (state_ret == GST_STATE_CHANGE_FAILURE || state != GST_STATE_PLAYING) {
            return;
        }
    }

    // 使用零拷贝方式创建buffer
    GstBuffer *buffer = gst_buffer_new_wrapped_full(
        GST_MEMORY_FLAG_READONLY, (gpointer)frame->GetBuffer(), frame->GetLength(), 0, frame->GetLength(),
        new Media::MediaFramePtr(frame), [](gpointer data) { delete static_cast<Media::MediaFramePtr *>(data); });

    if (!buffer) {
        LOG_ERROR("Failed to create wrapped GstBuffer for {}", frame->ToString().c_str());
        return;
    }

    // 为I帧设置关键帧标志
    if (frame->GetFrameType() == MEDIA_FRAME_I) {
        GST_BUFFER_FLAG_UNSET(buffer, GST_BUFFER_FLAG_DELTA_UNIT);
    } else {
        GST_BUFFER_FLAG_SET(buffer, GST_BUFFER_FLAG_DELTA_UNIT);
    }

    // 使用快速路径push buffer
    GstFlowReturn flow_ret = gst_app_src_push_buffer(GST_APP_SRC(appsrc), buffer);
    if (G_UNLIKELY(flow_ret != GST_FLOW_OK)) {
        LOG_ERROR("Failed to push buffer: {}", gst_flow_get_name(flow_ret));
    }
}

void StreamGstWebRTC::SendSdpToPeer(GstWebRTCSessionDescription *desc, void *connection)
{
    if (!connection || !desc || !desc->sdp) {
        LOG_ERROR("Invalid WebSocket connection or SDP");
        return;
    }

    gchar *sdp_str = gst_sdp_message_as_text(desc->sdp);
    if (!sdp_str) {
        LOG_ERROR("Failed to convert SDP to string");
        return;
    }

    nlohmann::json msg;
    msg["type"] = desc->type == GST_WEBRTC_SDP_TYPE_OFFER ? "offer" : "answer";
    msg["sdp"] = std::string(sdp_str);

    std::string msg_str = msg.dump();
    SendWebSocketTextMessage(connection, msg_str);

    g_free(sdp_str);
}

void StreamGstWebRTC::SendIceCandidateToPeer(guint mlineindex, gchar *candidate, void *connection)
{
    if (!connection || !candidate) {
        LOG_ERROR("Invalid WebSocket connection or ICE candidate");
        return;
    }

    nlohmann::json msg;
    msg["candidate"] = candidate;
    msg["sdpMLineIndex"] = mlineindex;

    std::string msg_str = msg.dump();
    SendWebSocketTextMessage(connection, msg_str);
}

void StreamGstWebRTC::OnOfferCreated(GstPromise *promise, GstElement *webrtc, ClientConnection *client)
{
    client->time_cost.Mark("OnOfferCreated");

    GstWebRTCSessionDescription *offer = nullptr;
    const GstStructure *reply = gst_promise_get_reply(promise);
    gst_structure_get(reply, "offer", GST_TYPE_WEBRTC_SESSION_DESCRIPTION, &offer, nullptr);
    gst_promise_unref(promise);

    GstPromise *local_desc_promise = gst_promise_new();
    g_signal_emit_by_name(webrtc, "set-local-description", offer, local_desc_promise);
    gst_promise_interrupt(local_desc_promise);
    gst_promise_unref(local_desc_promise);

    SendSdpToPeer(offer, client->ws_conn);
    gst_webrtc_session_description_free(offer);
}

void StreamGstWebRTC::OnNegotiationNeeded(GstElement *webrtc, ClientConnection *client)
{
    client->time_cost.Mark("OnNegotiationNeeded");

    GstPromise *promise = gst_promise_new_with_change_func(
        +[](GstPromise *p, gpointer user_data) {
            auto *conn = static_cast<ClientConnection *>(user_data);
            static_cast<StreamGstWebRTC *>(g_object_get_data(G_OBJECT(conn->webrtc_element), "self"))
                ->OnOfferCreated(p, conn->webrtc_element, conn);
        },
        client, nullptr);

    g_signal_emit_by_name(webrtc, "create-offer", nullptr, promise);
}

void StreamGstWebRTC::OnIceCandidate([[maybe_unused]] GstElement *webrtc,
                                     guint mlineindex,
                                     gchar *candidate,
                                     ClientConnection *client)
{
    SendIceCandidateToPeer(mlineindex, candidate, client->ws_conn);
}

} // namespace StreamServer
} // namespace El