#include "stream_gst_rtsp.h"
#include <gst/app/gstappsrc.h>
#include "base_log.h"
#include "common_media_define.h"
#include "hal_network.h"
#include <memory>
#include "common_config_manager.h"
#include "camera_video_enc.h"

namespace El {
namespace StreamServer {

StreamGstRtsp &StreamGstRtsp::GetInstance()
{
    static StreamGstRtsp instance;
    return instance;
}

StreamGstRtsp::StreamGstRtsp() : loop_(nullptr), server_(nullptr) {}

StreamGstRtsp::~StreamGstRtsp()
{
    Stop();
}

bool StreamGstRtsp::Start()
{
    auto config = Common::ConfigManager::GetInstance().GetConfig("audio");
    if (!config.empty()) {
        audio_enable_ = config["enable"].get<bool>();
    }

    server_ = gst_rtsp_server_new();
    g_object_set(server_, "service", "554", nullptr);

    GstRTSPMountPoints *mounts = gst_rtsp_server_get_mount_points(server_);
    if (!mounts) {
        LOG_ERROR("Failed to get mount points");
        return false;
    }

    // 添加多个RTSP路径
    for (uint32_t channel = 1; channel <= CHANNEL_MAX; ++channel) {
        for (uint32_t stream_type = 0; stream_type < CHANNEL_STREAM_TYPE_MAX; ++stream_type) {
            GstRTSPMediaFactory *factory = gst_rtsp_media_factory_new();
            if (!factory) {
                LOG_ERROR("Failed to create media factory for channel {} stream {}", channel, stream_type);
                continue;
            }

            if (audio_enable_) {
                gst_rtsp_media_factory_set_launch(factory,
                                                  "( appsrc name=video_src ! h264parse ! rtph264pay name=pay0 pt=96 "
                                                  "appsrc name=audio_src ! aacparse ! rtpmp4apay name=pay1 pt=97 )");
            } else {
                gst_rtsp_media_factory_set_launch(factory,
                                                  "( appsrc name=video_src ! h264parse ! rtph264pay name=pay0 pt=96 )");
            }

            gst_rtsp_media_factory_set_shared(factory, FALSE);
            g_signal_connect(factory, "media-configure", G_CALLBACK(OnNewMedia), this);

            std::string path = fmt::format("/live/{}/{}", channel, stream_type);
            gst_rtsp_mount_points_add_factory(mounts, path.c_str(), factory);

            El::Hal::NetIfInfo ifInfo = Hal::NetworkManager::GetInstance().GetNetworkInterface("eth0");
            std::string ip_address = ifInfo.ipv4Address;
            if (ip_address.empty()) {
                LOG_ERROR("Failed to get IP address for eth0");
            }
            std::string rtsp_url = fmt::format("rtsp://{}:554{}", ip_address, path);
            LOG_INFO("RTSP URL for channel {} stream {}: {}", channel, stream_type, rtsp_url);
        }
    }

    g_object_unref(mounts);

    // 先创建 GMainContext
    GMainContext *main_context = g_main_context_new();
    if (!main_context) {
        LOG_ERROR("Failed to create GMainContext for main loop thread");
        return false;
    }

    // 在创建主循环之前先附加 RTSP 服务器到我们的 context
    guint server_id = gst_rtsp_server_attach(server_, main_context);
    if (server_id == 0) {
        LOG_ERROR("Failed to attach RTSP server");
        g_main_context_unref(main_context);
        return false;
    }

    g_signal_connect(server_, "client-connected", G_CALLBACK(OnClientConnected), this);

    // 启动主循环线程
    main_loop_thread_ = std::thread([this, main_context]() {
        // 在线程中设置GMainContext
        g_main_context_push_thread_default(main_context);

        loop_ = g_main_loop_new(main_context, FALSE);
        if (!loop_) {
            LOG_ERROR("Failed to create GMainLoop");
            g_main_context_pop_thread_default(main_context);
            g_main_context_unref(main_context);
            return;
        }

        LOG_INFO("Starting GLib main loop");
        g_main_loop_run(loop_);

        // 清理线程本地GMainContext
        g_main_context_pop_thread_default(main_context);
        g_main_context_unref(main_context);
        LOG_INFO("GLib main loop stopped");
    });

    return true;
}

void StreamGstRtsp::Stop()
{
    if (loop_) {
        g_main_loop_quit(loop_);

        // 等待主循环线程结束
        if (main_loop_thread_.joinable()) {
            main_loop_thread_.join();
        }

        g_main_loop_unref(loop_);
        loop_ = nullptr;
    }

    if (server_) {
        g_object_unref(server_);
        server_ = nullptr;
    }
}

void StreamGstRtsp::OnNewMedia(GstRTSPMediaFactory *factory, GstRTSPMedia *media, gpointer user_data)
{
    (void)factory;
    StreamGstRtsp *server = static_cast<StreamGstRtsp *>(user_data);
    GstElement *element = gst_rtsp_media_get_element(media);

    uint32_t channel = 0;
    uint32_t stream_type = 0;
    GstRTSPContext *ctx = gst_rtsp_context_get_current();
    if (ctx && ctx->uri) {
        const gchar *abspath = ctx->uri->abspath;
        if (abspath) {
            sscanf(abspath, "/live/%u/%u", &channel, &stream_type);
        }
    }

    if (channel < 1 || channel > CHANNEL_MAX || stream_type > CHANNEL_STREAM_TYPE_MAX) {
        LOG_ERROR("Invalid channel or stream type {}", ctx->uri->abspath);
        return;
    }
    LOG_INFO("OnNewMedia channel {} stream_type {}", channel, stream_type);

    auto stream = std::make_shared<MediaData>();
    stream->server = server;
    stream->channel = channel - 1;
    stream->stream_type = stream_type;
    stream->appsrc_video = gst_bin_get_by_name(GST_BIN(element), "video_src");

    if (server->audio_enable_) {
        stream->appsrc_audio = gst_bin_get_by_name(GST_BIN(element), "audio_src");
    }

    if (!stream->appsrc_video) {
        LOG_ERROR("Failed to get video appsrc element");
        gst_object_unref(element);
        return;
    }

    g_object_set(G_OBJECT(stream->appsrc_video), "format", GST_FORMAT_TIME, "is-live", TRUE, "do-timestamp", TRUE,
                 "stream-type", GST_APP_STREAM_TYPE_STREAM, NULL);

    if (server->audio_enable_ && stream->appsrc_audio) {
        g_object_set(G_OBJECT(stream->appsrc_audio), "format", GST_FORMAT_TIME, "is-live", TRUE, "do-timestamp", TRUE,
                     "stream-type", GST_APP_STREAM_TYPE_STREAM, NULL);
    }

    // 获取编码格式
    auto &video_enc = Hal::ICameraVideoEnc::GetInstance(stream->channel, stream->stream_type);
    VideoEncFormat enc_format;
    if (!video_enc.GetEncFormat(enc_format)) {
        LOG_ERROR("Failed to get video encode format for channel {} stream {}", stream->channel, stream->stream_type);
        gst_object_unref(element);
        return;
    }

    // 设置视频caps，使用实际的编码参数
    GstCaps *video_caps =
        gst_caps_new_simple("video/x-h264", "stream-format", G_TYPE_STRING, "byte-stream", "alignment", G_TYPE_STRING,
                            "au", "width", G_TYPE_INT, (gint)enc_format.width, "height", G_TYPE_INT,
                            (gint)enc_format.height, "framerate", GST_TYPE_FRACTION, (gint)enc_format.fps, 1, NULL);

    if (!video_caps) {
        LOG_ERROR("Failed to create video caps");
        gst_object_unref(element);
        return;
    }

    gst_app_src_set_caps(GST_APP_SRC(stream->appsrc_video), video_caps);
    gst_caps_unref(video_caps);

    if (server->audio_enable_ && stream->appsrc_audio) {
        GstCaps *audio_caps =
            gst_caps_new_simple("audio/mpeg", "mpegversion", G_TYPE_INT, 4, "stream-format", G_TYPE_STRING, "adts",
                                "rate", G_TYPE_INT, 8000, "channels", G_TYPE_INT, 1, NULL);

        if (!audio_caps) {
            LOG_ERROR("Failed to create audio caps");
            gst_object_unref(element);
            return;
        }

        gst_app_src_set_caps(GST_APP_SRC(stream->appsrc_audio), audio_caps);
        gst_caps_unref(audio_caps);
    }

    stream->streamSource = Media::StreamSource::Create(stream->channel, stream->stream_type);
    stream->streamSource->SetInfo(60, "rtsp frame data");
    stream->stream_handle = stream->streamSource->Register(
        [server, stream](const Media::MediaFramePtr &frame) { server->PushFrame(frame, stream); });
    stream->streamSource->Start();

    {
        std::lock_guard<std::mutex> lock(server->media_data_mutex_);
        server->media_data_[stream->stream_handle] = stream;
    }

    g_object_set_data_full(G_OBJECT(media), "media-data", new int(stream->stream_handle), [](gpointer data) {
        int *handle = static_cast<int *>(data);
        StreamGstRtsp *gstServer = &StreamGstRtsp::GetInstance();
        {
            std::lock_guard<std::mutex> lock(gstServer->media_data_mutex_);
            auto it = gstServer->media_data_.find(*handle);
            if (it != gstServer->media_data_.end()) {
                it->second->streamSource->Stop();
                gstServer->media_data_.erase(it);
            }
        }
        delete handle;
        LOG_INFO("Media stopped and cleaned up, handle: {}", *handle);
    });

    gst_object_unref(element);
}

void StreamGstRtsp::PushFrame(const Media::MediaFramePtr &frame, MediaDataPtr stream)
{
    GstAppSrc *appsrc = nullptr;
    if (frame->GetFrameType() == MEDIA_FRAME_I || frame->GetFrameType() == MEDIA_FRAME_P) {
        appsrc = GST_APP_SRC(stream->appsrc_video);
    } else if (frame->GetFrameType() == MEDIA_FRAME_AAC && audio_enable_) {
        appsrc = GST_APP_SRC(stream->appsrc_audio);
    } else {
        LOG_WARN("Ignoring unknown frame type: {}", frame->GetFrameType());
        return;
    }

    if (!appsrc) {
        return;
    }

    // 使用wrapped_full创建buffer，直接传入frame
    GstBuffer *buffer =
        gst_buffer_new_wrapped_full(GST_MEMORY_FLAG_READONLY,        // 内存标志
                                    (gpointer)frame->GetBuffer(),    // 实际内存指针
                                    frame->GetLength(),              // 总大小
                                    0,                               // 偏移
                                    frame->GetLength(),              // 使用的大小
                                    new Media::MediaFramePtr(frame), // 直接在这里创建，会自动增加引用计数
                                    [](gpointer data) {              // lambda作为释放回调
                                        delete static_cast<Media::MediaFramePtr *>(data);
                                    });

    if (!buffer) {
        LOG_ERROR("Failed to create wrapped GstBuffer for {}", frame->ToString().c_str());
        return;
    }

    // 为I帧设置关键帧标志
    if (frame->GetFrameType() == MEDIA_FRAME_I) {
        GST_BUFFER_FLAG_UNSET(buffer, GST_BUFFER_FLAG_DELTA_UNIT);
    } else {
        GST_BUFFER_FLAG_SET(buffer, GST_BUFFER_FLAG_DELTA_UNIT);
    }

    // 使用快速路径push buffer
    GstFlowReturn flow_ret = gst_app_src_push_buffer(GST_APP_SRC(appsrc), buffer);
    if (G_UNLIKELY(flow_ret != GST_FLOW_OK)) {
        LOG_ERROR("Failed to push buffer: {}", gst_flow_get_name(flow_ret));
    }
}

void StreamGstRtsp::OnClientConnected(GstRTSPServer *server, GstRTSPClient *client, gpointer user_data)
{
    (void)server;
    StreamGstRtsp *self = static_cast<StreamGstRtsp *>(user_data);
    {
        std::lock_guard<std::mutex> lock(self->clients_mutex_);
        self->clients_.insert(client);
    }
    LOG_INFO("RTSP client connected. Total clients: {}", self->clients_.size());

    // 监听客户端断开连接的信号
    g_signal_connect(client, "closed", G_CALLBACK(OnClientDisconnected), self);
}

void StreamGstRtsp::OnClientDisconnected(GstRTSPClient *client, gpointer user_data)
{
    StreamGstRtsp *self = static_cast<StreamGstRtsp *>(user_data);
    {
        std::lock_guard<std::mutex> lock(self->clients_mutex_);
        self->clients_.erase(client);
    }
    LOG_INFO("RTSP client disconnected. Total clients: {}", self->clients_.size());
}

} // namespace StreamServer
} // namespace El