#define STB_IMAGE_WRITE_IMPLEMENTATION

#include <chrono>
#include <fstream>
#include <iostream>
#include <vector>

#include <fcntl.h>
#include <sys/mman.h>
#include <unistd.h>

#include <gbm.h>
#include <xf86drm.h>

extern "C" {
#include <libavcodec/avcodec.h>
#include <libavformat/avformat.h>
#include <libavutil/imgutils.h>
#include <libavutil/opt.h>
#include <libavutil/time.h>
#include <libswscale/swscale.h>
}

#include <wayland-client.hpp>

#include "linux-dmabuf.h"
#include "wlr-screencopy.h"

wayland::display_t display;
wayland::shm_t shm;
wayland::output_t output;
wayland::zwlr_screencopy_manager_v1_t screencopyManager;
wayland::zwp_linux_dmabuf_v1_t linuxDmabuf;
wayland::zwp_linux_dmabuf_feedback_v1_t feedback;
wayland::shm_pool_t pool;
std::vector<wayland::output_t> data;
std::string drmDeviceName;
gbm_device *gbmDevice;

static struct
{
    uint8_t *data;
    struct wayland::buffer_t wl_buffer;
    enum wayland::shm_format format;
    int drm_format;
    struct gbm_bo *bo = nullptr;
    wayland::zwp_linux_buffer_params_v1_t params;
    int width, height, stride;
    bool y_invert;
} buffer;

bool buffer_copy_done = false;

bool isCompositorReady()
{
    if (!shm) {
        std::cerr << "Compositor does not support the wl_shm interface" << std::endl;
        return false;
    }

    if (!output) {
        std::cerr << "Compositor does not support the wl_output interface" << std::endl;
        return false;
    }

    if (!screencopyManager) {
        std::cerr << "Compositor does not support the zwlr_screencopy_manager_v1 interface" << std::endl;
        return false;
    }

    return true;
}

wayland::shm_format drm_to_shm_format(uint32_t format)
{
    if (format == GBM_FORMAT_ARGB8888) {
        return wayland::shm_format::argb8888;
    } else if (format == GBM_FORMAT_XRGB8888) {
        return wayland::shm_format::xrgb8888;
    } else {
        return (wayland::shm_format)format;
    }
}

int main()
{
    int screen_width = 0;
    int screen_height = 0;

    auto registry = display.get_registry();
    registry.on_global() = [&](uint32_t name, const std::string &interface, uint32_t version) {
        if (interface == wayland::shm_t::interface_name) {
            shm = static_cast<wayland::shm_t>(registry.bind(name, shm, std::min<int>(wayland::detail::shm_interface.version, version)));
        } else if (interface == wayland::output_t::interface_name) {
            output = static_cast<wayland::output_t>(registry.bind(name, output, std::min<int>(wayland::detail::output_interface.version, version)));
            output.on_mode() = [&](uint32_t flags, int32_t width, int32_t height, int32_t refresh) {
                if (flags & wayland::output_mode::current && screen_width == 0 && screen_height == 0) {
                    screen_width = width;
                    screen_height = height;
                }
            };

        } else if (interface == wayland::zwlr_screencopy_manager_v1_t::interface_name) {
            screencopyManager = static_cast<wayland::zwlr_screencopy_manager_v1_t>(registry.bind(name, screencopyManager, std::min<int>(wayland::detail::zwlr_screencopy_manager_v1_interface.version, version)));
        } else if (interface == wayland::zwp_linux_dmabuf_v1_t::interface_name) {
            linuxDmabuf = static_cast<wayland::zwp_linux_dmabuf_v1_t>(registry.bind(name, linuxDmabuf, std::min<int>(wayland::detail::zwp_linux_dmabuf_v1_interface.version, version)));
            if (linuxDmabuf) {
                feedback = linuxDmabuf.get_default_feedback();
                feedback.on_format_table() = [&](uint32_t fd, uint32_t size) {
                    close(fd);
                };
                feedback.on_main_device() = [&](std::vector<dev_t> devices) {
                    drmDevice *device = nullptr;
                    if (drmGetDeviceFromDevId(devices[0], 0, &device) != 0) {
                        std::cerr << "Failed to get DRM device" << std::endl;
                        return;
                    }

                    if (device->available_nodes & (1 << DRM_NODE_RENDER)) {
                        drmDeviceName = device->nodes[DRM_NODE_RENDER];
                    } else if (device->available_nodes & (1 << DRM_NODE_PRIMARY)) {
                        drmDeviceName = device->nodes[DRM_NODE_PRIMARY];
                    } else {
                        std::cerr << "No primary or render node found" << std::endl;
                        return;
                    }

                    drmFreeDevice(&device);
                };
            }
        }
    };

    display.dispatch();
    display.roundtrip();

    if (!isCompositorReady()) {
        return 1;
    }

    int drm_fd = open(drmDeviceName.c_str(), O_RDWR);
    if (drm_fd < 0) {
        std::cerr << "Failed to open DRM device" << std::endl;
        return 1;
    }

    gbmDevice = gbm_create_device(drm_fd);
    if (!gbmDevice) {
        std::cerr << "Failed to create GBM device" << std::endl;
        return 1;
    }

    const AVCodec *codec = avcodec_find_encoder(AV_CODEC_ID_H264);
    if (!codec) {
        std::cerr << "Failed to find encoder" << std::endl;
        return 1;
    }

    AVCodecContext *c = avcodec_alloc_context3(codec);
    if (!c) {
        std::cerr << "Failed to allocate codec context" << std::endl;
        return 1;
    }

    c->profile = FF_PROFILE_H264_HIGH;
    c->bit_rate = 4000000;
    c->width = screen_width;
    c->height = screen_height;
    c->time_base = {1, 25};
    c->framerate = {25, 1};
    c->gop_size = 10;
    c->max_b_frames = 1;
    c->pix_fmt = AV_PIX_FMT_YUV420P;

    av_opt_set(c->priv_data, "preset", "ultrafast", 0); // 低延迟编码

    if (avcodec_open2(c, codec, NULL) < 0) {
        std::cerr << "Failed to open codec" << std::endl;
        return 1;
    }

    AVFormatContext *out_ctx = nullptr;
    const AVOutputFormat *out_fmt = av_guess_format("rtp_mpegts", NULL, NULL);
    if (!out_fmt) {
        std::cerr << "Failed to guess format" << std::endl;
        return 1;
    }

    avformat_alloc_output_context2(&out_ctx, out_fmt, "rtp_mpegts", "rtp://239.0.0.1:52013?pkt_size=1024");
    if (!out_ctx) {
        std::cerr << "Failed to allocate output context" << std::endl;
        return 1;
    }

    AVStream *stream = avformat_new_stream(out_ctx, codec);
    stream->time_base = c->time_base;
    avcodec_parameters_from_context(stream->codecpar, c);

    if (avio_open(&out_ctx->pb, "rtp://239.0.0.1:52013", AVIO_FLAG_WRITE) < 0) {
        std::cerr << "Failed to open output" << std::endl;
        return 1;
    }

    if (avformat_write_header(out_ctx, NULL) < 0) {
        std::cerr << "Failed to write header" << std::endl;
        return 1;
    }

    int64_t start_time = av_gettime();
    int64_t frame_index = 0;
    while (true) {
        buffer_copy_done = false;
        wayland::zwlr_screencopy_frame_v1_t screencopyFrame = screencopyManager.capture_output(0, output);
        screencopyFrame.on_ready() = [&](uint32_t, uint32_t, uint32_t) {
            buffer_copy_done = true;
        };
        screencopyFrame.on_flags() = [&](wayland::zwlr_screencopy_frame_v1_flags flags) {
            buffer.y_invert = flags & wayland::zwlr_screencopy_frame_v1_flags::y_invert;
        };
        screencopyFrame.on_failed() = []() {
            std::cerr << "Failed to copy frame" << std::endl;
            exit(1);
        };
        screencopyFrame.on_linux_dmabuf() = [&](uint32_t format, uint32_t width, uint32_t height) {
            auto old_format = buffer.format;
            buffer.format = drm_to_shm_format(format);
            buffer.width = width;
            buffer.height = height;
            buffer.drm_format = format;

            if (!buffer.wl_buffer || old_format != buffer.format) {
                if (buffer.bo) {
                    gbm_bo_destroy(buffer.bo);
                    buffer.bo = nullptr;
                }
                uint64_t modifier = 0;
                buffer.bo = gbm_bo_create_with_modifiers(gbmDevice, buffer.width, buffer.height, buffer.drm_format, &modifier, 1);
                if (!buffer.bo) {
                    buffer.bo = gbm_bo_create(gbmDevice, buffer.width, buffer.height, buffer.drm_format, GBM_BO_USE_LINEAR | GBM_BO_USE_RENDERING);
                }
                if (!buffer.bo) {
                    std::cerr << "Failed to create GBM buffer" << std::endl;
                    exit(1);
                }

                buffer.stride = gbm_bo_get_stride(buffer.bo);
                buffer.params = linuxDmabuf.create_params();

                uint64_t mod = gbm_bo_get_modifier(buffer.bo);
                buffer.params.add(gbm_bo_get_fd(buffer.bo), 0, gbm_bo_get_offset(buffer.bo, 0), gbm_bo_get_stride(buffer.bo), mod >> 32, mod & 0xffffffff);

                buffer.params.on_created() = [&](wayland::buffer_t buf) {
                    buffer.wl_buffer = buf;
                    screencopyFrame.copy(buffer.wl_buffer);
                };

                buffer.params.create(buffer.width, buffer.height, buffer.drm_format, 0);
            } else {
                screencopyFrame.copy(buffer.wl_buffer);
            }
        };

        while (!buffer_copy_done && display.dispatch() != -1) {
            // No-op
        }

        int64_t current_time_stamp = av_gettime() - start_time;

        uint32_t stride = 0;
        void *map_data = nullptr;
        void *data = gbm_bo_map(buffer.bo, 0, 0, buffer.width, buffer.height, GBM_BO_TRANSFER_READ, &stride, &map_data);
        if (!data) {
            std::cerr << "Failed to map bo" << std::endl;
            break;
        }

        buffer.data = (uint8_t *)data;

        gbm_bo_unmap(buffer.bo, map_data);
        // we only need to copy the buffer once for screenshot

        AVFrame *frame = av_frame_alloc();
        if (!frame) {
            std::cerr << "Failed to allocate frame" << std::endl;
            break;
        }

        // 转化为yuva420p
        frame->format = AV_PIX_FMT_BGRA;
        frame->width = buffer.width;
        frame->height = buffer.height;
        av_frame_get_buffer(frame, 32);
        av_image_fill_arrays(frame->data, frame->linesize, buffer.data, AV_PIX_FMT_BGRA, buffer.width, buffer.height, 1);
        AVFrame *yuv_frame = av_frame_alloc();
        if (!yuv_frame) {
            std::cerr << "Failed to allocate frame" << std::endl;
            break;
        }
        yuv_frame->format = AV_PIX_FMT_YUV420P;
        yuv_frame->width = buffer.width;
        yuv_frame->height = buffer.height;
        av_frame_get_buffer(yuv_frame, 32);
        SwsContext *sws_ctx = sws_getContext(buffer.width, buffer.height, AV_PIX_FMT_BGRA, buffer.width, buffer.height, AV_PIX_FMT_YUV420P, SWS_BICUBIC, NULL, NULL, NULL);
        if (!sws_ctx) {
            std::cerr << "Failed to create sws context" << std::endl;
            break;
        }
        sws_scale(sws_ctx, frame->data, frame->linesize, 0, buffer.height, yuv_frame->data, yuv_frame->linesize);
        yuv_frame->pts = frame_index++;
        yuv_frame->pkt_dts = yuv_frame->pts;

        AVPacket *pkt = av_packet_alloc();
        pkt->data = nullptr;
        pkt->size = 0;

        int ret = avcodec_send_frame(c, yuv_frame);
        if (ret < 0) {
            std::cerr << "Failed to send frame" << std::endl;
            break;
        }

        while (ret >= 0) {
            ret = avcodec_receive_packet(c, pkt);
            if (ret == AVERROR(EAGAIN)) {
                break;
            } else if (ret == AVERROR_EOF) {
                break;
            } else if (ret < 0) {
                std::cerr << "Failed to receive packet" << std::endl;
                break;
            }

            pkt->stream_index = stream->index;
            pkt->pts = av_rescale_q(pkt->pts, c->time_base, stream->time_base);
            pkt->dts = av_rescale_q(pkt->dts, c->time_base, stream->time_base);

            if (av_interleaved_write_frame(out_ctx, pkt) < 0) {
                std::cerr << "Failed to write frame" << std::endl;
                break;
            }

            av_packet_unref(pkt);
        }

        av_packet_free(&pkt);
        av_frame_free(&frame);
        av_frame_free(&yuv_frame);
        sws_freeContext(sws_ctx);
    }

    return 0;
}