/* This file is part of the KDE project
   Copyright (C) 2018 Oleg Chernovskiy <kanedias@xaker.ru>
   Copyright (C) 2018 Jan Grulich <jgrulich@redhat.com>

   This program is free software; you can redistribute it and/or
   modify it under the terms of the GNU General Public
   License as published by the Free Software Foundation; either
   version 3 of the License, or (at your option) any later version.
*/

// system
#include <sys/mman.h>
#include <cstring>

// Qt
#include <QCoreApplication>
#include <QGuiApplication>
#include <QScreen>
#include <QSocketNotifier>
#include <QDebug>
#if (QT_VERSION >= QT_VERSION_CHECK(5, 10, 0))
#include <QRandomGenerator>
#endif

#include <spa/utils/result.h>
#ifdef HAVE_LINUX_DMABUF_H
#include <linux/dma-buf.h>
#endif
#include <sys/ioctl.h>

#include <spa/param/format-utils.h>
#include <spa/param/video/format-utils.h>
#include <spa/param/props.h>

#include <pipewire/pipewire.h>

#include <climits>

#include "pw_framebuffer.h"
#include "debug.h"

#include <glib.h>
#include <gio/gio.h>
#include <gio/gunixfdlist.h>
#include "xdp/waylandintegration.h"
#ifdef __cplusplus
extern "C" {
#endif
#include "xdp/pipewire.h"
#ifdef __cplusplus
}
#endif

#include <QApplication>
#include <QDesktopWidget>

#include <assert.h>

enum DeviceType {
    None = 0x0,
    Keyboard = 0x1,
    Pointer = 0x2,
    TouchScreen = 0x4,
    All = (Keyboard | Pointer | TouchScreen),
};
Q_DECLARE_FLAGS(DeviceTypes, DeviceType)

static const uint MIN_SUPPORTED_XDP_KDE_SC_VERSION = 1;
static const int BYTES_PER_PIXEL = 4;

#define PERMISSION_ITEM(item_id, item_permissions) \
    ((struct pw_permission) { \
        .id = item_id, \
        .permissions = item_permissions})

static PipeWireRemote *open_pipewire_screen_cast_remote(const char *app_id,
                                                        const QVector<WaylandIntegration::WaylandIntegration::Stream> streams,
                                                        GError **error)
{
    struct pw_properties *pipewire_properties;
    PipeWireRemote *remote;
    QVector<struct pw_permission> permission_items;

    pipewire_properties = pw_properties_new(
        "pipewire.access.portal.media_roles", "",
        NULL);
    remote = pipewire_remote_new_sync(pipewire_properties,
                                      NULL, NULL, NULL, NULL,
                                      error);
    if (!remote)
        return FALSE;

    permission_items.append(PERMISSION_ITEM(PW_ID_CORE, PW_PERM_RWX));

    /*
   * PipeWire:Interface:NodeFactory
   * Needs r-- so it can be passed to create-object when creating the sink node.
   */
    permission_items.append(PERMISSION_ITEM(remote->node_factory_id, PW_PERM_R));

    for (const auto &stream : streams) {
        quint32 stream_id = stream.stream->nodeid();
        permission_items.append(PERMISSION_ITEM(stream_id, PW_PERM_RWX));
    }

    /*
   * Hide all existing and future nodes (except the ones we explicitly list above).
   */
    permission_items.append(PERMISSION_ITEM(PW_ID_ANY, 0));

    pw_client_update_permissions(pw_core_get_client(remote->core),
                                 permission_items.length(),
                                 (const struct pw_permission *)(&permission_items.at(0)));

    pipewire_remote_roundtrip(remote);

    return remote;
}

static uint OpenPipeWireRemote()
{
    //    QVariant streams = WaylandIntegration::streams();
    QVector<WaylandIntegration::WaylandIntegration::Stream> streams = WaylandIntegration::WaylandIntegration::getStreams();
    const char *app_id = "appid";
    g_autoptr(GError) error = NULL;
    // g_autoptr(GUnixFDList) out_fd_list = NULL;
    int fd;
    int fd_id;

    PipeWireRemote *remote = open_pipewire_screen_cast_remote(app_id, streams, &error);

    //    out_fd_list = g_unix_fd_list_new ();
    fd = pw_core_steal_fd(remote->core);
    //    fd_id = g_unix_fd_list_append (out_fd_list, fd, &error);
    //    close (fd);
    pipewire_remote_destroy(remote);
    return fd;
}

/**
 * @brief The PWFrameBuffer::Private class - private counterpart of PWFramebuffer class. This is the entity where
 *        whole logic resides, for more info search for "d-pointer pattern" information.
 */
class PWFrameBuffer::Private
{
public:
    Private(PWFrameBuffer *q);
    ~Private();

private:
    friend class PWFrameBuffer;

    static void onCoreError(void *data, uint32_t id, int seq, int res, const char *message);
    static void onStreamParamChanged(void *data, uint32_t id, const struct spa_pod *format);
    static void onStreamStateChanged(void *data, pw_stream_state old, pw_stream_state state, const char *error_message);
    static void onStreamProcess(void *data);

    void init();
    void initPw();

    // pw handling
    pw_stream *createReceivingStream();
    void handleFrame(pw_buffer *pwBuffer);

    // link to public interface
    PWFrameBuffer *q;

    // pipewire stuff
    struct pw_context *pwContext = nullptr;
    struct pw_core *pwCore = nullptr;
    struct pw_stream *pwStream = nullptr;
    struct pw_thread_loop *pwMainLoop = nullptr;

    // wayland-like listeners
    // ...of events that happen in pipewire server
    spa_hook coreListener = {};
    spa_hook streamListener = {};

    // event handlers
    pw_core_events pwCoreEvents = {};
    pw_stream_events pwStreamEvents = {};

    uint pwStreamNodeId = 0;

    // negotiated video format
    spa_video_info_raw *videoFormat = nullptr;

    // QScopedPointer<RemoteDesktopSession> XdpRemoteDesktopSession;

    QSize streamSize;
    QSize videoSize;

    // sanity indicator
    bool isValid = true;

    QImage cursorTexture;
    QPoint cursorPosition;
    QPoint cursorHotspot;
};

PWFrameBuffer::Private::Private(PWFrameBuffer *q)
    : q(q)
{
    pwCoreEvents.version = PW_VERSION_CORE_EVENTS;
    pwCoreEvents.error = &onCoreError;

    pwStreamEvents.version = PW_VERSION_STREAM_EVENTS;
    pwStreamEvents.state_changed = &onStreamStateChanged;
    pwStreamEvents.param_changed = &onStreamParamChanged;
    pwStreamEvents.process = &onStreamProcess;
}

void PWFrameBuffer::Private::init()
{
    // TODO check whether we got some outputs?
    if (WaylandIntegration::screens().isEmpty()) {
        qCWarning(XdgDesktopPortalKdeRemoteDesktop) << "Failed to show dialog as there is no screen to select";
        return;
    }

#if 1
    if (!WaylandIntegration::startStreamingOutput(WaylandIntegration::screens().first().waylandOutputName(), Screencasting::Hidden)) {
        return;
    }
#else
    QByteArray winId_s = QByteArray::number(QApplication::desktop()->winId());
    if (!WaylandIntegration::startStreamingWindow(winId_s)) {
            return;
    }
#endif

    WaylandIntegration::authenticate();

    QVector<WaylandIntegration::WaylandIntegration::Stream> streams = WaylandIntegration::WaylandIntegration::getStreams();

    if (streams.isEmpty()) {
        qCWarning(KRFB_FB_PIPEWIRE) << "Failed to get screencast streams";
        isValid = false;
        return;
    }

//    uint pipewireFd = OpenPipeWireRemote();

    pwStreamNodeId = streams.first().nodeId;

    initPw();
}
/**
 * @brief PWFrameBuffer::Private::initPw - initialize Pipewire socket connectivity.
 *        pipewireFd should be pointing to existing file descriptor.
 */
void PWFrameBuffer::Private::initPw()
{
    qInfo() << "Initializing Pipewire connectivity";

    // init pipewire (required)
    pw_init(nullptr, nullptr); // args are not used anyways

    pwMainLoop = pw_thread_loop_new("pipewire-main-loop", nullptr);
    pwContext = pw_context_new(pw_thread_loop_get_loop(pwMainLoop), nullptr, 0);
    if (!pwContext) {
        qCWarning(KRFB_FB_PIPEWIRE) << "Failed to create PipeWire context";
        return;
    }

    pwCore = pw_context_connect(pwContext, nullptr, 0);
    if (!pwCore) {
        qCWarning(KRFB_FB_PIPEWIRE) << "Failed to connect PipeWire context";
        return;
    }

    pw_core_add_listener(pwCore, &coreListener, &pwCoreEvents, this);

    pwStream = createReceivingStream();
    if (!pwStream) {
        qCWarning(KRFB_FB_PIPEWIRE) << "Failed to create PipeWire stream";
        return;
    }

    if (pw_thread_loop_start(pwMainLoop) < 0) {
        qCWarning(KRFB_FB_PIPEWIRE) << "Failed to start main PipeWire loop";
        isValid = false;
    }
}

void PWFrameBuffer::Private::onCoreError(void *data, uint32_t id, int seq, int res, const char *message)
{
    Q_UNUSED(data);
    Q_UNUSED(id);
    Q_UNUSED(seq);
    Q_UNUSED(res);

    qInfo() << "core error: " << message;
}

/**
 * @brief PWFrameBuffer::Private::onStreamStateChanged - called whenever stream state changes on pipewire server
 * @param data pointer that you have set in pw_stream_add_listener call's last argument
 * @param state new state that stream has changed to
 * @param error_message optional error message, is set to non-null if state is error
 */
void PWFrameBuffer::Private::onStreamStateChanged(void *data, pw_stream_state /*old*/, pw_stream_state state, const char *error_message)
{
    qInfo() << "Stream state changed: " << pw_stream_state_as_string(state);

    auto *d = static_cast<PWFrameBuffer::Private *>(data);

    switch (state) {
    case PW_STREAM_STATE_ERROR:
        qCWarning(KRFB_FB_PIPEWIRE) << "pipewire stream error: " << error_message;
        break;
    case PW_STREAM_STATE_PAUSED:
        pw_stream_set_active(d->pwStream, true);
        break;
    case PW_STREAM_STATE_STREAMING:
    case PW_STREAM_STATE_UNCONNECTED:
    case PW_STREAM_STATE_CONNECTING:
        break;
    }
}

/**
 * @brief PWFrameBuffer::Private::onStreamFormatChanged - being executed after stream is set to active
 *        and after setup has been requested to connect to it. The actual video format is being negotiated here.
 * @param data pointer that you have set in pw_stream_add_listener call's last argument
 * @param format format that's being proposed
 */
void PWFrameBuffer::Private::onStreamParamChanged(void *data, uint32_t id, const struct spa_pod *format)
{
    qInfo() << "Stream format changed";
    auto *d = static_cast<PWFrameBuffer::Private *>(data);

    const int bpp = 4;

    if (!format || id != SPA_PARAM_Format) {
        return;
    }

    d->videoFormat = new spa_video_info_raw();
    spa_format_video_raw_parse(format, d->videoFormat);
    auto width = d->videoFormat->size.width;
    auto height = d->videoFormat->size.height;
    auto stride = SPA_ROUND_UP_N(width * bpp, 4);
    auto size = height * stride;
    d->streamSize = QSize(width, height);

    uint8_t buffer[1024];
    auto builder = SPA_POD_BUILDER_INIT(buffer, sizeof(buffer));

    // setup buffers and meta header for new format
    const struct spa_pod *params[2];

    params[0] = reinterpret_cast<spa_pod *>(spa_pod_builder_add_object(&builder,
                                                                       SPA_TYPE_OBJECT_ParamBuffers, SPA_PARAM_Buffers,
                                                                       SPA_PARAM_BUFFERS_size, SPA_POD_Int(size),
                                                                       SPA_PARAM_BUFFERS_stride, SPA_POD_Int(stride),
                                                                       SPA_PARAM_BUFFERS_buffers, SPA_POD_CHOICE_RANGE_Int(8, 1, 32),
                                                                       SPA_PARAM_BUFFERS_blocks, SPA_POD_Int(1),
                                                                       SPA_PARAM_BUFFERS_align, SPA_POD_Int(16)));
    params[1] = reinterpret_cast<spa_pod *>(spa_pod_builder_add_object(&builder,
                                                                       SPA_TYPE_OBJECT_ParamMeta, SPA_PARAM_Meta,
                                                                       SPA_PARAM_META_type, SPA_POD_Id(SPA_META_Header),
                                                                       SPA_PARAM_META_size, SPA_POD_Int(sizeof(struct spa_meta_header))));
    pw_stream_update_params(d->pwStream, params, 2);
}

/**
 * @brief PWFrameBuffer::Private::onNewBuffer - called when new buffer is available in pipewire stream
 * @param data pointer that you have set in pw_stream_add_listener call's last argument
 * @param id
 */
void PWFrameBuffer::Private::onStreamProcess(void *data)
{
    auto *d = static_cast<PWFrameBuffer::Private *>(data);

    pw_buffer *buf;
    if (!(buf = pw_stream_dequeue_buffer(d->pwStream))) {
        return;
    }

    d->handleFrame(buf);

    pw_stream_queue_buffer(d->pwStream, buf);
}

static void syncDmaBuf(int fd, uint64_t start_or_end)
{
#ifdef HAVE_LINUX_DMABUF_H
    struct dma_buf_sync sync = {0};
    sync.flags = start_or_end | DMA_BUF_SYNC_READ;

    while (true) {
        int ret;
        ret = ioctl(fd, DMA_BUF_IOCTL_SYNC, &sync);
        if (ret == -1 && errno == EINTR) {
            continue;
        } else if (ret == -1) {
            qCWarning(KRFB_FB_PIPEWIRE) << "Failed to synchronize DMA buffer: " << strerror(errno);
            break;
        } else {
            break;
        }
    }
#endif
}

#if 0
static QImage::Format spaToQImageFormat(quint32 format)
{
    return format == SPA_VIDEO_FORMAT_BGR  ? QImage::Format_BGR888
           : format == SPA_VIDEO_FORMAT_RGBx ? QImage::Format_RGBX8888
           : QImage::Format_RGB32;
}
#endif

void PWFrameBuffer::Private::handleFrame(pw_buffer *pwBuffer)
{
    auto *spaBuffer = pwBuffer->buffer;
    //    void *src = spaBuffer->datas[0].data;
    uint8_t *src = nullptr;

#if 0
    // process cursor
    {
        struct spa_meta_cursor *cursor = static_cast<struct spa_meta_cursor*>(spa_buffer_find_meta_data (spaBuffer, SPA_META_Cursor, sizeof (*cursor)));

        if (cursor != NULL && spa_meta_cursor_is_valid (cursor)) {
            struct spa_meta_bitmap *bitmap = nullptr;

            if (cursor->bitmap_offset)
                bitmap = SPA_MEMBER (cursor, cursor->bitmap_offset, struct spa_meta_bitmap);

            if (bitmap && bitmap->size.width > 0 && bitmap->size.height > 0) {
                const uint8_t *bitmap_data;

                bitmap_data = SPA_MEMBER (bitmap, bitmap->offset, uint8_t);
                cursorHotspot = { cursor->hotspot.x, cursor->hotspot.y };
                cursorTexture = QImage(bitmap_data, bitmap->size.width, bitmap->size.height, bitmap->stride, spaToQImageFormat(bitmap->format));
            }

            cursorPosition = QPoint{ cursor->position.x, cursor->position.y };
        }
    }
#endif

    if (spaBuffer->datas[0].chunk->size == 0) {
        qCDebug(KRFB_FB_PIPEWIRE) << "Got empty buffer. The buffer possibly carried only "
                                     "information about the mouse cursor.";
        return;
    }

    const quint32 maxSize = spaBuffer->datas[0].maxsize;
    const qint64 srcStride = spaBuffer->datas[0].chunk->stride;

    std::function<void()> cleanup;
#ifdef HAVE_LINUX_DMABUF_H
    if (spaBuffer->datas->type == SPA_DATA_DmaBuf) {
#if 1
        const int fd = spaBuffer->datas[0].fd;
        uint32_t off = spaBuffer->datas[0].mapoffset;
        auto map = mmap(
            //            nullptr, spaBuffer->datas[0].maxsize + spaBuffer->datas[0].mapoffset,
            nullptr, maxSize + off,
#ifdef HUAWEI_KLU_PGV
            PROT_READ | PROT_WRITE, MAP_SHARED, fd, 0);
#else
            PROT_READ, MAP_PRIVATE, fd, 0);
#endif
        if (map == MAP_FAILED) {
            qCWarning(KRFB_FB_PIPEWIRE) << "Failed to mmap the dmabuf: " << strerror(errno);
            return;
        }

        syncDmaBuf(fd, DMA_BUF_SYNC_START);
        src = SPA_MEMBER(map, spaBuffer->datas[0].mapoffset, uint8_t);

        cleanup = [map, spaBuffer, fd] {
            syncDmaBuf(fd, DMA_BUF_SYNC_END);
            munmap(map, spaBuffer->datas[0].maxsize + spaBuffer->datas[0].mapoffset);
        };
#else
        src = SPA_MEMBER(spaBuffer->datas[0].data, 0, uint8_t);
//    void *src = spaBuffer->datas[0].data;
#endif
    } else
#endif
        if (spaBuffer->datas->type == SPA_DATA_MemFd) {
        uint8_t *map = static_cast<uint8_t *>(mmap(
            nullptr, spaBuffer->datas->maxsize + spaBuffer->datas->mapoffset,
            PROT_READ, MAP_PRIVATE, spaBuffer->datas->fd, 0));

        if (map == MAP_FAILED) {
            qCWarning(KRFB_FB_PIPEWIRE) << "Failed to mmap the memory: " << strerror(errno);
            return;
        }
        src = SPA_MEMBER(map, spaBuffer->datas[0].mapoffset, uint8_t);

        cleanup = [map, spaBuffer] {
            munmap(map, spaBuffer->datas->maxsize + spaBuffer->datas->mapoffset);
        };
    }

    struct spa_meta_region *videoMetadata =
        static_cast<struct spa_meta_region *>(spa_buffer_find_meta_data(
            spaBuffer, SPA_META_VideoCrop, sizeof(*videoMetadata)));

    if (videoMetadata && (videoMetadata->region.size.width > static_cast<uint32_t>(streamSize.width()) || videoMetadata->region.size.height > static_cast<uint32_t>(streamSize.height()))) {
        qCWarning(KRFB_FB_PIPEWIRE) << "Stream metadata sizes are wrong!";
        return;
    }

    // Use video metadata when video size from metadata is set and smaller than
    // video stream size, so we need to adjust it.
    bool videoFullWidth = true;
    bool videoFullHeight = true;
    if (videoMetadata && videoMetadata->region.size.width != 0 && videoMetadata->region.size.height != 0) {
        if (videoMetadata->region.size.width < static_cast<uint32_t>(streamSize.width())) {
            videoFullWidth = false;
        } else if (videoMetadata->region.size.height < static_cast<uint32_t>(streamSize.height())) {
            videoFullHeight = false;
        }
    }

    QSize prevVideoSize = videoSize;
    if (!videoFullHeight || !videoFullWidth) {
        videoSize = QSize(videoMetadata->region.size.width, videoMetadata->region.size.height);
    } else {
        videoSize = streamSize;
    }

    if (!q->fb || videoSize != prevVideoSize) {
        if (q->fb) {
            free(q->fb);
        }
        q->fb = static_cast<char *>(malloc(videoSize.width() * videoSize.height() * BYTES_PER_PIXEL));

        if (!q->fb) {
            qCWarning(KRFB_FB_PIPEWIRE) << "Failed to allocate buffer";
            isValid = false;
            return;
        }

        Q_EMIT q->frameBufferChanged();
    }

    const qint32 dstStride = videoSize.width() * BYTES_PER_PIXEL;
    Q_ASSERT(dstStride <= srcStride);

    if (!videoFullHeight && (videoMetadata->region.position.y + videoSize.height() <= streamSize.height())) {
        src += srcStride * videoMetadata->region.position.y;
    }

    const int xOffset = !videoFullWidth && (videoMetadata->region.position.x + videoSize.width() <= streamSize.width())
                            ? videoMetadata->region.position.x * BYTES_PER_PIXEL
                            : 0;

    char *dst = q->fb;
    for (int i = 0; i < videoSize.height(); ++i) {
        // Adjust source content based on crop video position if needed
        src += xOffset;
        std::memcpy(dst, src, dstStride);

        if (videoFormat->format == SPA_VIDEO_FORMAT_BGRA || videoFormat->format == SPA_VIDEO_FORMAT_BGRx) {
            for (int j = 0; j < dstStride; j += 4) {
                std::swap(dst[j], dst[j + 2]);
            }
        }

        src += srcStride - xOffset;
        dst += dstStride;
    }

    if (spaBuffer->datas->type == SPA_DATA_MemFd || spaBuffer->datas->type == SPA_DATA_DmaBuf) {
        cleanup();
    }

    if (videoFormat->format != SPA_VIDEO_FORMAT_RGB) {
#if 0
        QImage img((uchar*) q->fb, videoSize.width(), videoSize.height(), dstStride, spaToQImageFormat(videoFormat->format));
        img.convertTo(QImage::Format_RGB888);
#endif
    }

    if (spa_meta *vdMeta = spa_buffer_find_meta(spaBuffer, SPA_META_VideoDamage)) {
        struct spa_meta_region *r;
        spa_meta_for_each(r, vdMeta)
        {
            if (!spa_meta_region_is_valid(r))
                break;

            q->tiles.append(QRect(r->region.position.x, r->region.position.y, r->region.size.width, r->region.size.height));
        }
    } else {
        q->tiles.append(QRect(0, 0, videoSize.width(), videoSize.height()));
    }
}

/**
 * @brief PWFrameBuffer::Private::createReceivingStream - create a stream that will consume Pipewire buffers
 *        and copy the framebuffer to the existing image that we track. The state of the stream and configuration
 *        are later handled by the corresponding listener.
 */
pw_stream *PWFrameBuffer::Private::createReceivingStream()
{
    spa_rectangle pwMinScreenBounds = SPA_RECTANGLE(1, 1);
    //    spa_rectangle pwMaxScreenBounds = SPA_RECTANGLE(screenGeometry.width, screenGeometry.height);
    spa_rectangle pwMaxScreenBounds = SPA_RECTANGLE(UINT32_MAX, UINT32_MAX);

    spa_fraction pwFramerateMin = SPA_FRACTION(0, 1);
    spa_fraction pwFramerateMax = SPA_FRACTION(60, 1);

#if 0
    auto stream = pw_stream_new_simple(pw_thread_loop_get_loop(pwMainLoop), "krfb-fb-consume-stream",
                                       pw_properties_new(PW_KEY_MEDIA_TYPE, "Video",
                                                         PW_KEY_MEDIA_CATEGORY, "Capture",
                                                         PW_KEY_MEDIA_ROLE, "Screen",
                                                         nullptr),
                                       &pwStreamEvents, this);

#else
    auto reuseProps = pw_properties_new("pipewire.client.reuse", "1", nullptr); // null marks end of varargs
    auto stream = pw_stream_new(pwCore, "krfb-fb-consume-stream", reuseProps);
#endif
    uint8_t buffer[1024] = {};
    const spa_pod *params[1];
    auto builder = SPA_POD_BUILDER_INIT(buffer, sizeof(buffer));

    params[0] = reinterpret_cast<spa_pod *>(spa_pod_builder_add_object(&builder,
                                                                       SPA_TYPE_OBJECT_Format, SPA_PARAM_EnumFormat,
                                                                       SPA_FORMAT_mediaType, SPA_POD_Id(SPA_MEDIA_TYPE_video),
                                                                       SPA_FORMAT_mediaSubtype, SPA_POD_Id(SPA_MEDIA_SUBTYPE_raw),
                                                                       SPA_FORMAT_VIDEO_format, SPA_POD_CHOICE_ENUM_Id(6, SPA_VIDEO_FORMAT_RGBx, SPA_VIDEO_FORMAT_RGBA, SPA_VIDEO_FORMAT_BGRx, SPA_VIDEO_FORMAT_BGRA, SPA_VIDEO_FORMAT_RGB, SPA_VIDEO_FORMAT_BGR),
                                                                       SPA_FORMAT_VIDEO_size, SPA_POD_CHOICE_RANGE_Rectangle(&pwMaxScreenBounds, &pwMinScreenBounds, &pwMaxScreenBounds),
                                                                       SPA_FORMAT_VIDEO_framerate, SPA_POD_Fraction(&pwFramerateMin),
                                                                       SPA_FORMAT_VIDEO_maxFramerate, SPA_POD_CHOICE_RANGE_Fraction(&pwFramerateMax, &pwFramerateMin, &pwFramerateMax)));

#if 0
    auto flags = static_cast<pw_stream_flags>(PW_STREAM_FLAG_AUTOCONNECT | PW_STREAM_FLAG_INACTIVE | PW_STREAM_FLAG_MAP_BUFFERS);
    if (pw_stream_connect(stream, PW_DIRECTION_INPUT, PW_ID_ANY, flags, params, 1) != 0) {
//    if (pw_stream_connect(stream, PW_DIRECTION_INPUT, pwStreamNodeId, flags, params, 1) != 0) {
#else
    pw_stream_add_listener(stream, &streamListener, &pwStreamEvents, this);
    if (pw_stream_connect(stream, PW_DIRECTION_INPUT, pwStreamNodeId, PW_STREAM_FLAG_AUTOCONNECT, params, 1) != 0) {
#endif
    qCWarning(KRFB_FB_PIPEWIRE) << "Could not connect receiving stream";
    isValid = false;
}

return stream;
}

PWFrameBuffer::Private::~Private()
{
    if (pwMainLoop) {
        pw_thread_loop_stop(pwMainLoop);
    }

    if (pwStream) {
        pw_stream_destroy(pwStream);
    }

    if (pwCore) {
        pw_core_disconnect(pwCore);
    }

    if (pwContext) {
        pw_context_destroy(pwContext);
    }

    if (pwMainLoop) {
        pw_thread_loop_destroy(pwMainLoop);
    }
}

PWFrameBuffer::PWFrameBuffer(WId winid, QObject *parent)
    : FrameBuffer(winid, parent)
    , d(new Private(this))
{
    fb = nullptr;
}

PWFrameBuffer::~PWFrameBuffer()
{
    free(fb);
    fb = nullptr;
}

int PWFrameBuffer::depth()
{
    return 32;
}

int PWFrameBuffer::height()
{
    return d->videoSize.height();
}

int PWFrameBuffer::width()
{
    return d->videoSize.width();
}

int PWFrameBuffer::paddedWidth()
{
    return width() * 5;
}

void PWFrameBuffer::getServerFormat(rfbPixelFormat &format)
{
    format.bitsPerPixel = 32;
    format.depth = 32;
    format.trueColour = true;
    format.bigEndian = false;
#ifdef HUAWEI_KLU_PGV
    format.redShift = 16;
    format.greenShift = 8;
    format.blueShift = 0;
#endif
}

void PWFrameBuffer::startMonitor()
{
}

void PWFrameBuffer::stopMonitor()
{
}

QPoint PWFrameBuffer::cursorPosition()
{
    return d->cursorPosition;
}

QVariant PWFrameBuffer::customProperty(const QString &property) const
{
    if (property == QLatin1String("stream_node_id")) {
        return QVariant::fromValue<uint>(d->pwStreamNodeId);
        //    } if (property == QLatin1String("session_handle")) {
        //        return QVariant::fromValue<QDBusObjectPath>(d->sessionPath);
    }

    return FrameBuffer::customProperty(property);
}

bool PWFrameBuffer::isValid() const
{
    return d->isValid;
}

FrameBuffer *PWFrameBuffer::myFrameBuffer(WId id)
{
    PWFrameBuffer *pwfb = new PWFrameBuffer(id);
    // sanity check for dbus/wayland/pipewire errors
    if (!pwfb->isValid()) {
        delete pwfb;
        return nullptr;
    }

    pwfb->init();
    return pwfb;
}

void PWFrameBuffer::init()
{
    d->init();
}
