#include "core_wl.h"
#include "global/global.h"

#include <QDBusInterface>
#include <QDBusReply>
#include <QDBusUnixFileDescriptor>
#include <QDBusArgument>
#include <QDBusConnection>
#include <QDBusObjectPath>
#include <QMap>
#include <QVariant>
#include <QVariantMap>
#include <QDebug>
#include <QDateTime>
#include <QThread>
#include <QApplication>
#include <QScreen>
#include "devices.h"
#include "qstandardpaths.h"
#include <pulse/simple.h>
#include <pulse/error.h>
#include <pulse/pulseaudio.h>

Q_DECLARE_METATYPE(Core_wl::Stream)
Q_DECLARE_METATYPE(Core_wl::Streams)

Core_wl *Core_wl::getInstance()
{
    static Core_wl c;
    return &c;
}

const QDBusArgument &operator >> ( const QDBusArgument &arg, Core_wl::Stream &stream )
{
    arg.beginStructure();
    arg >> stream.node_id;

    arg.beginMap();

    while ( !arg.atEnd() ) {
        QString key;
        QVariant value;
        arg.beginMapEntry();
        arg >> key >> value;
        arg.endMapEntry();
        stream.map.insert( key, value );
    }

    arg.endMap();
    arg.endStructure();

    return arg;
}

Core_wl::Core_wl( QObject* parent )
    : QObject{ parent }
{
    set_check_all_Elements_available();
    m_sVideoPath = QStandardPaths::writableLocation( QStandardPaths::MoviesLocation );
    qDebug() << "m_sVideoPath:" << m_sVideoPath;

    connect( this, SIGNAL( signal_portal_fd_path(QString,QString) ), this, SLOT( slot_start_gst(QString,QString) ) );
    connect( this, SIGNAL( signal_portal_aborted() ),                this, SLOT( slot_portal_dialog_aborted() ) );
}

Core_wl::~Core_wl()
{
    if ( mScreencastPortal ) {
        delete mScreencastPortal;
    }
}

//void Core_wl::setRecordProperty(QString screen, FORMAT fmt, QPoint lt, QPoint br, int fps, double scale)
//{
//    m_screen = screen;
//    m_format = fmt;
//    m_x = lt.x();
//    m_y = lt.y();
//    m_RightWidth = br.x();  // 这里的宽=屏幕宽像素/缩放比-x-width得到的，高计算方式同理
//    m_BottomHeight = br.y();
//    m_fps = fps;
//    m_scale = scale;
//}

void Core_wl::setPropertyOfSelectWid(uint qId, bool isfullscreen, bool isWindow, QString screen, uint id, QRect rect, double scale)
{
    m_screen = screen;
//    m_isFullscreen = isfullscreen;
//    m_isWindow = isWindow;
//    m_xid = id;
    m_x = rect.left();
    m_y = rect.top();
    QRect screenRect = QGuiApplication::primaryScreen()->geometry();
    m_RightWidth = screenRect.width()/scale - rect.left() - rect.width();
    m_BottomHeight = screenRect.height()/scale - rect.top() - rect.height();
    m_scale = scale;
}

void Core_wl::setSavePath(QString savePath)
{
    m_sVideoPath = savePath;
}

void Core_wl::setSaveFormat(QString saveformat)
{
    if(saveformat == "mp4"){
        m_format = MP4;
    }
    if(saveformat == "mkv"){
        m_format = MKV;
    }
    if(saveformat == "avi"){
        m_format = AVI;
    }
    if(saveformat == "mov"){
        m_format = MOV;
    }
    if(saveformat == "webm"){
        m_format = WEBM;
    }
    if(saveformat == "gif"){
        m_format = GIF;
    }
}

void Core_wl::setResolution(int resolution)
{
    if (resolution == static_cast<int>(SaveResolution::SUPER_OR_ORIGINAL_QUALITY))
    {
        m_bitrate = 8000; //单位 Mbps
        m_qpmin = 2;
        m_qpmax = 2;
//        m_profile = "high-4:4:4";
    }
    else if(resolution == static_cast<int>(SaveResolution::HIGHT_QUALITY)){
        m_bitrate = 5000;
        m_qpmin = 15;
        m_qpmax = 15;
    }
    else if(resolution == static_cast<int>(SaveResolution::STANDARD_QUALITY)){
        m_bitrate = 2500;
        m_qpmin = 35;
        m_qpmax = 35;
    }
    qDebug()<<"core - 已设置 码率  m_bitrate"<<m_bitrate << "kbps";
}

void Core_wl::setFrameRate(QString frameRate)
{
    if(frameRate.contains( "15" )){
        m_fps = 15;
    }

    if(frameRate.contains( "25" )){
        m_fps = 25;
    }

    if(frameRate.contains( "30" )){
        m_fps = 30;
    }

    if(frameRate.contains( "50" )){
        m_fps = 50;
    }

    if(frameRate.contains( "60" )){
        m_fps = 60;
    }
}

void Core_wl::setMicrophone(QString microphone)
{
    if(microphone =="" || microphone == m_NoMic){
        m_microphoneName = "";
    }else{
        m_microphoneName = microphone;
    }

    //获取选择的麦克风设备
    QString newDevice = "";
    if (mapNameToDevice.contains(m_microphoneName)) {
        newDevice = mapNameToDevice.value(m_microphoneName);
    }else{
        qDebug()<<"Core_wl::setMicrophone 选择设备不存在";
    }

    //重新设置管道和回调
    ModifylevelMeterDevice(newDevice);
}

//void Core_wl::getMicDevices(QStringList &ls, QString soundtype, bool isName)
//{

//}

void Core_wl::slot_startScreenCast(uint m_sourceType, uint m_cursorModes )
{
    mCursorModes = m_cursorModes;
    mSourcType = m_sourceType;

    QDBusInterface* portal = screencastPortal();
    if ( !portal ) {
        qDebug().noquote() <<  Global::nameOutput << "Aborted in function: Core_wl::slot_startScreenCast";
        return;
    }

    // Create ScreenCast session
    QString requestToken = createRequestToken();
    QMap<QString, QVariant> options;
    options["session_handle_token"] = createSessionToken();
    options["handle_token"] = requestToken;

    QDBusConnection::sessionBus().connect( "",
                                           mRequestPath + requestToken,
                                           "org.freedesktop.portal.Request",
                                           "Response",
                                           "ua{sv}",
                                           this,
                                           SLOT(slot_handleCreateSessionResponse(uint,QMap<QString,QVariant>)));

    const QDBusReply<QDBusObjectPath> reply = portal->call("CreateSession", options);
    if ( !reply.isValid() ) {
        qDebug().noquote() <<  Global::nameOutput << "Couldn't get reply to ScreenCast/CreateSession";
        qDebug().noquote() <<  Global::nameOutput << "Error: " << reply.error().message();
        emit signal_portal_aborted();
        return;
    }
}

void Core_wl::slot_stopScreenCast()
{
    stopRec();
    if ( mSession.isEmpty() ) {
        return;
    }

    QDBusInterface portal( "org.freedesktop.portal.Desktop", mSession, "org.freedesktop.portal.Session" );
    if ( portal.isValid() ) {
        const QDBusReply<void> reply = portal.call( "Close" );
        if ( !reply.isValid() ) {
            qDebug().noquote() <<  Global::nameOutput << "Couldn't get reply to ScreenCast/Close";
            qDebug().noquote() <<  Global::nameOutput << "Error: " << reply.error().message();
            mSession.clear();
            return;
        }
    }
}

void Core_wl::slot_handleCreateSessionResponse( uint response, const QVariantMap& results )
{
    if ( response != 0 ) {
        qDebug().noquote() <<  Global::nameOutput << "Failed to create session: " << response << results;
        emit signal_portal_aborted();
        return;
    }

    mSession = results.value( "session_handle" ).toString();

    QDBusInterface* portal = screencastPortal();
    if ( !portal ) {
        qDebug().noquote() <<  Global::nameOutput << "Aborted in function: Core_wl::slot_handleCreateSessionResponse";
        return;
    }

    // Select sources
    QString requestToken = createRequestToken();
    QMap<QString, QVariant> options;
    options["multiple"] = false;
    options["types"] = mSourcType;
    options["cursor_mode"] = mCursorModes;
    options["handle_token"] = requestToken;
    options["persist_mode"] = TRANSIENT;

    if ( !mRestoreToken.isEmpty() ) {
        options["restore_token"] = mRestoreToken;
    }

    // connect before call
    QDBusConnection::sessionBus().connect( "",
                                           mRequestPath + requestToken,
                                           "org.freedesktop.portal.Request",
                                           "Response",
                                           "ua{sv}",
                                           this,
                                           SLOT(slot_handleSelectSourcesResponse(uint,QMap<QString,QVariant>)));

#if QT_VERSION >= QT_VERSION_CHECK(5, 15, 10)
    const QDBusReply<QDBusObjectPath> reply = portal->call( "SelectSources", QDBusObjectPath(mSession), options );
    if ( !reply.isValid() ) {
        qDebug().noquote() <<  Global::nameOutput << "Couldn't get reply in Core_wl::slot_handleCreateSessionResponse";
        qDebug().noquote() <<  Global::nameOutput << "Error: " << reply.error().message();
        emit signal_portal_aborted();
        return;
    }
#endif
}

void Core_wl::slot_handleSelectSourcesResponse( uint response, const QVariantMap& results )
{
    Q_UNUSED(results);

    if ( response != 0 ) {
        qDebug().noquote() <<  Global::nameOutput << "Failed to select sources: " << response;
        emit signal_portal_aborted();
        return;
    }

    QDBusInterface* portal = screencastPortal();
    if ( !portal ) {
        qDebug().noquote() <<  Global::nameOutput << "Aborted in function: Core_wl::slot_handleSelectSourcesResponse";
        return;
    }

    // Start ScreenCast
    QString requestToken = createRequestToken();
    QMap<QString, QVariant> options;
    options["handle_token"] = requestToken;

    QDBusConnection::sessionBus().connect( "",
                                           mRequestPath + requestToken,
                                           "org.freedesktop.portal.Request",
                                           "Response",
                                           "ua{sv}",
                                           this,
                                           SLOT(slot_handleStartResponse(uint,QMap<QString,QVariant>)));

#if QT_VERSION >= QT_VERSION_CHECK(5, 15, 10)
    const QDBusReply<QDBusObjectPath> reply = portal->call( "Start", QDBusObjectPath(mSession), "", options );
    if ( !reply.isValid() ) {
        qDebug().noquote() <<  Global::nameOutput << "Couldn't get reply in Core_wl::slot_handleSelectSourcesResponse";
        qDebug().noquote() <<  Global::nameOutput << "Error: " << reply.error().message();
        emit signal_portal_aborted();
        return;
    }
#endif
}

void Core_wl::slot_handleStartResponse( uint response, const QVariantMap& results )
{
    Q_UNUSED(results);

    if ( response != 0 ) {
        qDebug().noquote() <<  Global::nameOutput << "Failed to start or cancel dialog: " << response;
        emit signal_portal_aborted();
        return;
    }

    // save restore token
    mRestoreToken = results.value( "restore_token" ).toString();

    const Streams streams = qdbus_cast<Streams>( results.value( "streams" ) );
    const Stream stream = streams.last();

    QDBusInterface* portal = screencastPortal();
    if ( !portal ) {
        qDebug().noquote() <<  Global::nameOutput << "Aborted in function: Core_wl::slot_handleStartResponse";
        return;
    }

    // Open PipeWire Remote
#if QT_VERSION >= QT_VERSION_CHECK(5, 15, 10)
    QMap<QString, QVariant> options;
    const QDBusReply<QDBusUnixFileDescriptor> reply = portal->call( "OpenPipeWireRemote", QDBusObjectPath(mSession), options );
    if ( !reply.isValid() ) {
        qDebug().noquote() <<  Global::nameOutput << "Couldn't get reply in Core_wl::slot_handleStartResponse";
        qDebug().noquote() <<  Global::nameOutput << "Error: " << reply.error().message();
        emit signal_portal_aborted();
        return;
    }

    const QString fd = QString::number( reply.value().fileDescriptor() );
    const QString path = QString::number( stream.node_id );

    emit signal_portal_fd_path( fd, path );
#endif
}

QDBusInterface* Core_wl::screencastPortal()
{
    if ( !mScreencastPortal ) {
        mScreencastPortal = new QDBusInterface( "org.freedesktop.portal.Desktop",
                                                "/org/freedesktop/portal/desktop",
                                                "org.freedesktop.portal.ScreenCast");
        mScreencastPortal->setParent(this);

        mRequestPath = "/org/freedesktop/portal/desktop/request/" + mScreencastPortal->connection().baseService().remove(0, 1).replace('.', '_') + "/";
        qDebug().noquote() <<  Global::nameOutput << "request path" << mRequestPath;
    }

    if ( mScreencastPortal->isValid() ) {
        return mScreencastPortal;
    }

    emit signal_portal_aborted();
    return nullptr;
}

QString Core_wl::createSessionToken() const
{
    static int sessionTokenCounter = 0;

    sessionTokenCounter += 1;
    return QString( "vosess%1" ).arg( sessionTokenCounter );

}

QString Core_wl::createRequestToken() const
{
    static int requestTokenCounter = 0;

    requestTokenCounter += 1;
    return QString( "voreq%1" ).arg( requestTokenCounter );
}

QString Core_wl::get_Area_Videocrop()
{
    QString videocrop = "";
    QString top    = QString::number(m_y);
    QString right  = QString::number(m_RightWidth);
    QString bottom = QString::number(m_BottomHeight);
    QString left   = QString::number(m_x);
    videocrop = "videocrop top=" + top + " " + "right=" + right + " " + "bottom=" + bottom + " " + "left=" + left;

    return videocrop;
}

QString Core_wl::getMuxer()
{
    QString value = m_encoder;
    if ( m_sMuxer == "matroskamux" ) {
        value = m_sMuxer + " name=mux writing-app=" +  Global::name + "_" + QString(  Global::version ).replace( " ", "_" );
    } else {
        value = m_sMuxer + " name=mux";
    }

    if ( m_sMuxer == "gifenc" ) {
        value = "";
    }

    return value;
}

bool Core_wl::isAvailable()
{
    bool value = false;

    pa_mainloop *pa_ml;
    pa_mainloop_api *pa_mlapi;
    pa_context *context = NULL;

    // Create a mainloop API and connection to the default server
    pa_ml = pa_mainloop_new();
    pa_mlapi = pa_mainloop_get_api( pa_ml );
    context = pa_context_new( pa_mlapi, NULL );

    // This function connects to the pulse server
    int status = pa_context_connect( context, NULL, PA_CONTEXT_NOAUTOSPAWN, NULL );
    if ( status < 0 ) {
        value = false;
    } else {
        value = true;
    }

    pa_context_unref( context );
    pa_mainloop_free( pa_ml );

    return value;
}

QStringList Core_wl::getAllDevices()
{
    QStringList list;
    if ( isAvailable() ) {
        const char *ss = get_all_audio_devices();
        QString s1 = QString::fromUtf8( ss );
        QString s2 = s1.left( QString::fromUtf8( ss ).length() - 3 );
        list = s2.split( "---" );
    }

    if ( list.contains( "" ) ) {
        list.clear();
    }

    return list;
}

void Core_wl::getDevices(QStringList &ls, QString soundtype)
{
    QStringList list;
    list << getAllDevices();
    if ( list.empty() )
    {
        return;
    }

    for ( int i = 0; i < list.count(); i++ ) {
        QString name = QString(list.at(i)).section( ":::", 1, 1 );
        QString device = QString(list.at(i)).section( ":::", 0, 0 );

        if (device.contains(soundtype))
        {
             ls << device;
        }

        qDebug().noquote() << Global::nameOutput << "[Audio] Found:" << name << "Device:" << device;

    }
    qDebug().noquote();
}

void Core_wl::getMicDevices(QStringList &ls, QString soundtype)
{
    QStringList list;
    list << getAllDevices();
    if ( list.empty() )
    {
        return;
    }

    for ( int i = 0; i < list.count(); i++ ) {
        QString name = QString(list.at(i)).section( ":::", 1, 1 );
        QString device = QString(list.at(i)).section( ":::", 0, 0 );

        if (!device.contains(soundtype))
        {
             ls << device;
        }

        qDebug().noquote() << Global::nameOutput << "[Audio] Found:" << name << "Device:" << device;

    }
    qDebug().noquote();
}

QStringList Core_wl::getSelectedAudioDevice()
{
    QStringList list;
    if (Global::isOpenSysSound)
    {
        getDevices(list, "monitor");
    }

//    if (Global::isOpenMic)
    {
        getMicDevices(list, "monitor");
    }
    if (!Global::isOpenMic)
    {
        closeMicrophone();
    }

    return list;
}

QString Core_wl::get_Videocodec_Encoder()
{
    QString value;
    QString encoder = m_encoder;

    if ( encoder == "openh264enc" ) {
        QStringList list;
        list << encoder;
        list << "qp-min=" + QString::number( m_qpmin );
        list << "qp-max=" + QString::number( m_qpmax );
        list << "usage-type=camera"; // We need camera not screen. With screen and a fast sequence of images the video jerks.
        list << "complexity=low";
        list << "multi-thread=" + QString::number( QThread::idealThreadCount() );
        list << "slice-mode=auto"; // Number of slices equal to number of threads
        value = list.join( " " );
        value.append( " ! h264parse" );
    }

    if ( encoder == "x264enc" ) {
        QStringList list;
        list << encoder;
        list << "qp-min=" + QString::number( m_qpmin );
        list << "qp-max=" + QString::number( m_qpmax );
        list << "speed-preset=superfast";
        list << "threads=" + QString::number( QThread::idealThreadCount() );
        value = list.join( " " );
        value.append( " ! video/x-h264, profile=baseline" );
    }

    if ( encoder == "vp8enc" ) {
        QStringList list;
        list << encoder;
        list << "min_quantizer=" + QString::number( m_qpmin );
        list << "max_quantizer=" + QString::number( m_qpmax );
        list << "cpu-used=" + QString::number( QThread::idealThreadCount() );
        list << "deadline=1000000";
        list << "threads=" + QString::number( QThread::idealThreadCount() );
        value = list.join( " " );
    }

    return value;
}

void Core_wl::slot_start_gst(QString fd, QString path )
{
    setEncoderAndAudioCodec();

    QStringList stringList;
    stringList << QString( "pipewiresrc fd=" ).append( fd ).append( " path=" ).append( path ).append( " do-timestamp=true" );
    stringList << "videoconvert";
    stringList << "videorate";
    stringList << "queue max-size-bytes=1073741824 max-size-time=10000000000 max-size-buffers=1000";
    stringList << get_Area_Videocrop();
    stringList << "video/x-raw, framerate=" + QString::number( m_fps ) + "/1";
    stringList << get_Videocodec_Encoder();
    stringList << "queue";
    stringList << "mux.";

    // Pipeline for one selected audiodevice
    if ( ( getSelectedAudioDevice().count() == 1 ) and !m_sAudioCodec.isEmpty() )
    {
        stringList << "pulsesrc device=" + getSelectedAudioDevice().at(0);
        stringList << "audioconvert";
        stringList << "audiorate";
        stringList << "audio/x-raw, channels=2";
        stringList << "queue max-size-bytes=1000000 max-size-time=10000000000 max-size-buffers=1000";
        stringList << m_sAudioCodec;
        stringList << "queue";
        stringList << "mux.";
    }

    // Pipeline for more as one audiodevice
    if ( ( getSelectedAudioDevice().count() > 1 ) and !m_sAudioCodec.isEmpty() )
    {
        for ( int x = 0; x < getSelectedAudioDevice().count(); x++ )
        {
            stringList << "pulsesrc device=" + getSelectedAudioDevice().at(x);
            stringList << "audioconvert";
            stringList << "audioresample";
            stringList << "queue";
            stringList << "mix.";
        }
        stringList << "audiomixer name=mix";
        stringList << "audioconvert";
        stringList << "audiorate";
        stringList << "queue";
        stringList << m_sAudioCodec;
        stringList << "queue";
        stringList << "mux.";
    }

    stringList << getMuxer();

    QString newVideoFilename =  Global::name + "-" + QDateTime::currentDateTime().toString( "yyyy-MM-dd_hh-mm-ss" ) + "." + m_sFormat;
    stringList << "filesink location=\"" + m_sVideoPath + "/" + newVideoFilename + "\"";

    QString sPipeline = stringList.join( " ! " );
    sPipeline = sPipeline.replace( "mix. !", "mix." );
    sPipeline = sPipeline.replace( "mux. !", "mux." );

    qDebug();
    qDebug().noquote() <<  Global::nameOutput << "Start record with:" << sPipeline;
    qDebug();
    qDebug().noquote() << pipeline_structured_output( sPipeline );

    QByteArray byteArray = sPipeline.toUtf8();
    const gchar *line = byteArray.constData();
    GError *error = Q_NULLPTR;
    pipeline = gst_parse_launch( line, &error );

    static GstBus *bus = gst_pipeline_get_bus( GST_PIPELINE ( pipeline ) );
    gst_bus_set_sync_handler( bus, (GstBusSyncHandler)call_bus_message, this, NULL );
//    gst_object_unref( bus );

    // Start playing
    GstStateChangeReturn ret = gst_element_set_state( pipeline, GST_STATE_PLAYING );
    if ( ret == GST_STATE_CHANGE_FAILURE )   { qDebug().noquote() <<  Global::nameOutput << "Start was clicked" << "GST_STATE_CHANGE_FAILURE" << "Returncode =" << ret;   } // 0
    if ( ret == GST_STATE_CHANGE_SUCCESS )   { qDebug().noquote() <<  Global::nameOutput << "Start was clicked" << "GST_STATE_CHANGE_SUCCESS" << "Returncode =" << ret;   } // 1
    if ( ret == GST_STATE_CHANGE_ASYNC )     { qDebug().noquote() <<  Global::nameOutput << "Start was clicked" << "GST_STATE_CHANGE_ASYNC"   << "Returncode =" << ret;   } // 2
    if ( ret == GST_STATE_CHANGE_NO_PREROLL ){ qDebug().noquote() <<  Global::nameOutput << "Start was clicked" << "GST_STATE_CHANGE_NO_PREROLL" << "Returncode =" << ret; }// 3
    if ( ret == GST_STATE_CHANGE_FAILURE )
    {
        qDebug().noquote() <<  Global::name << "Unable to set the pipeline to the playing state.";
        gst_object_unref( pipeline );
        return;
    }
}

void Core_wl::slot_portal_dialog_aborted()
{
    qDebug() << "slot_portal_dialog_aborted";
}

void Core_wl::stopRec()
{
    // wait for EOS
    bool a = gst_element_send_event( pipeline, gst_event_new_eos() );
    Q_UNUSED(a);

    GstClockTime timeout = 5 * GST_SECOND;
    GstMessage *msg = gst_bus_timed_pop_filtered( GST_ELEMENT_BUS (pipeline), timeout, GST_MESSAGE_EOS );
    Q_UNUSED(msg);

    GstStateChangeReturn ret ;
    Q_UNUSED(ret);
    ret = gst_element_set_state( pipeline, GST_STATE_PAUSED );
    Q_UNUSED(ret);
    ret = gst_element_set_state( pipeline, GST_STATE_READY );
    Q_UNUSED(ret);
    ret = gst_element_set_state( pipeline, GST_STATE_NULL );
    Q_UNUSED(ret);
    gst_object_unref( pipeline );
    qDebug().noquote() <<  Global::nameOutput << "Stop record";
}

QString Core_wl::pipeline_structured_output( QString pipeline )
{
    QString string;
    QString nl;

    nl = "\\";
    string = pipeline.prepend( "gst-launch-1.0 -e " + nl + "\n    " );

    string = pipeline.replace( "mux.", "mux. " + nl + "\n   " );
    string = pipeline.replace( "mix.", "mix. " + nl + "\n   " );
    string = pipeline.replace( "!", nl + "\n        !" );
    string.append( "\n" );
    return string;
}

GstBusSyncReply Core_wl::call_bus_message( GstBus *bus, GstMessage *message, gpointer user_data )
{
    Q_UNUSED(bus);
    Q_UNUSED(user_data)
    switch (GST_MESSAGE_TYPE (message)) {
        case GST_MESSAGE_ERROR:
            qDebug().noquote() << Global::nameOutput << "GST_MESSAGE_ERROR";
            gchar *debug;
            GError *error;
            gst_message_parse_error(message, &error, &debug);
            qDebug() << message->src->name;
            qDebug() << message->src->parent->name;
            qDebug() << error->message;
            qDebug() << debug;
            g_free(debug);
            g_error_free(error);
            break;
        case GST_MESSAGE_EOS:
            qDebug().noquote() << Global::nameOutput << "GST_MESSAGE_EOS";
            break;
        case GST_MESSAGE_DURATION_CHANGED:
            qDebug().noquote() << Global::nameOutput << "GST_MESSAGE_DURATION_CHANGED";
            break;
        case GST_MESSAGE_STEP_DONE:
            qDebug().noquote() << Global::nameOutput << "GST_MESSAGE_STEP_DONE";
            break;
        case GST_MESSAGE_TAG:
            qDebug().noquote() << Global::nameOutput << "GST_MESSAGE_TAG";
            break;
        case GST_MESSAGE_STATE_CHANGED:
            qDebug().noquote() << Global::nameOutput << "GST_MESSAGE_STATE_CHANGED";
            break;
        case GST_MESSAGE_STREAM_START:
            qDebug().noquote() << Global::nameOutput << "GST_MESSAGE_STREAM_START";
            break;
        case GST_MESSAGE_APPLICATION:
            {
            qDebug().noquote() << Global::nameOutput << "GST_MESSAGE_APPLICATION";
            break;
            }
        default:
            break;
    }

    return GST_BUS_PASS;
}

void Core_wl::openMicrophone()
{
    qDebug()<<"Core_wl - openMicrophone";

    toggle_microphone(false);
    m_closeMicrophone = false;
}

void Core_wl::closeMicrophone()
{
    qDebug()<<"Core_wl - closeMicrophone";

    toggle_microphone(true);
    m_closeMicrophone = true;
}

void Core_wl::toggle_microphone(bool mute)
{
    pa_mainloop* mainloop = pa_mainloop_new();
    pa_context* context = pa_context_new(pa_mainloop_get_api(mainloop), "Microphone Control");

    pa_context_connect(context, nullptr, PA_CONTEXT_NOFLAGS, nullptr);

    // Wait for context to be ready
    while (pa_context_get_state(context) != PA_CONTEXT_READY) {
        pa_mainloop_iterate(mainloop, 1, nullptr);
    }

    // Mute or unmute the default source
    pa_operation* op = pa_context_set_source_mute_by_name(context, "@DEFAULT_SOURCE@", mute, nullptr, nullptr);
    while (pa_operation_get_state(op) == PA_OPERATION_RUNNING) {
        pa_mainloop_iterate(mainloop, 1, nullptr);
    }
    pa_operation_unref(op);

    pa_context_disconnect(context);
    pa_context_unref(context);
    pa_mainloop_free(mainloop);
}

void Core_wl::levelMeterStart(QString name)
{
    m_microphoneName = name;
    //获取麦克风设备
    QString device;
    if(mapNameToDevice.contains(name)){
        device = mapNameToDevice.value(name);
    }

    qDebug()<<"监测音量的设备: "<< name << device;
    GstElement *audiosrc, *audioconvert, *level, *fakesink;
    GstCaps *caps;
    GstBus *bus;

    caps = gst_caps_from_string( "audio/x-raw,channels=2" );

    pipelineMic = gst_pipeline_new( NULL );
    g_assert (pipelineMic);
    audiosrc = gst_element_factory_make( "pulsesrc", "my_audiosrc" );
    g_assert (audiosrc);
    audioconvert = gst_element_factory_make( "audioconvert", NULL );
    g_assert (audioconvert);
    level = gst_element_factory_make( "level", NULL );
    g_assert (level);
    fakesink = gst_element_factory_make( "fakesink", NULL );
    g_assert (fakesink);

    gst_bin_add_many( GST_BIN( pipelineMic ), audiosrc, audioconvert, level, fakesink, NULL );
    if ( !gst_element_link( audiosrc, audioconvert ) ) {
        g_error( "Failed to link audiosrc and audioconvert" );
    }
    if (!gst_element_link_filtered( audioconvert, level, caps ) ) {
        g_error( "Failed to link audioconvert and level" );
    }
    if ( !gst_element_link( level, fakesink ) ) {
        g_error( "Failed to link level and fakesink" );
    }

    g_object_set( G_OBJECT( audiosrc ), "device", device.toUtf8().constData(), NULL );

    QString m_name = "[kylin-screencap] " + name;
    g_object_set( G_OBJECT( audiosrc ), "client-name", m_name.toUtf8().constData(), NULL );
    g_object_set( G_OBJECT( level ), "post-messages", TRUE, NULL );
    g_object_set( G_OBJECT( fakesink ), "sync", TRUE, NULL );

    bus = gst_element_get_bus (pipelineMic);
    gst_bus_set_sync_handler( bus, (GstBusSyncHandler)message_handler,NULL, NULL );
    gst_element_set_state( pipelineMic, GST_STATE_PLAYING );
}

gboolean Core_wl::message_handler(GstBus *bus, GstMessage *message, gpointer data)
{
    Q_UNUSED(bus)

    if ( message->type == GST_MESSAGE_ELEMENT ) {
        const GstStructure *s = gst_message_get_structure( message );
        const gchar *name = gst_structure_get_name( s );
        if ( strcmp( name, "level" ) == 0 ) {
            gint channels;
            gdouble rms_dB;
            gdouble rms;
            const GValue *array_val;
            const GValue *value;
            GValueArray *rms_arr;

            // the values are packed into GValueArrays with the value per channel
            array_val = gst_structure_get_value( s, "rms" );
            rms_arr = (GValueArray *) g_value_get_boxed( array_val );

            // we can get the number of channels as the length of any of the value arrays
            channels = rms_arr->n_values;

            for ( gint i = 0; i < channels; ++i ) {
                value = g_value_array_get_nth( rms_arr, i );
                rms_dB = g_value_get_double( value );

                // converting from dB to normal gives us a value between 0.0 and 1.0
                rms = pow( 10, rms_dB / 20 ) ;
                //更新音量值
                int int_rms = static_cast<int>(rms *100);
                emit getInstance()->updateVolume(int_rms);
            }
        }
    }

    return TRUE;
}

void Core_wl::ModifylevelMeterDevice(QString newDevice)
{
    if(pipelineMic){
        GstElement *audiosrc = gst_bin_get_by_name(GST_BIN(pipelineMic), "my_audiosrc");
        if (!audiosrc) {
            return;
        }
        gst_element_set_state(pipelineMic, GST_STATE_NULL);
        g_object_set(G_OBJECT(audiosrc), "device", newDevice.toUtf8().constData(), NULL);
        //回调函数
        GstBus *bus = gst_element_get_bus (pipelineMic);
        gst_bus_set_sync_handler( bus, (GstBusSyncHandler)message_handler,NULL, NULL );
        gst_element_set_state(pipelineMic, GST_STATE_PLAYING);
        qDebug()<<"core - 检测的麦克风设备已变更: "<< newDevice;
    }
}

void Core_wl::setEncoderAndAudioCodec()
{
    switch (m_format) {
    case MP4:
        m_encoder = "openh264enc"; // openh264enc  x264enc
        m_sFormat = "mp4";
        m_sMuxer = "mp4mux";
        m_sAudioCodec = "lamemp3enc"; // opusenc
        break;
    case MKV:
        m_encoder = "vp8enc"; // x264enc、openh264enc
        m_sMuxer = "matroskamux";
        m_sAudioCodec = "lamemp3enc";   //vorbisenc、flacenc、opusenc
        m_sFormat = "mkv";
        break;
    case AVI:
        m_encoder = "vp8enc"; //openh264enc x264enc
        m_sMuxer = "avimux";
        m_sAudioCodec = "lamemp3enc";
        m_sFormat = "avi";
        break;
    case MOV:
        m_encoder = "vp8enc"; //  x264enc openh264enc
        m_sMuxer = "qtmux";
        m_sAudioCodec = "lamemp3enc"; //
        m_sFormat = "mov";
        break;
    case WEBM:
        m_sMuxer = "webmmux";
        m_encoder = "vp8enc";
        m_sAudioCodec = "opusenc"; // vorbisenc
        m_sFormat = "webm";
        break;
    case GIF:
        m_encoder = "gifenc";
        m_sFormat = "gif";
        break;
    default:
        break;
    }
}

void Core_wl::set_check_all_Elements_available()
{
    QStringList list;
    list << "pipewiresrc";
    list << "pulsesrc";
    list << "queue";
    list << "capsfilter";
    list << "videoconvert";
    list << "videorate";
    list << "audioconvert";
    list << "audiorate";
    list << "filesink";
    list << "videoscale";
    list << "h264parse";
    list << "audiomixer";
    list << "videocrop";

    qDebug().noquote() << Global::nameOutput << "--- GStreamer elements ---";

    for ( int i = 0; i < list.count(); i++ ) {
        GstElementFactory *factory = gst_element_factory_find( QString( list.at(i) ).toLatin1() );
        if ( !factory ) {
            qDebug().noquote() << Global::nameOutput << "-" << list.at(i);
            if ( list.at(i) == "pipewiresrc" ) {
               qDebug().noquote() <<  "gstreamer-plugin-pipewire";
            }
        } else {
            qDebug().noquote() << Global::nameOutput << "+" << list.at(i);
            gst_object_unref( factory );
        }
    }
    qDebug();
}
