
#include <QObject>
#include <QRect>
#include <QVariantMap>
#include <QList>
#include <QDBusInterface>
#include <gst/gst.h>
#include <math.h>

class Core_wl : public QObject
{
    Q_OBJECT

public:
    static Core_wl *getInstance();
    typedef enum Format {MP4, MKV, AVI, MOV, WEBM, GIF} FORMAT;
    typedef struct
    {
        uint node_id;
        QVariantMap map;
    } Stream;
    typedef QList<Stream> Streams;

    explicit Core_wl( QObject* parent = nullptr );
    virtual ~Core_wl();

    void setPropertyOfSelectWid(uint qId, bool isfullscreen, bool isWindow, QString screen, uint id, QRect rect, double scale);
    void setSavePath(QString savePath);
    void setSaveFormat(QString saveformat);
    void setResolution(int resolution);
    void setFrameRate(QString frameRate);
    void setMicrophone(QString microphone);

    void getMicDevices(QStringList &ls, QString soundtype);
    void openMicrophone();
    void closeMicrophone();
    void levelMeterStart(QString name);                 //检测麦克风音量

public slots:
    void slot_startScreenCast(uint,uint);
    void slot_stopScreenCast();
    void slot_start_gst( QString fd, QString path );
    void slot_portal_dialog_aborted();
signals:
    void signal_portal_fd_path(QString,QString);
    void signal_portal_aborted();

    void updateVolume(int newVolume);   //ui可以更新音量

private slots:
    void slot_handleCreateSessionResponse(uint response, const QVariantMap& results);
    void slot_handleSelectSourcesResponse(uint response, const QVariantMap& results);
    void slot_handleStartResponse(uint response, const QVariantMap& results);

private:
    QDBusInterface* screencastPortal();
    QString createSessionToken() const;
    QString createRequestToken() const;

    QString getMuxer();
    QStringList getSelectedAudioDevice();
    QString pipeline_structured_output( QString pipeline );
    QString get_Videocodec_Encoder();
    QString get_Area_Videocrop();
    void stopRec();
    QStringList getAllDevices();
    void getDevices(QStringList &ls, QString soundtype);

    bool isAvailable();
    void set_check_all_Elements_available();
    void setEncoderAndAudioCodec();

    static GstBusSyncReply call_bus_message( GstBus *bus, GstMessage *message, gpointer user_data );

    //麦克风
    void toggle_microphone(bool mute);                  //切换麦克风硬件的状态
    static gboolean message_handler( GstBus * bus, GstMessage * message, gpointer data );
    void ModifylevelMeterDevice (QString newDevice);    //更改检测的麦克风设置
private:
    uint mCursorModes;
    uint mSourcType;
    QString mSession;
    QString mRequestPath;
    QString mRestoreToken;
    QDBusInterface* mScreencastPortal = nullptr;

    enum : uint { MONITOR = 1, WINDOW = 2, VIRTUAL = 4 } SourceType;
    enum : uint { HIDDEN = 1, EMBEDDED = 2, METADATA = 4 } CursorMode;
    enum : uint { TRANSIENT = 0, APPLICATION = 1, PERSISTENT = 2 } PersistMode;

    QString m_screen;
    FORMAT m_format = MKV;
    double m_bitrate = 8000;

    int m_scale = 1;
    int m_x=0, m_y=0, m_RightWidth=200, m_BottomHeight=100;  // 这里的宽=屏幕宽像素/缩放比-x-width得到的，高计算方式同理
    QString m_sMuxer = "matroskamux";
    QString m_sFormat = "mkv";
    int m_fps = 25;
    QString m_sAudioCodec = "lamemp3enc";
    QString m_sVideoPath;
    GstElement *pipeline = nullptr;
    int m_qpmin = 8;
    int m_qpmax = 8;
    QString m_encoder = "vp8enc";

    //麦克风
    QString m_microphoneName = "";  //选择的麦克风设备名字
    QString m_NoMic = tr("none mic");
    QList<GstElement *> m_micSourceList;        // 麦克风源
    QHash<QString, QString> mapNameToDevice;
    GstElement *pipelineMic = nullptr;             //音量的
    bool m_closeMicrophone = false;             // 关闭麦克风
};
