#include "videoplayer.h"
#include <gst/pbutils/pbutils.h>
#include <QUrl>
#include <QThread>
#include <QTime>
#include <QDebug>
#include <QMimeDatabase>
#include <QProcess>
#include <QFileInfo>
#include <QtMath>

void MediaInfo::init(){
    id=QUuid::createUuid().toString();
    startPos=0;
    endPos=0;

    isQueryed=false;
    isValid=false;
    duration=0;

    isSeeked=false;
    errorCount=0;
}

/*
    消息总线回调函数
*/
gboolean VideoPlayer::gstBusMessageCallback(GstBus *bus, GstMessage *msg, gpointer data)
{
    Q_UNUSED(bus)

    VideoPlayer *player = (VideoPlayer*)data;
    if(!player || player->m_isDestoryed){
        return FALSE;
    }

    switch (GST_MESSAGE_TYPE(msg)) {
    case GST_MESSAGE_EOS:
        player->onEosMessage(msg);
        break;
    case GST_MESSAGE_ERROR:
        player->onErrorMessage(msg);
        break;
    case GST_MESSAGE_BUFFERING:
        player->onBufferingMessage(msg);
        break;
    case GST_MESSAGE_STATE_CHANGED:
        player->onStateChangedMessage(msg);
        break;
    case GST_MESSAGE_DURATION_CHANGED:
        player->onDurationChangedMessage(msg);
        break;
    case GST_MESSAGE_ELEMENT:
        player->onElementMessage(msg);
        break;
    case GST_MESSAGE_ASYNC_DONE:
        player->onAsyncDoneMessage(msg);
        break;
    case GST_MESSAGE_CLOCK_LOST:
        player->onClockLostMessage(msg);
        break;
    case GST_MESSAGE_LATENCY:
        player->onLatencyMessage(msg);
        break;
    case GST_MESSAGE_REQUEST_STATE:
        break;
    case GST_MESSAGE_ASYNC_START:
        break;
    case GST_MESSAGE_TAG:
        break;
    case GST_MESSAGE_APPLICATION:
        break;
    case GST_MESSAGE_INFO:
        break;
    case GST_MESSAGE_STATE_DIRTY:
        break;
    case GST_MESSAGE_STREAM_STATUS:
        break;
    case GST_MESSAGE_WARNING:
        break;
    default:
        break;
    }
    return TRUE;
}

void VideoPlayer::gstPadAddedCallback(GstElement *decodebin, GstPad *new_pad, gpointer data)
{
    Q_UNUSED(decodebin)

    VideoPlayer* player = (VideoPlayer*)(data);
    if(!player || player->m_isDestoryed){
        return;
    }

    GstCaps *new_pad_caps = gst_pad_query_caps(new_pad,NULL);
    if( new_pad_caps == nullptr){
        return;
    }
    GstStructure *new_pad_struct = gst_caps_get_structure (new_pad_caps, 0);
    if(new_pad_struct == nullptr){
        gst_caps_unref(new_pad_caps);
        return;
    }
    const gchar *new_pad_type = gst_structure_get_name (new_pad_struct);
    if(new_pad_type == nullptr){
        gst_caps_unref(new_pad_caps);
        return;
    }

    GstElement*sink=player->m_playsink;
    GstElementClass *klass = GST_ELEMENT_GET_CLASS(sink);
    GstPadTemplate *templ = nullptr;
    if (g_str_has_prefix (new_pad_type, "audio/x-raw"))
    {
        templ = gst_element_class_get_pad_template(klass,"audio_sink");
    }
    else if(g_str_has_prefix (new_pad_type, "video/x-raw"))
    {
        templ = gst_element_class_get_pad_template(klass,"video_sink");
    }
    else if(g_str_has_prefix (new_pad_type, "text"))
    {
        templ = gst_element_class_get_pad_template(klass,"text_sink");
    }

    if(templ)
    {
        GstPad *playsink_req_pad  = gst_element_request_pad(sink,templ,nullptr,nullptr);
        if(playsink_req_pad){
            if(!gst_pad_is_linked (playsink_req_pad)){
                gst_pad_link(new_pad,playsink_req_pad);
            }
            player->m_playsinkPads.push_back(playsink_req_pad);
        }
    }

    gst_caps_unref(new_pad_caps);
}

void VideoPlayer::gstElementSetupCallback(GstElement *playbin, GstElement *new_ele, gpointer data){
    GstElementFactory *factory = gst_element_get_factory(new_ele);
    if(factory){
        gchar* elename = gst_plugin_feature_get_name (GST_PLUGIN_FEATURE (factory));
        QString name(elename);

        if(name.contains("decodebin") && !name.contains("uridecodebin")){
            g_signal_connect (new_ele, "autoplug_select", G_CALLBACK (gstDecoderSelectCallback), data);
        }
    }
}

void VideoPlayer::gstSourceElementSetupCallback(GstElement *decodebin, GstElement *new_ele, gpointer data)
{
    GstElementFactory *factory = gst_element_get_factory(new_ele);
    if(factory){
        gchar* elename = gst_plugin_feature_get_name (GST_PLUGIN_FEATURE (factory));
        QString name(elename);

        if(name == "udpsrc"){
            g_object_set(G_OBJECT(new_ele),"timeout",5*GST_SECOND, nullptr);
            g_object_set(G_OBJECT(new_ele),"buffer-size",2147483647, nullptr);
        }else if(name == "rtspsrc"){
            g_object_set(G_OBJECT(new_ele),"latency", 2000, nullptr);            //缓冲时间ms（默认2S）
            g_object_set(G_OBJECT(new_ele),"protocols",0x00000004, nullptr);     //传输协议
            g_object_set(G_OBJECT(new_ele),"timeout",1*1000000, nullptr);        //UDP超时us（默认5S）
            g_object_set(G_OBJECT(new_ele),"tcp-timeout",10*1000000, nullptr);   //TCP超时us（默认20S）
            g_object_set(G_OBJECT(new_ele),"drop-on-latency",false, nullptr);
            g_object_set(G_OBJECT(new_ele),"tls-validation-flags",0x00000020, nullptr);;
        }else if(name == "rtmpsrc"){
            g_object_set(G_OBJECT(new_ele),"timeout",5, nullptr);
        }
    }
}

/*
    解码器筛选回调

    return: 0(尝试使用该解码器)、1/2（跳过该解码器）
*/
int VideoPlayer::gstDecoderSelectCallback(GstElement* bin, GstPad *pad, GstCaps* caps, GstElementFactory* factory, gpointer data)
{
    VideoPlayer* player = (VideoPlayer*)(data);
    if(!player || player->m_isDestoryed){
        return 0;
    }

    QString factoryName = gst_element_factory_get_longname(factory);
    QString elename = gst_plugin_feature_get_name (GST_PLUGIN_FEATURE (factory));
    QString className =gst_element_factory_get_klass(factory);

    //禁用MPP硬解码器
    if(className.contains("Decoder/Video")){
        if(elename.contains("mpp")){
            if(!player->m_enableHWA || player->m_mediaInfo.videoFormat.contains("MPEG-4")){
                qDebug()<<__FUNCTION__<<"禁用解码器："<<elename<<className<<factoryName;
                return 2;
            }
        }
    }

    if(className.contains("Decoder/Video")){
        player->m_vcodecName=elename;
    }else if(className.contains("Decoder/Audio")){
        player->m_acodecName=elename;
    }

    qDebug()<<__FUNCTION__<<"加载组件："<<elename<<className<<factoryName;
    return 0;
}

GstPadProbeReturn VideoPlayer::gstVideoSinkProbe(GstPad *pad, GstPadProbeInfo *info, gpointer data)
{
    VideoPlayer* player = (VideoPlayer*)(data);
    if(!player || player->m_isDestoryed){
        return GST_PAD_PROBE_REMOVE;
    }

    player->m_probeLastDateTime = QDateTime::currentDateTime();
    return GST_PAD_PROBE_OK;
}

VideoPlayer::VideoPlayer(const QString &logname, QObject *parent):QObject(parent)
{
    if(logname!=""){
        m_logger = Logger::createLogger(this,logname);
    }else{
        m_logger=NULL;
    }

    m_pipeline=NULL;
    m_videosink=NULL;
    m_playsink=NULL;

    m_targetState = GST_STATE_NULL;
    m_isDestoryed = false;
    m_isReqPlay=false;
    m_isReqSeek=false;

    m_wid=0;
    m_volume=100;
    m_mute=false;
    m_fill=false;
    m_enableHWA=true;
    m_rate=1.0;
    m_posNotifyInterval=-1;
    m_showFirstFrame=false;
    m_renderSize=QSize(0,0);
    m_vsinkName="xvimagesink";
    m_acodecName="";
    m_vcodecName="";
    m_timeout=6000;

    m_endTimer.setSingleShot(true);
    m_endTimer.setTimerType(Qt::TimerType::PreciseTimer);
    connect(&m_endTimer,&QTimer::timeout,this,&VideoPlayer::onEndTimerTimeout);

    m_posNotifyTimer.setSingleShot(false);
    connect(&m_posNotifyTimer,&QTimer::timeout,this,&VideoPlayer::onPosNotifyTimerTimeout);

    m_probeCheckInterval=2000;
    m_probeCheckTimeout=1500;
    m_probeCheckMaxErrTimes=3;
    m_probeCheckCurErrTimes=0;
    m_probeCheckTimer.setSingleShot(false);
    m_probeCheckTimer.setInterval(m_probeCheckInterval);
    connect(&m_probeCheckTimer,&QTimer::timeout,this,&VideoPlayer::onProbeCheckTimerTimeout);
}

VideoPlayer::~VideoPlayer()
{
    m_logger=NULL;
    m_isDestoryed = true;

    destoryPipeline();
}

/*
    创建媒体文件的预览图，并返回路径。需要本地系统安装FFmpeg
*/
QString VideoPlayer::createMediaPreview(const QString &mediaFilePath,const QSize& size)
{
    QFileInfo fileInfo(mediaFilePath);
    if(fileInfo.exists()){
        QString baseName = fileInfo.baseName();
        QString filePath= fileInfo.path();
        QString imgFilePath = filePath+"/"+baseName+"_preview.jpg";

        QString cmd;
        if(size.isValid()){
            cmd=QString("ffmpeg -i %1 -q:v 2 -vf \"scale=%2:%3:force_original_aspect_ratio=decrease\" -y -f image2 -t 0.001 %4")
                    .arg(mediaFilePath).arg(size.width()).arg(size.height()).arg(imgFilePath);
        }else{
            cmd=QString("ffmpeg -i %1 -q:v 2 -y -f image2 -t 0.001 %2")
                    .arg(mediaFilePath).arg(imgFilePath);
        }

        QProcess p;
        p.start(cmd);
        p.waitForFinished(3*1000);

        if(QFileInfo::exists(imgFilePath)){
            return imgFilePath;
        }
    }

    return "";
}


void queryMediaFormat (GstDiscovererStreamInfo *info, MediaInfo& mediaInfo) {
    if (!info){
        return;
    }

    gchar *desc = NULL;
    GstCaps *caps = gst_discoverer_stream_info_get_caps (info);
    if (caps) {
        if (gst_caps_is_fixed (caps))
            desc = gst_pb_utils_get_codec_description (caps);
        else
            desc = gst_caps_to_string (caps);
        gst_caps_unref (caps);
    }

    QString metaType(gst_discoverer_stream_info_get_stream_type_nick (info));
    if(metaType=="container"){
        mediaInfo.containerFormat = QString(desc);
    }else if(metaType=="video"){
        mediaInfo.videoFormat = QString(desc);
    }else if(metaType=="audio"){
        mediaInfo.audioFormat = QString(desc);
    }

    if (desc) {
        g_free (desc);
        desc = NULL;
    }

    if (GST_IS_DISCOVERER_CONTAINER_INFO (info)) {
        GList *tmp=NULL, *streams=NULL;
        streams = gst_discoverer_container_info_get_streams (GST_DISCOVERER_CONTAINER_INFO (info));
        if(streams){
            for (tmp = streams; tmp; tmp = tmp->next) {
                GstDiscovererStreamInfo *tmpinf = (GstDiscovererStreamInfo *) tmp->data;
                queryMediaFormat (tmpinf, mediaInfo);
            }
            gst_discoverer_stream_info_list_free (streams);
        }
    }
}

/*
    查询媒体源Url的元数据，并将数据存储在mediaInfo中

    目前支持的协议包括: 本地图片文件、本地视频文件、udp直播流。
    对于udp直播流，本函数并不会检查其是否可播。

    url：待查询的媒体地址
    errMsg：查询失败时返回错误信息
    return: 返回是否查询成功，查询成功不代表媒体有效。需通过mediaInfo.isValid判断媒体是否有效
*/
static QVector<MediaInfo> mediaInfoCache;
bool VideoPlayer::queryMediaInfo(const QString &url,MediaInfo& mediaInfo,QString& errMsg)
{
    mediaInfo.url = url;
    mediaInfo.isQueryed=true;

    if(url.isEmpty()){
        errMsg="媒体路径为空";
        return false;
    }

    //先从缓存中查询媒体信息(按路径匹配)
    for(int i=0;i<mediaInfoCache.size();i++){
        if(mediaInfoCache[i].url == url){
            mediaInfo.isValid = mediaInfoCache[i].isValid;
            mediaInfo.mediaType=mediaInfoCache[i].mediaType;
            mediaInfo.duration = mediaInfoCache[i].duration;
            mediaInfo.videoFormat=mediaInfoCache[i].videoFormat;
            mediaInfo.audioFormat=mediaInfoCache[i].audioFormat;
            mediaInfo.containerFormat=mediaInfoCache[i].containerFormat;

            errMsg="";
            return true;
        }
    }

    QString scheme  = QUrl::fromUserInput(url).scheme();
    if(scheme == "udp" || scheme == "http" || scheme == "https"
            || scheme == "rtp" || scheme == "rtmp" || scheme == "rtsp")
    {
        //检查是否为支持的直播源,仅检查直播源协议是否支持
        mediaInfo.isValid = true;
        mediaInfo.mediaType="live";
        mediaInfo.duration = 0;
        mediaInfo.videoFormat="";
        mediaInfo.audioFormat="";
        mediaInfo.containerFormat="";
        mediaInfoCache.push_back(mediaInfo);

        errMsg="";
        return true;

    }else if(scheme=="file"){
        if(!QFile::exists(url)){
            errMsg="媒体文件不存在";
            return false;
        }

        QMimeDatabase mimeDb;
        QMimeType mimeType = mimeDb.mimeTypeForUrl(url);
        QString mimeName = mimeType.name();
        if(!mimeType.isValid()){
            errMsg="媒体元类型无效";
            return false;
        }

        if(mimeName.startsWith("image")){
            //检查是否为图片文件
            mediaInfo.isValid = true;
            mediaInfo.mediaType="image";
            mediaInfo.duration = 0;
            mediaInfo.videoFormat="";
            mediaInfo.audioFormat="";
            mediaInfo.containerFormat="";
            mediaInfoCache.push_back(mediaInfo);
            errMsg="";
            return true;

        }else if(mimeName.startsWith("audio") || mimeName.startsWith("video")){
            //检查是否为音视频文件
            bool queryOk=false;
            GError *err = NULL;

            GstDiscoverer *dc = gst_discoverer_new (3*GST_SECOND, &err);
            if(dc){
                //gst_discoverer_discover_uri的路径必须携带file://协议头等
                GstDiscovererInfo *info = gst_discoverer_discover_uri(dc, QUrl::fromUserInput(url).toString().toStdString().data(), &err);
                if(info){
                    GstDiscovererResult result = gst_discoverer_info_get_result(info);
                    if(result==GST_DISCOVERER_OK){
                        quint64 duration = gst_discoverer_info_get_duration (info)/GST_MSECOND;
                        GstDiscovererStreamInfo* sInfo = gst_discoverer_info_get_stream_info(info);
                        if(sInfo){
                            queryMediaFormat(sInfo,mediaInfo);
                            gst_discoverer_stream_info_unref(sInfo);

                            if(duration>0){
                                queryOk =true;
                                mediaInfo.isValid = true;
                                mediaInfo.duration = duration;
                                if(mediaInfo.audioFormat!="" && mediaInfo.videoFormat==""){
                                    mediaInfo.mediaType="audio";
                                }else{
                                    mediaInfo.mediaType="video";
                                }
                            }else{
                                queryOk=false;
                                errMsg="媒体时长为0";
                            }
                        }else{
                            queryOk=false;
                            errMsg="gst_discoverer_info_get_stream_info失败";
                        }
                    }else{
                        queryOk=false;
                        errMsg=QString("查询失败 resultCode: %1").arg(result);
                    }

                    gst_discoverer_info_unref (info);
                }else{
                    queryOk=false;
                    errMsg="gst_discoverer_discover_uri失败";
                }
                gst_object_unref (dc);
            }else{
                queryOk=false;
                errMsg="gst_discoverer_new失败";
            }

            if (err){
                if(!queryOk){
                    //如果gst错误信息有效，则携带上gst的错误信息
                    if(err->message){
                        errMsg+=(" gstError: "+QString(err->message));
                    }
                }
                g_error_free (err);
            }

            if(queryOk){
                //查询成功的媒体文件计入缓存
                mediaInfoCache.push_back(mediaInfo);
                return true;
            }else{
                mediaInfo.isValid = false;
                mediaInfo.mediaType="";
                mediaInfo.duration=0;
                mediaInfo.videoFormat="";
                mediaInfo.audioFormat="";
                mediaInfo.containerFormat="";
                return false;
            }
        }else{
            errMsg="媒体元类型未知 mimeName: "+mimeName;
            return false;
        }

    }else{
        errMsg="媒体协议不支持";
        return false;
    }
}

/*
    返回指定媒体源的信息
*/
QString VideoPlayer::dumpMediaInfo(const MediaInfo &media)
{
    return QString("programType: %1 mediaType: %2 url: %3 "
                   "startPos: %4 endPos: %5 duration: %6 "
                   "isSeeked: %7 errorCount: %8")

            .arg(media.programType.isEmpty()?"null":media.programType)
            .arg(media.mediaType.isEmpty()?"null":media.mediaType)
            .arg(media.url.isEmpty()?"null":media.url)

            .arg(media.startPos>0?QString::number(media.startPos/1000.0,'f',2)+"s":QString::number(media.startPos))
            .arg(media.endPos>0?QString::number(media.endPos/1000.0,'f',2)+"s":QString::number(media.endPos))
            .arg(media.duration>0?QString::number(media.duration/1000.0,'f',2)+"s":QString::number(media.duration))

            .arg(media.isSeeked?"true":"false")
            .arg(media.errorCount);
}


/*
    播放指定的媒体源。

    注意：如果当前正在播放或播放到结尾，则应先调用stop()停止上次播放，然后
    再调用本函数开启新的播放。

    media：需要播放的媒体源，可以是本地视频/音频文件、本地图片文件、直播流。
    必须指定url
*/
void VideoPlayer::play(MediaInfo media)
{
    stopEndTimer();
    stopProbeCheck();
    m_isEos=false;

    if(m_logger){
        m_logger->info(QString("<%1> 启动播放 %2 %3")
                       .arg(__FUNCTION__)
                       .arg(dumpMediaInfo(media))
                       .arg(dumpPipelineState()));
    }

    if (this->state() > GST_STATE_READY ){
        if(m_logger){
            m_logger->warn(QString("<%1> 已取消播放（管线当前未处于NULL状态或管线繁忙） %2 %3")
                           .arg(__FUNCTION__)
                           .arg(dumpMediaInfo(m_mediaInfo))
                           .arg(dumpPipelineState()));
        }

        //如果管道当前未处于NULL状态，则丢弃本次播放任务，并通知外部
        //重新尝试播放。外部应首先调用stop()再重新调用play()
        emit requestRetryPlay();
    }
    else{
        m_mediaInfo = media;
        m_mediaInfo.isSeeked=false;

        //启动结束定时器
        if(m_mediaInfo.endPos>0){
            startEndTimer();
        }

        //创建管线并播放
        m_isReqPlay=true;
        if(!m_pipeline){
            createPipeline();
        }

        if(m_mediaInfo.mediaType=="live"){
            changeState(GST_STATE_PLAYING,"启动播放",true);
        }else{
            if(m_mediaInfo.mediaType=="image"){
                changeState(GST_STATE_PLAYING,"启动播放",true);
            }else{
                if(m_mediaInfo.startPos>0){
                    changeState(GST_STATE_PAUSED,"预卷管道",true);
                }else{
                    changeState(GST_STATE_PLAYING,"启动播放",true);
                }
            }
        }
    }
}


/*
    停止播放
*/
void VideoPlayer::stop( )
{
    stopEndTimer();
    stopProbeCheck();
    stopPosNotifyTimer();
    m_isReqPlay=false;
    m_isEos=false;

    if(m_pipeline){
        if(m_logger){
            m_logger->info(QString("<%1> 停止播放 %2 %3")
                           .arg(__FUNCTION__)
                           .arg(dumpMediaInfo(m_mediaInfo))
                           .arg(dumpPipelineState()));
        }

        destoryPipeline();
    }

    emit playerStopped();
}


void VideoPlayer::pause()
{
    stopEndTimer();
    stopProbeCheck();
    stopPosNotifyTimer();

    m_isReqPlay=false;
    m_isEos=false;
    if(m_pipeline){
        if(m_logger){
            m_logger->info(QString("<%1> 暂停播放 %2 %3")
                           .arg(__FUNCTION__)
                           .arg(dumpMediaInfo(m_mediaInfo))
                           .arg(dumpPipelineState()));
        }

        changeState(GST_STATE_PAUSED,"暂停播放",false);
    }
}

void VideoPlayer::resume()
{
    if(m_pipeline){
        if(m_logger){
            m_logger->info(QString("<%1> 恢复播放 %2 %3")
                           .arg(__FUNCTION__)
                           .arg(dumpMediaInfo(m_mediaInfo))
                           .arg(dumpPipelineState()));
        }

        changeState(GST_STATE_PLAYING,"恢复播放",true);
    }
}

/*
    从当前位置跳过指定的帧数后继续播放，如果当前
    处于暂停状态，则跳帧后将显示下一帧。

    默认调用一次前进或后退2s，前进或后退取决于setRate()
    设置的速率方向。

    format: 跳帧类型，BUFFERS（跳过指定帧数），TIME（跳过指定时间ms）
    num：需要跳过的帧的数量/时间

    注意：仅在播放或暂停状态下可调用
*/
bool VideoPlayer::step(GstFormat format, qint64 num)
{
    if(state()<=GST_STATE_READY){
        return false;
    }

    if(format==GST_FORMAT_TIME){
        num= num*GST_MSECOND;
    }
    GstEvent* e =gst_event_new_step (format, num, qAbs(m_rate), TRUE, FALSE);
    gst_element_send_event(m_videosink,e);

    return true;
}

/*
    设置当前播放位置

    pos：需要定位到的目标位置（ms），其值应大于0，小于
    媒体总时长。

    注意：仅在播放或暂停状态下可调用，且对音视频文件以外的
    媒体无效
*/
bool VideoPlayer::seek(qint64 pos)
{
    if(!m_pipeline || state()<=GST_STATE_READY || m_mediaInfo.mediaType=="live"){
        if(m_logger){
            m_logger->error(QString("<%1> 定位失败（管线当前状态不可定位）pos: %2 %3")
                            .arg(__FUNCTION__)
                            .arg(pos)
                            .arg(dumpMediaInfo(m_mediaInfo)));
        }

        return false;
    }

    if(pos<0)
    {
        if(m_logger){
            m_logger->error(QString("<%1> 定位失败（定位点无效）pos: %2 %3")
                            .arg(__FUNCTION__)
                            .arg(pos)
                            .arg(dumpMediaInfo(m_mediaInfo)));
        }

        return false;
    }


    if(m_logger){
        m_logger->info(QString("<%1> 执行定位 pos: %2 %3 %4")
                       .arg(__FUNCTION__)
                       .arg(pos)
                       .arg(dumpMediaInfo(m_mediaInfo))
                       .arg(dumpPipelineState()));
    }

    //执行定位
    gint64 seekPos = pos *GST_MSECOND;
    m_isReqSeek=true;
    bool ok =false;
    if(m_rate>0){
        ok= gst_element_seek(m_videosink,m_rate,GST_FORMAT_TIME,
                             (GstSeekFlags)(GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_ACCURATE),
                             GST_SEEK_TYPE_SET,seekPos,
                             GST_SEEK_TYPE_END,0);

    }else{
        ok= gst_element_seek(m_videosink,m_rate,GST_FORMAT_TIME,
                             (GstSeekFlags)(GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_ACCURATE),
                             GST_SEEK_TYPE_SET,0,
                             GST_SEEK_TYPE_SET,seekPos);
    }

    if(!ok){
        m_isReqSeek=false;
        if(m_logger){
            m_logger->error(QString("<%1> 定位失败（无法发送定位事件） pos: %2 %3 %4")
                            .arg(__FUNCTION__)
                            .arg(pos)
                            .arg(dumpMediaInfo(m_mediaInfo))
                            .arg(dumpPipelineState()));
        }
    }

    return ok;

}

/*
    返回视频渲染窗口
*/
WId VideoPlayer::wId()
{
    return m_wid;
}

/*
    设置视频渲染窗口

    注意：如果在播放时将wid设为0，会导致GStreamer创建
    独立的窗口渲染视频
    注意：可在任何时候调用
*/
void VideoPlayer::setWId(WId id)
{
    m_wid=id;

    if(m_videosink && id>0){
        gst_video_overlay_set_window_handle (GST_VIDEO_OVERLAY (m_videosink), m_wid);
    }
}

bool VideoPlayer::fill()
{
    return m_fill;
}

/*
    设置视频渲染时是否非等比例拉伸视频图像，以填充绑定
    的窗口。默认按图像帧等比例自适应缩放，不拉伸

    注意：可在任何时候都可以调用
*/
void VideoPlayer::setFill(bool fill)
{
    m_fill=fill;

    if(m_videosink){
        g_object_set(G_OBJECT(m_videosink),"force-aspect-ratio",!m_fill,nullptr);
    }
}

double VideoPlayer::rate()
{
    return m_rate;
}

/*
    设置播放速率，默认为1.0

    0~1：正向慢放
    1：正向正常播放
    >1：正向快放
    -1～0：反向慢放
    -1：反向正常播放
    <-1：反向快放

    注意：可在任何时候调用
*/
void VideoPlayer::setRate(double rate)
{
    m_rate=rate;

    if(state()>GST_STATE_READY && m_mediaInfo.mediaType!="live"){
        if(rate>0){
            gint64 pos=-1;
            GstFormat format = GST_FORMAT_TIME;
            gst_element_query_position(m_pipeline, format, &pos);
            GstEvent*e =gst_event_new_seek(rate,GST_FORMAT_TIME,
                                           (GstSeekFlags)(GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_ACCURATE),
                                           GST_SEEK_TYPE_SET,pos,
                                           GST_SEEK_TYPE_END,0);
            gst_element_send_event(m_videosink,e);
        }else{
            gint64 pos=-1;
            GstFormat format = GST_FORMAT_TIME;
            gst_element_query_position(m_pipeline, format, &pos);
            GstEvent*e =gst_event_new_seek(rate,GST_FORMAT_TIME,
                                           (GstSeekFlags)(GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_ACCURATE),
                                           GST_SEEK_TYPE_SET,0,
                                           GST_SEEK_TYPE_NONE,pos);
            gst_element_send_event(m_videosink,e);
        }
    }
}

/*
    返回是否静音
*/
bool VideoPlayer::mute()
{
    return m_mute;
}

/*
    设置是否静音，默认为false

    注意：可在任何时候调用
*/
void VideoPlayer::setMute(bool mute)
{
    if(m_mute!=mute){
        m_mute=mute;

        GstElement* sinkEle=NULL;
        if(m_playsink){
            sinkEle = m_playsink;
        }else if(m_pipeline){
            sinkEle = m_pipeline;
        }else{
            return;
        }

        if(m_logger){
            m_logger->info(QString("<%1> 设置静音 %2 %3")
                           .arg(__FUNCTION__)
                           .arg(dumpMediaInfo(m_mediaInfo))
                           .arg(dumpPipelineState()));
        }
        g_object_set(G_OBJECT (sinkEle), "mute",mute?1:0,nullptr);
    }
}

int VideoPlayer::volume()
{
    return m_volume;
}

/*
    设置播放器输出的音量，默认为100

    volume：音量百分比，0～1000%。超过100%时，音量
    会被放大而可能出现失真

    注意：可在任何时候调用
*/
void VideoPlayer::setVolume(int volume)
{
    if(volume>=0 && m_volume!=volume){
        m_volume=volume;

        GstElement* sinkEle=NULL;
        if(m_playsink){
            sinkEle = m_playsink;
        }else if(m_pipeline){
            sinkEle = m_pipeline;
        }else{
            return;
        }

        if(m_logger){
            m_logger->info(QString("<%1> 设置音量 %2 %3")
                           .arg(__FUNCTION__)
                           .arg(dumpMediaInfo(m_mediaInfo))
                           .arg(dumpPipelineState()));
        }

        g_object_set(G_OBJECT (sinkEle), "volume",volume/(double)100.0,nullptr);
    }
}

/*
    返回当前是否启用硬件加速。
*/
bool VideoPlayer::enableHWA()
{
    return m_enableHWA;
}

/*
    设置是否启用硬件加速，默认启用

    注意：必须在开始播放之前设置才有效
    注意：某些设备的硬件解码器在解码某些格式的视频数据时可能存在异常。
         例如rockchip硬解码器在解码视频时会概率性出现死锁问题。
*/
void VideoPlayer::setEnableHWA(bool enable)
{
    m_enableHWA=enable;
}

bool VideoPlayer::showFirstFrame()
{
    return m_showFirstFrame;
}

/*
    设置是否显示首帧画面。如果在播放视频文件时指定了startPos，
    则建议关闭此设置，否则在定位到startPos前，将显示该视频文件
    的第一帧画面，默认开启

    注意：可在任何时候调用
*/
void VideoPlayer::setShowFirstFrame(bool show)
{
    m_showFirstFrame=show;

    if(m_videosink){
        g_object_set(G_OBJECT(m_videosink),"show-preroll-frame",m_showFirstFrame,nullptr);
    }
}

int VideoPlayer::posNotifyInterval()
{
    return m_posNotifyInterval;
}

/*
    设置positionChanged()信号发送间隔，默认为-1（不发送）。

    注意：仅在播放开始前调用有效
*/
void VideoPlayer::setPosNotifyInterval(int intervalMs, bool notifyWithSec)
{
    m_posNotifyInterval = intervalMs;
    m_posNotifyWithSec = notifyWithSec;

    if(intervalMs>0){
        if(this->state()==GST_STATE_PLAYING){
            startPosNotifyTimer();
        }
    }else{
        stopPosNotifyTimer();
    }
}

QSize VideoPlayer::renderSize()
{
    return m_renderSize;
}

void VideoPlayer::setRenderSize(QSize size)
{
    m_renderSize=size;

    if(m_videosink && m_renderSize.width()>0 && m_renderSize.height()>0){
        gst_video_overlay_set_render_rectangle (GST_VIDEO_OVERLAY (m_videosink), 0, 0, size.width(), size.height());
    }
}

QString VideoPlayer::vsinkName()
{
    return m_vsinkName;
}

void VideoPlayer::setVsinkName(const QString &vsink)
{
    m_vsinkName=vsink;
}

int VideoPlayer::timeout()
{
    return m_timeout;
}

void VideoPlayer::setTimeout(int ms)
{
    if(ms>0){
        m_timeout=ms;

        int times = qCeil(ms/(double)m_probeCheckInterval);
        if(times<=0){
            times=1;
        }
        m_probeCheckMaxErrTimes=times;
    }
}


/*
    返回当前播放进度(ms）

    注意：仅在播放或暂停状态下可调用
*/
qint64 VideoPlayer::position()
{
    if(state()<=GST_STATE_READY){
        return -1;
    }

    if(m_mediaInfo.mediaType=="live"){
        //直播源返回播放总时长
        GstClock* clock = gst_element_get_clock(m_pipeline);
        if(clock){
            GstClockTime clockTime = gst_clock_get_time(clock);
            GstClockTime baseTime = gst_element_get_base_time(m_pipeline);
            gst_object_unref(clock);
            return (clockTime-baseTime)/GST_MSECOND;
        }
        return -1;
    }else{
        //非直播源返回媒体播放进度
        gint64 pos=-1;
        GstFormat format = GST_FORMAT_TIME;
        gst_element_query_position(m_pipeline, format, &pos);
        if(pos>0){
            return  pos / GST_MSECOND;
        }else{
            return  pos;
        }
    }
}



/*
    返回当前正在播放的媒体信息
*/
MediaInfo* VideoPlayer::media()
{
    return &m_mediaInfo;
}

/*
    返回当前管道的播放状态
*/
GstState VideoPlayer::state()
{
    if(m_pipeline){
        return m_pipeline->current_state;
    }

    return GST_STATE_NULL;
}

void VideoPlayer::createPipeline()
{
    m_vcodecName="";
    m_acodecName="";

    QUrl mediaUrl = QUrl::fromUserInput(m_mediaInfo.url);
    QString schemeName=mediaUrl.scheme();

    if(schemeName=="file"){
        QMimeType mimeType = m_mimeDb.mimeTypeForUrl(mediaUrl);
        QString mimeName= mimeType.name();

        if(mimeName.contains("image")){
            //播放图片文件
            GError *error = NULL;
            m_pipeline = gst_parse_launch("filesrc name=filesrc1 ! decodebin name=decodebin1 ! videoconvert ! imagefreeze ! glimagesink name=imagesink1", &error);
            GstBus* bus = gst_pipeline_get_bus(GST_PIPELINE(m_pipeline));
            m_busWatchId=gst_bus_add_watch (bus,gstBusMessageCallback, this);
            gst_object_unref(bus);

            GstElement* filesrc = gst_bin_get_by_name(GST_BIN(m_pipeline), "filesrc1");
            g_object_set(G_OBJECT(filesrc),"location",m_mediaInfo.url.toStdString().data(), nullptr);

            m_videosink = gst_bin_get_by_name(GST_BIN(m_pipeline), "imagesink1");
            setWId(m_wid);
            setRenderSize(m_renderSize);
            GstPad *pad = gst_element_get_static_pad (m_videosink, "sink");
            if(pad){
                m_probeId=gst_pad_add_probe (pad, GST_PAD_PROBE_TYPE_BUFFER,
                                   (GstPadProbeCallback)gstVideoSinkProbe, this, NULL);
                gst_object_unref (pad);
            }
        }else{
            //播放音视频文件
            m_pipeline = gst_pipeline_new("pipeline");
            GstBus* bus = gst_pipeline_get_bus(GST_PIPELINE(m_pipeline));
            m_busWatchId=gst_bus_add_watch (bus,gstBusMessageCallback, this);
            gst_object_unref(bus);

            GstElement* filesrc = gst_element_factory_make ("filesrc", "filesrc");
            m_filesrc=filesrc;
            g_object_set(G_OBJECT(filesrc),"location",m_mediaInfo.url.toStdString().data(), nullptr);

            GstElement* decodebin = gst_element_factory_make ("decodebin", "decodebin");
            g_signal_connect (decodebin, "pad-added", G_CALLBACK (gstPadAddedCallback), this);
            g_signal_connect (decodebin, "autoplug-select", G_CALLBACK (gstDecoderSelectCallback), this);

            if(m_vsinkName!=""){
                m_videosink = gst_element_factory_make (m_vsinkName.toStdString().data(), "vsink");
            }
            if(!m_videosink){
                m_videosink = gst_element_factory_make ("xvimagesink", "vsink");
            }
            g_object_set(G_OBJECT(m_videosink),"show-preroll-frame",m_showFirstFrame,nullptr);
            setWId(m_wid);
            setRenderSize(m_renderSize);
            GstPad *pad = gst_element_get_static_pad (m_videosink, "sink");
            if(pad){
                m_probeId=gst_pad_add_probe (pad, GST_PAD_PROBE_TYPE_BUFFER,
                                   (GstPadProbeCallback)gstVideoSinkProbe, this, NULL);
                gst_object_unref (pad);
            }

            m_playsink = gst_element_factory_make ("playsink", "playsink");
            gst_util_set_object_arg (G_OBJECT (m_playsink), "flags","text+audio+video");
            g_object_set(G_OBJECT(m_playsink),"video-sink",m_videosink, nullptr);
            if(m_mute){
                g_object_set(G_OBJECT(m_playsink),"mute",1, nullptr);
            }
            if(m_volume>0){
                g_object_set(G_OBJECT(m_playsink),"volume",m_volume/(double)100.0, nullptr);
            }

            gst_bin_add_many(GST_BIN (m_pipeline),filesrc,decodebin,m_playsink,nullptr);
            gst_element_link(filesrc,decodebin);
        }
    }else{
        //播放直播
        //直播流不建议创建固定的管线进行播放，因为直播流可能涉及到rtpdepay、
        //h264/tsparse、协议多变等。
        m_pipeline = gst_element_factory_make ("playbin", "playbin");
        GstBus* bus = gst_pipeline_get_bus(GST_PIPELINE(m_pipeline));
        m_busWatchId=gst_bus_add_watch (bus,gstBusMessageCallback, this);
        gst_object_unref(bus);

        g_signal_connect (m_pipeline, "element-setup", G_CALLBACK (gstElementSetupCallback), this);
        g_signal_connect (m_pipeline, "source-setup", G_CALLBACK (gstSourceElementSetupCallback), this);
        g_object_set(G_OBJECT(m_pipeline),"uri",m_mediaInfo.url.toStdString().data(), nullptr);
        g_object_set(G_OBJECT(m_pipeline),"buffer-duration",4*GST_SECOND, nullptr);

        if(m_mute){
            g_object_set(G_OBJECT(m_pipeline),"mute",1, nullptr);
        }
        if(m_volume>0){
            g_object_set(G_OBJECT(m_pipeline),"volume",m_volume/(double)100.0, nullptr);
        }

        if(m_vsinkName!=""){
            m_videosink = gst_element_factory_make (m_vsinkName.toStdString().data(), "vsink");
        }
        if(!m_videosink){
            m_videosink = gst_element_factory_make ("xvimagesink", "vsink");
        }
        g_object_set(G_OBJECT(m_videosink),"show-preroll-frame",m_showFirstFrame,nullptr);
        setWId(m_wid);
        setRenderSize(m_renderSize);
        g_object_set (m_pipeline, "video-sink", m_videosink, NULL);

        GstPad *pad = gst_element_get_static_pad (m_videosink, "sink");
        if(pad){
            m_probeId=gst_pad_add_probe (pad, GST_PAD_PROBE_TYPE_BUFFER,
                               (GstPadProbeCallback)gstVideoSinkProbe, this, NULL);
            gst_object_unref (pad);
        }
    }
}

void VideoPlayer::destoryPipeline()
{
    stopEndTimer();
    stopProbeCheck();
    stopPosNotifyTimer();

    if(m_pipeline){
      //  GstStateChangeReturn ret = gst_element_set_state (m_pipeline, GST_STATE_NULL);

        gst_element_set_state (m_pipeline, GST_STATE_PAUSED);
        gst_element_set_state (m_pipeline, GST_STATE_READY);
        gst_element_set_state (m_pipeline, GST_STATE_NULL);

        g_source_remove(m_busWatchId);

        for(int i=0;i<m_playsinkPads.size();i++){
            GstPad* pad = m_playsinkPads[i];
            if(pad){
                gst_element_release_request_pad(m_playsink,pad);
                gst_object_unref(pad);
            }
        }
        m_playsinkPads.clear();

        GstPad *pad = gst_element_get_static_pad (m_videosink, "sink");
        if(pad){
            gst_pad_remove_probe(pad,m_probeId);
            gst_object_unref (pad);
        }

        gst_object_unref(m_pipeline);

        m_pipeline=NULL;
        m_playsink=NULL;
        m_videosink=NULL;
        m_vcodecName="";
        m_acodecName="";
    }
}


/*
    切换管线状态
*/
void VideoPlayer::changeState(GstState targetState,const QString &reason,bool check)
{
    m_targetState = targetState;
    if(m_logger){
        m_logger->info(QString("<%1> 请求切换管线状态（%2） %3")
                       .arg(__FUNCTION__)
                       .arg(reason)
                       .arg(dumpPipelineState()));
    }

    if(check){
        startProbeCheck();
    }else{
        stopProbeCheck();
        stopPosNotifyTimer();
    }

    //执行状态切换
    GstStateChangeReturn ret = gst_element_set_state (m_pipeline, targetState);

    //处理切换结果
    switch (ret)
    {
    case GST_STATE_CHANGE_SUCCESS:
        if(m_logger){
            m_logger->info(QString("<%1> 管线状态已同步切换 %2")
                           .arg(__FUNCTION__)
                           .arg(dumpPipelineState()));
        }

        //同步切换时不会发出GST_STATE_CHANGED信号
        processStateChangeResult(targetState);
        break;
    case GST_STATE_CHANGE_ASYNC:
        if(m_logger){
            m_logger->info(QString("<%1> 管线状态将异步切换 %2")
                           .arg(__FUNCTION__)
                           .arg(dumpPipelineState()));
        }

        //异步切换时稍后会发出GST_MESSAGE_STATE_CHANGE消息
        break;
    case GST_STATE_CHANGE_FAILURE:
        if(m_logger){
            m_logger->error(QString("<%1> 管线状态切换失败 %2")
                            .arg(__FUNCTION__)
                            .arg(dumpPipelineState()));
        }

        //切换失败时稍后会发出GST_MESSAGE_ERROR消息
        break;
    case GST_STATE_CHANGE_NO_PREROLL:
        if(m_logger){
            m_logger->warn(QString("<%1> 管线状态已同步切换，但未预滚 %2")
                           .arg(__FUNCTION__)
                           .arg(dumpPipelineState()));
        }

        //返回此结果一般是在对实时源切换状态，其效果与GST_STATE_CHANGE_ASYNC类似
        break;
    default:
        break;
    }
}

/*
   状态切换完成时应调用此函数处理切换结果
*/
void VideoPlayer::processStateChangeResult(GstState curState)
{
    //发出信号通知外部
    switch (curState) {
    case GST_STATE_PLAYING:
    {
        //播放成功时，清空媒体源的错误累计次数
        m_mediaInfo.errorCount=0;

        //启动进度通知
        startPosNotifyTimer();

        emit this->playerPlaying();
        break;
    }
    case GST_STATE_PAUSED:
    {
        emit this->playerPaused();
        break;
    }
    case GST_STATE_READY:
    {
        break;
    }
    case GST_STATE_NULL:
    {
        emit this->playerStopped();
        break;
    }
    default:
        break;
    }
}


/*
    组件及管线状态改变时消息回调
*/
void VideoPlayer::onStateChangedMessage(GstMessage *msg)
{
    if(GST_MESSAGE_SRC(msg) == GST_OBJECT(m_pipeline))
    {
        GstState oldstate, newstate, pendingstate;
        gst_message_parse_state_changed (msg, &oldstate, &newstate, &pendingstate);
        if(m_logger){
            m_logger->info(QString("<%1> 管线状态已改变 newState: %2 oldState: %3 pendingState: %4 myTargetState: %5")
                           .arg(__FUNCTION__)
                           .arg(gst_element_state_get_name(newstate))
                           .arg(gst_element_state_get_name(oldstate))
                           .arg(gst_element_state_get_name(pendingstate))
                           .arg(gst_element_state_get_name(m_targetState)));
        }

        processStateChangeResult(newstate);
    }
}

/*
    流结尾消息回调
*/
void VideoPlayer::onEosMessage(GstMessage *msg)
{
    if(m_logger){
        m_logger->info(QString("<%1> 媒体流到达结尾 %2 %3")
                       .arg(__FUNCTION__)
                       .arg(dumpMediaInfo(m_mediaInfo))
                       .arg(dumpPipelineState()));
    }
    stopProbeCheck();
    stopPosNotifyTimer();
    m_isEos=true;

    //如果是直播流到达结尾，则可能是直播流中断，此时按播放错误处理
    if(m_mediaInfo.mediaType=="live"){
        QString errMsg = "直播流异常中断";
        this->error(errMsg);
        return;
    }

    //如果EOS时定时器还未结束，则是管线播放速度过快，
    //因此需等待定时器结束时再发出End信号
    if(!m_isForceEnd && m_endTimer.isActive()){
        if(m_logger){
            m_logger->info(QString("<%1> 暂停等待（管线播放速度过快） remainTime: %2 %3 %4")
                           .arg(__FUNCTION__)
                           .arg(QString::number(m_endTimer.remainingTime()/1000.0,'f',2)+"s")
                           .arg(dumpMediaInfo(m_mediaInfo))
                           .arg(dumpPipelineState()));
        }
        return;
    }

    stopEndTimer();
    if(m_posNotifyInterval>0){
        onPosNotifyTimerTimeout();
    }
    emit this->playerEnded();
}

/*
    管线错误消息回调
*/
void VideoPlayer::onErrorMessage(GstMessage *msg)
{
    GError *error = NULL;
    gchar *debug = NULL;
    gst_message_parse_error(msg, &error, &debug);

    QString errMsg = QString("管线出现错误 errCode %1 errMsg: %2 debugMsg: %3")
            .arg(error->code)
            .arg(error->message)
            .arg(debug);

    g_error_free(error);
    g_free(debug);

    this->error(errMsg);
}


void VideoPlayer::onElementMessage(GstMessage *msg)
{
    const GstStructure* st = gst_message_get_structure(msg);
    if (gst_structure_has_name(st, "GstUDPSrcTimeout")) {
        QString errMsg = QString("udpsrc接收超时");
        this->error(errMsg);
    }
}

void VideoPlayer::onBufferingMessage(GstMessage *msg)
{
    Q_UNUSED(msg)
    gint percent=0;
    gst_message_parse_buffering(msg,&percent);

    qDebug()<<__FUNCTION__<<"缓冲中："<<percent<<"%";
}


void VideoPlayer::onDurationChangedMessage(GstMessage *msg)
{
    Q_UNUSED(msg)
}

void VideoPlayer::onAsyncDoneMessage(GstMessage *msg)
{
    if(m_isReqSeek){
        if(m_logger){
            m_logger->info(QString("<%1> 定位完成 %2 %3")
                           .arg(__FUNCTION__)
                           .arg(dumpMediaInfo(m_mediaInfo))
                           .arg(dumpPipelineState()));
        }

        m_isReqSeek=false;
        if(m_posNotifyInterval>0){
            onPosNotifyTimerTimeout();
        }
        return;
    }

    if(m_isReqPlay){
        //force-aspect-ratio必须在管线预卷完成后更改才有效，
        //但如果在playing或paused状态设置，首帧仍会显示异常
        setFill(m_fill);

        if(m_mediaInfo.startPos>0){
            if(m_mediaInfo.mediaType=="audio" || m_mediaInfo.mediaType=="video"){
                if(!m_mediaInfo.isSeeked){
                    m_mediaInfo.isSeeked=true;

                    if(m_logger){
                        m_logger->info(QString("<%1> 执行Seek/Rate %2 %3")
                                       .arg(__FUNCTION__)
                                       .arg(dumpMediaInfo(m_mediaInfo))
                                       .arg(dumpPipelineState()));
                    }

                    qint64 startPos = m_mediaInfo.startPos>0?m_mediaInfo.startPos:0;
                    if(m_rate>0){
                        gst_element_seek(m_videosink,m_rate,GST_FORMAT_TIME,
                                         (GstSeekFlags)(GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_ACCURATE),
                                         GST_SEEK_TYPE_SET,startPos*GST_MSECOND,
                                         GST_SEEK_TYPE_END,0);

                    }else{
                        gst_element_seek(m_videosink,m_rate,GST_FORMAT_TIME,
                                         (GstSeekFlags)(GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_ACCURATE),
                                         GST_SEEK_TYPE_SET,0,
                                         GST_SEEK_TYPE_SET,startPos*GST_MSECOND);
                    }
                }else{
                    m_isReqPlay=false;
                    if(m_posNotifyInterval>0){
                        onPosNotifyTimerTimeout();
                    }
                    changeState(GST_STATE_PLAYING,"Seek/Rate执行完成，启动播放",true);
                }
            }else{
                m_isReqPlay=false;
            }
        }else{
            m_isReqPlay=false;
        }
    }
}

void VideoPlayer::onClockLostMessage(GstMessage *msg)
{
    Q_UNUSED(msg)

    this->changeState(GST_STATE_PAUSED,"时钟丢失",false);
    this->changeState(GST_STATE_PLAYING,"时钟丢失",true);
}

void VideoPlayer::onLatencyMessage(GstMessage *msg)
{
    Q_UNUSED(msg)
    if(m_pipeline){
        gst_bin_recalculate_latency (GST_BIN (m_pipeline));
    }
}


void VideoPlayer::error(const QString &errMsg)
{
    if(m_logger){
        m_logger->error(QString("<%1> %2 %3 %4")
                        .arg(__FUNCTION__).arg(errMsg)
                        .arg(dumpMediaInfo(m_mediaInfo))
                        .arg(dumpPipelineState()));
    }

    stopEndTimer();
    stopProbeCheck();
    stopPosNotifyTimer();

    if(m_pipeline){
        this->changeState(GST_STATE_NULL,errMsg,false);
    }

    emit this->playerError(errMsg);
}


/*
    返回管线的状态信息
*/
QString VideoPlayer::dumpPipelineState()
{
    if(m_pipeline){
        return QString("curState: %1 nextState: %2 pendingState: %3 targetState: %4 myTargetState: %5 "
                       "rate: %6 mute: %7 volume: %8 hwa: %9 fill: %10 vcodec: %11 acodec: %12")
                .arg(gst_element_state_get_name(m_pipeline->current_state))
                .arg(gst_element_state_get_name(m_pipeline->next_state))
                .arg(gst_element_state_get_name(m_pipeline->pending_state))
                .arg(gst_element_state_get_name(m_pipeline->target_state))
                .arg(gst_element_state_get_name(this->m_targetState))
                .arg(QString::number(this->m_rate,'f',2))
                .arg(this->m_mute)
                .arg(this->m_volume)
                .arg(this->m_enableHWA)
                .arg(this->m_fill)
                .arg(this->m_vcodecName)
                .arg(this->m_acodecName);
    }

    return "pipeline: null";
}

void VideoPlayer::onProbeCheckTimerTimeout()
{
    if(this->m_isDestoryed){
        return;
    }

    if(!m_probeLastDateTime.isValid() || (m_probeLastDateTime.msecsTo(QDateTime::currentDateTime()) > m_probeCheckTimeout)){
        m_probeCheckCurErrTimes+=1;
    }else{
        m_probeCheckCurErrTimes=0;
    }

    if(m_probeCheckCurErrTimes >= m_probeCheckMaxErrTimes){
        QString errMsg = QString("管线数据流超时 timeout: %1ms")
                .arg(m_probeCheckCurErrTimes*m_probeCheckInterval);
        this->error(errMsg);
    }
}

void VideoPlayer::startEndTimer()
{
    m_isForceEnd=false;
    if(m_mediaInfo.endPos>0 && m_mediaInfo.endPos>m_mediaInfo.startPos){
        m_endTimer.start(m_mediaInfo.endPos-m_mediaInfo.startPos);
    }
}

void VideoPlayer::stopEndTimer()
{
    m_endTimer.stop();
}


/*
    播放时间到,强制结束播放
*/
void VideoPlayer::onEndTimerTimeout()
{
    if(!m_isForceEnd && !m_isEos){
        qint64 remainms=0;
        bool needWait=false;
        if(m_mediaInfo.mediaType!="live"){
            qint64 curPos = position();

            //定时器结束时。如果当前播放进度已超过媒体时长，或者距离媒体时长很近
            //则代表管线播放马上要到达EOS，故让其自然播放结束
            if(curPos>=m_mediaInfo.endPos ||
                (curPos<m_mediaInfo.endPos && m_mediaInfo.endPos-curPos <1000 && state()==GST_STATE_PLAYING))
            {
                remainms=m_mediaInfo.endPos-curPos;
                needWait=true;

                //已知Gstreamer有概率在播放结束时不发出EOS消息，
                //因此需再次定时确保在播放结束后能发出Ended信号
                m_isForceEnd=true;
                int delay =1000;
                if(remainms>0){
                    delay+=remainms;
                }
                m_endTimer.start(delay);
            }
        }
        if(needWait){
            if(m_logger){
                m_logger->info(QString("<%1> 播放时间到，管线即将EOS，暂停等待 remainTime: %2ms %3 %4")
                               .arg(__FUNCTION__).arg(remainms)
                               .arg(dumpMediaInfo(m_mediaInfo))
                               .arg(dumpPipelineState()));
            }
            return;
        }
    }

    if(m_logger){
        m_logger->info(QString("<%1> 播放时间到，强制结束播放 %2 %3")
                       .arg(__FUNCTION__)
                       .arg(dumpMediaInfo(m_mediaInfo))
                       .arg(dumpPipelineState()));
    }
    if(m_posNotifyInterval>0){
        onPosNotifyTimerTimeout();
    }
    emit this->playerEnded();
}

void VideoPlayer::startPosNotifyTimer()
{
    if(m_posNotifyInterval>0){
        m_lastNotifyPos=-1;
        m_posNotifyTimer.setInterval(m_posNotifyInterval);
        m_posNotifyTimer.start();
    }else{
        stopPosNotifyTimer();
    }
}

void VideoPlayer::stopPosNotifyTimer()
{
    m_posNotifyTimer.stop();
}

void VideoPlayer::onPosNotifyTimerTimeout()
{
    qint64 curPos = this->position();
    if(curPos>0){
        if(m_posNotifyWithSec){
            curPos=curPos/1000;
        }

        if(curPos!=m_lastNotifyPos){
            m_lastNotifyPos=curPos;
            emit positionChanged(curPos);
        }
    }
}


void VideoPlayer::startProbeCheck()
{
    m_probeCheckCurErrTimes=0;
    m_probeLastDateTime = QDateTime();
    m_probeCheckTimer.start();
}

void VideoPlayer::stopProbeCheck()
{
    m_probeCheckTimer.stop();
}

Logger *VideoPlayer::logger()
{
    return m_logger;
}
