#include "xh_avcodeccontroler.h"
#include <QDebug>
#include <thread>
#include "xh_datadefine.h"
#include "mainwindow.h"
#include "audioplayer.h"

xh_AVCodecControler::xh_AVCodecControler(MainWindow* mainWindow) :
    m_mainWindow(mainWindow)
{
    av_register_all();
    avfilter_register_all();

    m_vStreamTimeRational = av_make_q(0, 0);
    m_aStreamTimeRational = av_make_q(0, 0);

    m_eMediaPlayStatus = STATUS_STOP;

    m_bThreadRunning = false;
    m_bFileThreadRunning = false;
    m_bVideoThreadRunning = false;
    m_bAudioThreadRunning = false;
}

xh_AVCodecControler::~xh_AVCodecControler()
{
    m_bThreadRunning = false;
    waitAllThreadsExit();

    if (m_YUV420Buffer) {
        av_free(m_YUV420Buffer);
        m_YUV420Buffer = nullptr;
    }

    if (m_YUVFrame) {
        av_frame_free(&m_YUVFrame);
        m_YUVFrame = nullptr;
    }

    if (m_videoCodecCtx) {
        avcodec_close(m_videoCodecCtx);
        m_videoCodecCtx = nullptr;
    }

    if (m_audioCodecCtx) {
        avcodec_close(m_audioCodecCtx);
        m_audioCodecCtx = nullptr;
    }

    if (m_formatCtx) {
        avformat_close_input(&m_formatCtx);
        m_formatCtx = nullptr;
    }

    if (m_pVideoFrame) {
        av_frame_free(&m_pVideoFrame);
        m_pVideoFrame = nullptr;
    }

    if (m_pAudioFrame) {
        av_frame_free(&m_pAudioFrame);
        m_pAudioFrame = nullptr;
    }

    if (m_pVideoSwsCtx) {
        sws_freeContext(m_pVideoSwsCtx);
        m_pVideoSwsCtx = nullptr;
    }

    if (m_pAudioSwrCtx) {
        swr_free(&m_pAudioSwrCtx);
        m_pAudioSwrCtx = nullptr;
    }

    if (m_pSwrBuffer) {
        free(m_pSwrBuffer);
        m_pSwrBuffer = nullptr;
    }

    if (m_filterGraph) {
        // 释放资源
        avfilter_graph_free(&m_filterGraph);
        m_filterGraph = nullptr;
    }
}

void xh_AVCodecControler::setVideoPathString(const QString &videoPathString)
{
    m_videoPathString = videoPathString;
}

int xh_AVCodecControler::InitVideoCodec()
{
    if (m_videoPathString.isEmpty())
        return -1;

    if (avformat_open_input(&m_formatCtx, m_videoPathString.toStdString().c_str(), NULL, NULL) != 0) {
        qDebug() << "open input stram failed.";
        return -1;
    }

    if (avformat_find_stream_info(m_formatCtx, nullptr) < 0) {
        qDebug() << "avformat_find_strame_info failed.";
        return -1;
    }

    av_dump_format(m_formatCtx, 0, m_videoPathString.toStdString().c_str(), 0);

    m_videoStreamIdx = -1;
    m_audioStreamIdx = -1;

    for (int i=0; i< (int)m_formatCtx->nb_streams; ++i) {
        AVCodecParameters *codecParameters = m_formatCtx->streams[i]->codecpar;

        if (codecParameters->codec_type == AVMEDIA_TYPE_VIDEO) {
            m_videoStreamIdx = i;
            qDebug() << "videoStreamIndex :" << m_videoStreamIdx;

            m_videoCodecCtx = m_formatCtx->streams[i]->codec;
            AVCodec* codec = avcodec_find_decoder(m_videoCodecCtx->codec_id);
            if (codec == nullptr) {
                qDebug() << "Video AVCodec is null";
                return -1;
            }

            if (avcodec_open2(m_videoCodecCtx, codec, nullptr) < 0) {
                qDebug() << "video codec open failed.";
                return -1;
            }

        } else if (codecParameters->codec_type == AVMEDIA_TYPE_AUDIO) {
            m_audioStreamIdx = i;
            qDebug() << "audioStreamIndex:" << m_audioStreamIdx;

            m_audioCodecCtx = m_formatCtx->streams[i]->codec;
            AVCodec* codec  = avcodec_find_decoder(m_audioCodecCtx->codec_id);
            if (codec == nullptr) {
                qDebug() << "audio codec open failed.";
                return -1;
            }

            if (avcodec_open2(m_audioCodecCtx, codec, nullptr) < 0) {
                qDebug() << "audio codec open failed.";
                return -1;
            }

            m_sampleRate = m_audioCodecCtx->sample_rate;
            m_channel = m_audioCodecCtx->channels;
            switch (m_audioCodecCtx->sample_fmt) {
            case AV_SAMPLE_FMT_U8:
                m_sampleSize = 8;
                break;
            case AV_SAMPLE_FMT_S16:
                m_sampleSize = 16;
                break;
            case AV_SAMPLE_FMT_S32:
                m_sampleSize = 32;
                break;
            default:
                break;
            }
        }
    }

    AudioPlayer::instance()->setSamleRate(m_sampleRate);
    AudioPlayer::instance()->setSampleSize(m_sampleSize);
    AudioPlayer::instance()->setChannel(m_channel);

    if (m_videoCodecCtx) {
        m_YUVFrame = av_frame_alloc();

        if (!m_YUVFrame) {
            qDebug() << "YUV Frame  alloc error.";
            return -1;
        }

        m_YUV420Buffer = (uint8_t*)av_malloc(av_image_get_buffer_size(AV_PIX_FMT_YUV420P, m_videoCodecCtx->width, m_videoCodecCtx->height, 1));

        if (!m_YUV420Buffer) {
            qDebug() << "YUV Buffer alloc error";
            return -1;
        }

        av_image_fill_arrays(m_YUVFrame->data, m_YUVFrame->linesize, m_YUV420Buffer, AV_PIX_FMT_YUV420P, m_videoCodecCtx->width, m_videoCodecCtx->height, 1);

        m_videoWidth = m_videoCodecCtx->width;
        m_videoHeight = m_videoCodecCtx->height;

        m_vStreamTimeRational = m_formatCtx->streams[m_videoStreamIdx]->time_base;
        m_aStreamTimeRational = m_formatCtx->streams[m_audioStreamIdx]->time_base;

        qDebug() << "Init VideoCodec sucess.V Time Base:" << m_vStreamTimeRational.num << "Den "<< m_vStreamTimeRational.den;
        qDebug() << "Init VideoCodec sucess.A Time Base:" << m_aStreamTimeRational.num << "Den "<< m_aStreamTimeRational.den;
        qDebug() << "Init VideoCodec sucess. Width:" << m_videoWidth << "Height:" << m_videoHeight;
    }

    initAudioFilter();

    return 0;
}

QSize xh_AVCodecControler::GetMediaWidthHeight()
{
    return QSize(m_videoWidth, m_videoHeight);
}

float xh_AVCodecControler::GetMediaTotalSeconds()
{
    //详情链接https://www.cnblogs.com/yongdaimi/p/10684953.html
    float totalDuration = m_formatCtx->duration/(AV_TIME_BASE*1.000);
    return totalDuration;
}

void xh_AVCodecControler::StartPlayVideo()
{
    AudioPlayer::instance()->StartAudioPlayer();
    startMediaProcessThreads();
}

void xh_AVCodecControler::StopPlayVideo()
{
    m_bThreadRunning = false;

    m_bReadFileEOF = false;

    m_currVideoTimeStamp =0.0f;
    m_currAudioTimeStamp =0.0f;

    m_vStreamTimeRational = av_make_q(0,0);
    m_aStreamTimeRational = av_make_q(0,0);

    m_eMediaPlayStatus = STATUS_STOP;

    AudioPlayer::instance()->StopAudioPlayer();

    while ( !m_audioPktQueue.isEmpty() )
        freePacket( m_audioPktQueue.takeFirst() );
    while ( !m_videoPktQueue.isEmpty() )
        freePacket( m_videoPktQueue.takeFirst() );

    waitAllThreadsExit();
}

void xh_AVCodecControler::startMediaProcessThreads()
{
    m_bThreadRunning = true;

    std::thread readThread(&xh_AVCodecControler::doReadMediaFrameThread, this);
    readThread.detach();

    std::thread videoThread(&xh_AVCodecControler::doVideoDecodeShowThread, this);
    videoThread.detach();

    std::thread audioThread(&xh_AVCodecControler::doAudioDecodePlayThread, this);
    audioThread.detach();
}

void xh_AVCodecControler::waitAllThreadsExit()
{
    while(m_bFileThreadRunning){
        stdThreadSleep(10);
        continue;
    }

    while(m_bVideoThreadRunning){
        stdThreadSleep(10);
        continue;
    }
}

void xh_AVCodecControler::doReadMediaFrameThread()
{
    while (m_bThreadRunning) {
        m_bFileThreadRunning = true;
        if (m_eMediaPlayStatus == STATUS_PAUSE) {
            stdThreadSleep(10);
            continue;
        }

        if (m_videoPktQueue.size() > MAX_VIDEO_FRAME_IN_QUEUE || m_audioPktQueue.size() > MAX_AUDIO_FRAME_IN_QUEUE) {
            stdThreadSleep(10);
            continue;
        }

        if (m_bReadFileEOF == false) {
            readMediaPacket();
        } else {
            stdThreadSleep(10);
        }

    }

    qDebug()<<"Read media thread exit...";
    m_bFileThreadRunning = false;
}

void xh_AVCodecControler::doVideoDecodeShowThread()
{
    if (!m_formatCtx)
        return;

    if (!m_pVideoFrame)
        m_pVideoFrame = av_frame_alloc();

    if (m_currVideoTimeStamp == 0)
        m_startTime = QDateTime::currentDateTime();
    else
        recalculateStartTime();

    while (m_bThreadRunning) {
        m_bVideoThreadRunning = true;

        if (m_eMediaPlayStatus == STATUS_PAUSE) {
            stdThreadSleep(10);
            continue;
        }

        if (m_videoPktQueue.isEmpty()) {
            stdThreadSleep(1);
            continue;
        }

        AVPacket* pkt = (AVPacket*)m_videoPktQueue.dequeue();
        if (!pkt)
            break;

        if (!m_bThreadRunning) {
            freePacket(pkt);
            break;
        }

        int retValue = avcodec_send_packet(m_videoCodecCtx, pkt);
        if (retValue != 0) {
            freePacket(pkt);
            continue;
        }

        int decodeRet = avcodec_receive_frame(m_videoCodecCtx, m_pVideoFrame);
        if (decodeRet == 0) {
            //解码成功
            //这里需要使用m_pVideoFrame的pts，pkt中的pts可能是乱的
            tickVideoFrameTimerDelay(m_pVideoFrame->pts);
            convertAndRenderVideo(m_pVideoFrame,m_pVideoFrame->pts);
        }

        freePacket(pkt);
    }

    qDebug()<<"video decode show  thread exit...";
    m_bVideoThreadRunning = false;
}

void xh_AVCodecControler::doAudioDecodePlayThread()
{
    if(m_formatCtx == NULL){
        return;
    }


    if(m_pAudioFrame == nullptr)
    {
        m_pAudioFrame = av_frame_alloc();
    }

    while (m_bThreadRunning) {
        m_bAudioThreadRunning = false;

        if (m_eMediaPlayStatus == STATUS_PAUSE) {
            stdThreadSleep(10);
            continue;
        }

        if (m_audioPktQueue.isEmpty()) {
            stdThreadSleep(1);
            continue;
        }

        AVPacket* pkt = (AVPacket*)m_audioPktQueue.dequeue();
        if (pkt == nullptr) {
            break;
        }

        if (!m_bThreadRunning) {
            freePacket(pkt);
            break;
        }

        int retValue = avcodec_send_packet(m_audioCodecCtx, pkt);
        if (retValue != 0) {
            freePacket(pkt);
            continue;
        }

        int decodeRet = avcodec_receive_frame(m_audioCodecCtx, m_pAudioFrame);
        if (decodeRet == 0) {
            tickAudioFrameTimerDelay(m_pAudioFrame->pts);

//            if (qAbs(m_speed - 1.0) > 0.000001) {
//                AVFrame* outFrame = nullptr;
//                resampling(m_pAudioFrame, &outFrame);
//                convertAndPlayAudio(outFrame);
//                av_frame_free(&outFrame);
//            } else {
//                convertAndPlayAudio(m_pAudioFrame);
//            }
            convertAndPlayAudio(m_pAudioFrame);
        }

        freePacket(pkt);


    }

    m_bAudioThreadRunning = false;

    qDebug()<<"audio decode show  thread exit...";
}

void xh_AVCodecControler::stdThreadSleep(int mseconds)
{
    std::chrono::milliseconds sleepTime(mseconds);
    std::this_thread::sleep_for(sleepTime);
}

void xh_AVCodecControler::readMediaPacket()
{
    AVPacket* packet = (AVPacket*)malloc(sizeof (AVPacket));
    if (!packet)
        return;

    av_init_packet(packet);

    m_eMediaPlayStatus = STATUS_PLAYING;

    int retValue = av_read_frame(m_formatCtx, packet);
    if (retValue == 0) {
        if (packet->stream_index == m_videoStreamIdx) { //Video frame
            if (!av_dup_packet(packet)) {
                m_videoPktQueue.enqueue(packet);
            } else {
                freePacket(packet);
            }
        }

        if (packet->stream_index == m_audioStreamIdx) { //audio frame
            if (!av_dup_packet(packet)) {
                m_audioPktQueue.enqueue(packet);
            } else {
                freePacket(packet);
            }
        }
    } else if (retValue < 0) {
        if ((m_bReadFileEOF == false) && (retValue == AVERROR_EOF))
            m_bReadFileEOF = true;
    }

}

void xh_AVCodecControler::freePacket(AVPacket *pkt)
{
    if (!pkt)
        return;

    av_free_packet(pkt);

    free(pkt);
}

void xh_AVCodecControler::tickVideoFrameTimerDelay(int64_t pts)
{
    if (m_vStreamTimeRational.den <= 0)
        return;

    qint64 currentTime = m_startTime.msecsTo(QDateTime::currentDateTime());

    m_currVideoTimeStamp = pts * av_q2d(m_vStreamTimeRational) * 1000;    //m_vStreamTimeRational

    m_mainWindow->updateCurrentPts(m_currVideoTimeStamp / 1000);

    float diffTime = m_currVideoTimeStamp / m_speed - currentTime;

    int sleepTime = int(diffTime);

    if (sleepTime > 0) {
        stdThreadSleep(sleepTime);
    }
}

void xh_AVCodecControler::tickAudioFrameTimerDelay(int64_t pts)
{
    if (m_aStreamTimeRational.den <= 0)
        return;

    qint64 currentTime = m_startTime.msecsTo(QDateTime::currentDateTime());

    m_currAudioTimeStamp = pts * av_q2d(m_aStreamTimeRational) * 1000;    //m_vStreamTimeRational

    float diffTime = m_currAudioTimeStamp / m_speed - currentTime;

    int sleepTime = int(diffTime);

    if (sleepTime > 0) {
        stdThreadSleep(sleepTime);
    }
}

void xh_AVCodecControler::copyDecodedFrame420(uint8_t* src, uint8_t* dist,int linesize, int width, int height)
{

    width = qMin(linesize, width);
    for (int i = 0; i < height; ++i) {
        memcpy(dist, src, width);
        dist += width;
        src += linesize;
    }

}

void xh_AVCodecControler::recalculateStartTime()
{
    QDateTime dataTime = QDateTime::currentDateTime();
    m_startTime = dataTime.addMSecs(-(m_currVideoTimeStamp/m_speed));
}

void xh_AVCodecControler::setUpdateVideoCallback(const UpdateVideo2GUI_Callback &updateVideoCallback, unsigned long userDataVideo)
{
    m_updateVideoCallback = updateVideoCallback;
    m_userDataVideo = userDataVideo;
}

void xh_AVCodecControler::convertAndRenderVideo(AVFrame *decodedFrame, long long ppts)
{
    if (!decodedFrame)
        return;

    if (!m_pVideoSwsCtx) {
        m_pVideoSwsCtx = sws_getContext(m_videoCodecCtx->width, m_videoCodecCtx->height,
                                        m_videoCodecCtx->pix_fmt,
                                        m_videoCodecCtx->width, m_videoCodecCtx->height,
                                        AV_PIX_FMT_YUV420P,
                                        SWS_BICUBIC, NULL, NULL, NULL);
    }

    sws_scale(m_pVideoSwsCtx, (const uint8_t* const*)decodedFrame->data,
              decodedFrame->linesize, 0,
              m_videoCodecCtx->height,
              m_YUVFrame->data,
              m_YUVFrame->linesize);

    unsigned int lumaLength= m_videoCodecCtx->height * (qMin(decodedFrame->linesize[0], m_videoCodecCtx->width));
    unsigned int chromBLength=((m_videoCodecCtx->height)/2)*(qMin(decodedFrame->linesize[1], (m_videoCodecCtx->width)/2));
    unsigned int chromRLength=((m_videoCodecCtx->height)/2)*(qMin(decodedFrame->linesize[2], (m_videoCodecCtx->width)/2));


    H264YUV_Frame* updateYUVFrame = new H264YUV_Frame();

    updateYUVFrame->luma.length = lumaLength;
    updateYUVFrame->chromaB.length = chromBLength;
    updateYUVFrame->chromaR.length =chromRLength;

    updateYUVFrame->luma.dataBuffer=(unsigned char*)malloc(lumaLength);
    updateYUVFrame->chromaB.dataBuffer=(unsigned char*)malloc(chromBLength);
    updateYUVFrame->chromaR.dataBuffer=(unsigned char*)malloc(chromRLength);

    copyDecodedFrame420(m_YUVFrame->data[0],updateYUVFrame->luma.dataBuffer,m_YUVFrame->linesize[0],
              m_videoCodecCtx->width,m_videoCodecCtx->height);
    copyDecodedFrame420(m_YUVFrame->data[1], updateYUVFrame->chromaB.dataBuffer,m_YUVFrame->linesize[1],
              m_videoCodecCtx->width / 2,m_videoCodecCtx->height / 2);
    copyDecodedFrame420(m_YUVFrame->data[2], updateYUVFrame->chromaR.dataBuffer,m_YUVFrame->linesize[2],
              m_videoCodecCtx->width / 2,m_videoCodecCtx->height / 2);

    updateYUVFrame->width = m_videoCodecCtx->width;
    updateYUVFrame->height = m_videoCodecCtx->height;

    updateYUVFrame->pts = ppts;

    if(m_updateVideoCallback) {
        m_updateVideoCallback(updateYUVFrame,m_userDataVideo);
    }

    if(updateYUVFrame->luma.dataBuffer){
        free(updateYUVFrame->luma.dataBuffer);
        updateYUVFrame->luma.dataBuffer=NULL;
    }

    if(updateYUVFrame->chromaB.dataBuffer){
        free(updateYUVFrame->chromaB.dataBuffer);
        updateYUVFrame->chromaB.dataBuffer=NULL;
    }

    if(updateYUVFrame->chromaR.dataBuffer){
        free(updateYUVFrame->chromaR.dataBuffer);
        updateYUVFrame->chromaR.dataBuffer=NULL;
    }

    if(updateYUVFrame){
        delete updateYUVFrame;
        updateYUVFrame = NULL;
    }
}

void xh_AVCodecControler::convertAndPlayAudio(AVFrame *decodedFrame)
{
    if (!m_formatCtx || !m_pAudioFrame || !decodedFrame) {
        return ;
    }

    if (m_pAudioSwrCtx == nullptr) {
        m_pAudioSwrCtx = swr_alloc();

        swr_alloc_set_opts(m_pAudioSwrCtx, av_get_default_channel_layout(m_channel),
                           AV_SAMPLE_FMT_S16,
                           decodedFrame->sample_rate / m_speed,
                           av_get_default_channel_layout(m_audioCodecCtx->channels),
                           m_audioCodecCtx->sample_fmt,
                           decodedFrame->sample_rate,
                           0, nullptr);

        if (m_pAudioSwrCtx != nullptr){
            swr_init(m_pAudioSwrCtx);
        }
    }

    //输出样本数 = 输入样本数 * 输出采样率 / 输入采样率
    int out_count = (int64_t)decodedFrame->nb_samples * (decodedFrame->sample_rate / m_speed) / decodedFrame->sample_rate/* + 256*/;
    int bufSize = av_samples_get_buffer_size(nullptr, m_channel, out_count, AV_SAMPLE_FMT_S16, 0);

    if (!m_pSwrBuffer || m_swrBuffSize != bufSize) {
        m_swrBuffSize = bufSize;
        m_pSwrBuffer = (uint8_t *)realloc(m_pSwrBuffer, m_swrBuffSize);
    }

    memset(m_pSwrBuffer, 0, m_swrBuffSize);

    uint8_t *outbuf[2] = { m_pSwrBuffer, 0 };

    int len = swr_convert(m_pAudioSwrCtx, outbuf,
                          out_count,
                          (const uint8_t **)decodedFrame->data, decodedFrame->nb_samples);
    if (len <= 0) {
        return ;
    }

    int size = 0;

    int freeSpace = 0;
    while(m_bThreadRunning)
    {
        freeSpace = AudioPlayer::instance()->getFreeSpace();

        if(freeSpace >= bufSize ) {
            break;
        } else {
            stdThreadSleep(1);
        }

    }

    if( !m_bThreadRunning ) {
        return;
    }

    //当前的倍速播放会出现变调的现象，暂时无法实现变速不变调。
    AudioPlayer::instance()->WriteAudioData((const char*)m_pSwrBuffer, m_swrBuffSize);
}

void xh_AVCodecControler::SetMediaStatusPlay()
{
    recalculateStartTime();
    m_eMediaPlayStatus = STATUS_PLAYING;
}

void xh_AVCodecControler::SetMediaStatusPause()
{
    m_eMediaPlayStatus = STATUS_PAUSE;
}

void xh_AVCodecControler::seekMedia(float nPos, MediaType mediaType)
{
    if (nPos < 0)
        return;

    if (m_formatCtx == nullptr)
        return;

    m_bThreadRunning = false;
    m_bReadFileEOF = false;

    if (mediaType == VIDEO) {
        qint64 seekPos = nPos / av_q2d(m_vStreamTimeRational);
        if (m_videoStreamIdx >= 0 && m_audioStreamIdx >= 0) {
            av_seek_frame(m_formatCtx, m_videoStreamIdx, seekPos, AVSEEK_FLAG_BACKWARD);
        }

        m_currVideoTimeStamp = nPos * 1000;
        m_currAudioTimeStamp = nPos * 1000;

        qDebug() << "seekPos: " << seekPos;
        qDebug() << "m_currVideoTimeStamp: " << m_currVideoTimeStamp;
    }

    while (!m_videoPktQueue.isEmpty()) {
        freePacket(m_videoPktQueue.dequeue());
    }

    while (!m_audioPktQueue.isEmpty()) {
        freePacket(m_audioPktQueue.dequeue());
    }

    waitAllThreadsExit();

    startMediaProcessThreads();
}

void xh_AVCodecControler::setSpeed(double speed)
{
    m_speed = speed;

    if (m_pAudioSwrCtx) {
        swr_free(&m_pAudioSwrCtx);
        m_pAudioSwrCtx = nullptr;
    }

    if (m_pAudioSwrCtx == nullptr) {
        m_pAudioSwrCtx = swr_alloc();

        swr_alloc_set_opts(m_pAudioSwrCtx, av_get_default_channel_layout(m_channel),
                           AV_SAMPLE_FMT_S16,
                           m_audioCodecCtx->sample_rate / m_speed,
                           av_get_default_channel_layout(m_audioCodecCtx->channels),
                           m_audioCodecCtx->sample_fmt,
                           m_audioCodecCtx->sample_rate,
                           0, nullptr);

        if (m_pAudioSwrCtx != nullptr){
            swr_init(m_pAudioSwrCtx);
        }
    }

    initAudioFilter();
    recalculateStartTime();
}

void xh_AVCodecControler::setMuted(bool muted)
{
    AudioPlayer::instance()->setMuted(muted);
}

// 错误处理宏
#define CHECK_AV_ERROR(ret, errMsg) \
    if (ret < 0) { \
        char errbuf[AV_ERROR_MAX_STRING_SIZE] = {0}; \
        av_strerror(ret, errbuf, sizeof(errbuf)); \
        qDebug() << errMsg << ": " << errbuf ; \
        return ret; \
    }

int xh_AVCodecControler::initAudioFilter() {
    // 初始化音频过滤器
    if (m_filterGraph) {
        // 释放资源
        avfilter_graph_free(&m_filterGraph);
        m_filterGraph = nullptr;
    }

    m_filterGraph = avfilter_graph_alloc();
    if (!m_filterGraph) {
        qDebug() << "Could not allocate filter graph" ;
        return -1;
    }

    int ret;
    // 创建音频输入过滤器
    AVFilter* bufferSrc = avfilter_get_by_name("abuffer");

    m_bufferSrcContext = avfilter_graph_alloc_filter(m_filterGraph, bufferSrc, "src");
//    int ret = avfilter_graph_create_filter(&m_bufferSrcContext, bufferSrc, "in", nullptr, nullptr, m_filterGraph);
//    CHECK_AV_ERROR(ret, "Could not create buffer source filter")

    // 设置音频输入过滤器参数
    AVSampleFormat sampleFmt = m_audioCodecCtx->sample_fmt;
    int sampleRate = m_audioCodecCtx->sample_rate;
    int channelLayout = m_audioCodecCtx->channel_layout;
    std::string args = "sample_rate=" + std::to_string(sampleRate) +
                       ":sample_fmts=" + av_get_sample_fmt_name(sampleFmt) +
                       ":channel_layout=" + std::to_string(channelLayout);
    ret = avfilter_init_str(m_bufferSrcContext, "sample_rate=8000:sample_fmt=s16:channel_layout=mono");
    //ret = avfilter_init_str(m_bufferSrcContext, args.c_str());
    CHECK_AV_ERROR(ret, "Could not initialize buffer source filter")

    // 创建音频输出过滤器
    AVFilter* bufferSink = avfilter_get_by_name("abuffersink");
    ret = avfilter_graph_create_filter(&m_bufferSinkContext, bufferSink, "out", nullptr, nullptr, m_filterGraph);
    CHECK_AV_ERROR(ret, "Could not create buffer sink filter")

    // 初始化音频输出过滤器参数
    ret = avfilter_init_str(m_bufferSinkContext, nullptr);
    CHECK_AV_ERROR(ret, "Could not initialize buffer sink filter")

    // 创建asetrate过滤器
    AVFilterContext* asetrateContext = nullptr;
    AVFilter* asetrateFilter = avfilter_get_by_name("asetrate");
    ret = avfilter_graph_create_filter(&asetrateContext, asetrateFilter, "asetrate", QString::number(sampleRate / m_speed).toStdString().c_str(), nullptr, m_filterGraph);
    CHECK_AV_ERROR(ret, "Could not create asetrate filter")

    // 配置asetrate过滤器
    ret = avfilter_init_str(asetrateContext, nullptr);
    CHECK_AV_ERROR(ret, "Could not initialize asetrate filter")

    // 连接音频输入过滤器、asetrate过滤器和音频输出过滤器
    ret = avfilter_link(m_bufferSrcContext, 0, asetrateContext, 0);
    CHECK_AV_ERROR(ret, "Could not link filters")

    ret = avfilter_link(asetrateContext, 0, m_bufferSinkContext, 0);
    CHECK_AV_ERROR(ret, "Could not link filters")

    // 配置过滤器图
    ret = avfilter_graph_config(m_filterGraph, nullptr);
    CHECK_AV_ERROR(ret, "Could not configure filter graph")

    return 0;
}

int xh_AVCodecControler::resampling(AVFrame* in, AVFrame** out)
{
    int ret;
    // 读取音频帧并进行采样率转换

    // 发送音频帧到过滤器图
    ret = av_buffersrc_add_frame_flags(m_bufferSrcContext, in, AV_BUFFERSRC_FLAG_KEEP_REF);
    CHECK_AV_ERROR(ret, "Could not add frame to buffer source filter")

    // 从过滤器图读取并处理音频帧
    AVFrame* filteredFrame = av_frame_alloc();
    ret = av_buffersink_get_frame(m_bufferSinkContext, filteredFrame);
    if (ret == AVERROR(EAGAIN) || ret == AVERROR_EOF) {
        av_frame_free(&filteredFrame);
        CHECK_AV_ERROR(ret, "Could not get frame from buffer sink filter")
        return -1;
    }

    // 在这里可以对 filteredFrame 进行其他操作，如保存到文件
    *out = filteredFrame;
    return 0;
}
