#ifndef AV_SYNC_H
#define AV_SYNC_H

#include <memory>
#include <QObject>
#include <QImage>
#include "Media/Decoder/BaseDecoder.h"
#include "Data/CircularBuffer.h"
#include <functional>
#include <QTimer>
#include "Data/FileInfo.h"
#include "Optimization/OptimizationManager.h"
class PlayManager;
using SyncCallback = std::function<void(int)>;

// AVSync 类负责处理音视频同步的问题。
class AVSync : public QObject{
    Q_OBJECT
public:
//PlayManager& playManager, SyncCallback sync_callback
    // 获取 AVSync 的单例实例。
    static AVSync& getInstance() {
        static AVSync instance;
        return instance;
    }
    // 将解码后的音频帧添加到音频缓冲区。
    void add_audio_frame(AVFrame *);

    // 将解码后的视频帧添加到视频缓冲区。
    void add_video_frame(AVFrame *);


    // 启动音视频同步处理。
    void start();

    // 停止音视频同步处理。
    void stop();

    void init();

    void delay(int millisecondsToWait);

    // 检查音视频同步状态并执行相应的调整。
    inline int check_sync_status(int & millisecondsdelay) noexcept {
        if (video_buffer->size() == 0 || audio_buffer->size() == 0) {
      //      std::cout << "Warning: video_buffer or audio_buffer is empty" << std::endl;
    //        std::cout << "video_buffer size: " << video_buffer->size() << std::endl;
    //        std::cout << "audio_buffer size: " << audio_buffer->size() << std::endl;
            return -1;
        }
        AVFrame & video_frame = video_buffer->front();
        AVFrame &  audio_frame = audio_buffer->front();
        //get timestamp(秒)
        double video_pts = video_frame.pts * m_video_time_base * 1000;
        double audio_pts = audio_frame.pts * m_audio_time_base * 1000;

        double diff = video_pts - audio_pts;

         positionChanged(static_cast<qint64>(audio_pts));
     //   std::cout << "sync video_pts: " << video_pts << std::endl;
    //    std::cout << "sync audio_pts: " << audio_pts << std::endl;
    //    std::cout << "sync diff: " << diff << std::endl; // diff单位为毫秒
        millisecondsdelay = static_cast<int>(diff > 0 ? diff : 0);
        return 0;
    }




    std::function<float(void*)> getSample;
    template<typename T>
    std::function<T(void*)> makeSampleGetter(AVSampleFormat format) {
        switch (format) {
        case AV_SAMPLE_FMT_FLT:
            return [](void* data) { return static_cast<T>(*reinterpret_cast<float*>(data)); };
        case AV_SAMPLE_FMT_S16:
            return [](void* data) { return static_cast<T>(*reinterpret_cast<int16_t*>(data)); };
        // Add more cases as needed.
        default:
            throw std::invalid_argument("Unsupported format");
        }
    }
  //  template<typename Container>
    void setAudioSyncInfo(AVCodecContext* codec_ctx,FileInfo & fileinfo) {
        // Initialize the sample getter function based on the sample format
  //       getSample = makeSampleGetter<Container::value_type>(codec_ctx->sample_fmt);

         m_audio_channel_layout = codec_ctx->ch_layout;
         m_audio_time_base = fileinfo.audio_type_time_base;
         //check value if error print
            // std::cout << "audio_channel_layout: " << m_audio_channel_layout.nb_channels << std::endl;
            // std::cout << "audio_time_base: " << m_audio_tinme_base << std::endl;
    }
    void setVideoSyncInfo(AVCodecContext* codec_ctx,FileInfo & fileinfo) {
        // Initialize the sample getter function based on the sample format 
         m_video_frame_rate = fileinfo.video_type_avg_frame_rate;
         m_video_time_base = fileinfo.video_type_time_base;
         //check info
            // std::cout << "video_frame_rate: " << m_video_frame_rate << std::endl;
            // std::cout << "video_tinme_base: " << m_video_tinme_base << std::endl;
    }

    template<typename Container>
    Container Convert_framedata(AVFrame & frame) {
        Container container;
        // Process the decoded frame data here.
        uint8_t** data = frame.extended_data;
        //const AVChannelLayout & av_layout = codec_ctx.ch_layout;
        int bytes_per_sample = av_get_bytes_per_sample(static_cast<enum AVSampleFormat>(frame.format));
        // Add the decoded audio data to container
        for (int i = 0; i < frame.nb_samples; ++i) {
            for (int ch = 0; ch < m_audio_channel_layout.nb_channels; ++ch) {
               // typename Container::value_type sample = getSample(data[ch] + i * bytes_per_sample);
                typename Container::value_type sample =  *((float*)(data[ch] + i * bytes_per_sample));
                container.push(sample); // Assumes Container has a push method
            }
        }
        return container;
    }
    void sendQImage();

    // 获取同步的数据
    template<typename Container>
    Container getSyncedData(int & nb_samples) {
        // 从缓冲区获取数据，进行同步
        AVFrame & frame = audio_buffer->pop();
        nb_samples = frame.nb_samples;
        auto  audiodata = Convert_framedata<std::queue<float>>(frame);
        
       // av_frame_unref(&frame);
        return audiodata;
    }
    void send_video_frame(){
        
        sendQImage();
    }
    enum SyncStatus{
        Null,
        Busy,
        SYNCED
    };
    int getSyncStatus(){
        return m_sync_status;
    }

   ~AVSync();



private:
    AVSync();
    // 删除复制构造函数和赋值操作符。
    AVSync(const AVSync&) = delete;
    AVSync& operator=(const AVSync&) = delete;
 //thread safe
    std::mutex mutex_; 
    std::condition_variable cv_;
//output data
    std::queue<QImage> frameQueue_;
// audio and video sync attribute
    double m_video_frame_rate = 0; // 视频帧率。
    AVChannelLayout m_audio_channel_layout; // 音频通道布局。
    float m_video_time_base = 0; // 视频时间基准。
    float m_audio_time_base = 0; // 音频时间基准。
// thread attribute
    QTimer * time_timer; // 定时器，用于定时检查音视频同步状态。
    int m_sync_status;
// 用于音视频同步的其他成员变量。
    CircularBuffer<AVFrame> *audio_buffer = nullptr; // 音频缓冲区。
    CircularBuffer<AVFrame> *video_buffer = nullptr; // 视频缓冲区。

    QImage frameToQImage(const AVFrame* frame);

signals:
    void newFrame(const QImage &frame);
    void positionChanged(int);
    // 缓冲区和同步处理的其他成员变量。
private slots:
    void updateCurrentTime();
};
#endif // AV_SYNC_H
