#include "decoder/stream_decoder_worker.hpp"
#include <iostream>
#include "decoder/common/subtitle_sync_manager.hpp"

using namespace WD;
StreamDecoderWorker::StreamDecoderWorker(int stream_index,
                                         AVFormatContext* fmt_ctx,
                                         AVCodecParameters* codecpar,
                                         std::shared_ptr<ThreadSafeQueue<AVPacket*>> pkt_queue,
                                         std::function<void(const FrameWithTimestamp&)> frame_cb)
    : stream_index_(stream_index),
      fmt_ctx_(fmt_ctx),
      pkt_queue_(pkt_queue),
      frame_cb_(frame_cb)
{
    const AVCodec* codec = avcodec_find_decoder(codecpar->codec_id);
    codec_ctx_ = avcodec_alloc_context3(codec);
    if (!codec_ctx_) {
        throw std::runtime_error("Failed to allocate codec context");
    }
    avcodec_parameters_to_context(codec_ctx_, codecpar);
    if (avcodec_open2(codec_ctx_, codec, nullptr) < 0) {
        throw std::runtime_error("Failed to open codec");
    }

    width_ = codec_ctx_->width;
    height_ = codec_ctx_->height;

    sws_ctx_ = sws_getContext(width_, height_, codec_ctx_->pix_fmt,
                              width_, height_, AV_PIX_FMT_BGR24,
                              SWS_BILINEAR, nullptr, nullptr, nullptr);

    if (!sws_ctx_) {
        throw std::runtime_error("Failed to initialize sws context");
    }

    // ✅ 添加这段代码以避免 deprecated pixel format 报错 & 崩溃
    // if (codec_ctx_->pix_fmt == AV_PIX_FMT_YUVJ420P) {
    //     sws_setColorspaceDetails(sws_ctx_,
    //                              sws_getCoefficients(SWS_CS_ITU601), 0,
    //                              sws_getCoefficients(SWS_CS_ITU601), 0,
    //                              0, 1 << 16, 1 << 16);
    // }

    int num_bytes = av_image_get_buffer_size(AV_PIX_FMT_BGR24, width_, height_, 1);
    rgb_buffer_.resize(num_bytes);
}

StreamDecoderWorker::~StreamDecoderWorker() {
    
}

void StreamDecoderWorker::start() {
    running_ = true;
    thread_ = std::thread(&StreamDecoderWorker::decodeLoop, this);
}

void StreamDecoderWorker::stop() {
    running_ = false;
    pkt_queue_->notifyAll();
    if (thread_.joinable()) thread_.join();

    if (sws_ctx_) {
        sws_freeContext(sws_ctx_);
        sws_ctx_ = nullptr;
    }
    if (codec_ctx_) {
        avcodec_free_context(&codec_ctx_);
        codec_ctx_ = nullptr;
    }
}

void StreamDecoderWorker::decodeLoop() {
    AVFrame* frame = av_frame_alloc();
    AVFrame* rgb_frame = av_frame_alloc();
    av_image_fill_arrays(rgb_frame->data, rgb_frame->linesize, rgb_buffer_.data(),
                         AV_PIX_FMT_BGR24, width_, height_, 1);

    while (running_) {
        AVPacket* pkt = nullptr;
        if (!pkt_queue_->pop(pkt, running_)) continue;

        if (avcodec_send_packet(codec_ctx_, pkt) == 0) {
            while (avcodec_receive_frame(codec_ctx_, frame) == 0) {
                int64_t pts = frame->best_effort_timestamp;
                auto ts_opt = WD::SubtitleSyncManager::instance().query(pts);
                sws_scale(sws_ctx_, frame->data, frame->linesize, 0, height_,
                          rgb_frame->data, rgb_frame->linesize);

                cv::Mat img(height_, width_, CV_8UC3, rgb_buffer_.data(), rgb_frame->linesize[0]);
                
                FrameWithTimestamp fwt;
                if (ts_opt.has_value()) {
                    double capture_time = ts_opt.value();
                    fwt.timestamp = capture_time;
                    // ✅ 中文打印图像时间戳
                    std::cout << "[视频流 " << stream_index_ << "] time: " 
                         << std::fixed << std::setprecision(6) << capture_time << std::endl;
                }
                fwt.image = img.clone();  // 如果想减少clone，这里可改进
                fwt.camera_id = stream_index_;

                frame_cb_(fwt);
            }
        }
        av_packet_free(&pkt);
    }

    av_frame_free(&frame);
    av_frame_free(&rgb_frame);
}
