#include "ReplayEncoder.h"

#include <algorithm>
#include <chrono>
#include <cerrno>
#include <cinttypes>
#include <cmath>
#include <condition_variable>
#include <cstring>
#include <fcntl.h>
#include <mutex>
#include <pthread.h>
#include <queue>
#include <sched.h>
#include <unistd.h>

#include "hilog/log.h"
#include "multimedia/player_framework/native_avbuffer.h"
#include "multimedia/player_framework/native_avcodec_base.h"
#include "multimedia/player_framework/native_avcodec_videoencoder.h"
#include "multimedia/player_framework/native_averrors.h"
#include "multimedia/player_framework/native_avformat.h"
#include "multimedia/player_framework/native_avmuxer.h"
#include "multimedia/image_framework/image_pixel_map_napi.h"
#include "native_buffer/native_buffer.h"
#include <dlfcn.h>

namespace {
constexpr unsigned int REPLAY_LOG_DOMAIN = 0xD005760;
constexpr const char REPLAY_LOG_TAG[] = "ReplayEncoder";

#define REPLAY_LOG(level, fmt, ...) OH_LOG_Print(LOG_APP, level, REPLAY_LOG_DOMAIN, REPLAY_LOG_TAG, fmt, ##__VA_ARGS__)

// Function pointers for dynamic loading
typedef int32_t (*OH_PixelMap_GetImageInfo_Func)(NativePixelMap *pixelMap, OHOS::Media::OhosPixelMapInfo *info);
typedef int32_t (*OH_PixelMap_AccessPixels_Func)(NativePixelMap *pixelMap, void **pixels);
typedef int32_t (*OH_PixelMap_UnAccessPixels_Func)(NativePixelMap *pixelMap);

static OH_PixelMap_GetImageInfo_Func g_getImageInfo = nullptr;
static OH_PixelMap_AccessPixels_Func g_accessPixels = nullptr;
static OH_PixelMap_UnAccessPixels_Func g_unAccessPixels = nullptr;

bool EnsurePixelMapFuncs() {
    if (g_getImageInfo && g_accessPixels && g_unAccessPixels) return true;
    
    // Try loading from libpixelmap_ndk.z.so or libimage_ndk.z.so
    // OpenHarmony API 12+ usually puts these in libpixelmap_ndk.z.so
    void* handle = dlopen("libpixelmap_ndk.z.so", RTLD_LAZY);
    if (!handle) {
        handle = dlopen("libimage_ndk.z.so", RTLD_LAZY);
    }
    if (!handle) {
        REPLAY_LOG(LOG_ERROR, "EnsurePixelMapFuncs: failed to dlopen libpixelmap_ndk or libimage_ndk");
        return false;
    }

    g_getImageInfo = (OH_PixelMap_GetImageInfo_Func)dlsym(handle, "OH_PixelMap_GetImageInfo");
    g_accessPixels = (OH_PixelMap_AccessPixels_Func)dlsym(handle, "OH_PixelMap_AccessPixels");
    g_unAccessPixels = (OH_PixelMap_UnAccessPixels_Func)dlsym(handle, "OH_PixelMap_UnAccessPixels");

    if (!g_getImageInfo || !g_accessPixels || !g_unAccessPixels) {
        REPLAY_LOG(LOG_ERROR, "EnsurePixelMapFuncs: failed to dlsym functions");
        return false;
    }
    return true;
}

struct InputBufferInfo {
    uint32_t index = 0;
    OH_AVBuffer *buffer = nullptr;
};

struct OutputBufferInfo {
    uint32_t index = 0;
    OH_AVBuffer *buffer = nullptr;
    OH_AVCodecBufferAttr attr {0, 0, 0, AVCODEC_BUFFER_FLAGS_NONE};
};

inline uint8_t ClampToByte(int value)
{
    return static_cast<uint8_t>(std::min(std::max(value, 0), 255));
}
} // namespace

struct ReplayEncoder::CodecContext {
    std::mutex inputMutex;
    std::condition_variable inputCond;
    std::queue<InputBufferInfo> inputQueue;

    std::mutex outputMutex;
    std::condition_variable outputCond;
    std::queue<OutputBufferInfo> outputQueue;

    bool eosReceived = false;
    bool fatalError = false;
    ReplayEncoder *owner = nullptr;
};

void ReplayEncoder::OnCodecError(OH_AVCodec *codec, int32_t errorCode, void *userData)
{
    (void)codec;
    auto *ctx = static_cast<CodecContext *>(userData);
    if (ctx && ctx->owner) {
        ctx->owner->HandleCodecError(errorCode);
    }
}

void ReplayEncoder::OnCodecFormatChange(OH_AVCodec *codec, OH_AVFormat *format, void *userData)
{
    (void)codec;
    (void)format;
    auto *ctx = static_cast<CodecContext *>(userData);
    if (ctx && ctx->owner) {
        ctx->owner->HandleFormatChange();
    }
}

void ReplayEncoder::OnNeedInputBuffer(OH_AVCodec *codec, uint32_t index, OH_AVBuffer *buffer, void *userData)
{
    (void)codec;
    auto *ctx = static_cast<CodecContext *>(userData);
    if (!ctx || !ctx->owner || !buffer) {
        return;
    }

    {
        std::lock_guard<std::mutex> lock(ctx->inputMutex);
        ctx->inputQueue.push({index, buffer});
    }
    ctx->inputCond.notify_one();
    REPLAY_LOG(LOG_DEBUG, "OnNeedInputBuffer index=%{public}u queue=%{public}zu", index,
               static_cast<size_t>(ctx->inputQueue.size()));
}

void ReplayEncoder::OnNewOutputBuffer(OH_AVCodec *codec, uint32_t index, OH_AVBuffer *buffer, void *userData)
{
    (void)codec;
    auto *ctx = static_cast<CodecContext *>(userData);
    if (!ctx || !ctx->owner || !buffer) {
        return;
    }

    OutputBufferInfo info;
    info.index = index;
    info.buffer = buffer;
    int32_t ret = OH_AVBuffer_GetBufferAttr(buffer, &info.attr);
    if (ret != AV_ERR_OK) {
        REPLAY_LOG(LOG_ERROR, "OnNewOutputBuffer GetAttr failed ret=%{public}d", ret);
    }

    if (info.attr.flags & AVCODEC_BUFFER_FLAGS_EOS) {
        ctx->eosReceived = true;
    }

    {
        std::lock_guard<std::mutex> lock(ctx->outputMutex);
        ctx->outputQueue.push(info);
    }
    ctx->outputCond.notify_one();

    REPLAY_LOG(LOG_DEBUG,
               "OnNewOutputBuffer index=%{public}u flags=%{public}u size=%{public}u pts=%{public}" PRId64,
               index, info.attr.flags, info.attr.size, info.attr.pts);
}

ReplayEncoder &ReplayEncoder::Instance()
{
    static ReplayEncoder instance;
    return instance;
}

ReplayEncoder::ReplayEncoder() = default;

ReplayEncoder::~ReplayEncoder()
{
    ResetState();
}

void ReplayEncoder::ResetState()
{
    stopRequested_.store(true);
    if (context_) {
        context_->inputCond.notify_all();
        context_->outputCond.notify_all();
    }

    if (drainThread_.joinable()) {
        drainThread_.join();
    }

    if (videoEncoder_ != nullptr) {
        if (encoderStarted_) {
            REPLAY_LOG(LOG_INFO, "Stopping video encoder");
            OH_VideoEncoder_Flush(videoEncoder_);
            OH_VideoEncoder_Stop(videoEncoder_);
        }
        OH_VideoEncoder_Destroy(videoEncoder_);
        videoEncoder_ = nullptr;
    }
    
    if (nativeWindow_ != nullptr) {
        OH_NativeWindow_DestroyNativeWindow(nativeWindow_);
        nativeWindow_ = nullptr;
    }

    if (muxer_ != nullptr) {
        if (muxerStarted_) {
            REPLAY_LOG(LOG_INFO, "Stopping muxer");
            OH_AVMuxer_Stop(muxer_);
        }
        OH_AVMuxer_Destroy(muxer_);
        muxer_ = nullptr;
    }

    if (duplicatedFd_ >= 0) {
        close(duplicatedFd_);
        duplicatedFd_ = -1;
    }

    context_.reset();
    nv12Buffer_.clear();
    expectedFrameSize_ = 0;
    yStride_ = 0;
    uvStride_ = 0;
    yPlaneSize_ = 0;
    uvPlaneSize_ = 0;
    firstFrame_ = true;
    frameCount_ = 0;
    writtenSamples_ = 0;
    lastPtsUs_ = 0;
    basePtsUs_ = 0;
    hasBasePts_ = false;
    frameDurationUs_ = 0;
    muxerStarted_ = false;
    encoderStarted_ = false;
    ready_ = false;
    width_ = 0;
    height_ = 0;
    fps_ = 0;
    mimeType_.clear();
    stopRequested_.store(false);
}

bool ReplayEncoder::Init(int32_t fd, int32_t width, int32_t height, int32_t fps, const std::string &mimeType)
{
    REPLAY_LOG(LOG_INFO,
               "Init begin fd=%{public}d width=%{public}d height=%{public}d fps=%{public}d mime=%{public}s",
               fd, width, height, fps, mimeType.c_str());

    ResetState();

    if (fd < 0) {
        REPLAY_LOG(LOG_ERROR, "Init failed: invalid fd");
        return false;
    }
    if (width <= 0 || height <= 0 || fps <= 0) {
        REPLAY_LOG(LOG_ERROR, "Init failed: invalid dimension or fps");
        return false;
    }
    
    // HarmonyOS 视频编码器通常要求最小分辨率为 256x256，且分辨率必须是16的倍数
    // 提高最小分辨率以避免编码器配置失败（错误码3：invalid argument）
    // 对齐分辨率时保持原始宽高比，避免图像变形
    constexpr int32_t MIN_SIZE = 256; // 最小尺寸（宽度或高度中的较大值）
    constexpr int32_t ALIGNMENT = 16; // 分辨率对齐要求
    
    // 计算原始宽高比
    double aspectRatio = (width > 0 && height > 0) ? static_cast<double>(width) / static_cast<double>(height) : 1.0;
    
    // 确定目标尺寸：保持宽高比，确保较大的边至少为 MIN_SIZE
    int32_t targetWidth = width;
    int32_t targetHeight = height;
    
    if (targetWidth < MIN_SIZE && targetHeight < MIN_SIZE) {
        // 如果两个尺寸都小于最小值，按比例放大，使较大的边达到最小值
        if (targetWidth >= targetHeight) {
            targetWidth = MIN_SIZE;
            targetHeight = static_cast<int32_t>(std::ceil(static_cast<double>(MIN_SIZE) / aspectRatio));
        } else {
            targetHeight = MIN_SIZE;
            targetWidth = static_cast<int32_t>(std::ceil(static_cast<double>(MIN_SIZE) * aspectRatio));
        }
    } else if (targetWidth < MIN_SIZE) {
        // 如果宽度小于最小值，按比例放大
        targetWidth = MIN_SIZE;
        targetHeight = static_cast<int32_t>(std::ceil(static_cast<double>(MIN_SIZE) / aspectRatio));
    } else if (targetHeight < MIN_SIZE) {
        // 如果高度小于最小值，按比例放大
        targetHeight = MIN_SIZE;
        targetWidth = static_cast<int32_t>(std::ceil(static_cast<double>(MIN_SIZE) * aspectRatio));
    }
    
    // 对齐到16的倍数
    int32_t alignedWidth = ((targetWidth + ALIGNMENT - 1) / ALIGNMENT) * ALIGNMENT;
    int32_t alignedHeight = ((targetHeight + ALIGNMENT - 1) / ALIGNMENT) * ALIGNMENT;
    
    if (alignedWidth != width || alignedHeight != height) {
        REPLAY_LOG(LOG_WARN, "Resolution aligned from %{public}dx%{public}d to %{public}dx%{public}d", 
                   width, height, alignedWidth, alignedHeight);
    }

    duplicatedFd_ = dup(fd);
    if (duplicatedFd_ < 0) {
        REPLAY_LOG(LOG_ERROR, "Init failed: dup errno=%{public}d", errno);
        return false;
    }

    mimeType_ = mimeType.empty() ? "video/avc" : mimeType;
    width_ = alignedWidth;
    height_ = alignedHeight;
    fps_ = fps;
    frameDurationUs_ = fps_ > 0 ? (1000000LL / fps_) : 0;
    if (frameDurationUs_ <= 0) {
        frameDurationUs_ = 1000; // fallback to 1ms step to keep pts monotonic
    }
    auto alignStride = [](int32_t value) -> uint32_t {
        constexpr int32_t alignment = 64;
        return static_cast<uint32_t>((value + alignment - 1) / alignment * alignment);
    };
    yStride_ = alignStride(width_);
    uvStride_ = yStride_;
    yPlaneSize_ = static_cast<size_t>(yStride_) * static_cast<size_t>(height_);
    uvPlaneSize_ = static_cast<size_t>(uvStride_) * static_cast<size_t>((height_ + 1) / 2);
    expectedFrameSize_ = yPlaneSize_ + uvPlaneSize_;
    nv12Buffer_.resize(expectedFrameSize_);
    REPLAY_LOG(LOG_INFO,
               "NV12 stride=%{public}u yPlane=%{public}llu uvPlane=%{public}llu total=%{public}llu",
               yStride_, static_cast<unsigned long long>(yPlaneSize_), static_cast<unsigned long long>(uvPlaneSize_),
               static_cast<unsigned long long>(expectedFrameSize_));

    muxer_ = OH_AVMuxer_Create(duplicatedFd_, AV_OUTPUT_FORMAT_MPEG_4);
    if (!muxer_) {
        REPLAY_LOG(LOG_ERROR, "OH_AVMuxer_Create failed");
        ResetState();
        return false;
    }

    OH_AVFormat *trackFormat = OH_AVFormat_CreateVideoFormat(mimeType_.c_str(), width_, height_);
    if (!trackFormat) {
        REPLAY_LOG(LOG_ERROR, "CreateVideoFormat failed");
        ResetState();
        return false;
    }

    // 计算 bitrate，确保不小于最小值（避免编码器配置失败）
    int64_t bitrate = static_cast<int64_t>(width_) * static_cast<int64_t>(height_) * fps_ * 3 / 4;
    constexpr int64_t MIN_BITRATE = 100000; // 最小 100kbps
    if (bitrate < MIN_BITRATE) {
        bitrate = MIN_BITRATE;
    }
    OH_AVFormat_SetDoubleValue(trackFormat, OH_MD_KEY_FRAME_RATE, static_cast<double>(fps_));
    OH_AVFormat_SetIntValue(trackFormat, OH_MD_KEY_WIDTH, width_);
    OH_AVFormat_SetIntValue(trackFormat, OH_MD_KEY_HEIGHT, height_);
    OH_AVFormat_SetLongValue(trackFormat, OH_MD_KEY_BITRATE, bitrate);

    int32_t ret = OH_AVMuxer_AddTrack(muxer_, &videoTrackId_, trackFormat);
    OH_AVFormat_Destroy(trackFormat);
    if (ret != AV_ERR_OK) {
        REPLAY_LOG(LOG_ERROR, "AddTrack failed ret=%{public}d", ret);
        ResetState();
        return false;
    }

    videoEncoder_ = OH_VideoEncoder_CreateByMime(mimeType_.c_str());
    if (!videoEncoder_) {
        REPLAY_LOG(LOG_ERROR, "CreateByMime failed");
        ResetState();
        return false;
    }

    OH_AVFormat *encoderFormat = OH_AVFormat_Create();
    if (!encoderFormat) {
        REPLAY_LOG(LOG_ERROR, "AVFormat_Create failed");
        ResetState();
        return false;
    }
    OH_AVFormat_SetIntValue(encoderFormat, OH_MD_KEY_WIDTH, width_);
    OH_AVFormat_SetIntValue(encoderFormat, OH_MD_KEY_HEIGHT, height_);
    OH_AVFormat_SetDoubleValue(encoderFormat, OH_MD_KEY_FRAME_RATE, static_cast<double>(fps_));
    OH_AVFormat_SetIntValue(encoderFormat, OH_MD_KEY_PIXEL_FORMAT, AV_PIXEL_FORMAT_NV12);
    // Use default bitrate mode (usually VBR) for better compatibility
    // OH_AVFormat_SetIntValue(encoderFormat, OH_MD_KEY_VIDEO_ENCODE_BITRATE_MODE, CBR);
    OH_AVFormat_SetLongValue(encoderFormat, OH_MD_KEY_BITRATE, bitrate);
    // Set I-frame interval to 1000. If unit is ms, this is 1s. If unit is seconds, this is 1000s (effectively all P frames after first I).
    // This is safer than 1 (which could be 1ms -> all I frames).
    OH_AVFormat_SetIntValue(encoderFormat, OH_MD_KEY_I_FRAME_INTERVAL, 1000);

    ret = OH_VideoEncoder_Configure(videoEncoder_, encoderFormat);
    OH_AVFormat_Destroy(encoderFormat);
    if (ret != AV_ERR_OK) {
        REPLAY_LOG(LOG_ERROR, "VideoEncoder_Configure failed ret=%{public}d", ret);
        ResetState();
        return false;
    }

    context_ = std::make_unique<CodecContext>();
    context_->owner = this;

    ret = OH_VideoEncoder_RegisterCallback(videoEncoder_,
                                           {OnCodecError, OnCodecFormatChange, OnNeedInputBuffer, OnNewOutputBuffer},
                                           context_.get());
    if (ret != AV_ERR_OK) {
        REPLAY_LOG(LOG_ERROR, "RegisterCallback failed ret=%{public}d", ret);
        ResetState();
        return false;
    }

    ret = OH_VideoEncoder_Prepare(videoEncoder_);
    if (ret != AV_ERR_OK) {
        REPLAY_LOG(LOG_ERROR, "VideoEncoder_Prepare failed ret=%{public}d", ret);
        ResetState();
        return false;
    }

    // Disable Surface mode to ensure Buffer mode (OnNeedInputBuffer callback) is active
    /*
    ret = OH_VideoEncoder_GetSurface(videoEncoder_, &nativeWindow_);
    if (ret != AV_ERR_OK || !nativeWindow_) {
        REPLAY_LOG(LOG_ERROR, "VideoEncoder_GetSurface failed ret=%{public}d", ret);
        ResetState();
        return false;
    }
    */

    ret = OH_VideoEncoder_Start(videoEncoder_);
    if (ret != AV_ERR_OK) {
        REPLAY_LOG(LOG_ERROR, "VideoEncoder_Start failed ret=%{public}d", ret);
        ResetState();
        return false;
    }
    encoderStarted_ = true;

    ret = OH_AVMuxer_Start(muxer_);
    if (ret != AV_ERR_OK) {
        REPLAY_LOG(LOG_ERROR, "AVMuxer_Start failed ret=%{public}d", ret);
        ResetState();
        return false;
    }
    muxerStarted_ = true;

    stopRequested_.store(false);
    drainThread_ = std::thread(&ReplayEncoder::DrainOutputLoop, this);

    ready_ = true;
    firstFrame_ = true;
    frameCount_ = 0;
    lastPtsUs_ = 0;

    REPLAY_LOG(LOG_INFO, "Init success encoder ready");
    return true;
}

bool ReplayEncoder::Encode(const void *rgba, size_t size, int64_t timestampMs, int32_t inputStride, int32_t inputWidth, int32_t inputHeight)
{
    auto encodeStart = std::chrono::steady_clock::now();
    if (!ready_) {
        REPLAY_LOG(LOG_WARN, "Encode ignored: encoder not initialized");
        return false;
    }
    if (!rgba) {
        REPLAY_LOG(LOG_ERROR, "Encode failed: null buffer");
        return false;
    }

    // Use input dimensions if provided, otherwise default to encoder dimensions
    int32_t realWidth = (inputWidth > 0) ? inputWidth : width_;
    int32_t realHeight = (inputHeight > 0) ? inputHeight : height_;

    // Use inputStride if provided, otherwise calculate default stride (width * 4)
    // We use realWidth for default stride calculation
    size_t strideBytes = (inputStride > 0) ? static_cast<size_t>(inputStride) : (static_cast<size_t>(realWidth) * 4);
    size_t required = strideBytes * static_cast<size_t>(realHeight);
    
    if (size < required) {
        REPLAY_LOG(LOG_ERROR, "Encode failed: buffer too small size=%{public}zu required=%{public}zu width=%{public}d height=%{public}d stride=%{public}zu", 
                   size, required, realWidth, realHeight, strideBytes);
        return false;
    }

    InputBufferInfo bufferInfo;
    auto waitStart = std::chrono::steady_clock::now();
    {
        std::unique_lock<std::mutex> lock(context_->inputMutex);
        bool ok = context_->inputCond.wait_for(lock, std::chrono::milliseconds(500), [this]() {
            return stopRequested_.load() || (context_ && !context_->inputQueue.empty());
        });
        if (!ok || !context_ || context_->inputQueue.empty()) {
            REPLAY_LOG(LOG_WARN, "Encode timed out waiting for input buffer");
            return false;
        }
        bufferInfo = context_->inputQueue.front();
        context_->inputQueue.pop();
    }
    auto waitTime = std::chrono::duration_cast<std::chrono::microseconds>(std::chrono::steady_clock::now() - waitStart).count();

    auto *dest = static_cast<uint8_t *>(OH_AVBuffer_GetAddr(bufferInfo.buffer));
    if (!dest) {
        REPLAY_LOG(LOG_ERROR, "Encode failed: buffer addr null");
        return false;
    }

    OH_AVCodecBufferAttr inputAttr {0};
    int32_t attrRet = OH_AVBuffer_GetBufferAttr(bufferInfo.buffer, &inputAttr);
    if (attrRet == AV_ERR_OK) {
        REPLAY_LOG(LOG_INFO,
                   "Input buffer attr size=%{public}u offset=%{public}u pts=%{public}" PRId64,
                   inputAttr.size, inputAttr.offset, inputAttr.pts);
    } else {
        REPLAY_LOG(LOG_WARN, "GetBufferAttr failed ret=%{public}d", attrRet);
    }

    int32_t capacityRet = OH_AVBuffer_GetCapacity(bufferInfo.buffer);
    if (capacityRet > 0) {
        REPLAY_LOG(LOG_INFO, "Input buffer capacity=%{public}d", capacityRet);
        size_t capacitySize = static_cast<size_t>(capacityRet);
        std::memset(dest, 0, capacitySize);
    } else {
        REPLAY_LOG(LOG_WARN, "GetCapacity returned %{public}d", capacityRet);
    }

    const uint8_t *rgbaBytes = static_cast<const uint8_t *>(rgba);
    if (frameCount_ == 0 && rgbaBytes) {
        REPLAY_LOG(LOG_INFO, "Frame0 sample RGBA r=%{public}d g=%{public}d b=%{public}d a=%{public}d", rgbaBytes[0], rgbaBytes[1], rgbaBytes[2], rgbaBytes[3]);
    }

    auto convertStart = std::chrono::steady_clock::now();
    if (!ConvertRgbaToNv12(rgbaBytes, size, dest, static_cast<int32_t>(strideBytes), realWidth, realHeight)) {
        REPLAY_LOG(LOG_ERROR, "Encode failed: RGBA->NV12 conversion error");
        return false;
    }
    auto convertTime = std::chrono::duration_cast<std::chrono::microseconds>(std::chrono::steady_clock::now() - convertStart).count();

    if (frameCount_ == 0) {
        int ySample = dest[0];
        size_t uvOffset = static_cast<size_t>(width_) * static_cast<size_t>(height_);
        int uSample = dest[uvOffset];
        int vSample = dest[uvOffset + 1];
        REPLAY_LOG(LOG_INFO, "Frame0 sample YUV y=%{public}d u=%{public}d v=%{public}d", ySample, uSample, vSample);
    }

    constexpr int64_t MICROSECOND_EPOCH_THRESHOLD = 100000000000000LL; // ~3 years in ms
    int64_t timestampMsAdjusted = timestampMs;
    // Removed normalization logic to trust the caller's unit (Milliseconds)
    // if (timestampMsAdjusted > MICROSECOND_EPOCH_THRESHOLD) { ... }

    OH_AVCodecBufferAttr attr {0};
    attr.size = static_cast<uint32_t>(expectedFrameSize_);
    attr.offset = 0;
    // Convert Milliseconds to Microseconds (x1,000) for HarmonyOS PTS
    int64_t ptsCandidateUs = timestampMsAdjusted * 1000;
    if (!hasBasePts_) {
        basePtsUs_ = ptsCandidateUs;
        hasBasePts_ = true;
    }
    int64_t ptsUs = ptsCandidateUs - basePtsUs_;
    if (ptsUs < 0) {
        ptsUs = 0;
    }
    if (ptsUs <= lastPtsUs_) {
        ptsUs = lastPtsUs_ + frameDurationUs_;
    }
    attr.pts = ptsUs;
    attr.flags = firstFrame_ ? AVCODEC_BUFFER_FLAGS_SYNC_FRAME : AVCODEC_BUFFER_FLAGS_NONE;

    int32_t ret = OH_AVBuffer_SetBufferAttr(bufferInfo.buffer, &attr);
    if (ret != AV_ERR_OK) {
        REPLAY_LOG(LOG_ERROR, "SetBufferAttr failed ret=%{public}d", ret);
        return false;
    }

    auto pushStart = std::chrono::steady_clock::now();
    ret = OH_VideoEncoder_PushInputBuffer(videoEncoder_, bufferInfo.index);
    if (ret != AV_ERR_OK) {
        REPLAY_LOG(LOG_ERROR, "PushInputBuffer failed ret=%{public}d", ret);
        return false;
    }
    auto pushTime = std::chrono::duration_cast<std::chrono::microseconds>(std::chrono::steady_clock::now() - pushStart).count();

    frameCount_++;
    firstFrame_ = false;
    lastPtsUs_ = ptsUs;

    auto totalTime = std::chrono::duration_cast<std::chrono::microseconds>(std::chrono::steady_clock::now() - encodeStart).count();
    // 输出性能日志：每20帧或异常时输出
    if (frameCount_ % 20 == 1 || waitTime > 10000 || convertTime > 50000 || pushTime > 5000 || totalTime > 100000) {
        REPLAY_LOG(LOG_INFO, "[PERF] ReplayEncoder Encode frame=%{public}" PRIu64 " size=%{public}dx%{public}d wait=%{public}lldus convert=%{public}lldus push=%{public}lldus total=%{public}lldus",
                   frameCount_, width_, height_, static_cast<long long>(waitTime), static_cast<long long>(convertTime), static_cast<long long>(pushTime), static_cast<long long>(totalTime));
    } else {
    REPLAY_LOG(LOG_DEBUG, "Encode submitted frame=%{public}" PRIu64 " pts=%{public}" PRId64,
               frameCount_, ptsUs);
    }
    return true;
}

bool ReplayEncoder::EncodePixelMap(void *pixelMapVoid, int64_t timestampMs)
{
    if (!ready_ || !pixelMapVoid) {
        REPLAY_LOG(LOG_ERROR, "EncodePixelMap: not ready or invalid pixelMap");
        return false;
    }
    NativePixelMap *pixelMap = static_cast<NativePixelMap *>(pixelMapVoid);

    if (!EnsurePixelMapFuncs()) {
        REPLAY_LOG(LOG_ERROR, "EncodePixelMap: symbols not available");
        return false;
    }

    OHOS::Media::OhosPixelMapInfo info;
    int32_t ret = g_getImageInfo(pixelMap, &info);
    if (ret != 0) {
        REPLAY_LOG(LOG_ERROR, "EncodePixelMap: GetImageInfo failed ret=%{public}d", ret);
        return false;
    }

    // Check dimensions
    if (info.width != static_cast<uint32_t>(width_) || info.height != static_cast<uint32_t>(height_)) {
        REPLAY_LOG(LOG_WARN, "EncodePixelMap: dimension mismatch pm=%{public}u x %{public}u encoder=%{public}d x %{public}d (auto-padding enabled)",
                 info.width, info.height, width_, height_);
    }

    void *pixels = nullptr;
    ret = g_accessPixels(pixelMap, &pixels);
    if (ret != 0 || !pixels) {
        REPLAY_LOG(LOG_ERROR, "EncodePixelMap: AccessPixels failed ret=%{public}d", ret);
        return false;
    }

    // Pass stride (rowSize) to Encode
    bool result = Encode(pixels, static_cast<size_t>(info.rowSize * info.height), timestampMs, static_cast<int32_t>(info.rowSize), static_cast<int32_t>(info.width), static_cast<int32_t>(info.height));

    g_unAccessPixels(pixelMap);
    return result;
}

void* ReplayEncoder::GetInputWindow()
{
    return static_cast<void*>(nativeWindow_);
}

void ReplayEncoder::Finalize()
{
    REPLAY_LOG(LOG_INFO, "Finalize begin ready=%{public}d", ready_ ? 1 : 0);
    if (!ready_) {
        REPLAY_LOG(LOG_WARN, "Finalize invoked while encoder not ready. frames=%{public}" PRIu64 " samples=%{public}" PRIu64,
                   frameCount_, writtenSamples_);
    }

    if (videoEncoder_ != nullptr) {
        int32_t ret = OH_VideoEncoder_NotifyEndOfStream(videoEncoder_);
        if (ret != AV_ERR_OK) {
            REPLAY_LOG(LOG_WARN, "NotifyEndOfStream failed ret=%{public}d (ignored)", ret);
        }
    }

    stopRequested_.store(true);
    if (context_) {
        context_->inputCond.notify_all();
        context_->outputCond.notify_all();
    }

    if (drainThread_.joinable()) {
        drainThread_.join();
    }

    bool wasEncoderStarted = encoderStarted_;
    bool wasMuxerStarted = muxerStarted_;

    if (videoEncoder_ != nullptr) {
        int32_t ret = OH_VideoEncoder_Stop(videoEncoder_);
        REPLAY_LOG(LOG_INFO, "VideoEncoder_Stop ret=%{public}d", ret);
        if (ret != AV_ERR_OK) {
            REPLAY_LOG(LOG_WARN, "VideoEncoder_Stop returned error=%{public}d", ret);
        }
        ret = OH_VideoEncoder_Destroy(videoEncoder_);
        REPLAY_LOG(LOG_INFO, "VideoEncoder_Destroy ret=%{public}d", ret);
        if (ret != AV_ERR_OK) {
            REPLAY_LOG(LOG_WARN, "VideoEncoder_Destroy returned error=%{public}d", ret);
        }
        videoEncoder_ = nullptr;
        encoderStarted_ = false;
    }

    if (muxer_ != nullptr) {
        if (muxerStarted_) {
            int32_t ret = OH_AVMuxer_Stop(muxer_);
            if (ret != AV_ERR_OK) {
                REPLAY_LOG(LOG_ERROR, "AVMuxer_Stop failed ret=%{public}d", ret);
            } else {
                REPLAY_LOG(LOG_INFO, "AVMuxer_Stop success");
            }
        }
        int32_t ret = OH_AVMuxer_Destroy(muxer_);
        if (ret != AV_ERR_OK) {
            REPLAY_LOG(LOG_ERROR, "AVMuxer_Destroy failed ret=%{public}d", ret);
        } else {
            REPLAY_LOG(LOG_INFO, "AVMuxer_Destroy success");
        }
        muxer_ = nullptr;
        muxerStarted_ = false;
    }

    REPLAY_LOG(LOG_INFO,
               "Finalize stats frames=%{public}" PRIu64 " samples=%{public}" PRIu64 " encoderStarted=%{public}d muxerStarted=%{public}d",
               frameCount_, writtenSamples_, wasEncoderStarted ? 1 : 0, wasMuxerStarted ? 1 : 0);
    ResetState();
    REPLAY_LOG(LOG_INFO, "Finalize done");
}

void ReplayEncoder::DrainOutputLoop()
{
    ApplyBackgroundPriority();
    REPLAY_LOG(LOG_INFO, "Output drain thread started");

    while (!stopRequested_.load()) {
        OutputBufferInfo info;
        {
            std::unique_lock<std::mutex> lock(context_->outputMutex);
            context_->outputCond.wait(lock, [this]() {
                return stopRequested_.load() || (context_ && !context_->outputQueue.empty());
            });

            if (stopRequested_.load() && (!context_ || context_->outputQueue.empty())) {
                break;
            }

            if (!context_ || context_->outputQueue.empty()) {
                continue;
            }

            info = context_->outputQueue.front();
            context_->outputQueue.pop();
        }

        if (!info.buffer) {
            continue;
        }

        int32_t ret = OH_AVBuffer_SetBufferAttr(info.buffer, &info.attr);
        if (ret != AV_ERR_OK) {
            REPLAY_LOG(LOG_ERROR, "Drain: SetBufferAttr failed ret=%{public}d", ret);
        }

        ret = OH_AVMuxer_WriteSampleBuffer(muxer_, videoTrackId_, info.buffer);
        if (ret != AV_ERR_OK) {
            REPLAY_LOG(LOG_ERROR, "WriteSampleBuffer failed ret=%{public}d flags=%{public}u size=%{public}u", ret, info.attr.flags, info.attr.size);
        } else {
            REPLAY_LOG(LOG_DEBUG, "WriteSampleBuffer success index=%{public}u size=%{public}u pts=%{public}" PRId64,
                       info.index, info.attr.size, info.attr.pts);
            writtenSamples_++;
        }

        ret = OH_VideoEncoder_FreeOutputBuffer(videoEncoder_, info.index);
        if (ret != AV_ERR_OK) {
            REPLAY_LOG(LOG_WARN, "FreeOutputBuffer failed ret=%{public}d", ret);
        }

        if (info.attr.flags & AVCODEC_BUFFER_FLAGS_EOS) {
            REPLAY_LOG(LOG_INFO, "Drain: received EOS");
            break;
        }
    }

    REPLAY_LOG(LOG_INFO, "Output drain thread exit stopRequested=%{public}d writtenSamples=%{public}" PRIu64,
               stopRequested_.load() ? 1 : 0, writtenSamples_);
}

void ReplayEncoder::ApplyBackgroundPriority()
{
#ifdef __OHOS__
    pthread_t thread = pthread_self();
    int policy = 0;
    struct sched_param param;
    if (pthread_getschedparam(thread, &policy, &param) == 0) {
        const int minPriority = sched_get_priority_min(policy);
        if (minPriority != -1 && param.sched_priority != minPriority) {
            param.sched_priority = minPriority;
            pthread_setschedparam(thread, policy, &param);
        }
    }
#endif
}

bool ReplayEncoder::ConvertRgbaToNv12(const uint8_t *rgba, size_t size, uint8_t *dest, int32_t inputStride, int32_t inputWidth, int32_t inputHeight)
{
    if (!rgba || !dest) {
        return false;
    }
    
    int32_t realWidth = (inputWidth > 0) ? inputWidth : width_;
    int32_t realHeight = (inputHeight > 0) ? inputHeight : height_;
    
    // We assume inputStride is valid and size is checked in Encode
    size_t strideBytes = (inputStride > 0) ? static_cast<size_t>(inputStride) : (static_cast<size_t>(realWidth) * 4);

    if (yStride_ == 0 || uvStride_ == 0) {
        return false;
    }

    size_t yPlaneSize = yPlaneSize_;
    size_t uvPlaneSize = uvPlaneSize_;
    
    // NOTE: dest is assumed to be zero-initialized by Encode() if necessary (handling padding area).

    const int32_t copyWidth = std::min(width_, realWidth);
    const int32_t copyHeight = std::min(height_, realHeight);
    const size_t yStride = yStride_;
    const size_t uvStride = uvStride_;
    
    uint8_t *yPlane = dest;
    uint8_t *uvPlane = dest + yPlaneSize_;

    // Convert Y plane
    for (int y = 0; y < copyHeight; ++y) {
        const uint8_t *src = rgba + static_cast<size_t>(y) * strideBytes;
        uint8_t *yDst = yPlane + static_cast<size_t>(y) * yStride;
        
        for (int x = 0; x < copyWidth; ++x) {
            int r = src[0];
            int g = src[1];
            int b = src[2];
            // Y = ((66 * r + 129 * g + 25 * b + 128) >> 8) + 16
            int yVal = ((66 * r + 129 * g + 25 * b + 128) >> 8) + 16;
            yDst[x] = static_cast<uint8_t>(std::min(std::max(yVal, 0), 255));
            src += 4;
        }
    }

    // Convert UV plane (2x2 downsampling)
    // UV conversion iterates over 2x2 blocks of original pixels
    for (int y = 0; y < copyHeight; y += 2) {
        const uint8_t *srcRow1 = rgba + static_cast<size_t>(y) * strideBytes;
        const uint8_t *srcRow2 = (y + 1 < copyHeight) ? (rgba + static_cast<size_t>(y + 1) * strideBytes) : nullptr;
        uint8_t *uvDst = uvPlane + static_cast<size_t>(y / 2) * uvStride;

        for (int x = 0; x < copyWidth; x += 2) {
            int rSum = 0, gSum = 0, bSum = 0;
            int count = 0;

            // Pixel (x, y)
            const uint8_t *p1 = srcRow1 + static_cast<size_t>(x) * 4;
            rSum += p1[0]; gSum += p1[1]; bSum += p1[2]; count++;

            // Pixel (x+1, y)
            if (x + 1 < copyWidth) {
                const uint8_t *p2 = p1 + 4;
                rSum += p2[0]; gSum += p2[1]; bSum += p2[2]; count++;
            }

            // Pixel (x, y+1) and (x+1, y+1)
            if (srcRow2) {
                const uint8_t *p3 = srcRow2 + static_cast<size_t>(x) * 4;
                rSum += p3[0]; gSum += p3[1]; bSum += p3[2]; count++;
                if (x + 1 < copyWidth) {
                    const uint8_t *p4 = p3 + 4;
                    rSum += p4[0]; gSum += p4[1]; bSum += p4[2]; count++;
                }
            }

            if (count == 0) continue;

            // Calculate average RGB
            int rAvg, gAvg, bAvg;
            if (count == 4) {
                rAvg = rSum >> 2; gAvg = gSum >> 2; bAvg = bSum >> 2;
            } else if (count == 2) {
                rAvg = rSum >> 1; gAvg = gSum >> 1; bAvg = bSum >> 1;
            } else {
                rAvg = rSum / count; gAvg = gSum / count; bAvg = bSum / count;
            }

            // U = ((-38 * r - 74 * g + 112 * b + 128) >> 8) + 128
            // V = ((112 * r - 94 * g - 18 * b + 128) >> 8) + 128
            int uVal = ((-38 * rAvg - 74 * gAvg + 112 * bAvg + 128) >> 8) + 128;
            int vVal = ((112 * rAvg - 94 * gAvg - 18 * bAvg + 128) >> 8) + 128;

            uvDst[x] = static_cast<uint8_t>(std::min(std::max(uVal, 0), 255));
            uvDst[x + 1] = static_cast<uint8_t>(std::min(std::max(vVal, 0), 255));
        }
    }

    return true;
}

void ReplayEncoder::HandleCodecError(int32_t errorCode)
{
    REPLAY_LOG(LOG_ERROR, "Codec error callback code=%{public}d", errorCode);
    if (context_) {
        context_->fatalError = true;
    }
}

void ReplayEncoder::HandleFormatChange()
{
    REPLAY_LOG(LOG_INFO, "Codec format changed");
}

#undef REPLAY_LOG


