/*
 * Copyright (C) 2025 Huawei Device Co., Ltd.
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 *     http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 */

// Minimal OHOS AVCodec-based encoder pipeline: RGBA -> NV12, push to encoder, write to MP4 via muxer.

#include "ReplayEncoder.h"
#include <vector>
#include <queue>
#include <mutex>
#include <condition_variable>
#include <atomic>
#include <thread>
#include <cstring>

extern "C" {
#include "multimedia/player_framework/native_avcodec_videoencoder.h"
#include "multimedia/player_framework/native_avbuffer.h"
#include "multimedia/player_framework/native_avcodec_base.h"
#include "multimedia/player_framework/native_avmuxer.h"
#include "multimedia/player_framework/native_avformat.h"
}

namespace {
struct InputFrame {
    std::vector<uint8_t> nv12; // size = width*height*3/2
    int64_t ptsUs = 0;         // microseconds
};
}

class EncoderContext {
public:
    OH_AVCodec* encoder = nullptr;
    OH_AVMuxer* muxer = nullptr;
    int videoTrackId = -1;
    int fd = -1;
    int width = 0;
    int height = 0;
    int fps = 0;
    bool muxerStarted = false;
    std::mutex inMutex;
    std::condition_variable inCond;
    std::queue<InputFrame> inQueue;
    std::atomic<bool> eos{false};
    std::atomic<bool> prepared{false};
};

static void OnCodecError(OH_AVCodec* codec, int32_t errorCode, void* userData) {
    (void)codec; (void)errorCode; (void)userData;
}

static void OnCodecFormatChange(OH_AVCodec* codec, OH_AVFormat* format, void* userData) {
    (void)codec;
    auto* ctx = reinterpret_cast<EncoderContext*>(userData);
    if (ctx == nullptr || ctx->muxer == nullptr || ctx->muxerStarted) return;
    // Add video track with provided format
    int32_t ret = OH_AVMuxer_AddTrack(ctx->muxer, &ctx->videoTrackId, format);
    if (ret == AV_ERR_OK) {
        ret = OH_AVMuxer_Start(ctx->muxer);
        if (ret == AV_ERR_OK) {
            ctx->muxerStarted = true;
        }
    }
}

static void OnNeedInputBuffer(OH_AVCodec* codec, uint32_t index, OH_AVBuffer* buffer, void* userData) {
    auto* ctx = reinterpret_cast<EncoderContext*>(userData);
    if (ctx == nullptr) return;
    InputFrame frame;
    {
        std::unique_lock<std::mutex> lk(ctx->inMutex);
        if (ctx->inQueue.empty()) {
            if (!ctx->eos.load()) {
                // No frame yet: push an empty with 0 size to avoid stall? Better wait briefly.
                // Here we wait a short time; platform callbacks typically re-invoke later.
            }
        } else {
            frame = std::move(ctx->inQueue.front());
            ctx->inQueue.pop();
        }
    }

    OH_AVCodecBufferAttr attr{0};
    if (!frame.nv12.empty()) {
        uint8_t* dst = OH_AVBuffer_GetAddr(buffer);
        size_t cap = OH_AVBuffer_GetCapacity(buffer);
        size_t sz = frame.nv12.size();
        if (dst != nullptr && cap >= sz) {
            std::memcpy(dst, frame.nv12.data(), sz);
            attr.size = static_cast<uint32_t>(sz);
            attr.pts = frame.ptsUs; // microseconds
            attr.flags = AVCODEC_BUFFER_FLAGS_CODEC_DATA_NONE;
        }
    } else if (ctx->eos.load()) {
        attr.size = 0;
        attr.flags = AVCODEC_BUFFER_FLAGS_EOS;
    } else {
        // No data: set size 0; encoder may request again
        attr.size = 0;
        attr.flags = AVCODEC_BUFFER_FLAGS_CODEC_DATA_NONE;
    }
    OH_AVBuffer_SetBufferAttr(buffer, &attr);
    OH_VideoEncoder_PushInputBuffer(codec, index);
}

static void OnNewOutputBuffer(OH_AVCodec* codec, uint32_t index, OH_AVBuffer* buffer, void* userData) {
    auto* ctx = reinterpret_cast<EncoderContext*>(userData);
    if (ctx == nullptr) return;
    OH_AVCodecBufferAttr attr{0};
    OH_AVBuffer_GetBufferAttr(buffer, &attr);
    if (ctx->muxer != nullptr && ctx->muxerStarted && ctx->videoTrackId >= 0) {
        // Write encoded sample to muxer
        OH_AVBuffer_SetBufferAttr(buffer, &attr);
        OH_AVMuxer_WriteSampleBuffer(ctx->muxer, ctx->videoTrackId, buffer);
    }
    OH_VideoEncoder_FreeOutputBuffer(codec, index);
}

static void RgbaToNv12(int width, int height, const uint8_t* rgba, std::vector<uint8_t>& outNv12) {
    const int frame = width * height;
    outNv12.resize(frame + (frame >> 1));
    uint8_t* yPlane = outNv12.data();
    uint8_t* uvPlane = outNv12.data() + frame;
    auto clamp = [](int v) { return (uint8_t)(v < 0 ? 0 : (v > 255 ? 255 : v)); };
    // Convert in 2x2 blocks to produce interleaved UV
    for (int j = 0; j < height; j++) {
        for (int i = 0; i < width; i++) {
            const uint8_t* px = rgba + (j * width + i) * 4;
            int R = px[0], G = px[1], B = px[2];
            int Y = ( 66*R + 129*G +  25*B + 128) >> 8; Y += 16;
            yPlane[j*width + i] = clamp(Y);
        }
    }
    for (int j = 0; j < height; j += 2) {
        for (int i = 0; i < width; i += 2) {
            int R0 = rgba[(j*width + i)*4 + 0];
            int G0 = rgba[(j*width + i)*4 + 1];
            int B0 = rgba[(j*width + i)*4 + 2];
            int R1 = rgba[(j*width + (i+1))*4 + 0];
            int G1 = rgba[(j*width + (i+1))*4 + 1];
            int B1 = rgba[(j*width + (i+1))*4 + 2];
            int R2 = rgba[((j+1)*width + i)*4 + 0];
            int G2 = rgba[((j+1)*width + i)*4 + 1];
            int B2 = rgba[((j+1)*width + i)*4 + 2];
            int R3 = rgba[((j+1)*width + (i+1))*4 + 0];
            int G3 = rgba[((j+1)*width + (i+1))*4 + 1];
            int B3 = rgba[((j+1)*width + (i+1))*4 + 2];
            int avgR = (R0 + R1 + R2 + R3) >> 2;
            int avgG = (G0 + G1 + G2 + G3) >> 2;
            int avgB = (B0 + B1 + B2 + B3) >> 2;
            int U = ((-38*avgR - 74*avgG + 112*avgB + 128) >> 8) + 128;
            int V = ((112*avgR - 94*avgG -  18*avgB + 128) >> 8) + 128;
            int uvIndex = (j/2) * width + i;
            uvPlane[uvIndex + 0] = clamp(U);
            uvPlane[uvIndex + 1] = clamp(V);
        }
    }
}

// Instance state
static EncoderContext g_ctx;

bool ReplayEncoder::Init(int fd, int width, int height, int fps, const char* mime) {
    fd_ = fd; width_ = width; height_ = height; fps_ = fps; mime_ = mime ? mime : "video/avc";

    g_ctx.fd = fd_; g_ctx.width = width_; g_ctx.height = height_; g_ctx.fps = fps_;

    g_ctx.encoder = OH_VideoEncoder_CreateByMime(mime_.c_str());
    if (!g_ctx.encoder) return false;

    OH_AVFormat* format = OH_AVFormat_Create();
    if (!format) return false;
    OH_AVFormat_SetIntValue(format, OH_MD_KEY_WIDTH, width_);
    OH_AVFormat_SetIntValue(format, OH_MD_KEY_HEIGHT, height_);
    OH_AVFormat_SetDoubleValue(format, OH_MD_KEY_FRAME_RATE, (double)fps_);
    OH_AVFormat_SetIntValue(format, OH_MD_KEY_PIXEL_FORMAT, AV_PIXEL_FORMAT_NV12);
    OH_AVFormat_SetLongValue(format, OH_MD_KEY_BITRATE, 4 * 1024 * 1024); // 4Mbps
    OH_AVFormat_SetIntValue(format, OH_MD_KEY_VIDEO_ENCODE_BITRATE_MODE, CBR);
    OH_AVFormat_SetIntValue(format, OH_MD_KEY_I_FRAME_INTERVAL, 2);

    int ret = OH_VideoEncoder_Configure(g_ctx.encoder, format);
    OH_AVFormat_Destroy(format);
    if (ret != AV_ERR_OK) return false;

    ret = OH_VideoEncoder_RegisterCallback(g_ctx.encoder,
        { OnCodecError, OnCodecFormatChange, OnNeedInputBuffer, OnNewOutputBuffer }, &g_ctx);
    if (ret != AV_ERR_OK) return false;

    ret = OH_VideoEncoder_Prepare(g_ctx.encoder);
    if (ret != AV_ERR_OK) return false;

    // Prepare muxer
    g_ctx.muxer = OH_AVMuxer_Create(fd_, AV_OUTPUT_FORMAT_MPEG_4);
    if (!g_ctx.muxer) return false;

    ret = OH_VideoEncoder_Start(g_ctx.encoder);
    if (ret != AV_ERR_OK) return false;

    started_ = true;
    return true;
}

bool ReplayEncoder::EncodeFrame(const uint8_t* rgba, size_t size, int64_t tsMs) {
    if (!started_ || rgba == nullptr) return false;
    // Convert RGBA to NV12 and queue it
    InputFrame f;
    RgbaToNv12(width_, height_, rgba, f.nv12);
    f.ptsUs = tsMs * 1000; // ms -> us
    {
        std::lock_guard<std::mutex> lk(g_ctx.inMutex);
        g_ctx.inQueue.push(std::move(f));
    }
    // Notify encoder by triggering input callback soon; framework calls OnNeedInputBuffer automatically.
    return true;
}

void ReplayEncoder::Finalize() {
    if (!started_) return;
    g_ctx.eos.store(true);
    // Notify end of stream explicitly
    OH_VideoEncoder_NotifyEndOfStream(g_ctx.encoder);

    // Stop encoder
    OH_VideoEncoder_Flush(g_ctx.encoder);
    OH_VideoEncoder_Stop(g_ctx.encoder);

    if (g_ctx.muxer && g_ctx.muxerStarted) {
        OH_AVMuxer_Stop(g_ctx.muxer);
    }
    if (g_ctx.muxer) {
        OH_AVMuxer_Destroy(g_ctx.muxer);
        g_ctx.muxer = nullptr;
    }
    if (g_ctx.encoder) {
        OH_VideoEncoder_Destroy(g_ctx.encoder);
        g_ctx.encoder = nullptr;
    }
    started_ = false;
}


