#include "CloudWangGenPlugin.h"

#include <stdio.h>
#include <fstream>
#include <string>
#include <ctime>
#include <vector>
#include <map>

#ifdef _WIN32
#define NOMINMAX  // 防止 windows.h 定义 min/max 宏，避免与 std::min/std::max 冲突
#include <windows.h>
#include <commdlg.h>
#endif

extern "C" {
#include <libavformat/avformat.h>
#include <libavcodec/avcodec.h>
#include <libavutil/avutil.h>
#include <libavutil/imgutils.h>
#include <libswscale/swscale.h>
}

#include "ofxsImageEffect.h"
#include "ofxsInteract.h"
#include "ofxsMultiThread.h"
#include "ofxsProcessing.h"
#include "ofxsLog.h"
#include "ofxDrawSuite.h"
#include "ofxsSupportPrivate.h"
#include "ofxImageEffectExt.h"

#define kPluginName "CloudWangGen"
#define kPluginGrouping "OpenFX Sample"
#define kPluginDescription "Generator plugin to display dual video streams from a video file"
#define kPluginIdentifier "com.OpenFXSample.CloudWangGen"
#define kPluginVersionMajor 1
#define kPluginVersionMinor 0

#define kSupportsTiles false
#define kSupportsMultiResolution false
#define kSupportsMultipleClipPARs false

// Forward declarations for FFmpeg types
struct AVFormatContext;
struct AVCodecContext;
struct AVFrame;
struct SwsContext;

////////////////////////////////////////////////////////////////////////////////
/** @brief The plugin that does our work */
class CloudWangGenPlugin : public OFX::ImageEffect
{
public:
    explicit CloudWangGenPlugin(OfxImageEffectHandle p_Handle);
    virtual ~CloudWangGenPlugin();

    /* Override the render */
    virtual void render(const OFX::RenderArguments& p_Args);

    /* Override is identity */
    virtual bool isIdentity(const OFX::IsIdentityArguments& p_Args, OFX::Clip*& p_IdentityClip, double& p_IdentityTime);

    /* Override getRegionOfDefinition */
    virtual bool getRegionOfDefinition(const OFX::RegionOfDefinitionArguments& p_Args, OfxRectD& p_RoD);

    /* Override getClipPreferences */
    virtual void getClipPreferences(OFX::ClipPreferencesSetter& p_ClipPreferences);

    /* Override changedParam */
    virtual void changedParam(const OFX::InstanceChangedArgs& p_Args, const std::string& p_ParamName);

    /* 打开文件选择对话框 */
    std::string openFileDialog();

    /* 写入日志文件 */
    void writeLog(const std::string& message);

    /* 初始化 FFmpeg（第一路视频流） */
    bool initializeFFmpegFirst();

    /* 初始化 FFmpeg（第二路视频流） */
    bool initializeFFmpegSecond();

    /* 清理 FFmpeg 资源 */
    void cleanupFFmpeg();

    /* 解码并获取指定时间的帧到缓存（第一路视频流） */
    bool decodeFrameAtTimeFirst(double time);

    /* 解码并获取指定时间的帧到缓存（第二路视频流） */
    bool decodeFrameAtTimeSecond(double time);
    
    /* 从缓存获取帧数据（第一路视频流） */
    bool getCachedFrameFirst(double time, std::vector<float>& frameData, int& width, int& height);

    /* 从缓存获取帧数据（第二路视频流） */
    bool getCachedFrameSecond(double time, std::vector<float>& frameData, int& width, int& height);
    
    /* 合成两路视频流 */
    void compositeStreams(const std::vector<float>& firstFrameData, int firstWidth, int firstHeight,
                          const std::vector<float>& secondFrameData, int secondWidth, int secondHeight,
                          OFX::Image* dstImage, double time);

private:
    // Does not own the following pointers
    OFX::Clip* m_DstClip;

    // Parameters
    OFX::StringParam* m_ResourcePath;
    OFX::PushButtonParam* m_BrowseButton;

    // FFmpeg related for first stream
    AVFormatContext* m_FormatContextFirst;
    AVCodecContext* m_CodecContextFirst;
    AVFrame* m_FrameFirst;
    AVFrame* m_FrameRGBFirst;
    SwsContext* m_SwsContextFirst;
    int m_VideoStreamIndexFirst;
    bool m_FFmpegInitializedFirst;
    double m_FrameRateFirst;
    int m_WidthFirst;
    int m_HeightFirst;
    uint8_t* m_BufferFirst;

    // FFmpeg related for second stream
    AVFormatContext* m_FormatContextSecond;
    AVCodecContext* m_CodecContextSecond;
    AVFrame* m_FrameSecond;
    AVFrame* m_FrameRGBSecond;
    SwsContext* m_SwsContextSecond;
    int m_VideoStreamIndexSecond;
    bool m_FFmpegInitializedSecond;
    double m_FrameRateSecond;
    int m_WidthSecond;
    int m_HeightSecond;
    uint8_t* m_BufferSecond;

    std::string m_SourceFilePath;
    
    // 帧缓存：key 是时间（帧号），value 是帧数据（Float RGBA）
    // 第一路视频流缓存
    std::map<int64_t, std::vector<float>> m_FrameCacheFirst;
    std::map<int64_t, int> m_FrameCacheWidthFirst;
    std::map<int64_t, int> m_FrameCacheHeightFirst;
    
    // 第二路视频流缓存
    std::map<int64_t, std::vector<float>> m_FrameCacheSecond;
    std::map<int64_t, int> m_FrameCacheWidthSecond;
    std::map<int64_t, int> m_FrameCacheHeightSecond;
    
    // 当前缓存的时间（用于判断是否需要重新解码）
    int64_t m_CachedTimeFirst;
    int64_t m_CachedTimeSecond;
};

CloudWangGenPlugin::CloudWangGenPlugin(OfxImageEffectHandle p_Handle)
    : ImageEffect(p_Handle)
    , m_FormatContextFirst(nullptr)
    , m_CodecContextFirst(nullptr)
    , m_FrameFirst(nullptr)
    , m_FrameRGBFirst(nullptr)
    , m_SwsContextFirst(nullptr)
    , m_VideoStreamIndexFirst(-1)
    , m_FFmpegInitializedFirst(false)
    , m_FrameRateFirst(25.0)
    , m_WidthFirst(0)
    , m_HeightFirst(0)
    , m_BufferFirst(nullptr)
    , m_FormatContextSecond(nullptr)
    , m_CodecContextSecond(nullptr)
    , m_FrameSecond(nullptr)
    , m_FrameRGBSecond(nullptr)
    , m_SwsContextSecond(nullptr)
    , m_VideoStreamIndexSecond(-1)
    , m_FFmpegInitializedSecond(false)
    , m_FrameRateSecond(25.0)
    , m_WidthSecond(0)
    , m_HeightSecond(0)
    , m_BufferSecond(nullptr)
    , m_CachedTimeFirst(-1)
    , m_CachedTimeSecond(-1)
{
    m_DstClip = fetchClip(kOfxImageEffectOutputClipName);
    m_ResourcePath = fetchStringParam("resourcePath");
    m_BrowseButton = fetchPushButtonParam("browseFile");

    writeLog("CloudWangGenPlugin initialized");
}

CloudWangGenPlugin::~CloudWangGenPlugin()
{
    cleanupFFmpeg();
}

void CloudWangGenPlugin::writeLog(const std::string& message)
{
    const std::string logPath = "C:\\Users\\Administrator\\Desktop\\CloudWangGenPlugin.log";
    
    std::ofstream logFile(logPath, std::ios::app);  // 追加模式
    if (logFile.is_open())
    {
        // 获取当前时间（简单版本）
        time_t now = time(0);
        char* timeStr = ctime(&now);
        if (timeStr)
        {
            // 移除换行符
            std::string timeString(timeStr);
            if (!timeString.empty() && timeString.back() == '\n')
            {
                timeString.pop_back();
            }
            logFile << "[" << timeString << "] " << message << std::endl;
        }
        else
        {
            logFile << message << std::endl;
        }
        logFile.close();
    }
}

std::string CloudWangGenPlugin::openFileDialog()
{
    std::string result;
    
#ifdef _WIN32
    // Windows 文件选择对话框
    OPENFILENAMEA ofn;
    char szFile[260] = { 0 };
    
    ZeroMemory(&ofn, sizeof(ofn));
    ofn.lStructSize = sizeof(ofn);
    ofn.hwndOwner = NULL;
    ofn.lpstrFile = szFile;
    ofn.nMaxFile = sizeof(szFile);
    ofn.lpstrFilter = "Video Files\0*.mp4;*.avi;*.mov;*.mkv\0All Files\0*.*\0";
    ofn.nFilterIndex = 1;
    ofn.lpstrFileTitle = NULL;
    ofn.nMaxFileTitle = 0;
    ofn.lpstrInitialDir = NULL;
    ofn.Flags = OFN_PATHMUSTEXIST | OFN_FILEMUSTEXIST;
    
    if (GetOpenFileNameA(&ofn) == TRUE)
    {
        result = szFile;
    }
#elif defined(__APPLE__)
    // macOS 文件选择对话框（需要 Objective-C 或使用 NSOpenPanel）
    // 这里提供一个简单的实现，实际使用时可能需要更复杂的实现
    // 可以使用 NSOpenPanel 或第三方库
    // 暂时返回空字符串，需要根据实际需求实现
#else
    // Linux 文件选择对话框
    // 可以使用 GTK 或其他 GUI 库
    // 暂时返回空字符串，需要根据实际需求实现
#endif
    
    return result;
}

void CloudWangGenPlugin::changedParam(const OFX::InstanceChangedArgs& p_Args, const std::string& p_ParamName)
{
    if (p_ParamName == "browseFile")
    {
        // 按钮被点击，打开文件选择对话框
        std::string selectedPath = openFileDialog();
        if (!selectedPath.empty())
        {
            // 设置文件路径参数的值
            m_ResourcePath->setValue(selectedPath);
            
            // 清理旧的 FFmpeg 资源
            cleanupFFmpeg();
            
            // 设置新的文件路径
            m_SourceFilePath = selectedPath;
            writeLog("File selected: " + selectedPath);
            
            // 初始化 FFmpeg（第一路和第二路视频流）
            if (initializeFFmpegFirst() && initializeFFmpegSecond())
            {
                writeLog("FFmpeg initialized successfully for both streams");
            }
            else
            {
                writeLog("Error: Failed to initialize FFmpeg");
            }
        }
    }
    else if (p_ParamName == "resourcePath")
    {
        // 当文件路径参数改变时，重新初始化 FFmpeg
        std::string filePath;
        m_ResourcePath->getValue(filePath);
        
        if (!filePath.empty() && filePath != m_SourceFilePath)
        {
            // 清理旧的 FFmpeg 资源
            cleanupFFmpeg();
            
            // 设置新的文件路径
            m_SourceFilePath = filePath;
            writeLog("File path changed: " + filePath);
            
            // 初始化 FFmpeg（第一路和第二路视频流）
            if (initializeFFmpegFirst() && initializeFFmpegSecond())
            {
                writeLog("FFmpeg initialized successfully for both streams");
            }
            else
            {
                writeLog("Error: Failed to initialize FFmpeg");
            }
        }
    }
}

bool CloudWangGenPlugin::initializeFFmpegFirst()
{
    if (m_FFmpegInitializedFirst || m_SourceFilePath.empty())
    {
        return m_FFmpegInitializedFirst;
    }

    writeLog("Initializing FFmpeg for first stream: " + m_SourceFilePath);

    // 打开输入文件
    m_FormatContextFirst = avformat_alloc_context();
    if (avformat_open_input(&m_FormatContextFirst, m_SourceFilePath.c_str(), nullptr, nullptr) < 0)
    {
        writeLog("Error: Could not open file " + m_SourceFilePath);
        cleanupFFmpeg();
        return false;
    }

    // 查找流信息
    if (avformat_find_stream_info(m_FormatContextFirst, nullptr) < 0)
    {
        writeLog("Error: Could not find stream info");
        cleanupFFmpeg();
        return false;
    }

    // 查找第一路视频流
    for (unsigned int i = 0; i < m_FormatContextFirst->nb_streams; i++)
    {
        if (m_FormatContextFirst->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_VIDEO)
        {
            m_VideoStreamIndexFirst = i;
            break;
        }
    }

    if (m_VideoStreamIndexFirst < 0)
    {
        writeLog("Error: Could not find first video stream");
        cleanupFFmpeg();
        return false;
    }

    writeLog("Found first video stream at index: " + std::to_string(m_VideoStreamIndexFirst));

    // 获取解码器
    AVCodecParameters* codecpar = m_FormatContextFirst->streams[m_VideoStreamIndexFirst]->codecpar;
    const AVCodec* codec = avcodec_find_decoder(codecpar->codec_id);
    if (!codec)
    {
        writeLog("Error: Codec not found for first stream");
        cleanupFFmpeg();
        return false;
    }

    // 创建解码器上下文
    m_CodecContextFirst = avcodec_alloc_context3(codec);
    if (avcodec_parameters_to_context(m_CodecContextFirst, codecpar) < 0)
    {
        writeLog("Error: Could not copy codec parameters for first stream");
        cleanupFFmpeg();
        return false;
    }

    // 打开解码器
    if (avcodec_open2(m_CodecContextFirst, codec, nullptr) < 0)
    {
        writeLog("Error: Could not open codec for first stream");
        cleanupFFmpeg();
        return false;
    }

    m_WidthFirst = m_CodecContextFirst->width;
    m_HeightFirst = m_CodecContextFirst->height;
    
    // 获取帧率
    AVRational fps = m_FormatContextFirst->streams[m_VideoStreamIndexFirst]->avg_frame_rate;
    if (fps.num > 0 && fps.den > 0)
    {
        m_FrameRateFirst = (double)fps.num / fps.den;
    }

    writeLog("First video stream info: " + std::to_string(m_WidthFirst) + "x" + std::to_string(m_HeightFirst) + 
             " @ " + std::to_string(m_FrameRateFirst) + " fps");

    // 分配帧
    m_FrameFirst = av_frame_alloc();
    m_FrameRGBFirst = av_frame_alloc();
    if (!m_FrameFirst || !m_FrameRGBFirst)
    {
        writeLog("Error: Could not allocate frames for first stream");
        cleanupFFmpeg();
        return false;
    }

    // 分配 RGB 缓冲区
    int numBytes = av_image_get_buffer_size(AV_PIX_FMT_RGBA, m_WidthFirst, m_HeightFirst, 1);
    m_BufferFirst = (uint8_t*)av_malloc(numBytes * sizeof(uint8_t));
    if (av_image_fill_arrays(m_FrameRGBFirst->data, m_FrameRGBFirst->linesize, m_BufferFirst, AV_PIX_FMT_RGBA, m_WidthFirst, m_HeightFirst, 1) < 0)
    {
        writeLog("Error: Could not fill image arrays for first stream");
        cleanupFFmpeg();
        return false;
    }

    // 创建 SwsContext 用于格式转换
    m_SwsContextFirst = sws_getContext(
        m_WidthFirst, m_HeightFirst, m_CodecContextFirst->pix_fmt,
        m_WidthFirst, m_HeightFirst, AV_PIX_FMT_RGBA,
        SWS_BILINEAR, nullptr, nullptr, nullptr);

    if (!m_SwsContextFirst)
    {
        writeLog("Error: Could not create SwsContext for first stream");
        cleanupFFmpeg();
        return false;
    }

    m_FFmpegInitializedFirst = true;
    writeLog("FFmpeg initialized successfully for first stream");
    return true;
}

bool CloudWangGenPlugin::initializeFFmpegSecond()
{
    if (m_FFmpegInitializedSecond || m_SourceFilePath.empty())
    {
        return m_FFmpegInitializedSecond;
    }

    writeLog("Initializing FFmpeg for second stream: " + m_SourceFilePath);

    // 打开输入文件
    m_FormatContextSecond = avformat_alloc_context();
    if (avformat_open_input(&m_FormatContextSecond, m_SourceFilePath.c_str(), nullptr, nullptr) < 0)
    {
        writeLog("Error: Could not open file " + m_SourceFilePath);
        cleanupFFmpeg();
        return false;
    }

    // 查找流信息
    if (avformat_find_stream_info(m_FormatContextSecond, nullptr) < 0)
    {
        writeLog("Error: Could not find stream info");
        cleanupFFmpeg();
        return false;
    }

    // 查找第二路视频流
    int videoStreamCount = 0;
    for (unsigned int i = 0; i < m_FormatContextSecond->nb_streams; i++)
    {
        if (m_FormatContextSecond->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_VIDEO)
        {
            videoStreamCount++;
            if (videoStreamCount == 2)
            {
                m_VideoStreamIndexSecond = i;
                break;
            }
        }
    }

    if (m_VideoStreamIndexSecond < 0)
    {
        writeLog("Error: Could not find second video stream");
        cleanupFFmpeg();
        return false;
    }

    writeLog("Found second video stream at index: " + std::to_string(m_VideoStreamIndexSecond));

    // 获取解码器
    AVCodecParameters* codecpar = m_FormatContextSecond->streams[m_VideoStreamIndexSecond]->codecpar;
    const AVCodec* codec = avcodec_find_decoder(codecpar->codec_id);
    if (!codec)
    {
        writeLog("Error: Codec not found for second stream");
        cleanupFFmpeg();
        return false;
    }

    // 创建解码器上下文
    m_CodecContextSecond = avcodec_alloc_context3(codec);
    if (avcodec_parameters_to_context(m_CodecContextSecond, codecpar) < 0)
    {
        writeLog("Error: Could not copy codec parameters for second stream");
        cleanupFFmpeg();
        return false;
    }

    // 打开解码器
    if (avcodec_open2(m_CodecContextSecond, codec, nullptr) < 0)
    {
        writeLog("Error: Could not open codec for second stream");
        cleanupFFmpeg();
        return false;
    }

    m_WidthSecond = m_CodecContextSecond->width;
    m_HeightSecond = m_CodecContextSecond->height;
    
    // 获取帧率
    AVRational fps = m_FormatContextSecond->streams[m_VideoStreamIndexSecond]->avg_frame_rate;
    if (fps.num > 0 && fps.den > 0)
    {
        m_FrameRateSecond = (double)fps.num / fps.den;
    }

    writeLog("Second video stream info: " + std::to_string(m_WidthSecond) + "x" + std::to_string(m_HeightSecond) + 
             " @ " + std::to_string(m_FrameRateSecond) + " fps");

    // 分配帧
    m_FrameSecond = av_frame_alloc();
    m_FrameRGBSecond = av_frame_alloc();
    if (!m_FrameSecond || !m_FrameRGBSecond)
    {
        writeLog("Error: Could not allocate frames for second stream");
        cleanupFFmpeg();
        return false;
    }

    // 分配 RGB 缓冲区
    int numBytes = av_image_get_buffer_size(AV_PIX_FMT_RGBA, m_WidthSecond, m_HeightSecond, 1);
    m_BufferSecond = (uint8_t*)av_malloc(numBytes * sizeof(uint8_t));
    if (av_image_fill_arrays(m_FrameRGBSecond->data, m_FrameRGBSecond->linesize, m_BufferSecond, AV_PIX_FMT_RGBA, m_WidthSecond, m_HeightSecond, 1) < 0)
    {
        writeLog("Error: Could not fill image arrays for second stream");
        cleanupFFmpeg();
        return false;
    }

    // 创建 SwsContext 用于格式转换
    m_SwsContextSecond = sws_getContext(
        m_WidthSecond, m_HeightSecond, m_CodecContextSecond->pix_fmt,
        m_WidthSecond, m_HeightSecond, AV_PIX_FMT_RGBA,
        SWS_BILINEAR, nullptr, nullptr, nullptr);

    if (!m_SwsContextSecond)
    {
        writeLog("Error: Could not create SwsContext for second stream");
        cleanupFFmpeg();
        return false;
    }

    m_FFmpegInitializedSecond = true;
    writeLog("FFmpeg initialized successfully for second stream");
    return true;
}

void CloudWangGenPlugin::cleanupFFmpeg()
{
    // 清理第一路视频流资源
    if (m_SwsContextFirst)
    {
        sws_freeContext(m_SwsContextFirst);
        m_SwsContextFirst = nullptr;
    }

    if (m_BufferFirst)
    {
        av_free(m_BufferFirst);
        m_BufferFirst = nullptr;
    }

    if (m_FrameRGBFirst)
    {
        av_frame_free(&m_FrameRGBFirst);
    }

    if (m_FrameFirst)
    {
        av_frame_free(&m_FrameFirst);
    }

    if (m_CodecContextFirst)
    {
        avcodec_free_context(&m_CodecContextFirst);
    }

    if (m_FormatContextFirst)
    {
        avformat_close_input(&m_FormatContextFirst);
    }

    // 清理第二路视频流资源
    if (m_SwsContextSecond)
    {
        sws_freeContext(m_SwsContextSecond);
        m_SwsContextSecond = nullptr;
    }

    if (m_BufferSecond)
    {
        av_free(m_BufferSecond);
        m_BufferSecond = nullptr;
    }

    if (m_FrameRGBSecond)
    {
        av_frame_free(&m_FrameRGBSecond);
    }

    if (m_FrameSecond)
    {
        av_frame_free(&m_FrameSecond);
    }

    if (m_CodecContextSecond)
    {
        avcodec_free_context(&m_CodecContextSecond);
    }

    if (m_FormatContextSecond)
    {
        avformat_close_input(&m_FormatContextSecond);
    }

    m_FFmpegInitializedFirst = false;
    m_FFmpegInitializedSecond = false;
    m_VideoStreamIndexFirst = -1;
    m_VideoStreamIndexSecond = -1;
    
    // 清空缓存
    m_FrameCacheFirst.clear();
    m_FrameCacheWidthFirst.clear();
    m_FrameCacheHeightFirst.clear();
    m_FrameCacheSecond.clear();
    m_FrameCacheWidthSecond.clear();
    m_FrameCacheHeightSecond.clear();
}

bool CloudWangGenPlugin::decodeFrameAtTimeFirst(double time)
{
    if (!m_FFmpegInitializedFirst)
    {
        if (!initializeFFmpegFirst())
        {
            return false;
        }
    }

    // 计算目标帧号（time 是 DaVinci 的时间，单位是帧）
    int64_t targetFrame = (int64_t)time;
    
    // 检查缓存
    if (m_FrameCacheFirst.find(targetFrame) != m_FrameCacheFirst.end())
    {
        // 缓存命中，不需要重新解码
        m_CachedTimeFirst = targetFrame;
        // 只在调试时输出日志
        // writeLog("decodeFrameAtTimeFirst: cache hit for frame " + std::to_string(targetFrame));
        return true;
    }
    
    // 缓存未命中，需要解码
    // writeLog("decodeFrameAtTimeFirst: cache miss for frame " + std::to_string(targetFrame) + ", decoding...");

    // 计算时间戳（转换为流的时间基准）
    AVRational timeBase = m_FormatContextFirst->streams[m_VideoStreamIndexFirst]->time_base;
    AVRational frameRate = m_FormatContextFirst->streams[m_VideoStreamIndexFirst]->avg_frame_rate;
    // 计算帧间隔（1/frameRate）
    AVRational frameInterval = { frameRate.den, frameRate.num };
    int64_t timestamp = av_rescale_q(targetFrame, frameInterval, timeBase);

    // 定位到目标帧
    if (av_seek_frame(m_FormatContextFirst, m_VideoStreamIndexFirst, timestamp, AVSEEK_FLAG_BACKWARD) < 0)
    {
        writeLog("Error: Could not seek to frame at time " + std::to_string(time) + " for first stream");
        return false;
    }

    // 清空解码器缓冲区
    avcodec_flush_buffers(m_CodecContextFirst);

    AVPacket* packet = av_packet_alloc();
    bool frameFound = false;

    // 读取并解码帧
    while (av_read_frame(m_FormatContextFirst, packet) >= 0)
    {
        if (packet->stream_index == m_VideoStreamIndexFirst)
        {
            // 发送包到解码器
            if (avcodec_send_packet(m_CodecContextFirst, packet) == 0)
            {
                // 接收解码后的帧
                if (avcodec_receive_frame(m_CodecContextFirst, m_FrameFirst) == 0)
                {
                    // 转换格式
                    sws_scale(m_SwsContextFirst,
                              (const uint8_t* const*)m_FrameFirst->data, m_FrameFirst->linesize,
                              0, m_HeightFirst,
                              m_FrameRGBFirst->data, m_FrameRGBFirst->linesize);

                    // 将 RGBA 数据复制到缓存（Float RGBA，使用原始尺寸）
                    size_t bufferSize = m_WidthFirst * m_HeightFirst * 4;
                    std::vector<float> frameData(bufferSize);

                    // 转换 uint8_t [0-255] 到 float [0.0-1.0]
                    for (int y = 0; y < m_HeightFirst; y++)
                    {
                        for (int x = 0; x < m_WidthFirst; x++)
                        {
                            int srcIdx = (y * m_WidthFirst + x) * 4;
                            int dstIdx = (y * m_WidthFirst + x) * 4;

                            frameData[dstIdx + 0] = m_FrameRGBFirst->data[0][srcIdx + 0] / 255.0f; // R
                            frameData[dstIdx + 1] = m_FrameRGBFirst->data[0][srcIdx + 1] / 255.0f; // G
                            frameData[dstIdx + 2] = m_FrameRGBFirst->data[0][srcIdx + 2] / 255.0f; // B
                            frameData[dstIdx + 3] = m_FrameRGBFirst->data[0][srcIdx + 3] / 255.0f; // A
                        }
                    }

                    // 存储到缓存
                    m_FrameCacheFirst[targetFrame] = std::move(frameData);
                    m_FrameCacheWidthFirst[targetFrame] = m_WidthFirst;
                    m_FrameCacheHeightFirst[targetFrame] = m_HeightFirst;
                    m_CachedTimeFirst = targetFrame;

                    frameFound = true;
                    break;
                }
            }
        }
        av_packet_unref(packet);
    }

    av_packet_free(&packet);
    return frameFound;
}

bool CloudWangGenPlugin::decodeFrameAtTimeSecond(double time)
{
    if (!m_FFmpegInitializedSecond)
    {
        if (!initializeFFmpegSecond())
        {
            return false;
        }
    }

    // 计算目标帧号（time 是 DaVinci 的时间，单位是帧）
    int64_t targetFrame = (int64_t)time;
    
    // 检查缓存
    if (m_FrameCacheSecond.find(targetFrame) != m_FrameCacheSecond.end())
    {
        // 缓存命中，不需要重新解码
        m_CachedTimeSecond = targetFrame;
        // 只在调试时输出日志
        // writeLog("decodeFrameAtTimeSecond: cache hit for frame " + std::to_string(targetFrame));
        return true;
    }
    
    // 缓存未命中，需要解码
    // writeLog("decodeFrameAtTimeSecond: cache miss for frame " + std::to_string(targetFrame) + ", decoding...");

    // 计算时间戳（转换为流的时间基准）
    AVRational timeBase = m_FormatContextSecond->streams[m_VideoStreamIndexSecond]->time_base;
    AVRational frameRate = m_FormatContextSecond->streams[m_VideoStreamIndexSecond]->avg_frame_rate;
    // 计算帧间隔（1/frameRate）
    AVRational frameInterval = { frameRate.den, frameRate.num };
    int64_t timestamp = av_rescale_q(targetFrame, frameInterval, timeBase);

    // 定位到目标帧
    if (av_seek_frame(m_FormatContextSecond, m_VideoStreamIndexSecond, timestamp, AVSEEK_FLAG_BACKWARD) < 0)
    {
        writeLog("Error: Could not seek to frame at time " + std::to_string(time) + " for second stream");
        return false;
    }

    // 清空解码器缓冲区
    avcodec_flush_buffers(m_CodecContextSecond);

    AVPacket* packet = av_packet_alloc();
    bool frameFound = false;

    // 读取并解码帧
    while (av_read_frame(m_FormatContextSecond, packet) >= 0)
    {
        if (packet->stream_index == m_VideoStreamIndexSecond)
        {
            // 发送包到解码器
            if (avcodec_send_packet(m_CodecContextSecond, packet) == 0)
            {
                // 接收解码后的帧
                if (avcodec_receive_frame(m_CodecContextSecond, m_FrameSecond) == 0)
                {
                    // 转换格式
                    sws_scale(m_SwsContextSecond,
                              (const uint8_t* const*)m_FrameSecond->data, m_FrameSecond->linesize,
                              0, m_HeightSecond,
                              m_FrameRGBSecond->data, m_FrameRGBSecond->linesize);

                    // 将 RGBA 数据复制到缓存（Float RGBA，使用原始尺寸）
                    size_t bufferSize = m_WidthSecond * m_HeightSecond * 4;
                    std::vector<float> frameData(bufferSize);

                    // 转换 uint8_t [0-255] 到 float [0.0-1.0]
                    for (int y = 0; y < m_HeightSecond; y++)
                    {
                        for (int x = 0; x < m_WidthSecond; x++)
                        {
                            int srcIdx = (y * m_WidthSecond + x) * 4;
                            int dstIdx = (y * m_WidthSecond + x) * 4;

                            frameData[dstIdx + 0] = m_FrameRGBSecond->data[0][srcIdx + 0] / 255.0f; // R
                            frameData[dstIdx + 1] = m_FrameRGBSecond->data[0][srcIdx + 1] / 255.0f; // G
                            frameData[dstIdx + 2] = m_FrameRGBSecond->data[0][srcIdx + 2] / 255.0f; // B
                            frameData[dstIdx + 3] = m_FrameRGBSecond->data[0][srcIdx + 3] / 255.0f; // A
                        }
                    }

                    // 存储到缓存
                    m_FrameCacheSecond[targetFrame] = std::move(frameData);
                    m_FrameCacheWidthSecond[targetFrame] = m_WidthSecond;
                    m_FrameCacheHeightSecond[targetFrame] = m_HeightSecond;
                    m_CachedTimeSecond = targetFrame;

                    frameFound = true;
                    break;
                }
            }
        }
        av_packet_unref(packet);
    }

    av_packet_free(&packet);
    return frameFound;
}

bool CloudWangGenPlugin::getCachedFrameFirst(double time, std::vector<float>& frameData, int& width, int& height)
{
    int64_t targetFrame = (int64_t)time;
    
    auto it = m_FrameCacheFirst.find(targetFrame);
    if (it != m_FrameCacheFirst.end())
    {
        frameData = it->second;
        width = m_FrameCacheWidthFirst[targetFrame];
        height = m_FrameCacheHeightFirst[targetFrame];
        return true;
    }
    
    return false;
}

bool CloudWangGenPlugin::getCachedFrameSecond(double time, std::vector<float>& frameData, int& width, int& height)
{
    int64_t targetFrame = (int64_t)time;
    
    auto it = m_FrameCacheSecond.find(targetFrame);
    if (it != m_FrameCacheSecond.end())
    {
        frameData = it->second;
        width = m_FrameCacheWidthSecond[targetFrame];
        height = m_FrameCacheHeightSecond[targetFrame];
        return true;
    }
    
    return false;
}

void CloudWangGenPlugin::render(const OFX::RenderArguments& p_Args)
{
    // 减少日志输出以提高性能
    // writeLog("render: called at time=" + std::to_string(p_Args.time));
    
    // 获取输出图像
    std::unique_ptr<OFX::Image> dst(m_DstClip->fetchImage(p_Args.time));
    if (!dst)
    {
        writeLog("render: failed to fetch output image");
        OFX::throwSuiteStatusException(kOfxStatErrMemory);
        return;
    }
    
    // 检查实际图像的像素格式（而不是 Clip 描述符的格式）
    if ((dst->getPixelDepth() != OFX::eBitDepthFloat) || 
        (dst->getPixelComponents() != OFX::ePixelComponentRGBA))
    {
        writeLog("render: unsupported pixel format - depth=" + 
                 std::to_string((int)dst->getPixelDepth()) + 
                 ", components=" + std::to_string((int)dst->getPixelComponents()));
        OFX::throwSuiteStatusException(kOfxStatErrUnsupported);
        return;
    }

    const OfxRectI& bounds = dst->getBounds();
    const int dstWidth = bounds.x2 - bounds.x1;
    const int dstHeight = bounds.y2 - bounds.y1;
    // writeLog("render: dstWidth=" + std::to_string(dstWidth) + ", dstHeight=" + std::to_string(dstHeight));

    // 如果文件路径为空或 FFmpeg 未初始化，输出纯色图片（红色）作为后备
    if (m_SourceFilePath.empty() || (!m_FFmpegInitializedFirst && !m_FFmpegInitializedSecond))
    {
        // writeLog("render: file path empty or FFmpeg not initialized, outputting red color");
        const OfxRectI& bounds = dst->getBounds();
        const int width = bounds.x2 - bounds.x1;
        const int height = bounds.y2 - bounds.y1;
        float* dstPixels = static_cast<float*>(dst->getPixelData());

        for (int y = 0; y < height; y++)
        {
            for (int x = 0; x < width; x++)
            {
                int idx = (y * width + x) * 4;
                dstPixels[idx + 0] = 1.0f; // R
                dstPixels[idx + 1] = 0.0f; // G
                dstPixels[idx + 2] = 0.0f; // B
                dstPixels[idx + 3] = 1.0f; // A
            }
        }
        return;
    }

    // 直接从缓存获取帧数据引用（避免复制）
    int64_t targetFrame = (int64_t)p_Args.time;
    auto itFirst = m_FrameCacheFirst.find(targetFrame);
    auto itSecond = m_FrameCacheSecond.find(targetFrame);
    
    // 如果缓存未命中，才进行解码
    if (itFirst == m_FrameCacheFirst.end())
    {
        decodeFrameAtTimeFirst(p_Args.time);
        itFirst = m_FrameCacheFirst.find(targetFrame);
    }
    
    if (itSecond == m_FrameCacheSecond.end())
    {
        decodeFrameAtTimeSecond(p_Args.time);
        itSecond = m_FrameCacheSecond.find(targetFrame);
    }
    
    // 如果两路视频流都解码成功，进行合成
    if (itFirst != m_FrameCacheFirst.end() && itSecond != m_FrameCacheSecond.end())
    {
        // 缓存命中，直接使用引用，避免复制
        int firstWidth = m_FrameCacheWidthFirst[targetFrame];
        int firstHeight = m_FrameCacheHeightFirst[targetFrame];
        int secondWidth = m_FrameCacheWidthSecond[targetFrame];
        int secondHeight = m_FrameCacheHeightSecond[targetFrame];
        
        // 直接传递缓存的引用，避免复制
        compositeStreams(itFirst->second, firstWidth, firstHeight,
                        itSecond->second, secondWidth, secondHeight,
                        dst.get(), p_Args.time);
    }
    else
    {
        // 如果解码失败，填充纯色图片（红色）作为后备
        // writeLog("render: decoding failed, outputting red color");
        const OfxRectI& bounds = dst->getBounds();
        const int width = bounds.x2 - bounds.x1;
        const int height = bounds.y2 - bounds.y1;
        float* dstPixels = static_cast<float*>(dst->getPixelData());

        for (int y = 0; y < height; y++)
        {
            for (int x = 0; x < width; x++)
            {
                int idx = (y * width + x) * 4;
                dstPixels[idx + 0] = 1.0f; // R
                dstPixels[idx + 1] = 0.0f; // G
                dstPixels[idx + 2] = 0.0f; // B
                dstPixels[idx + 3] = 1.0f; // A
            }
        }
    }
}

void CloudWangGenPlugin::compositeStreams(const std::vector<float>& firstFrameData, int firstWidth, int firstHeight,
                                          const std::vector<float>& secondFrameData, int secondWidth, int secondHeight,
                                          OFX::Image* dstImage, double time)
{
    if (!dstImage || firstFrameData.empty() || secondFrameData.empty())
    {
        return;
    }

    const OfxRectI& dstBounds = dstImage->getBounds();
    const int dstWidth = dstBounds.x2 - dstBounds.x1;
    const int dstHeight = dstBounds.y2 - dstBounds.y1;
    
    void* dstPixelData = dstImage->getPixelData();
    if (!dstPixelData)
    {
        return;
    }
    float* dstPixels = static_cast<float*>(dstPixelData);

    // 左右分屏：左半部分显示第一路视频流，右半部分显示第二路视频流
    const int halfWidth = dstWidth / 2;
    
    // 减少日志输出以提高性能
    // writeLog("compositeStreams: dstWidth=" + std::to_string(dstWidth) + ", dstHeight=" + std::to_string(dstHeight));
    // writeLog("compositeStreams: firstWidth=" + std::to_string(firstWidth) + ", firstHeight=" + std::to_string(firstHeight));
    // writeLog("compositeStreams: secondWidth=" + std::to_string(secondWidth) + ", secondHeight=" + std::to_string(secondHeight));
    // writeLog("compositeStreams: halfWidth=" + std::to_string(halfWidth));

    // 复制第一路视频流到左半部分
    for (int y = 0; y < dstHeight; y++)
    {
        // 计算第一路视频流中的对应位置（垂直方向）
        int srcY = (y * firstHeight) / dstHeight;
        if (srcY < 0) srcY = 0;
        if (srcY >= firstHeight) srcY = firstHeight - 1;

        for (int x = 0; x < halfWidth; x++)
        {
            // 计算第一路视频流中的对应位置（水平方向）
            int srcX = (x * firstWidth) / halfWidth;
            if (srcX < 0) srcX = 0;
            if (srcX >= firstWidth) srcX = firstWidth - 1;

            int srcIdx = (srcY * firstWidth + srcX) * 4;
            int dstIdx = (y * dstWidth + x) * 4;

            // 直接复制像素（边界检查已在上面完成）
            dstPixels[dstIdx + 0] = firstFrameData[srcIdx + 0]; // R
            dstPixels[dstIdx + 1] = firstFrameData[srcIdx + 1]; // G
            dstPixels[dstIdx + 2] = firstFrameData[srcIdx + 2]; // B
            dstPixels[dstIdx + 3] = firstFrameData[srcIdx + 3]; // A
        }
    }

    // 复制第二路视频流到右半部分
    for (int y = 0; y < dstHeight; y++)
    {
        // 计算第二路视频流中的对应位置（垂直方向）
        int srcY = (y * secondHeight) / dstHeight;
        if (srcY < 0) srcY = 0;
        if (srcY >= secondHeight) srcY = secondHeight - 1;

        for (int x = halfWidth; x < dstWidth; x++)
        {
            // 计算第二路视频流中的对应位置（水平方向，相对于右半部分）
            int srcX = ((x - halfWidth) * secondWidth) / halfWidth;
            if (srcX < 0) srcX = 0;
            if (srcX >= secondWidth) srcX = secondWidth - 1;

            int srcIdx = (srcY * secondWidth + srcX) * 4;
            int dstIdx = (y * dstWidth + x) * 4;

            // 直接复制像素（边界检查已在上面完成）
            dstPixels[dstIdx + 0] = secondFrameData[srcIdx + 0]; // R
            dstPixels[dstIdx + 1] = secondFrameData[srcIdx + 1]; // G
            dstPixels[dstIdx + 2] = secondFrameData[srcIdx + 2]; // B
            dstPixels[dstIdx + 3] = secondFrameData[srcIdx + 3]; // A
        }
    }
}

bool CloudWangGenPlugin::isIdentity(const OFX::IsIdentityArguments& p_Args, OFX::Clip*& p_IdentityClip, double& p_IdentityTime)
{
    // 这个插件总是生成新内容，不是 identity
    return false;
}

bool CloudWangGenPlugin::getRegionOfDefinition(const OFX::RegionOfDefinitionArguments& p_Args, OfxRectD& p_RoD)
{
    // 减少日志输出以提高性能
    // writeLog("getRegionOfDefinition: called at time=" + std::to_string(p_Args.time));
    // writeLog("getRegionOfDefinition: m_FFmpegInitializedFirst=" + std::to_string(m_FFmpegInitializedFirst) + 
    //          ", m_FFmpegInitializedSecond=" + std::to_string(m_FFmpegInitializedSecond));
    // writeLog("getRegionOfDefinition: m_WidthFirst=" + std::to_string(m_WidthFirst) + ", m_HeightFirst=" + std::to_string(m_HeightFirst));
    // writeLog("getRegionOfDefinition: m_WidthSecond=" + std::to_string(m_WidthSecond) + ", m_HeightSecond=" + std::to_string(m_HeightSecond));
    
    // Generator 类型没有源片段，使用 FFmpeg 获取的原始视频流尺寸作为参考
    // 使用第一路视频流的尺寸作为参考
    if (m_FFmpegInitializedFirst && m_WidthFirst > 0 && m_HeightFirst > 0)
    {
        // 输出应该是两个图像拼在一起（左右分屏）
        // 输出宽度 = 2 * 第一路视频流宽度，输出高度 = 第一路视频流高度
        // 例如：如果第一路视频流是 N * N，输出应该是 2N * N
        p_RoD.x1 = 0.0;
        p_RoD.y1 = 0.0;
        p_RoD.x2 = m_WidthFirst * 2.0; // 宽度翻倍
        p_RoD.y2 = m_HeightFirst; // 高度不变
        
        // writeLog("getRegionOfDefinition: firstWidth=" + std::to_string(m_WidthFirst) + ", firstHeight=" + std::to_string(m_HeightFirst));
        // writeLog("getRegionOfDefinition: outputRoD x1=" + std::to_string(p_RoD.x1) + ", y1=" + std::to_string(p_RoD.y1) + 
        //          ", x2=" + std::to_string(p_RoD.x2) + ", y2=" + std::to_string(p_RoD.y2));
        // writeLog("getRegionOfDefinition: outputWidth=" + std::to_string(p_RoD.x2 - p_RoD.x1) + ", outputHeight=" + std::to_string(p_RoD.y2 - p_RoD.y1));
        // writeLog("getRegionOfDefinition: returning true");
        
        return true;
    }
    else if (m_FFmpegInitializedSecond && m_WidthSecond > 0 && m_HeightSecond > 0)
    {
        // 如果第一路视频流未初始化，使用第二路视频流的尺寸
        p_RoD.x1 = 0.0;
        p_RoD.y1 = 0.0;
        p_RoD.x2 = m_WidthSecond * 2.0; // 宽度翻倍
        p_RoD.y2 = m_HeightSecond; // 高度不变
        
        // writeLog("getRegionOfDefinition: secondWidth=" + std::to_string(m_WidthSecond) + ", secondHeight=" + std::to_string(m_HeightSecond));
        // writeLog("getRegionOfDefinition: outputRoD x1=" + std::to_string(p_RoD.x1) + ", y1=" + std::to_string(p_RoD.y1) + 
        //          ", x2=" + std::to_string(p_RoD.x2) + ", y2=" + std::to_string(p_RoD.y2));
        // writeLog("getRegionOfDefinition: outputWidth=" + std::to_string(p_RoD.x2 - p_RoD.x1) + ", outputHeight=" + std::to_string(p_RoD.y2 - p_RoD.y1));
        // writeLog("getRegionOfDefinition: returning true");
        
        return true;
    }

    // 如果无法确定，返回默认值（例如 1920x1080，宽度翻倍为 3840x1080）
    // 这样即使 FFmpeg 未初始化，也能返回一个合理的尺寸
    // writeLog("getRegionOfDefinition: FFmpeg not initialized, using default size 3840x1080");
    p_RoD.x1 = 0.0;
    p_RoD.y1 = 0.0;
    p_RoD.x2 = 3840.0; // 默认宽度（2 * 1920）
    p_RoD.y2 = 1080.0; // 默认高度
    // writeLog("getRegionOfDefinition: outputRoD x1=" + std::to_string(p_RoD.x1) + ", y1=" + std::to_string(p_RoD.y1) + 
    //          ", x2=" + std::to_string(p_RoD.x2) + ", y2=" + std::to_string(p_RoD.y2));
    // writeLog("getRegionOfDefinition: returning true (default size)");
    return true;
}

void CloudWangGenPlugin::getClipPreferences(OFX::ClipPreferencesSetter& p_ClipPreferences)
{
    // 设置输出 clip 的组件类型为 RGBA
    p_ClipPreferences.setClipComponents(*m_DstClip, OFX::ePixelComponentRGBA);
    
    // 设置输出 clip 的像素深度为 Float
    p_ClipPreferences.setClipBitDepth(*m_DstClip, OFX::eBitDepthFloat);
    
    writeLog("getClipPreferences: set output clip to RGBA Float");
}

////////////////////////////////////////////////////////////////////////////////

using namespace OFX;

CloudWangGenPluginFactory::CloudWangGenPluginFactory()
    : OFX::PluginFactoryHelper<CloudWangGenPluginFactory>(kPluginIdentifier, kPluginVersionMajor, kPluginVersionMinor)
{
}

void CloudWangGenPluginFactory::describe(OFX::ImageEffectDescriptor& p_Desc)
{
    // Basic labels
    p_Desc.setLabels(kPluginName, kPluginName, kPluginName);
    p_Desc.setPluginGrouping(kPluginGrouping);
    p_Desc.setPluginDescription(kPluginDescription);

    // Add the supported contexts, use Generator
    p_Desc.addSupportedContext(eContextGenerator);
    p_Desc.addSupportedContext(eContextGeneral);

    // Add supported pixel depths
    p_Desc.addSupportedBitDepth(eBitDepthFloat);

    // Set a few flags
    p_Desc.setSingleInstance(false);
    p_Desc.setHostFrameThreading(false);
    p_Desc.setSupportsMultiResolution(kSupportsMultiResolution);
    p_Desc.setSupportsTiles(kSupportsTiles);
    p_Desc.setTemporalClipAccess(false);
    p_Desc.setRenderTwiceAlways(false);
    p_Desc.setSupportsMultipleClipPARs(kSupportsMultipleClipPARs);

    // Indicates that the plugin output does not depend on location or neighbours of a given pixel.
    p_Desc.setNoSpatialAwareness(false);
}

void CloudWangGenPluginFactory::describeInContext(OFX::ImageEffectDescriptor& p_Desc, OFX::ContextEnum /*p_Context*/)
{
    // Generator context does not need source clip
    // Only define output clip

    // Create the mandated output clip
    ClipDescriptor* dstClip = p_Desc.defineClip(kOfxImageEffectOutputClipName);
    dstClip->addSupportedComponent(ePixelComponentRGBA);
    dstClip->addSupportedComponent(ePixelComponentAlpha);
    dstClip->setSupportsTiles(kSupportsTiles);

    // Create page to add UI controllers
    PageParamDescriptor* page = p_Desc.definePageParam("Controls");

    // 定义文件路径参数（文本输入框）
    StringParamDescriptor* filePathParam = p_Desc.defineStringParam("resourcePath");
    filePathParam->setLabels("Resource Path", "Resource Path", "Resource Path");
    filePathParam->setScriptName("resourcePath");
    filePathParam->setHint("Path to video file (or click Browse button to select)");
    filePathParam->setDefault("");
    filePathParam->setStringType(eStringTypeSingleLine);  // 使用单行文本输入
    page->addChild(*filePathParam);

    // 定义浏览按钮
    PushButtonParamDescriptor* browseButton = p_Desc.definePushButtonParam("browseFile");
    browseButton->setLabels("Browse...", "Browse...", "Browse...");
    browseButton->setHint("Click to open file selection dialog");
    page->addChild(*browseButton);
}

ImageEffect* CloudWangGenPluginFactory::createInstance(OfxImageEffectHandle p_Handle, ContextEnum /*p_Context*/)
{
    return new CloudWangGenPlugin(p_Handle);
}

void OFX::Plugin::getPluginIDs(PluginFactoryArray& p_FactoryArray)
{
    static CloudWangGenPluginFactory cloudWangGenPlugin;
    p_FactoryArray.push_back(&cloudWangGenPlugin);
}

