#include <jni.h>
#include <string>
#include <fstream>
#include <sys/stat.h>
#include <chrono>
#include <queue>
#include <mutex>
#include <condition_variable>
#include <atomic>
#include <thread>

#include "opencv2/opencv.hpp"
#include "log.h"
#include <dirent.h>
#include <Eigen/Dense>
#include <numeric>

static cv::Mat moveTemplate, calibTemplate;
static cv::Rect2f moveROI, calibROI;
static std::atomic_bool isRecording;
static std::string appPath, currentRecordPath;

// 视频编码相关
struct VideoConfig {
    cv::Size frameSize;
    double fps = 30.0;
    std::string videoPath;
    int fourcc = cv::VideoWriter::fourcc('M','J','P','G');
};

// 线程安全队列
static int frameWidth, frameHeight;
std::queue<cv::Mat> frameQueue;
std::mutex queueMutex;
std::condition_variable queueCV;
std::atomic_bool encodingActive(false);
std::thread encodingThread;

// 时间管理
std::chrono::time_point<std::chrono::high_resolution_clock> recordStartTime;
std::chrono::milliseconds frameInterval{33}; // 30fps≈33ms/frame
std::chrono::milliseconds nextFrameTime{0};


void encodingWorker(VideoConfig config);
void drawRGBonGLSurface(JNIEnv *env, cv::Mat& rgbBitmap);
bool createDirectory(const std::string& path);
void processRecording(cv::Mat& currentFrame);

cv::Mat mtxGet(const cv::Mat& mtx, int size, cv::Point max_loc);
double quadraticFit(const Eigen::Vector2d& xy, double a, double b, double c, double d, double e, double f);
cv::Point3d templateMatch(const cv::Mat& img, const cv::Mat& templ, cv::Point loc);

// java层的方法
jobject chartObject = nullptr;
jobject processObject = nullptr;
jobject previewObject = nullptr;
jobject renderObject = nullptr;

jmethodID initChartMethod;
jmethodID updateChartMethod;
jmethodID updateInformationMethod;
jmethodID requestRenderMethod;
jmethodID updateFrameMethod;

// 编码线程函数
void encodingWorker(VideoConfig config) {
    cv::VideoWriter writer;
    bool initialized = false;

    try {
        if(!writer.open(config.videoPath, config.fourcc, config.fps, config.frameSize, true)) {
            config.fourcc = cv::VideoWriter::fourcc('M','J','P','G');
            if(!writer.open(config.videoPath, config.fourcc, config.fps, config.frameSize, true)) {
                LOGE("Failed to initialize video writer!");
                return;
            }
        }
        initialized = true;
        LOGI("Video writer initialized");

        while(encodingActive || !frameQueue.empty()) {
            cv::Mat frame;
            {
                std::unique_lock<std::mutex> lock(queueMutex);
                queueCV.wait_for(lock, std::chrono::milliseconds(100),
                                 []{ return !frameQueue.empty() || !encodingActive; });

                if(!frameQueue.empty()) {
                    frame = frameQueue.front();
                    frameQueue.pop();
                }
            }

            if(!frame.empty()) {
                writer.write(frame);
                LOGV("Wrote frame to video");
            }
        }
    } catch(const std::exception& e) {
        LOGE("Encoding thread error: %s", e.what());
    }

    if(initialized) {
        writer.release();
        LOGI("Video writer released");
    }
}


JNIEXPORT jint JNICALL JNI_OnLoad(JavaVM* vm, void* reserved) {
    JNIEnv* env;
    if (vm->GetEnv(reinterpret_cast<void**>(&env), JNI_VERSION_1_6) != JNI_OK) {
        return JNI_ERR; // JNI版本不支持
    }
    // 初始化cv
    cv::setUseOptimized(true);
    cv::setNumThreads(cv::getNumberOfCPUs());

    const char* externalPath = getenv("EXTERNAL_STORAGE");
    appPath = std::string(externalPath) + "/Documents/MISS/";

    createDirectory(appPath);

    return JNI_VERSION_1_6; // 返回支持的JNI版本
}

void drawRGBonGLSurface(JNIEnv *env, cv::Mat& rgbBitmap){

    if (renderObject == nullptr || updateFrameMethod == nullptr) {
        return; // 方法未找到
    }

    int totalWidth = rgbBitmap.cols;
    int totalHeight = rgbBitmap.rows;
    int bufferSize = rgbBitmap.total() * rgbBitmap.elemSize();
    jbyteArray byteArray = env->NewByteArray(bufferSize);
    env->SetByteArrayRegion(byteArray, 0, bufferSize, (jbyte*)rgbBitmap.data);

    // 调用Java层的updateFrame方法
    env->CallVoidMethod(renderObject, updateFrameMethod, byteArray, totalWidth, totalHeight);

    env->DeleteLocalRef(byteArray);
}

bool createDirectory(const std::string& path) {
    size_t pos = 0;
    do {
        pos = path.find_first_of('/', pos + 1);
        if(mkdir(path.substr(0, pos).c_str(), 0744) == -1) {
            if(errno != EEXIST) return false; // 不是已存在错误则返回失败
        }
    } while(pos != std::string::npos);
    return true;
}

extern "C"
JNIEXPORT void JNICALL
Java_com_shm_miss_MainActivity_startRecording(JNIEnv *env, jclass clazz) {
    if(!isRecording.exchange(true)){

        time_t now = time(nullptr);
        tm *gmtm = gmtime(&now);
        char buffer[20];
        strftime(buffer, 20, "%Y%m%d_%H%M%S", gmtm);

        std::string recordTime = std::string(buffer);

        currentRecordPath =  appPath + recordTime + "/";
        std::string resultFilePath =  currentRecordPath + "result.csv";
        isRecording = true;

        // 创建目录（如果不存在）
        if(!createDirectory(currentRecordPath)) {
            // 处理目录创建失败
            resultFilePath.clear();
            return;
        }

        // 打开文件（追加模式 + 自动创建）
//        std::ofstream resultFile;
//        resultFile.open(resultFilePath, std::ios::out | std::ios::app);
//        if(!resultFile.is_open()) {
//            // 处理文件打开失败
//            LOGE("result file open fail!");
//            return;
//        }

        // 写入CSV表头
//        resultFile << "timestamp(ms),"
//                      "u(pixel),"
//                      "strain\n";
//
//        resultFile.flush(); // 立即写入硬盘
//        resultFile.close();

        // 保存ROI
        std::ofstream roiFile;
        std::string roiFilePath = currentRecordPath + "roi.txt";
        roiFile.open(roiFilePath, std::ios::out | std::ios::app);
        if(!roiFile.is_open()) {
            // 处理文件打开失败
            LOGE("roi file open fail!");
            return;
        }

        // 写入roi
        roiFile << moveROI.x << " " << moveROI.y << " " << moveROI.width << " " << moveROI.height << "\n"
                << calibROI.x << " " << calibROI.y << " " << calibROI.width << " " << calibROI.height;

        roiFile.flush(); // 立即写入硬盘
        roiFile.close();

        // 初始化视频写入器（延迟到第一帧到来时实际创建）
        // 初始化视频参数
        VideoConfig config;
        config.frameSize = cv::Size(frameWidth, frameHeight); // 需要实际帧尺寸
        config.videoPath = currentRecordPath + "record.avi";

        // 启动编码线程
        encodingActive = true;
        encodingThread = std::thread(encodingWorker, config);

        // 初始化时间戳
        recordStartTime = std::chrono::high_resolution_clock::now();
        nextFrameTime = std::chrono::milliseconds(0);
    }
}


extern "C"
JNIEXPORT void JNICALL
Java_com_shm_miss_MainActivity_stopRecording(JNIEnv *env, jclass clazz) {
    if(isRecording.exchange(false)) {

        // 停止编码线程
        encodingActive = false;
        queueCV.notify_all();
        if(encodingThread.joinable()) {
            encodingThread.join();
        }

        // 清空队列
        std::lock_guard<std::mutex> lock(queueMutex);
        while(!frameQueue.empty()) {
            frameQueue.pop();
        }
    }
}


void processRecording(cv::Mat& currentFrame){
    // 视频录制逻辑
    if(isRecording) {
        auto now = std::chrono::high_resolution_clock::now();
        auto elapsed = std::chrono::duration_cast<std::chrono::milliseconds>(
                now - recordStartTime);

        // 帧率控制逻辑
        if(elapsed >= nextFrameTime) {
            cv::Mat colorFrame;
            cv::cvtColor(currentFrame, colorFrame, cv::COLOR_GRAY2BGR);

            {
                std::lock_guard<std::mutex> lock(queueMutex);
                // 保持队列长度不超过2秒数据（60帧）
                if(frameQueue.size() < 60) {
                    frameQueue.push(colorFrame.clone());
                    queueCV.notify_one();
                } else {
                    LOGW("Frame queue full, dropping frame");
                }
            }

            // 更新下一帧时间（动态补偿）
            nextFrameTime += frameInterval;
            if(elapsed > nextFrameTime) {
                nextFrameTime = elapsed + frameInterval;
            }
        }
    }

}

extern "C"
JNIEXPORT void JNICALL
Java_com_shm_singlecamera_Camera_ImageProcess_showMono(
       JNIEnv *env, jobject thiz, jobject y_data,
       jint src_stride_y, jobject u_data,
       jint src_stride_u, jobject v_data,
       jint src_stride_v, jint stride,
       jint src_width, jint src_height,
       jobject camera_render) {

    // 将图像转化为灰度图
    // 获取y通道的指针
    auto *current_src_y = (uint8_t *) env->GetDirectBufferAddress(y_data);

    // 创建当前的灰度图
    cv::Mat currentFrame = cv::Mat(src_height, src_width, CV_8UC1, current_src_y, src_stride_y);

    cv::Mat rgbBitmap;
    cv::cvtColor(currentFrame, rgbBitmap, cv::COLOR_GRAY2RGB);

    // 保存当前尺寸
    frameWidth = src_width; frameHeight = src_height;

    // 处理记录事件
    processRecording(currentFrame);

    // 渲染在GLSurface上
    drawRGBonGLSurface(env, rgbBitmap);
    if(previewObject != nullptr && requestRenderMethod != nullptr)
        env->CallVoidMethod(previewObject, requestRenderMethod);

    if(processObject != nullptr && updateInformationMethod != nullptr)
        env->CallVoidMethod(processObject, updateInformationMethod);
}



extern "C"
JNIEXPORT void JNICALL
Java_com_shm_singlecamera_UI_AssistView_setROI1(JNIEnv *env, jobject thiz, jfloat x, jfloat y,
                                                jfloat width, jfloat height) {
    moveROI.x = x;
    moveROI.y = y;
    moveROI.width = width;
    moveROI.height = height;
}
extern "C"
JNIEXPORT void JNICALL
Java_com_shm_singlecamera_UI_AssistView_setROI2(JNIEnv *env, jobject thiz, jfloat x, jfloat y,
                                                jfloat width, jfloat height) {
    calibROI.x = x;
    calibROI.y = y;
    calibROI.width = width;
    calibROI.height = height;
}



// --- Helper function to get a sub-matrix ---
// Extracts a square sub-matrix of 'size' centered at max_loc from mtx.
// Handles boundary conditions. 'size' should be odd.
cv::Mat mtxGet(const cv::Mat& mtx, int size, cv::Point max_loc) {
    if (size % 2 == 0) {
        size++; // Ensure size is odd
        // std::cerr << "Warning: mtxGet size was even, incrementing to " << size << std::endl;
    }
    int radius = (size - 1) / 2;
    int i = max_loc.y; // row
    int j = max_loc.x; // col

    // Calculate boundaries, clamping to the matrix dimensions
    int start_row = std::max(0, i - radius);
    int end_row = std::min(mtx.rows, i + radius + 1);
    int start_col = std::max(0, j - radius);
    int end_col = std::min(mtx.cols, j + radius + 1);

    // Check if the calculated range is valid
    if (start_row >= end_row || start_col >= end_col) {
        // Return an empty matrix if the ROI is invalid (e.g., max_loc is outside)
        return cv::Mat();
    }

    // Extract the region of interest and clone it to ensure it's a new copy
    return mtx(cv::Range(start_row, end_row), cv::Range(start_col, end_col)).clone();
}

// --- Quadratic surface fitting function ---
// Represents the 2D quadratic: z = a + b*x + c*y + d*x^2 + e*xy + f*y^2
double quadraticFit(const Eigen::Vector2d& xy, double a, double b, double c, double d, double e, double f) {
    double x = xy(0);
    double y = xy(1);
    return a + b * x + c * y + d * x * x + e * x * y + f * y * y;
}

// --- Negative quadratic fit for minimization (if using an optimization library) ---
// Not directly used when solving analytically, but kept for reference.
double negQuadraticFit(const Eigen::Vector2d& xy, const Eigen::VectorXd& args) {
    if (args.size() != 6) {
        throw std::runtime_error("negQuadraticFit requires 6 arguments.");
    }
    return -1.0 * quadraticFit(xy, args(0), args(1), args(2), args(3), args(4), args(5));
}


// --- Sub-pixel template matching (MODIFIED with Analytical Peak Finding) ---
// Performs template matching and refines the peak location using quadratic surface fitting.
cv::Point3d templateMatch(const cv::Mat& img, const cv::Mat& templ, cv::Point loc) {
    if (img.empty() || templ.empty()) {
        throw std::runtime_error("Input image or template is empty in templateMatch.");
    }
    int h = templ.rows;
    int w = templ.cols;

    // --- Define search region within the input image (img) ---
    // Search around the coarse location 'loc' provided.
    // The padding (e.g., 20) allows for refinement even if coarse loc is slightly off.
    int search_padding = 20;
    int start_row = std::max(0, loc.y - search_padding);
    int end_row = std::min(img.rows, loc.y + h + search_padding);
    int start_col = std::max(0, loc.x - search_padding);
    int end_col = std::min(img.cols, loc.x + w + search_padding);

    // Ensure the search region is valid and large enough for the template
    if (start_row >= end_row || start_col >= end_col ||
        (end_row - start_row) < h || (end_col - start_col) < w) {
        // Fallback: return the coarse location if the search ROI is invalid/too small
        std::cerr << "Warning: Search region invalid or too small in templateMatch. Returning coarse location." << std::endl;
        return {static_cast<double>(loc.x), static_cast<double>(loc.y), 0.0}; // Return coarse loc with 0 score
    }

    cv::Mat search_img = img(cv::Range(start_row, end_row), cv::Range(start_col, end_col)).clone();

    // --- Perform template matching within the search region ---
    cv::Mat res;
    // Ensure search_img is large enough for the template
    if (search_img.cols < templ.cols || search_img.rows < templ.rows) {
        std::cerr << "Warning: search_img smaller than template in templateMatch. Returning coarse location." << std::endl;
        return {static_cast<double>(loc.x), static_cast<double>(loc.y), 0.0};
    }

    cv::matchTemplate(search_img, templ, res, cv::TM_CCOEFF_NORMED);

    // --- Find the maximum location in the result (relative to search_img) ---
    double min_val, max_val;
    cv::Point min_loc, max_loc; // max_loc is relative to the 'res' matrix origin
    cv::minMaxLoc(res, &min_val, &max_val, &min_loc, &max_loc);

    // --- Extract neighborhood for sub-pixel fitting ---
    int fit_window_size = 5; // Size of the window for quadratic fitting (must be odd)
    cv::Mat fit_mtx_cv = mtxGet(res, fit_window_size, max_loc);

    if (fit_mtx_cv.empty() || fit_mtx_cv.rows < 3 || fit_mtx_cv.cols < 3) { // Need at least 3x3 for quadratic fit
        std::cerr << "Warning: Fit matrix is empty or too small in templateMatch. Returning integer peak." << std::endl;
        // Fallback: return integer peak location relative to the original 'img'
        return {static_cast<double>(max_loc.x + start_col), static_cast<double>(max_loc.y + start_row), max_val};
    }

    // --- Prepare data for Eigen least-squares fitting ---
    int fit_rows = fit_mtx_cv.rows;
    int fit_cols = fit_mtx_cv.cols;
    int num_points = fit_rows * fit_cols;

    Eigen::MatrixXd A(num_points, 6); // Design matrix
    Eigen::VectorXd b(num_points);    // Observation vector (correlation values)
    int k = 0;
    // Coordinates relative to the center of the fit_mtx_cv window
    double center_x = static_cast<double>(fit_cols - 1) / 2.0;
    double center_y = static_cast<double>(fit_rows - 1) / 2.0;

    for (int r = 0; r < fit_rows; ++r) {
        for (int c = 0; c < fit_cols; ++c) {
            double x = static_cast<double>(c) - center_x;
            double y = static_cast<double>(r) - center_y;
            A(k, 0) = 1.0;       // Constant term (a)
            A(k, 1) = x;         // x term (b)
            A(k, 2) = y;         // y term (c)
            A(k, 3) = x * x;     // x^2 term (d)
            A(k, 4) = x * y;     // xy term (e)
            A(k, 5) = y * y;     // y^2 term (f)
            b(k) = static_cast<double>(fit_mtx_cv.at<float>(r, c)); // Correlation value
            k++;
        }
    }

    // --- Perform least squares fitting (A * popt = b) ---
    Eigen::VectorXd popt = A.colPivHouseholderQr().solve(b);
    // popt contains coefficients: [a, b, c, d, e, f]

    // --- Analytical Peak Finding ---
    double a = popt(0);
    double b_coeff = popt(1); // Renamed from 'b' in equation
    double c_coeff = popt(2); // Renamed from 'c' in equation
    double d_coeff = popt(3); // Renamed from 'd' in equation
    double e_coeff = popt(4); // Renamed from 'e' in equation
    double f_coeff = popt(5); // Renamed from 'f' in equation

    // Solve the 2x2 system for the peak location offset (peak_x_offset, peak_y_offset)
    // relative to the center of the fit_mtx_cv window.
    // H * [peak_offset] = -g
    // [ 2d  e ] [peak_x_offset] = [-b]
    // [  e 2f ] [peak_y_offset] = [-c]
    Eigen::Matrix2d hessian;
    hessian << 2 * d_coeff, e_coeff,
            e_coeff,     2 * f_coeff;

    Eigen::Vector2d gradient_neg;
    gradient_neg << -b_coeff, -c_coeff;

    double det = hessian.determinant();
    double peak_x_offset = 0.0; // Default to center if matrix is singular or not a max
    double peak_y_offset = 0.0;
    double peak_val = a; // Default peak value is the constant term if calculation fails

    // Check determinant is non-zero and it's likely a maximum (requires negative definite hessian)
    // Simplified check: determinant > epsilon and diagonal elements are negative
    if (std::abs(det) > 1e-9 && d_coeff < 0 && f_coeff < 0) {
        try {
            Eigen::Vector2d peak_offset = hessian.inverse() * gradient_neg;
            peak_x_offset = peak_offset(0);
            peak_y_offset = peak_offset(1);

            Eigen::Vector2d peak_xy;
            peak_xy << peak_x_offset, peak_y_offset;
            peak_val = quadraticFit(peak_xy, a, b_coeff, c_coeff, d_coeff, e_coeff, f_coeff);
            // }
        } catch (const std::exception& e) {
            std::cerr << "Warning: Error solving for peak offset: " << e.what() << ". Using integer peak." << std::endl;
            peak_x_offset = 0.0;
            peak_y_offset = 0.0;
            peak_val = fit_mtx_cv.at<float>(static_cast<int>(center_y), static_cast<int>(center_x)); // Value at center pixel
        }
    } else {
        // Handle singular or non-maximum case (e.g., flat surface, saddle point)
        // std::cerr << "Warning: Hessian determinant zero or not negative definite. Using integer peak." << std::endl;
        peak_x_offset = 0.0; // Center of the fit window
        peak_y_offset = 0.0;
        peak_val = fit_mtx_cv.at<float>(static_cast<int>(center_y), static_cast<int>(center_x)); // Value at center pixel
    }

    // --- Calculate final coordinates relative to the original input 'img' ---
    // Start with integer peak in 'res' matrix (max_loc.x, max_loc.y)
    // Add the subpixel offset (peak_x_offset, peak_y_offset) calculated relative to the center of the fit window
    // Add the origin of the search_img relative to the input 'img' (start_col, start_row)
    double final_x = static_cast<double>(max_loc.x) + peak_x_offset + static_cast<double>(start_col);
    double final_y = static_cast<double>(max_loc.y) + peak_y_offset + static_cast<double>(start_row);

    return {
            final_x,
            final_y,
            // Use the calculated sub-pixel peak value, optionally rounded
            std::round(peak_val * 100.0) / 100.0
    };
}



/**
 * 计算过程 (Modified version with resizing option and updated logic)
 * @param move move模板 (原始分辨率)
 * @param calib calib模板 (原始分辨率)
 * @param rectMove move模板的ROI区域 in the full video frame
 * @param rectCalib calib模板的ROI区域 in the full video frame
 * @param video video图像流数据
 * @param result 结果文件流
 */
void process(JNIEnv *env,
             cv::Mat& move, cv::Mat& calib,
             cv::Rect2f& rectMove, cv::Rect2f& rectCalib,
             cv::VideoCapture& video, std::ofstream& result,
             const float K, const float K_strain) {

    double times = 0.5;
    cv::Mat move_resized, calib_resized;
    cv::resize(move, move_resized, cv::Size(), times, times, cv::INTER_LINEAR);
    cv::resize(calib, calib_resized, cv::Size(), times, times, cv::INTER_LINEAR);

    cv::Mat frame;
    bool firstFrame = true;
    double L_first = 0.;
    int i = 0;
    cv::Point3d movePos, calibPos;

    while (video.read(frame)) {
        cv::Mat grayFrame;
        cv::cvtColor(frame, grayFrame, cv::COLOR_BGR2GRAY);

        cv::Mat roiMove = grayFrame(rectMove);
        cv::Mat roiCalib = grayFrame(rectCalib);
        cv::Mat roiMove_resized, roiCalib_resized;
        cv::resize(roiMove, roiMove_resized, cv::Size(), times, times, cv::INTER_LINEAR);
        cv::resize(roiCalib, roiCalib_resized, cv::Size(), times, times, cv::INTER_LINEAR);

        cv::Mat res1;
        cv::matchTemplate(roiMove_resized, move_resized, res1, cv::TM_CCOEFF_NORMED);
        cv::Point maxLocMove;
        cv::minMaxLoc(res1, nullptr, nullptr, nullptr, &maxLocMove);
        maxLocMove.x = static_cast<int>(maxLocMove.x / times);
        maxLocMove.y = static_cast<int>(maxLocMove.y / times);

        movePos = templateMatch(roiMove, move, maxLocMove);

        if (i % 5 == 0) {
            cv::Mat res2;
            cv::matchTemplate(roiCalib_resized, calib_resized, res2, cv::TM_CCOEFF_NORMED);
            cv::Point maxLocCalib;
            cv::minMaxLoc(res2, nullptr, nullptr, nullptr, &maxLocCalib);
            maxLocCalib.x = static_cast<int>(maxLocCalib.x / times);
            maxLocCalib.y = static_cast<int>(maxLocCalib.y / times);

            calibPos = templateMatch(roiCalib, calib, maxLocCalib);
        }

        double deltaX = movePos.x - calibPos.x;

        if (firstFrame) {
            L_first = deltaX;
            firstFrame = false;

            result << "timestamp(ms),delta(pixel)\n";
        }

        double strain = deltaX - L_first;

        auto timestamp = static_cast<int64_t>(video.get(cv::CAP_PROP_POS_MSEC));

        result << timestamp << "," << strain << "\n";
        result.flush();

        // 更新预览画面
        // 在frame上绘出计算得到的move和calib模板位置
        // 计算move矩形在原始帧中的位置
        cv::Point moveGlobalTopLeft(
                static_cast<int>(rectMove.x) + movePos.x,
                static_cast<int>(rectMove.y) + movePos.y
        );

        cv::Point moveGlobalBottomRight(
                moveGlobalTopLeft.x + move.cols,
                moveGlobalTopLeft.y + move.rows
        );

        // 计算calib矩形在原始帧中的位置
        cv::Point calibGlobalTopLeft(
                static_cast<int>(rectCalib.x) + calibPos.x,
                static_cast<int>(rectCalib.y) + calibPos.y
        );
        cv::Point calibGlobalBottomRight(
                calibGlobalTopLeft.x + calib.cols,
                calibGlobalTopLeft.y + calib.rows
        );

        // 更新预览画面 - 使用全局坐标绘制
        // 绘制move矩形（黄色）
        cv::rectangle(
                frame,
                moveGlobalTopLeft,
                moveGlobalBottomRight,
                cv::Scalar(255, 255, 0),  // 黄色 (BGR)
                5
        );

        // 绘制calib矩形（红色）
        cv::rectangle(
                frame,
                calibGlobalTopLeft,
                calibGlobalBottomRight,
                cv::Scalar(255, 0, 0),    // 红色 (BGR)
                5
        );

        drawRGBonGLSurface(env, frame);
        if(previewObject != nullptr && requestRenderMethod != nullptr)
            env->CallVoidMethod(previewObject, requestRenderMethod);

        // 更新折线图
        if(chartObject != nullptr && updateChartMethod != nullptr)
            // 调用Java层的updateFrame方法
            env->CallVoidMethod(chartObject, updateChartMethod, strain);

        // 更新计算耗时等信息
        if(processObject != nullptr && updateInformationMethod != nullptr)
            env->CallVoidMethod(processObject, updateInformationMethod);

        i++;
    }
}

// 单项目计算：处理视频并输出结果
void calcSingleProject(JNIEnv *env, std::string& projectName) {
    if (projectName.empty()) {
        LOGE("No recording path available.");
        return;
    }

    if (moveTemplate.empty() || calibTemplate.empty()) {
        LOGE("Failed to load template images.");
        return;
    }

    // 打开视频文件
    cv::VideoCapture video(projectName + "record.avi");
    if (!video.isOpened()) {
        LOGE("Failed to open video file.");
        return;
    }

    cv::Mat template1, template2;
    template1 = cv::imread(projectName + "move.png", cv::IMREAD_GRAYSCALE);
    template2 = cv::imread(projectName + "calibrate.png", cv::IMREAD_GRAYSCALE);

    if (template1.empty() || template2.empty()) {
        // 如果未自定义，则使用内置的模板
        template1 = moveTemplate;
        template2 = calibTemplate;
    }

    // 参数设置（示例值，需根据实际情况调整）
    const float K = 1.5f / static_cast<float>(template1.cols); // 比例因子 = 物理(mm) / 像素
    const float gauge_length = 270.0f;
    const float K_strain = 1000000.0f / gauge_length;

    // 打开结果文件
    std::ofstream result(projectName + "result.csv", std::ios::out | std::ios::app);
    if (!result.is_open()) {
        LOGE("Failed to open result file.");
        return;
    }

    // 读取ROI数据
    std::ifstream inFile(projectName + "roi.txt");
    if (!inFile.is_open()) {
        LOGE("failed to open roi file");
        return;
    }

    std::vector<float> configData;

    std::string line;
    // 读取文件的每一行
    while (std::getline(inFile, line)) {
        std::istringstream iss(line);
        // 先判断是否会找到对应行
        float value;
        // 解析当前行的每个值
        while (iss >> value) {
            configData.push_back(value);
        }

    }
    inFile.close(); // 关闭文件

    cv::Rect2f roiMove(configData[0], configData[1], configData[2], configData[3]);
    cv::Rect2f roiCalib(configData[4], configData[5], configData[6], configData[7]);

    // 计算过程
    process(env, template1, template2, roiMove, roiCalib, video, result, K, K_strain);

    video.release();
    result.close();
}

// 辅助函数：获取指定路径下的子目录名称列表
std::vector<std::string> getSubDirectoryNames(const std::string& path) {
    std::vector<std::string> dirs;
    DIR* dir = opendir(path.c_str());
    if (!dir) return dirs; // 打开目录失败

    struct dirent* entry;
    while ((entry = readdir(dir))) {
        // 跳过 "." 和 ".."
        if (strcmp(entry->d_name, ".") == 0 || strcmp(entry->d_name, "..") == 0) {
            continue;
        }

        std::string fullPath = path + entry->d_name;
        struct stat statInfo{};
        if (stat(fullPath.c_str(), &statInfo) == 0 && S_ISDIR(statInfo.st_mode)) {
            dirs.emplace_back(entry->d_name);
        }
    }
    closedir(dir);
    return dirs;
}


extern "C"
JNIEXPORT jobjectArray JNICALL
Java_com_shm_miss_MainActivity_getRecordedFolders(JNIEnv *env, jobject thiz) {
    std::vector<std::string> folders = getSubDirectoryNames(appPath);

    // 创建Java字符串数组
    jclass stringClass = env->FindClass("java/lang/String");
    jobjectArray result = env->NewObjectArray(folders.size(), stringClass, nullptr);

    for (size_t i = 0; i < folders.size(); ++i) {
        jstring str = env->NewStringUTF(folders[i].c_str());
        env->SetObjectArrayElement(result, i, str);
        env->DeleteLocalRef(str);
    }

    return result;
}


extern "C"
JNIEXPORT void JNICALL
Java_com_shm_miss_MainActivity_calcProjects(JNIEnv *env,
                        jclass clazz,
                        jobjectArray projects_name) {

    std::vector<std::string> projects;
    jsize array_length = env->GetArrayLength(projects_name); // 获取数组长度

    for (jsize i = 0; i < array_length; ++i) {
        // 获取数组中的每个jstring元素
        auto jstr = (jstring)env->GetObjectArrayElement(projects_name, i);
        if (jstr) {
            // 转换为C风格字符串
            const char* cstr = env->GetStringUTFChars(jstr, nullptr);
            if (cstr) {
                // 构造std::string并添加到结果向量中
                projects.emplace_back(cstr);
                // 释放C风格字符串
                env->ReleaseStringUTFChars(jstr, cstr);
            }
            // 释放jstring对象
            env->DeleteLocalRef(jstr);
        }
    }

    for(auto& project : projects){
        LOGE("calcing %s", project.c_str());
        auto projectName = appPath + project.append("/");

        // 初始化折线图
        if(chartObject != nullptr && initChartMethod != nullptr)
            env->CallVoidMethod(chartObject, initChartMethod);

        calcSingleProject(env, projectName);

    }
}

extern "C"
JNIEXPORT void JNICALL
Java_com_shm_miss_MainActivity_passMovePath(JNIEnv *env, jobject thiz, jstring path) {
    // 将 Java 的 jstring 转换为 C 字符串
    const char* nativePath = env->GetStringUTFChars(path, nullptr);
    std::string cppPath(nativePath); // 转换为 C++ 字符串

    // 释放 JNI 分配的内存
    env->ReleaseStringUTFChars(path, nativePath);

    // 加载模板图像
    moveTemplate = cv::imread(cppPath, cv::IMREAD_GRAYSCALE);
}

extern "C"
JNIEXPORT void JNICALL
Java_com_shm_miss_MainActivity_passCalibPath(JNIEnv *env, jobject thiz, jstring path) {
    // 将 Java 的 jstring 转换为 C 字符串
    const char* nativePath = env->GetStringUTFChars(path, nullptr);
    std::string cppPath(nativePath); // 转换为 C++ 字符串

    // 释放 JNI 分配的内存
    env->ReleaseStringUTFChars(path, nativePath);

    // 加载模板图像
    calibTemplate = cv::imread(cppPath, cv::IMREAD_GRAYSCALE);
}


extern "C"
JNIEXPORT void JNICALL
Java_com_shm_miss_MainActivity_setMethods(JNIEnv *env,
                                          jobject thiz,
                                          jobject chart_object,
                                          jobject  image_process,
                                          jobject preview_object,
                                          jobject render_object) {
    // 创建新的全局引用
    chartObject = env->NewGlobalRef(chart_object);
    processObject = env->NewGlobalRef(image_process);
    previewObject = env->NewGlobalRef(preview_object);
    renderObject = env->NewGlobalRef(render_object);

    jclass chartClazz = env->GetObjectClass(chartObject);
    initChartMethod = env->GetMethodID(chartClazz, "initLineChart", "()V");
    updateChartMethod = env->GetMethodID(chartClazz, "setLineChartData", "(D)V");

    jclass processClazz = env->GetObjectClass(processObject);
    updateInformationMethod = env->GetMethodID(processClazz, "updateInformation", "()V");

    jclass previewClazz = env->GetObjectClass(previewObject);
    requestRenderMethod = env->GetMethodID(previewClazz, "requestRender", "()V");

    jclass customRenderClass = env->GetObjectClass(renderObject);
    updateFrameMethod = env->GetMethodID(customRenderClass, "updateFrame", "([BII)V");
}

extern "C"
JNIEXPORT void JNICALL
Java_com_shm_miss_MainActivity_cleanupRef(JNIEnv *env, jobject thiz) {
    if (chartObject) env->DeleteGlobalRef(chartObject);
    if (processObject) env->DeleteGlobalRef(processObject);
    if (previewObject) env->DeleteGlobalRef(previewObject);
    if (renderObject) env->DeleteGlobalRef(renderObject);

    chartObject = nullptr;
    processObject = nullptr;
    previewObject = nullptr;
    renderObject = nullptr;
}
extern "C"
JNIEXPORT void JNICALL
Java_com_shm_singlecamera_UI_AssistView_setMoveTemplate(JNIEnv *env, jobject thiz, jfloat x,
                              jfloat y, jfloat width, jfloat height) {
    moveROI.x = x;
    moveROI.y = y;
    moveROI.width = width;
    moveROI.height = height;
}
extern "C"
JNIEXPORT void JNICALL
Java_com_shm_singlecamera_UI_AssistView_setCalibTemplate(JNIEnv *env, jobject thiz, jfloat x,
                                                         jfloat y, jfloat width, jfloat height) {
    calibROI.x = x;
    calibROI.y = y;
    calibROI.width = width;
    calibROI.height = height;
}