#include <opencv2/opencv.hpp>
#include <iostream>
#include <filesystem>
#include <iomanip>
#include <sstream>
#include <vector>
#include <algorithm>
#include <map>
#include <cstdlib>
#include <thread>
#include <chrono>
#include <memory>
#include <fstream>
#include <termios.h>
#include <fcntl.h>
#include <unistd.h>
#include <sys/stat.h>
#include <sys/types.h>

// 命名空间简化
namespace fs = std::filesystem;
using namespace cv;
using namespace std;

// 全局变量
Mat disparity, Qglobal;
Mat disp8_for_show;
Mat left_rect, right_rect;
Mat frameL, frameR;
Size cameraResolution(320, 240);  // 降低分辨率以提高帧率

// 视频捕获对象
VideoCapture capL, capR;

// 摄像头配置结构体
struct CameraConfig
{
    string udp_url;
    string name;
    bool detected;
};

// 为避免与OpenCV的utils命名空间冲突，改为my_utils
namespace my_utils
{
    // 从视差计算深度
    float disparityToDepth(float disparity, double fx, double baseline)
    {
        if (disparity <= 0) return 0;
        return (float)(fx * baseline / disparity);
    }

    // 加载立体标定参数
    void loadStereoParams(const string &filename, 
                         Mat &M1, Mat &D1, Mat &M2, Mat &D2,
                         Mat &R, Mat &T, Mat &R1, Mat &R2, 
                         Mat &P1, Mat &P2, Mat &Q)
    {
        FileStorage fs(filename, FileStorage::READ);
        if (!fs.isOpened())
            throw runtime_error("Cannot open stereo parameters file: " + filename);
        
        fs["M1"] >> M1;
        fs["D1"] >> D1;
        fs["M2"] >> M2;
        fs["D2"] >> D2;
        fs["R"] >> R;
        fs["T"] >> T;
        fs["R1"] >> R1;
        fs["R2"] >> R2;
        fs["P1"] >> P1;
        fs["P2"] >> P2;
        fs["Q"] >> Q;
        
        fs.release();
    }

    // 保存16位视差图
    bool saveDisparityAs16BitPNG(const Mat &disparity, const string &filename)
    {
        if (disparity.empty()) return false;
        
        Mat disp16;
        if (disparity.type() == CV_16S)
            disparity.convertTo(disp16, CV_16U, 1);
        else
            disparity.convertTo(disp16, CV_16U);
            
        return imwrite(filename, disp16);
    }

    // 保存点云为PLY文件
    bool savePointCloudPLY(const Mat &points3d, const Mat &color, const string &filename)
    {
        if (points3d.empty() || color.empty()) return false;
        
        ofstream plyFile(filename);
        if (!plyFile.is_open()) return false;
        
        int validPoints = 0;
        vector<Vec3f> vertices;
        vector<Vec3b> colors;
        
        for (int y = 0; y < points3d.rows; y++)
        {
            for (int x = 0; x < points3d.cols; x++)
            {
                Vec3f point = points3d.at<Vec3f>(y, x);
                if (!isinf(point[2]) && point[2] > 0 && point[2] < 10.0)
                {
                    vertices.push_back(point);
                    colors.push_back(color.at<Vec3b>(y, x));
                    validPoints++;
                }
            }
        }
        
        plyFile << "ply\n";
        plyFile << "format ascii 1.0\n";
        plyFile << "element vertex " << validPoints << "\n";
        plyFile << "property float x\n";
        plyFile << "property float y\n";
        plyFile << "property float z\n";
        plyFile << "property uchar red\n";
        plyFile << "property uchar green\n";
        plyFile << "property uchar blue\n";
        plyFile << "end_header\n";
        
        for (size_t i = 0; i < vertices.size(); i++)
        {
            plyFile << vertices[i][0] << " " << vertices[i][1] << " " << vertices[i][2] << " ";
            plyFile << (int)colors[i][2] << " " << (int)colors[i][1] << " " << (int)colors[i][0] << "\n";
        }
        
        plyFile.close();
        return true;
    }

    // 创建目录函数
    bool createDirectory(const string &path)
    {
        try
        {
            if (!fs::exists(path))
            {
                return fs::create_directory(path);
            }
            return true;
        }
        catch (const fs::filesystem_error &e)
        {
            cerr << "Error creating directory: " << e.what() << endl;
            return false;
        }
    }

    // 启动rpicam-vid UDP流
    bool startRpicamStream(const string& camera_name, int port, int width = 320, int height = 240, int fps = 30)
    {
        cout << "启动 " << camera_name << " 的rpicam-vid UDP流..." << endl;
        
        string command = "rpicam-vid -t 0 --width " + to_string(width) + 
                        " --height " + to_string(height) + 
                        " --framerate " + to_string(fps) +
                        " --codec mjpeg -o udp://127.0.0.1:" + to_string(port) + 
                        " > /dev/null 2>&1 &";
        
        cout << "执行命令: " << command << endl;
        
        int result = system(command.c_str());
        if (result != 0) {
            cerr << "启动 " << camera_name << " 的rpicam-vid失败" << endl;
            return false;
        }
        
        // 等待流启动
        this_thread::sleep_for(chrono::seconds(2));
        cout << camera_name << " rpicam-vid UDP流启动完成，端口: " << port << endl;
        return true;
    }

    // 停止所有rpicam-vid进程
    void stopRpicamStreams()
    {
        cout << "停止所有rpicam-vid进程..." << endl;
        system("pkill -f rpicam-vid");
        this_thread::sleep_for(chrono::seconds(1));
    }

    // 使用UDP流初始化摄像头
    bool initCameraUDP(VideoCapture &cap, const string &udp_url, const string &cameraName)
    {
        cout << "初始化 " << cameraName << " 摄像头 (UDP流)..." << endl;
        cout << "UDP URL: " << udp_url << endl;
        
        // 使用FFMPEG后端打开UDP流
        cap.open(udp_url, CAP_FFMPEG);
        if (!cap.isOpened()) {
            cerr << "无法使用FFMPEG打开UDP流 " << cameraName << endl;
            return false;
        }
        
        // 设置摄像头参数
        cap.set(CAP_PROP_FRAME_WIDTH, 320);
        cap.set(CAP_PROP_FRAME_HEIGHT, 240);
        cap.set(CAP_PROP_FPS, 30);
        
        // 验证摄像头
        Mat testFrame;
        bool success = false;
        for (int i = 0; i < 10; i++) {
            if (cap.read(testFrame) && !testFrame.empty()) {
                success = true;
                cout << cameraName << " UDP流初始化成功: " 
                     << testFrame.cols << "x" << testFrame.rows << endl;
                break;
            }
            this_thread::sleep_for(chrono::milliseconds(50));
        }
        
        if (!success) {
            cerr << cameraName << " UDP流无法读取帧" << endl;
            cap.release();
            return false;
        }
        
        return true;
    }

    // 检测UDP摄像头流
    bool testUDPCamera(const string &udp_url, const string &cameraName)
    {
        cout << "测试 " << cameraName << " UDP流..." << endl;
        
        VideoCapture testCap(udp_url, CAP_FFMPEG);
        if (!testCap.isOpened()) {
            cout << "✗ " << cameraName << " UDP流无法打开" << endl;
            return false;
        }
        
        Mat testFrame;
        bool success = false;
        for (int i = 0; i < 5; i++) {
            if (testCap.read(testFrame) && !testFrame.empty()) {
                success = true;
                break;
            }
            this_thread::sleep_for(chrono::milliseconds(50));
        }
        
        testCap.release();
        
        if (success) {
            cout << "✓ " << cameraName << " UDP流检测成功" << endl;
        } else {
            cout << "✗ " << cameraName << " UDP流无法读取帧" << endl;
        }
        
        return success;
    }

    // 优化的视频流捕获函数
    bool captureVideoFrameOptimized(VideoCapture &cap, Mat &outputImage, const string &cameraName)
    {
        if (!cap.isOpened()) {
            return false;
        }
        
        // 清除缓冲区中的旧帧
        for (int i = 0; i < 2; i++) {
            cap.grab();
        }
        
        bool success = cap.read(outputImage);
        if (!success || outputImage.empty()) {
            cerr << "从 " << cameraName << " UDP流读取帧失败" << endl;
            return false;
        }
        
        return true;
    }
}

// 鼠标回调函数
void onMouse(int event, int x, int y, int, void *)
{
    if (event != EVENT_LBUTTONDOWN)
        return;
        
    if (disparity.empty() || Qglobal.empty())
    {
        cout << "[警告] 视差图或Q矩阵未就绪" << endl;
        return;
    }
    
    if (x < 0 || y < 0 || x >= disparity.cols || y >= disparity.rows)
    {
        cout << "[警告] 点击位置超出图像范围" << endl;
        return;
    }
    
    if (disparity.type() != CV_16S && disparity.type() != CV_32F)
    {
        cout << "[警告] 不支持的视差图类型: " << disparity.type() << endl;
        return;
    }
    
    float d;
    if (disparity.type() == CV_16S)
        d = disparity.at<short>(y, x) / 16.0f;
    else
        d = disparity.at<float>(y, x);
        
    if (d <= 0)
    {
        cout << "[信息] 无效视差值 (" << x << "," << y << ")" << endl;
        return;
    }
    
    double fx = Qglobal.at<double>(2, 3);
    double baseline = -1.0 / Qglobal.at<double>(3, 2);
    float depth = my_utils::disparityToDepth(d, fx, baseline);
    
    cout << "[深度信息] 像素(" << x << "," << y << ") 视差=" << d << " 深度=" << depth << " 米" << endl;
}

// 创建带背景的文字显示函数
void putTextWithBackground(Mat &image, const string &text, Point position, 
                          double fontScale, int thickness, Scalar textColor, Scalar bgColor)
{
    int baseline = 0;
    Size textSize = getTextSize(text, FONT_HERSHEY_SIMPLEX, fontScale, thickness, &baseline);
    
    int margin = 5;
    rectangle(image, 
              Point(position.x - margin, position.y - textSize.height - margin),
              Point(position.x + textSize.width + margin, position.y + baseline + margin),
              bgColor, FILLED);
    
    putText(image, text, position, FONT_HERSHEY_SIMPLEX, fontScale, textColor, thickness);
}

// 调整窗口大小以适应屏幕
void resizeWindowToFit(const string &winname, const Mat &image, int maxWidth = 800, int maxHeight = 600)
{
    int width = image.cols;
    int height = image.rows;
    
    if (width > maxWidth || height > maxHeight)
    {
        double scale = min((double)maxWidth / width, (double)maxHeight / height);
        width = static_cast<int>(width * scale);
        height = static_cast<int>(height * scale);
    }
    
    resizeWindow(winname, width, height);
}

// 非阻塞键盘输入检测
int getKeyNonBlocking()
{
    struct termios oldt, newt;
    int ch;
    int oldf;

    tcgetattr(STDIN_FILENO, &oldt);
    newt = oldt;
    newt.c_lflag &= ~(ICANON | ECHO);
    tcsetattr(STDIN_FILENO, TCSANOW, &newt);

    oldf = fcntl(STDIN_FILENO, F_GETFL, 0);
    fcntl(STDIN_FILENO, F_SETFL, oldf | O_NONBLOCK);

    ch = getchar();

    tcsetattr(STDIN_FILENO, TCSANOW, &oldt);
    fcntl(STDIN_FILENO, F_SETFL, oldf);

    return ch;
}

// 安全地创建轨迹条
bool createTrackbarSafe(const string& trackbarName, const string& windowName, 
                       int* value, int count, void (*onChange)(int, void*) = 0, void* userdata = 0)
{
    try {
        createTrackbar(trackbarName, windowName, value, count, onChange, userdata);
        return true;
    }
    catch (const cv::Exception& e) {
        cerr << "[警告] 创建轨迹条失败 '" << trackbarName << "': " << e.what() << endl;
        return false;
    }
}

int main(int argc, char **argv)
{
    // 强制使用X11而不是Wayland
    setenv("QT_QPA_PLATFORM", "xcb", 1);
    setenv("DISPLAY", ":0", 1);
    
    // 设置临时XDG目录
    uid_t uid = geteuid();
    string tmp_runtime = "/tmp/xdg_runtime_" + to_string((unsigned long)uid);
    if (mkdir(tmp_runtime.c_str(), 0700) != 0 && errno != EEXIST)
    {
        cerr << "[警告] 无法创建临时 XDG_RUNTIME_DIR: " << tmp_runtime << endl;
    }
    else
    {
        chmod(tmp_runtime.c_str(), 0700);
    }
    setenv("XDG_RUNTIME_DIR", tmp_runtime.c_str(), 1);

    cout << "==============================================" << endl;
    cout << "       立体视觉深度与视差实时展示程序" << endl;
    cout << "    (rpicam-vid UDP流版本)" << endl;
    cout << "==============================================" << endl;
    
    // 首先停止可能存在的旧rpicam进程
    my_utils::stopRpicamStreams();
    
    // UDP流配置
    cout << endl << "[UDP流配置]" << endl;
    
    // 默认的UDP流地址和端口
    int left_port = 8554;
    int right_port = 8555;
    string udp_left = "udp://127.0.0.1:" + to_string(left_port);
    string udp_right = "udp://127.0.0.1:" + to_string(right_port);
    
    // 如果提供了自定义端口参数
    if (argc >= 3)
    {
        left_port = atoi(argv[1]);
        right_port = atoi(argv[2]);
        udp_left = "udp://127.0.0.1:" + to_string(left_port);
        udp_right = "udp://127.0.0.1:" + to_string(right_port);
        cout << "使用自定义UDP端口:" << endl;
        cout << "  左相机: " << udp_left << endl;
        cout << "  右相机: " << udp_right << endl;
    }
    else
    {
        cout << "使用默认UDP端口:" << endl;
        cout << "  左相机: " << udp_left << endl;
        cout << "  右相机: " << udp_right << endl;
    }

    // 启动rpicam-vid UDP流
    cout << endl << "[启动rpicam-vid UDP流]" << endl;
    bool left_stream_started = false;
    bool right_stream_started = false;
    
    // 尝试启动左相机流
    cout << "启动左相机UDP流..." << endl;
    left_stream_started = my_utils::startRpicamStream("左相机", left_port, 320, 240, 30);
    
    // 尝试启动右相机流
    cout << "启动右相机UDP流..." << endl;
    right_stream_started = my_utils::startRpicamStream("右相机", right_port, 320, 240, 30);
    
    if (!left_stream_started && !right_stream_started) {
        cerr << "[错误] 无法启动任何rpicam-vid UDP流！" << endl;
        return -1;
    }

    // 摄像头配置
    cout << endl << "[UDP流检测与初始化]" << endl;
    vector<CameraConfig> cameras = {
        {udp_left, "左相机", false},
        {udp_right, "右相机", false}};

    // 等待流稳定
    cout << "等待UDP流稳定..." << endl;
    this_thread::sleep_for(chrono::seconds(3));

    // 检测UDP流
    cout << "检测UDP流配置..." << endl;
    for (auto &camera : cameras)
    {
        camera.detected = my_utils::testUDPCamera(camera.udp_url, camera.name);
    }

    if (!cameras[0].detected && !cameras[1].detected)
    {
        cerr << "[错误] 未检测到任何UDP流！" << endl;
        cerr << "请检查:" << endl;
        cerr << "1. rpicam-vid是否正确安装" << endl;
        cerr << "2. 摄像头是否连接正常" << endl;
        cerr << "3. 端口是否被占用" << endl;
        my_utils::stopRpicamStreams();
        return -1;
    }

    // 显示检测结果
    cout << endl << "[UDP流检测结果]" << endl;
    for (const auto &camera : cameras)
    {
        if (camera.detected)
        {
            cout << "✓ " << camera.name << " UDP流检测成功" << endl;
        }
        else
        {
            cout << "✗ " << camera.name << " UDP流未检测到" << endl;
        }
    }

    // 加载立体标定参数
    cout << endl << "[加载标定参数]" << endl;
    Mat M1, D1, M2, D2, R, T, R1, R2, P1, P2, Q;
    
    try
    {
        my_utils::loadStereoParams("data/stereo_params.yml", M1, D1, M2, D2, R, T, R1, R2, P1, P2, Q);
        Qglobal = Q;
        cout << "标定参数加载成功" << endl;
        
        double baseline = -1.0 / Q.at<double>(3, 2);
        double fx = Q.at<double>(2, 3);
        cout << "相机基线: " << baseline << " 米" << endl;
        cout << "焦距: " << fx << " 像素" << endl;
    }
    catch (const exception &e)
    {
        cerr << "[错误] " << e.what() << endl;
        cerr << "请先运行立体标定程序生成 data/stereo_params.yml 文件" << endl;
        cerr << "或者使用默认参数继续运行..." << endl;
        
        // 使用默认参数继续
        M1 = (Mat_<double>(3,3) << 700, 0, 320, 0, 700, 240, 0, 0, 1);
        D1 = Mat::zeros(1,5,CV_64F);
        M2 = M1.clone();
        D2 = D1.clone();
        R = Mat::eye(3,3,CV_64F);
        T = (Mat_<double>(3,1) << -0.1, 0, 0);
        R1 = Mat::eye(3,3,CV_64F);
        R2 = Mat::eye(3,3,CV_64F);
        P1 = (Mat_<double>(3,4) << 700,0,320,0, 0,700,240,0, 0,0,1,0);
        P2 = (Mat_<double>(3,4) << 700,0,320,35, 0,700,240,0, 0,0,1,0);
        Q = (Mat_<double>(4,4) << 1,0,0,-320, 0,1,0,-240, 0,0,0,700, 0,0,1.0/0.1,0);
        Qglobal = Q;
    }

    // 设置摄像头分辨率
    cout << endl << "[设置摄像头分辨率]" << endl;
    int calibrated_width = (int)(P1.at<double>(0, 2) * 2);
    int calibrated_height = (int)(P1.at<double>(1, 2) * 2);
    
    if (calibrated_width > 0 && calibrated_height > 0)
    {
        // 如果标定分辨率太高，使用缩放后的版本
        if (calibrated_width > 640) {
            double scale = 320.0 / calibrated_width;
            cameraResolution = Size(320, static_cast<int>(calibrated_height * scale));
            cout << "标定分辨率较高，使用优化分辨率: " << cameraResolution.width << "x" << cameraResolution.height << endl;
        } else {
            cameraResolution = Size(calibrated_width, calibrated_height);
            cout << "使用标定分辨率: " << cameraResolution.width << "x" << cameraResolution.height << endl;
        }
    }
    else
    {
        cameraResolution = Size(320, 240);  // 默认使用优化分辨率
        cout << "使用优化分辨率: " << cameraResolution.width << "x" << cameraResolution.height << endl;
    }

    // 初始化立体校正映射
    Mat mapL1, mapL2, mapR1, mapR2;
    initUndistortRectifyMap(M1, D1, R1, P1, cameraResolution, CV_16SC2, mapL1, mapL2);
    initUndistortRectifyMap(M2, D2, R2, P2, cameraResolution, CV_16SC2, mapR1, mapR2);
    cout << "立体校正映射初始化完成" << endl;

    // 创建SGBM立体匹配器（使用优化参数）
    cout << endl << "[初始化立体匹配器]" << endl;
    int numDisparitiesMul = 3;  // 从6降低到3，减少计算量
    int blockSize = 5;          // 从8降低到5
    int uniquenessRatio = 10;
    int speckleWindowSize = 50;  // 减少斑点过滤
    int speckleRange = 16;
    int disp12MaxDiff = 1;

    Ptr<StereoSGBM> sgbm = StereoSGBM::create(0, numDisparitiesMul * 16, blockSize);
    sgbm->setP1(8 * 1 * blockSize * blockSize);
    sgbm->setP2(32 * 1 * blockSize * blockSize);
    sgbm->setMode(StereoSGBM::MODE_SGBM);
    sgbm->setUniquenessRatio(uniquenessRatio);
    sgbm->setSpeckleWindowSize(speckleWindowSize);
    sgbm->setSpeckleRange(speckleRange);
    sgbm->setDisp12MaxDiff(disp12MaxDiff);
    
    cout << "SGBM参数初始化完成 (优化配置)" << endl;

    // 使用UDP流初始化摄像头
    cout << endl << "[初始化UDP视频流]" << endl;
    bool leftCamReady = false;
    bool rightCamReady = false;
    
    if (cameras[0].detected) {
        leftCamReady = my_utils::initCameraUDP(capL, cameras[0].udp_url, cameras[0].name);
    }
    if (cameras[1].detected) {
        rightCamReady = my_utils::initCameraUDP(capR, cameras[1].udp_url, cameras[1].name);
    }
    
    if (!leftCamReady && !rightCamReady) {
        cerr << "[错误] 无法初始化任何UDP流" << endl;
        my_utils::stopRpicamStreams();
        return -1;
    }

    // 创建显示窗口
    cout << endl << "[创建显示窗口]" << endl;
    bool guiAvailable = true;
    
    try {
        namedWindow("视差图", WINDOW_NORMAL);
        resizeWindow("视差图", 320, 240);
        
        // 等待窗口创建完成
        waitKey(100);
        
        // 创建其他窗口
        if (leftCamReady) {
            namedWindow("左相机-原始", WINDOW_NORMAL);
            namedWindow("左相机-校正后", WINDOW_NORMAL);
        }
        if (rightCamReady) {
            namedWindow("右相机-原始", WINDOW_NORMAL);
            namedWindow("右相机-校正后", WINDOW_NORMAL);
        }
        
        // 检查窗口是否可用
        double prop = getWindowProperty("视差图", WND_PROP_VISIBLE);
        if (prop < 0) {
            cerr << "[警告] GUI 窗口未能正确初始化，将使用无GUI模式运行" << endl;
            guiAvailable = false;
        }
    }
    catch (const cv::Exception& e) {
        cerr << "[警告] 创建窗口失败: " << e.what() << endl;
        cerr << "将使用无GUI模式运行" << endl;
        guiAvailable = false;
    }

    // 初始化轨迹条变量
    int trackbar_numDisparities = numDisparitiesMul;
    int trackbar_blockSize = blockSize;
    int trackbar_uniquenessRatio = uniquenessRatio;
    int trackbar_speckleWindowSize = speckleWindowSize;
    int trackbar_speckleRange = speckleRange;

    // 只有在GUI可用时才创建轨迹条和设置鼠标回调
    if (guiAvailable)
    {
        cout << "GUI可用，创建轨迹条和鼠标回调..." << endl;
        
        // 安全地创建轨迹条
        bool trackbar1 = createTrackbarSafe("视差范围(x16)", "视差图", &trackbar_numDisparities, 8);
        bool trackbar2 = createTrackbarSafe("块大小", "视差图", &trackbar_blockSize, 15);
        bool trackbar3 = createTrackbarSafe("唯一性比率", "视差图", &trackbar_uniquenessRatio, 50);
        bool trackbar4 = createTrackbarSafe("斑点窗口", "视差图", &trackbar_speckleWindowSize, 100);
        bool trackbar5 = createTrackbarSafe("斑点范围", "视差图", &trackbar_speckleRange, 50);

        // 设置鼠标回调
        try {
            setMouseCallback("视差图", onMouse);
            cout << "鼠标回调设置成功" << endl;
        }
        catch (const cv::Exception& e) {
            cerr << "[警告] 设置鼠标回调失败: " << e.what() << endl;
        }
    }
    else
    {
        cout << "GUI不可用，使用默认参数运行" << endl;
    }
    
    cout << "显示窗口初始化完成" << endl;

    // 创建数据保存目录
    if (!fs::exists("data/depth"))
    {
        fs::create_directories("data/depth");
        cout << "创建数据保存目录: data/depth/" << endl;
    }

    // 显示操作说明
    cout << endl << "==============================================" << endl;
    cout << "              操作说明" << endl;
    cout << "==============================================" << endl;
    if (guiAvailable)
    {
        cout << "轨迹条控制:" << endl;
        cout << "  - 视差范围: 调整立体匹配的视差搜索范围" << endl;
        cout << "  - 块大小:   调整匹配块的大小（奇数）" << endl;
        cout << "  - 唯一性比率: 确保匹配的唯一性" << endl;
        cout << "  - 斑点窗口:  过滤小斑点的窗口大小" << endl;
        cout << "  - 斑点范围:  斑点过滤的视差范围" << endl;
        cout << endl;
        cout << "鼠标操作:" << endl;
        cout << "  点击视差图任意位置显示该点深度信息" << endl;
    }
    cout << "键盘快捷键:" << endl;
    cout << "  's' - 保存当前视差图和点云" << endl;
    cout << "  'd' - 切换调试信息显示" << endl;
    cout << "  'p' - 暂停/继续视频流" << endl;
    cout << "  'r' - 重新启动摄像头流" << endl;
    cout << "  ESC - 退出程序" << endl;
    cout << "==============================================" << endl;

    int frameIdx = 0;
    bool debugMode = false;
    bool pauseMode = false;
    bool firstFrame = true;
    double fps = 0;
    double capture_fps = 0;
    double process_fps = 0;
    auto lastTime = chrono::high_resolution_clock::now();
    auto lastCaptureTime = chrono::high_resolution_clock::now();

    // 初始化frameL和frameR
    frameL = Mat::zeros(cameraResolution, CV_8UC3);
    frameR = Mat::zeros(cameraResolution, CV_8UC3);

    cout << "开始实时深度计算..." << endl;
    cout << "目标帧率: 15-30 FPS" << endl;
    cout << "处理分辨率: " << cameraResolution.width << "x" << cameraResolution.height << endl;

    while (true)
    {
        auto currentTime = chrono::high_resolution_clock::now();
        double elapsed = chrono::duration<double>(currentTime - lastTime).count();
        
        if (!pauseMode)
        {
            auto processStartTime = chrono::high_resolution_clock::now();
            
            bool successL = true, successR = true;
            
            // 使用优化的视频流捕获
            if (leftCamReady) {
                successL = my_utils::captureVideoFrameOptimized(capL, frameL, cameras[0].name);
            }
            
            if (rightCamReady) {
                successR = my_utils::captureVideoFrameOptimized(capR, frameR, cameras[1].name);
            }
            
            if (!successL || !successR || frameL.empty() || frameR.empty())
            {
                cerr << "[警告] 捕获图像失败，跳过本帧" << endl;
                this_thread::sleep_for(chrono::milliseconds(10));
                continue;
            }

            // 如果捕获的图像分辨率与处理分辨率不同，进行缩放
            if (frameL.cols != cameraResolution.width || frameL.rows != cameraResolution.height)
            {
                resize(frameL, frameL, cameraResolution);
                resize(frameR, frameR, cameraResolution);
            }

            // 计算捕获帧率
            auto captureEndTime = chrono::high_resolution_clock::now();
            double captureElapsed = chrono::duration<double>(captureEndTime - lastCaptureTime).count();
            capture_fps = 0.9 * capture_fps + 0.1 * (1.0 / captureElapsed);
            lastCaptureTime = captureEndTime;

            // 立体校正
            remap(frameL, left_rect, mapL1, mapL2, INTER_LINEAR);
            remap(frameR, right_rect, mapR1, mapR2, INTER_LINEAR);

            // 转换为灰度图进行立体匹配
            Mat grayL, grayR;
            cvtColor(left_rect, grayL, COLOR_BGR2GRAY);
            cvtColor(right_rect, grayR, COLOR_BGR2GRAY);

            // 使用轨迹条参数或默认参数
            int numDispMulTB = guiAvailable ? trackbar_numDisparities : numDisparitiesMul;
            int blockSizeTB = guiAvailable ? trackbar_blockSize : blockSize;
            int uniquenessRatioTB = guiAvailable ? trackbar_uniquenessRatio : uniquenessRatio;
            int speckleWindowSizeTB = guiAvailable ? trackbar_speckleWindowSize : speckleWindowSize;
            int speckleRangeTB = guiAvailable ? trackbar_speckleRange : speckleRange;

            // 确保块大小是奇数且大于等于3
            if (blockSizeTB % 2 == 0) blockSizeTB += 1;
            if (blockSizeTB < 3) blockSizeTB = 3;
            
            int numDisp = numDispMulTB * 16;

            // 更新SGBM参数
            sgbm->setNumDisparities(numDisp);
            sgbm->setBlockSize(blockSizeTB);
            sgbm->setP1(8 * 1 * blockSizeTB * blockSizeTB);
            sgbm->setP2(32 * 1 * blockSizeTB * blockSizeTB);
            sgbm->setUniquenessRatio(uniquenessRatioTB);
            sgbm->setSpeckleWindowSize(speckleWindowSizeTB);
            sgbm->setSpeckleRange(speckleRangeTB);

            // 计算视差图
            sgbm->compute(grayL, grayR, disparity);

            // 转换为8位用于显示
            disparity.convertTo(disp8_for_show, CV_8U, 255.0 / (numDisp * 16.0));
            
            // 计算处理帧率和总帧率
            auto processEndTime = chrono::high_resolution_clock::now();
            double processElapsed = chrono::duration<double>(processEndTime - processStartTime).count();
            process_fps = 0.9 * process_fps + 0.1 * (1.0 / processElapsed);
            
            fps = 0.9 * fps + 0.1 * (1.0 / elapsed);
            
            if (firstFrame && guiAvailable)
            {
                if (leftCamReady) {
                    resizeWindowToFit("左相机-原始", frameL);
                    resizeWindowToFit("左相机-校正后", left_rect);
                }
                if (rightCamReady) {
                    resizeWindowToFit("右相机-原始", frameR);
                    resizeWindowToFit("右相机-校正后", right_rect);
                }
                resizeWindowToFit("视差图", disp8_for_show);
                firstFrame = false;
            }
        }

        lastTime = currentTime;

        // 只有在GUI可用时才显示图像
        if (guiAvailable)
        {
            // 显示图像
            string totalFpsText = "总FPS: " + to_string((int)fps);
            string captureFpsText = "捕获FPS: " + to_string((int)capture_fps);
            string processFpsText = "处理FPS: " + to_string((int)process_fps);
            
            if (leftCamReady) {
                Mat left_display = frameL.clone();
                Mat left_rect_display = left_rect.clone();
                
                if (debugMode) {
                    putTextWithBackground(left_display, "左相机-原始", Point(10, 30), 0.6, 2, Scalar(0,255,0), Scalar(0,0,0));
                    putTextWithBackground(left_display, totalFpsText, Point(10, 60), 0.6, 2, Scalar(0,255,0), Scalar(0,0,0));
                    putTextWithBackground(left_rect_display, "左相机-校正后", Point(10, 30), 0.6, 2, Scalar(0,255,0), Scalar(0,0,0));
                    putTextWithBackground(left_rect_display, processFpsText, Point(10, 60), 0.6, 2, Scalar(255,165,0), Scalar(0,0,0));
                } else {
                    putTextWithBackground(left_display, totalFpsText, Point(10, 30), 0.6, 2, Scalar(0,255,0), Scalar(0,0,0));
                    putTextWithBackground(left_rect_display, totalFpsText, Point(10, 30), 0.6, 2, Scalar(0,255,0), Scalar(0,0,0));
                }
                
                imshow("左相机-原始", left_display);
                imshow("左相机-校正后", left_rect_display);
            }
            
            if (rightCamReady) {
                Mat right_display = frameR.clone();
                Mat right_rect_display = right_rect.clone();
                
                if (debugMode) {
                    putTextWithBackground(right_display, "右相机-原始", Point(10, 30), 0.6, 2, Scalar(0,255,0), Scalar(0,0,0));
                    putTextWithBackground(right_display, totalFpsText, Point(10, 60), 0.6, 2, Scalar(0,255,0), Scalar(0,0,0));
                    putTextWithBackground(right_rect_display, "右相机-校正后", Point(10, 30), 0.6, 2, Scalar(0,255,0), Scalar(0,0,0));
                    putTextWithBackground(right_rect_display, processFpsText, Point(10, 60), 0.6, 2, Scalar(255,165,0), Scalar(0,0,0));
                } else {
                    putTextWithBackground(right_display, totalFpsText, Point(10, 30), 0.6, 2, Scalar(0,255,0), Scalar(0,0,0));
                    putTextWithBackground(right_rect_display, totalFpsText, Point(10, 30), 0.6, 2, Scalar(0,255,0), Scalar(0,0,0));
                }
                
                imshow("右相机-原始", right_display);
                imshow("右相机-校正后", right_rect_display);
            }
            
            // 显示视差图
            Mat disparity_display = disp8_for_show.clone();
            if (debugMode) {
                string dispInfo = "视差图 - 范围: " + to_string(numDisparitiesMul * 16);
                putTextWithBackground(disparity_display, dispInfo, Point(10, 30), 0.6, 2, Scalar(0,255,0), Scalar(0,0,0));
                putTextWithBackground(disparity_display, totalFpsText, Point(10, 60), 0.6, 2, Scalar(0,255,0), Scalar(0,0,0));
                putTextWithBackground(disparity_display, processFpsText, Point(10, 90), 0.6, 2, Scalar(255,165,0), Scalar(0,0,0));
                if (pauseMode) {
                    putTextWithBackground(disparity_display, "已暂停", Point(10, 120), 0.6, 2, Scalar(0,0,255), Scalar(0,0,0));
                }
            } else {
                putTextWithBackground(disparity_display, totalFpsText, Point(10, 30), 0.6, 2, Scalar(0,255,0), Scalar(0,0,0));
            }
            
            imshow("视差图", disparity_display);
        }
        else
        {
            // 无GUI模式下的简单状态显示
            if (frameIdx % 30 == 0)
            {
                cout << "总FPS: " << (int)fps << " | 捕获FPS: " << (int)capture_fps 
                     << " | 处理FPS: " << (int)process_fps << " | 帧: " << frameIdx;
                if (pauseMode) cout << " | 已暂停";
                if (debugMode) cout << " | 调试模式";
                cout << endl;
            }
        }

        // 键盘输入处理
        int key = guiAvailable ? waitKey(1) : 0;
        int terminalKey = getKeyNonBlocking();
        if (terminalKey != EOF)
        {
            key = terminalKey;
        }

        if (key == 27) // ESC
        {
            cout << "程序退出" << endl;
            break;
        }
        else if (key == 's' || key == 'S')
        {
            string dispPath = "data/depth/disparity_" + to_string(frameIdx) + ".png";
            bool ok = my_utils::saveDisparityAs16BitPNG(disparity, dispPath);
            cout << "[保存] 视差图: " << dispPath << " -> " << (ok ? "成功" : "失败") << endl;

            if (ok)
            {
                Mat points3d;
                reprojectImageTo3D(disparity, points3d, Q, true);
                string plyPath = "data/depth/pointcloud_" + to_string(frameIdx) + ".ply";
                bool ok2 = my_utils::savePointCloudPLY(points3d, left_rect, plyPath);
                cout << "[保存] 点云: " << plyPath << " -> " << (ok2 ? "成功" : "失败") << endl;
                
                if (ok2)
                {
                    frameIdx++;
                    cout << "[信息] 已保存第 " << frameIdx << " 组数据" << endl;
                }
            }
        }
        else if (key == 'd' || key == 'D')
        {
            debugMode = !debugMode;
            cout << "调试模式 " << (debugMode ? "开启" : "关闭") << endl;
        }
        else if (key == 'p' || key == 'P')
        {
            pauseMode = !pauseMode;
            cout << "视频流 " << (pauseMode ? "暂停" : "继续") << endl;
        }
        else if (key == 'r' || key == 'R')
        {
            cout << "重新启动摄像头流..." << endl;
            capL.release();
            capR.release();
            my_utils::stopRpicamStreams();
            
            // 重新启动rpicam流
            left_stream_started = my_utils::startRpicamStream("左相机", left_port, 320, 240, 30);
            right_stream_started = my_utils::startRpicamStream("右相机", right_port, 320, 240, 30);
            
            // 等待流稳定
            this_thread::sleep_for(chrono::seconds(3));
            
            // 重新初始化摄像头
            leftCamReady = false;
            rightCamReady = false;
            
            if (left_stream_started) {
                leftCamReady = my_utils::initCameraUDP(capL, udp_left, "左相机");
            }
            if (right_stream_started) {
                rightCamReady = my_utils::initCameraUDP(capR, udp_right, "右相机");
            }
        }

        frameIdx++;
    }

    // 清理资源
    cout << "清理资源..." << endl;
    capL.release();
    capR.release();
    my_utils::stopRpicamStreams();

    if (guiAvailable)
    {
        destroyAllWindows();
    }

    cout << endl << "==============================================" << endl;
    cout << "程序执行完成" << endl;
    cout << "总共处理帧数: " << frameIdx << endl;
    cout << "数据保存位置: data/depth/" << endl;
    cout << "最终帧率 - 总FPS: " << (int)fps << " | 捕获FPS: " << (int)capture_fps 
         << " | 处理FPS: " << (int)process_fps << endl;
    cout << "==============================================" << endl;

    return 0;
}