#include "SparseReconstructor.h"


SparseRecontructor::SparseRecontructor(QObject *parent):QThread(parent),m_stopFlag(false)
{

}

SparseRecontructor::~SparseRecontructor()
{
	stop();
	wait();
}

void SparseRecontructor::setImagesPath(const std::vector<std::string>& imagePaths)
{
	m_imagePaths = imagePaths;

}

void SparseRecontructor::stop()
{
	m_stopFlag = true;
}



void SparseRecontructor::run()
{

	m_stopFlag = false; 

	
	emit progressUpdate(10, "开始读取图片");
	std::vector<cv::Mat> images;
	for (const std::string& path : m_imagePaths) {

		cv::Mat img = cv::imread(path);
		if (img.empty()) {
			emit errorOccurred(QString("读取图片失败:%1").arg(QString::fromStdString(path)));
			return;
		}

        // 统一缩放到最大的图片尺寸（或指定尺寸，如 800x600）
        static cv::Size targetSize(800, 600); // 自定义目标尺寸
        cv::resize(img, img, targetSize);

	
		cv::Mat img_3ch;
		if (img.depth() != CV_8U) {
			img.convertTo(img, CV_8UC(img.channels()), 255.0 / (pow(2, 8 * img.elemSize1()) - 1));
		}
		if (img.channels() == 1) {
			cv::cvtColor(img, img_3ch, cv::COLOR_GRAY2BGR);
		}
		else if (img.channels() == 3) {
			img_3ch = img.clone();
		}
		else if (img.channels() == 4) {
			cv::cvtColor(img, img_3ch, cv::COLOR_BGRA2BGR);
		}
		else {
			emit errorOccurred(QString("%1ͨ转换图片失败%2ͨ").arg(QString::fromStdString(path)).arg(img.channels()));
			return;
		}
		if (img_3ch.type() != CV_8UC3) {
            emit errorOccurred(QString("%1转换图片失败").arg(QString::fromStdString(path)));
			return;
		}
		images.push_back(img_3ch);
	}

	if (images.size() < 2) {
		emit errorOccurred("图片太少");
		return;
	}

	
	emit progressUpdate(30, "特征提取...");
	std::vector<std::vector<cv::KeyPoint>>keyPoints;
	std::vector<cv::Mat> descriptors;
	if (!extractFeatures(images, keyPoints, descriptors) || m_stopFlag) {
		if (!m_stopFlag) 
            emit errorOccurred("特征提取失败");
		return;
	}


	emit progressUpdate(50, "特征匹配");
	std::vector<std::vector<cv::DMatch>> matches;
	if (!matchFeatures(descriptors, matches) || m_stopFlag) {
		if (!m_stopFlag) emit errorOccurred("特征匹配失败");
		return;
	}

	
	emit progressUpdate(70, "三维重建...");
	pcl::PointCloud<pcl::PointXYZRGB>::Ptr cloud = reconstruct3D(images, keyPoints, matches);
	if (m_stopFlag) return;

	if (!cloud || cloud->empty()) {
		emit errorOccurred("三维重建失败");
		return;
	}


	emit progressUpdate(100, "完成");

    //跨线程传输pcl::PointCloud<pcl::PointXYZRGB>::Ptr
	emit reconstructFinished(cloud);

}

bool SparseRecontructor::extractFeatures(const std::vector<cv::Mat>& images, std::vector<std::vector<cv::KeyPoint>>& keypoints, std::vector<cv::Mat>& descriptors)
{
	cv::Ptr<cv::SIFT> sift = cv::SIFT::create();
	keypoints.resize(images.size());
	descriptors.resize(images.size());

	for (size_t i = 0; i < images.size(); ++i)
	{
		if (m_stopFlag) return false; 
		cv::Mat gray;
		cv::cvtColor(images[i], gray, cv::COLOR_BGR2GRAY);
		sift->detectAndCompute(gray, cv::noArray(), keypoints[i], descriptors[i]);
		if (keypoints[i].empty()) return false;
	}
	return true;
}

bool SparseRecontructor::matchFeatures1(std::vector<cv::Mat>& descriptors, std::vector<std::vector<cv::DMatch>>& matches)
{
	cv::Ptr<cv::DescriptorMatcher> matcher = cv::DescriptorMatcher::create(cv::DescriptorMatcher::FLANNBASED);
	matches.resize(descriptors.size() - 1);

	for (size_t i = 0; i < descriptors.size() - 1; i++)
	{
		if (m_stopFlag) return false; 
		std::vector<std::vector<cv::DMatch>> knn_matches;
		matcher->knnMatch(descriptors[i], descriptors[i + 1], knn_matches, 2);

		const float ratio_thresh = 0.7f;
		for (size_t j = 0; j < knn_matches.size(); j++)
		{
			if (knn_matches[j][0].distance < ratio_thresh * knn_matches[j][1].distance) {
				matches[i].push_back(knn_matches[j][0]);
			}
		}

		if (matches[i].size() < 10) return false;
	}
	return true;
}

bool SparseRecontructor::matchFeatures(std::vector<cv::Mat>& descriptors, std::vector<std::vector<cv::DMatch>>& matches)
{
    // 优化1：更换更稳定的匹配器（暴力匹配+交叉验证，适合低纹理/视角差异大的场景）
    // cv::Ptr<cv::DescriptorMatcher> matcher = cv::DescriptorMatcher::create(cv::DescriptorMatcher::FLANNBASED);
    cv::Ptr<cv::DescriptorMatcher> matcher = cv::DescriptorMatcher::create(cv::DescriptorMatcher::BRUTEFORCE);

    matches.resize(descriptors.size() - 1);
    bool hasValidMatch = false; // 标记是否有至少一组有效匹配

    for (size_t i = 0; i < descriptors.size() - 1; i++)
    {
        if (m_stopFlag) return false;

        // 优化2：交叉验证匹配（过滤单向错误匹配，提高匹配质量）
        std::vector<cv::DMatch> forwardMatches, backwardMatches;
        matcher->match(descriptors[i], descriptors[i + 1], forwardMatches);
        matcher->match(descriptors[i + 1], descriptors[i], backwardMatches);

        // 只保留双向匹配一致的点（A→B的匹配点，B→A的匹配点也对应同一对）
        std::vector<cv::DMatch> validMatches;
        for (const auto& fwdMatch : forwardMatches) {
            for (const auto& bwdMatch : backwardMatches) {
                if (fwdMatch.queryIdx == bwdMatch.trainIdx && fwdMatch.trainIdx == bwdMatch.queryIdx) {
                    validMatches.push_back(fwdMatch);
                    break;
                }
            }
        }

        // 优化3：放宽比例阈值（从0.7调整为0.85，保留更多匹配点）
        const float ratio_thresh = 0.85f;
        // 若交叉验证后匹配点仍少，用KNN匹配补充
        if (validMatches.size() < 10) {
            std::vector<std::vector<cv::DMatch>> knn_matches;
            matcher->knnMatch(descriptors[i], descriptors[i + 1], knn_matches, 2);
            for (size_t j = 0; j < knn_matches.size(); j++) {
                if (knn_matches[j].size() >= 2 && knn_matches[j][0].distance < ratio_thresh * knn_matches[j][1].distance) {
                    validMatches.push_back(knn_matches[j][0]);
                }
            }
        }

        // 优化4：降低最小匹配点阈值（从10调整为8，适配低纹理图片）
        if (validMatches.size() < 8) {
            // 打印日志，查看哪组图像匹配失败
            qDebug() << "图像对" << i << "-" << i + 1 << "匹配点不足：" << validMatches.size() << "个";
            matches[i].clear();
            continue; // 跳过该组，不直接return false
        }

        matches[i] = validMatches;
        hasValidMatch = true; // 标记有有效匹配
    }

    // 优化5：整体容错（只要有至少一组有效匹配，就返回true）
    return hasValidMatch;
}

pcl::PointCloud<pcl::PointXYZRGB>::Ptr SparseRecontructor::reconstruct3D(
    const std::vector<cv::Mat>& images,
    const std::vector<std::vector<cv::KeyPoint>>& keypoints,
    const std::vector<std::vector<cv::DMatch>>& matches)
{
    qDebug() << "===== 进入 reconstruct3D 函数 =====";
    qDebug() << "图像总数：" << images.size() << "，关键点组数：" << keypoints.size() << "，匹配对数：" << matches.size();

    // 校验输入图像基本信息
    if (!images.empty()) {
        const cv::Mat& firstImg = images[0];
        qDebug() << "第一张图像：type=" << firstImg.type() << "（CV_8UC3=16），cols=" << firstImg.cols << "，rows=" << firstImg.rows
            << "，channels=" << firstImg.channels() << "，depth=" << firstImg.depth() << "，data指针=" << (void*)firstImg.data;
    }

    if (m_stopFlag) {
        qDebug() << "触发停止标志，返回nullptr";
        return nullptr;
    }

    if (images.size() < 2 || keypoints.size() != images.size() || matches.size() != images.size() - 1) {
        qDebug() << "输入参数不匹配：图像数<2 或 关键点/匹配对数量不匹配";
        return pcl::PointCloud<pcl::PointXYZRGB>::Ptr(new pcl::PointCloud<pcl::PointXYZRGB>());
    }

    double fx = 500.0;
    double fy = 500.0;
    double cx = images[0].cols / 2.0;
    double cy = images[0].rows / 2.0;
    cv::Mat K = (cv::Mat_<double>(3, 3) << fx, 0, cx, 0, fy, cy, 0, 0, 1);
    qDebug() << "相机内参 K 初始化完成";

    pcl::PointCloud<pcl::PointXYZRGB>::Ptr cloud(new pcl::PointCloud<pcl::PointXYZRGB>());
    cloud->reserve(100000);
    qDebug() << "点云容器初始化完成，预留100000个点";

    for (size_t i = 0; i < matches.size(); i++) {
        qDebug() << "\n===== 处理第" << i << "组匹配对（图像" << i << "-" << i + 1 << "）=====";
        if (m_stopFlag) {
            qDebug() << "第" << i << "组匹配处理：触发停止标志，返回nullptr";
            return nullptr;
        }

        const auto& currentMatches = matches[i];
        qDebug() << "第" << i << "组匹配对数量：" << currentMatches.size();
        if (currentMatches.empty()) {
            qDebug() << "第" << i << "组匹配对为空，跳过";
            continue;
        }

        // 构建 points1/points2（带日志）
        std::vector<cv::Point2f> points1, points2;
        points1.reserve(currentMatches.size());
        points2.reserve(currentMatches.size());

        if (i >= keypoints.size() || (i + 1) >= keypoints.size()) {
            qDebug() << "第" << i << "组：keypoints索引越界（i=" << i << "，keypoints.size=" << keypoints.size() << "），跳过";
            continue;
        }
        if (keypoints[i].empty() || keypoints[i + 1].empty()) {
            qDebug() << "第" << i << "组：图像" << i << "关键点数量=" << keypoints[i].size()
                << "，图像" << i + 1 << "关键点数量=" << keypoints[i + 1].size() << "，跳过";
            continue;
        }

        int invalidMatchCount = 0;
        for (const cv::DMatch& match : currentMatches) {
            if (match.queryIdx < 0 || match.queryIdx >= static_cast<int>(keypoints[i].size()) ||
                match.trainIdx < 0 || match.trainIdx >= static_cast<int>(keypoints[i + 1].size())) {
                invalidMatchCount++;
                continue;
            }
            points1.push_back(keypoints[i][match.queryIdx].pt);
            points2.push_back(keypoints[i + 1][match.trainIdx].pt);
        }
        qDebug() << "第" << i << "组：有效匹配点数量=" << points1.size() << "，无效匹配点数量=" << invalidMatchCount;
        if (points1.empty() || points2.empty()) {
            qDebug() << "第" << i << "组：有效匹配点为空，跳过";
            continue;
        }

        // 计算基础矩阵（带日志）
        double ransacThreshold = 1.0;
        std::vector<uchar> inlierMask;
        qDebug() << "第" << i << "组：开始计算基础矩阵（输入点数量=" << points1.size() << "）";
        cv::Mat fundamentalMatrix = cv::findFundamentalMat(
            points1, points2, cv::FM_RANSAC, ransacThreshold, 0.99, inlierMask
        );
        int inlierCount = cv::countNonZero(inlierMask);
        qDebug() << "第" << i << "组：基础矩阵计算完成，内点数量=" << inlierCount;
        if (fundamentalMatrix.empty() || inlierCount < 8) {
            qDebug() << "第" << i << "组：基础矩阵为空或内点不足8个，跳过";
            continue;
        }

        // 提取内点（带日志）
        std::vector<cv::Point2f> inlierPoints1, inlierPoints2;
        for (size_t j = 0; j < inlierMask.size(); j++) {
            if (inlierMask[j]) {
                inlierPoints1.push_back(points1[j]);
                inlierPoints2.push_back(points2[j]);
            }
        }
        qDebug() << "第" << i << "组：提取内点完成，内点数量=" << inlierPoints1.size();
        if (inlierPoints1.size() < 8) {
            qDebug() << "第" << i << "组：内点数量不足8个，跳过";
            continue;
        }

        // 计算本质矩阵和位姿（带日志）
        qDebug() << "第" << i << "组：开始计算本质矩阵和位姿";
        cv::Mat essentialMatrix = K.t() * fundamentalMatrix * K;
        cv::Mat R, t;
        int recoverInlierCount = cv::recoverPose(essentialMatrix, inlierPoints1, inlierPoints2, K, R, t);
        qDebug() << "第" << i << "组：位姿恢复完成，内点数量=" << recoverInlierCount;
        if (recoverInlierCount < 8) {
            qDebug() << "第" << i << "组：位姿恢复内点不足8个，跳过";
            continue;
        }

        // 三角化（带日志）
        cv::Mat projection1 = (cv::Mat_<double>(3, 4) << 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0);
        cv::Mat RT;
        cv::hconcat(R, t, RT);
        cv::Mat projection2 = K * RT;
        qDebug() << "第" << i << "组：开始三角化（内点数量=" << inlierPoints1.size() << "）";
        cv::Mat points3D_hom;
        // cv::Mat(inlierPoints1).t() 会创建一个临时的转置矩阵，而 OpenCV 4.4.0 中对临时矩阵的at访问存在兼容性问题（尤其是转置后的数据布局变化），
        // 导致后续隐式访问时触发断言
       /* cv::triangulatePoints(
            projection1, projection2,
            cv::Mat(inlierPoints1).t(),
            cv::Mat(inlierPoints2).t(),
            points3D_hom
        );*/
        // 显式创建矩阵并转置，用clone()确保数据连续（避免临时矩阵问题）
        // 关键修复：先转 Mat 再克隆（兼容 OpenCV 4.4.0，解决 MatExpr 无 clone() 问题）
        cv::Mat pts1_mat(inlierPoints1);
        cv::Mat pts1_t = pts1_mat.t();  // MatExpr → Mat（通过赋值转换）
        pts1_t = pts1_t.clone();        // 克隆为连续矩阵

        cv::Mat pts2_mat(inlierPoints2);
        cv::Mat pts2_t = pts2_mat.t();  // 同理
        pts2_t = pts2_t.clone();

        // 用连续矩阵作为输入
        cv::triangulatePoints(
            projection1, projection2,
            pts1_t,  // 不再用临时转置矩阵
            pts2_t,
            points3D_hom
        );
        qDebug() << "第" << i << "组：三角化完成，3D点数量（齐次）=" << points3D_hom.cols;

        // 图像格式转换（带详细日志）
        cv::Mat img_8uc3;
        const cv::Mat& currentImg = images[i];
        int imgDepth = currentImg.depth();
        int imgChannels = currentImg.channels();
        qDebug() << "第" << i << "组：当前处理图像（索引" << i << "）- type=" << currentImg.type()
            << "，depth=" << imgDepth << "，channels=" << imgChannels
            << "，cols=" << currentImg.cols << "，rows=" << currentImg.rows
            << "，data指针=" << (void*)currentImg.data;

        // 第一步：处理数据类型（非8位→8位）
        cv::Mat img_8u;
        if (imgDepth != CV_8U) {
            qDebug() << "第" << i << "组：图像非8位深度，开始转换为8位";
            currentImg.convertTo(img_8u, CV_8UC(imgChannels), 255.0 / (pow(2, 8 * currentImg.elemSize1()) - 1));
            qDebug() << "第" << i << "组：8位转换完成 - type=" << img_8u.type() << "，channels=" << img_8u.channels() << "，data指针=" << (void*)img_8u.data;
        }
        else {
            img_8u = currentImg;
            qDebug() << "第" << i << "组：图像已是8位深度，直接使用 - type=" << img_8u.type();
        }

        // 第二步：处理通道数（确保3通道）
        if (img_8u.channels() == 1) {
            qDebug() << "第" << i << "组：单通道图像，转换为BGR";
            cv::cvtColor(img_8u, img_8uc3, cv::COLOR_GRAY2BGR);
        }
        else if (img_8u.channels() == 4) {
            qDebug() << "第" << i << "组：4通道图像，转换为BGR";
            cv::cvtColor(img_8u, img_8uc3, cv::COLOR_BGRA2BGR);
        }
        else {
            qDebug() << "第" << i << "组：" << img_8u.channels() << "通道图像，直接克隆";
            img_8uc3 = img_8u.clone();
        }
        qDebug() << "第" << i << "组：通道数处理完成 - img_8uc3.type=" << img_8uc3.type()
            << "（CV_8UC3=16），channels=" << img_8uc3.channels() << "，data指针=" << (void*)img_8uc3.data;

        // 终极修复2：强制确保类型是 CV_8UC3（兜底方案）
        if (img_8uc3.type() != CV_8UC3) {
            qDebug() << "第" << i << "组：img_8uc3类型不是CV_8UC3，强制构造";
            img_8uc3.create(currentImg.size(), CV_8UC3);
            if (!img_8u.empty()) {
                img_8u.convertTo(img_8uc3, CV_8UC3);
            }
            qDebug() << "第" << i << "组：强制构造后 - type=" << img_8uc3.type() << "，data指针=" << (void*)img_8uc3.data;
        }

        // 终极修复3：二次容错（带日志）
        if (img_8uc3.empty() || img_8uc3.type() != CV_8UC3) {
            qDebug() << "第" << i << "组：img_8uc3为空或类型错误，使用默认白色点云";
            int imgCols = currentImg.cols;
            int imgRows = currentImg.rows;
            for (int j = 0; j < points3D_hom.cols; ++j) {
                if (m_stopFlag) return nullptr;
                double w = points3D_hom.at<double>(3, j);
                if (w <= 1e-6) continue;

                pcl::PointXYZRGB point;
                point.x = points3D_hom.at<double>(0, j) / w;
                point.y = points3D_hom.at<double>(1, j) / w;
                point.z = points3D_hom.at<double>(2, j) / w;

                if (fabs(point.x) > 10.0 || fabs(point.y) > 10.0 || fabs(point.z) > 10.0) {
                    continue;
                }

                point.r = 255;
                point.g = 255;
                point.b = 255;
                cloud->push_back(point);
            }
            qDebug() << "第" << i << "组：默认白色点云添加完成，跳过后续处理";
            continue;
        }

        // 准备提取颜色（带日志）
        int imgCols = img_8uc3.cols;
        int imgRows = img_8uc3.rows;
        size_t step = img_8uc3.step;
        qDebug() << "第" << i << "组：开始提取颜色 - img_8uc3：cols=" << imgCols << "，rows=" << imgRows
            << "，step=" << step << "，type=" << img_8uc3.type() << "，data指针=" << (void*)img_8uc3.data;

        // 遍历3D点，提取颜色（带逐点日志+指针访问修复）
        for (int j = 0; j < points3D_hom.cols; ++j) {
            qDebug() << "第" << i << "组：处理第" << j << "个3D点（共" << points3D_hom.cols << "个）";
            if (m_stopFlag) {
                qDebug() << "第" << i << "组：处理第" << j << "个点时触发停止标志，返回nullptr";
                return nullptr;
            }

            // 关键修复1：指针访问 points3D_hom，避免 at<double> 类型不匹配
            float w_float = 0.0f;
            float x_float = 0.0f, y_float = 0.0f, z_float = 0.0f;
            if (points3D_hom.empty() || points3D_hom.rows != 4) {
                qDebug() << "第" << i << "组-第" << j << "个点：points3D_hom 格式错误，跳过";
                continue;
            }

            if (points3D_hom.type() == CV_32F) {
                const float* colPtr = points3D_hom.ptr<float>(0) + j * 4; // 齐次坐标：x(0), y(1), z(2), w(3)
                w_float = colPtr[3];
                x_float = colPtr[0];
                y_float = colPtr[1];
                z_float = colPtr[2];
            }
            else if (points3D_hom.type() == CV_64F) {
                const double* colPtr = points3D_hom.ptr<double>(0) + j * 4;
                w_float = static_cast<float>(colPtr[3]);
                x_float = static_cast<float>(colPtr[0]);
                y_float = static_cast<float>(colPtr[1]);
                z_float = static_cast<float>(colPtr[2]);
            }
            else {
                qDebug() << "第" << i << "组-第" << j << "个点：points3D_hom 类型不支持（type=" << points3D_hom.type() << "），跳过";
                continue;
            }

            double w = static_cast<double>(w_float);
            qDebug() << "第" << i << "组-第" << j << "个点：w=" << w;
            if (w <= 1e-6) {
                qDebug() << "第" << i << "组-第" << j << "个点：w<=1e-6，跳过";
                continue;
            }

            // 计算3D坐标（除以w）
            pcl::PointXYZRGB point;
            point.x = x_float / w_float;
            point.y = y_float / w_float;
            point.z = z_float / w_float;
            qDebug() << "第" << i << "组-第" << j << "个点：3D坐标（x=" << point.x << "，y=" << point.y << "，z=" << point.z << "）";

            if (fabs(point.x) > 10.0 || fabs(point.y) > 10.0 || fabs(point.z) > 10.0) {
                qDebug() << "第" << i << "组-第" << j << "个点：坐标超出范围，跳过";
                continue;
            }

            // 图像坐标夹紧（保持不变）
            cv::Point2f pt = inlierPoints1[j];
            int x = std::clamp(static_cast<int>(std::round(pt.x)), 0, imgCols - 1);
            int y = std::clamp(static_cast<int>(std::round(pt.y)), 0, imgRows - 1);
            qDebug() << "第" << i << "组-第" << j << "个点：图像坐标（原始pt.x=" << pt.x << "，pt.y=" << pt.y
                << "，夹紧后x=" << x << "，y=" << y << "）";

            // 颜色提取（保持指针访问，无 at 调用）
            if (!img_8uc3.data) {
                qDebug() << "第" << i << "组-第" << j << "个点：img_8uc3.data为空指针！";
                point.r = 255; point.g = 0; point.b = 0;
                cloud->push_back(point);
                continue;
            }
            size_t offset = y * step + x * 3;
            size_t totalBytes = img_8uc3.total() * img_8uc3.elemSize();
            if (offset + 2 >= totalBytes) {
                qDebug() << "第" << i << "组-第" << j << "个点：offset超出范围，跳过颜色提取";
                point.r = 255; point.g = 0; point.b = 0;
                cloud->push_back(point);
                continue;
            }

            uchar* data = img_8uc3.data;
            uchar b = data[offset];
            uchar g = data[offset + 1];
            uchar r = data[offset + 2];
            qDebug() << "第" << i << "组-第" << j << "个点：颜色（B=" << (int)b << "，G=" << (int)g << "，R=" << (int)r << "）";

            point.r = r;
            point.g = g;
            point.b = b;
            cloud->push_back(point);
            qDebug() << "第" << i << "组-第" << j << "个点：添加到点云完成";
        }
        qDebug() << "第" << i << "组：所有3D点处理完成，当前点云总数=" << cloud->size();
    }

    // 点云下采样（带日志）
    qDebug() << "\n===== 所有匹配对处理完成，开始下采样 =====";
    qDebug() << "下采样前点云数量：" << cloud->size();
    pcl::PointCloud<pcl::PointXYZRGB>::Ptr filteredCloud(new pcl::PointCloud<pcl::PointXYZRGB>());
    pcl::VoxelGrid<pcl::PointXYZRGB> voxelFilter;
    voxelFilter.setInputCloud(cloud);
    voxelFilter.setLeafSize(0.01f, 0.01f, 0.01f);
    voxelFilter.filter(*filteredCloud);
    qDebug() << "下采样后点云数量：" << filteredCloud->size();

    qDebug() << "===== reconstruct3D 函数执行完成 =====";

    return filteredCloud;
}