#include "ImageProcessor.h"
#include "precisionidentify.h"
#include "opencv2/imgproc.hpp"
#include "source/configmanager.h"
#include "source/logger.h"
#include "cvalgedgedetection.h"
#include "errordialogmanager.h"
#include "preprocessor.h"
PrecisionIdentify::PrecisionIdentify(QObject *parent) : QObject(parent)
{
    identifiThresholdAuto_= PreProcessor::getInstance().identifiThresholdAuto();
    identifiThreshold_ = PreProcessor::getInstance().identifiThreshold();
    markEnhancement_ = ParamValue.getParameter<int>("preprocessing" , "markEnhancement");
    markSmoothing_ =   ParamValue.getParameter<int>("preprocessing" , "contourAreaMin");
    overturn_  =   ParamValue.getParameter<int>("preprocessing" , "overturn");
    connect(&ConfigManager::getInstance() , &ConfigManager::valueTableLoad, this, &PrecisionIdentify::valueTableLoad);
}

PrecisionIdentify::~PrecisionIdentify()
{
}

PrecisionIdentify &PrecisionIdentify::getInstance()
{
    static PrecisionIdentify instance;
    return instance;
}

void PrecisionIdentify::setMethon(Matching method)
{
    method_ = method;
    LogDebug << "precisionIdentify  setMethon :" << method;
}

void PrecisionIdentify::setidentifiThresholdAuto(bool identifiThresholdAuto)
{
    LogDebug << "setidentifiThresholdAuto set :" << identifiThresholdAuto;
    if(identifiThresholdAuto_!=identifiThresholdAuto )
    {
        identifiThresholdAuto_ = identifiThresholdAuto;
        emit identifiThresholdAutoChanged();
        PreProcessor::getInstance().setIdentifiThresholdAuto(identifiThresholdAuto);
    }
}

void PrecisionIdentify::setidentifiThreshold(int identifiThreshold)
{
    LogTrack << "setidentifiThreshold : " << identifiThreshold;
    if(identifiThreshold_ != identifiThreshold)
    {
        if(identifiThresholdAuto_ != 0)
        {
            identifiThresholdAuto_ = 0;
            emit identifiThresholdAutoChanged();
        }
        identifiThreshold_ = identifiThreshold;
        emit identifiThresholdhanged();
        PreProcessor::getInstance().setIdentifiThreshold(identifiThreshold_);
        testBinaryIdentifyImg();
    }
}

void PrecisionIdentify::setoverturn(float overturn)
{
    if(overturn != overturn_)
    {
        overturn_ = overturn;
        ParamValue.setParameter("preprocessing", "overturn", overturn_);
        emit overturnchanged();
        testBinaryIdentifyImg();
    }
}

void PrecisionIdentify::setmarkSmoothing(int markSmoothing)
{
    LogDebug << "setmarkSmoothing : " << markSmoothing;
    if(markSmoothing_ != markSmoothing)
    {
        markSmoothing_ = markSmoothing;
        //ParamValue.setParameter("preprocessing" , "markSmoothing",markSmoothing_);
        ParamValue.setParameter("preprocessing" , "contourAreaMin",markSmoothing_);
        emit markSmoothingdhanged();
        testBinaryIdentifyImg();
    }
}

void PrecisionIdentify::setmarkEnhancement(int markEnhancement)
{
    LogDebug << "setmarkEnhancement : " << markEnhancement;
    if(markEnhancement_ != markEnhancement)
    {
        markEnhancement_ = markEnhancement;
        ParamValue.setParameter("preprocessing" , "markEnhancement",markEnhancement_);
        emit markEnhancementchanged();
        testBinaryIdentifyImg();
    }
}


void PrecisionIdentify::valueTableLoad()
{
    markEnhancement_ = ParamValue.getParameter("preprocessing" , "markEnhancement").toInt();
    markSmoothing_ =   ParamValue.getParameter("preprocessing" , "contourAreaMin").toInt();
    overturn_  =   ParamValue.getParameter("preprocessing" , "overturn").toInt();
    emit overturnchanged();
    emit markEnhancementchanged();
    emit markSmoothingdhanged();
}

void PrecisionIdentify::testBinaryIdentifyImg()
{
    if(ImageProcessor::getInstance().selectedStep()==ImageProcessor::MeauPreProcess)
    {
        if(!identifiGrayTemp.empty())
        {
            ImageProcessor::getInstance().setcaptureImgMode(ImageProcessor::CapPause);
            LogDebug << "setidentifiThreshold Start to test";
            QtConcurrent::run([this](){
                cv::Mat outImg;
                cv::Rect rect;
                handleIdentify(identifiGrayTemp.clone(),rect,true);

            });
        }
        else
        {
            ErrorDialogManager::instance().showNonBlockingError("二值化无法预览", "请先设置标识");
        }
    }
}

bool PrecisionIdentify::needsInversion(cv::Mat &binaryImage)
{
    int top = cv::countNonZero(binaryImage.row(0));
    int bottom = cv::countNonZero(binaryImage.row(binaryImage.rows - 1));
    int left = cv::countNonZero(binaryImage.col(0));
    int right = cv::countNonZero(binaryImage.col(binaryImage.cols - 1));

    int totalPixels = binaryImage.cols * 2 + binaryImage.rows * 2 - 4;
    int countZero = totalPixels - (top + bottom + left + right);
    int countOne = top + bottom + left + right;

    return (countZero < countOne);
}

int PrecisionIdentify::findBackgroundColor(const cv::Mat &binaryImage)
{
    int top = cv::countNonZero(binaryImage.row(0));
    int bottom = cv::countNonZero(binaryImage.row(binaryImage.rows - 1));
    int left = cv::countNonZero(binaryImage.col(0));
    int right = cv::countNonZero(binaryImage.col(binaryImage.cols - 1));

    int totalPixels = binaryImage.cols * 2 + binaryImage.rows * 2 - 4;
    int countZero = totalPixels - (top + bottom + left + right);
    int countOne = top + bottom + left + right;

    LogDebug << "Background color determination: countZero=" << countZero << ", countOne=" << countOne;

    return (countZero > countOne) ? 0 : 255;
}

cv::Rect PrecisionIdentify::findMinBoundingRect(const cv::Mat& binaryImage) {
    std::vector<std::vector<cv::Point>> contours;
    std::vector<cv::Vec4i> hierarchy;

    cv::findContours(binaryImage, contours, hierarchy, cv::RETR_EXTERNAL, cv::CHAIN_APPROX_SIMPLE);

    if (contours.empty()) {
        LogWarning << "No contours found!";
        return cv::Rect();
    }

    // 将所有轮廓点合并到一个向量中
    std::vector<cv::Point> allPoints;
    for (const auto& contour : contours) {
        allPoints.insert(allPoints.end(), contour.begin(), contour.end());
    }

    // 获取最小外接矩形
    cv::Rect minBoundingRect = cv::boundingRect(allPoints);
    LogTrack  << "Bounding rect: x=" << minBoundingRect.x << ", y=" << minBoundingRect.y
             << ", width=" << minBoundingRect.width << ", height=" << minBoundingRect.height;

    return minBoundingRect;
}


bool PrecisionIdentify::handleIdentifyV2(const cv::Mat &frameGray, cv::Mat &searchImage, cv::Rect2d &roi, float scale)
{
    // Step 1: Check if searchImage is in grayscale, if not, convert it
    if (searchImage.channels() == 3) {
        cv::cvtColor(searchImage, searchImage, cv::COLOR_BGR2GRAY);
    }

    if (scale < 0.1)
        scale = 0.1;

    float value = 2.0f / scale;
    int dippx = static_cast<int>(value);

    // 确保 dippx 是双数并且比 value 大
    if (dippx % 2 != 0) {
        dippx++; // 如果不是双数，则加1
    }
    if (dippx <= value) {
        dippx += 2; // 如果结果不大于 value，增加2
    }
    // Step 2: Define the region of interest (ROI) and expand it by 30 pixels
    int expandedWidth = roi.width + 2 * dippx;  // Expand width by 30 pixels on both sides
    int expandedHeight = roi.height + 2 * dippx;  // Expand height by 30 pixels on both sides

    // Make sure the expanded ROI does not go out of bounds
    cv::Rect expandedRoi(roi.x - dippx, roi.y - dippx, expandedWidth, expandedHeight);

    // Ensure that the ROI does not go out of bounds
    expandedRoi &= cv::Rect(0, 0, frameGray.cols, frameGray.rows);

    // Step 3: Crop the expanded region from the frameGray image
    cv::Mat roiRegion = frameGray(expandedRoi);

    // Step 4: Perform template matching in the expanded ROI
    cv::Mat result;
    cv::matchTemplate(roiRegion, searchImage, result, cv::TM_CCOEFF_NORMED);

    // Step 5: Find all matches (min and max)
    double minVal, maxVal;
    cv::Point minLoc, maxLoc;
    cv::minMaxLoc(result, &minVal, &maxVal, &minLoc, &maxLoc);

    roi.x = expandedRoi.x + maxLoc.x;
    roi.y = expandedRoi.y + maxLoc.y;
    roi.width = searchImage.cols;
    roi.height = searchImage.rows;

    // If no valid match is found
    return false;
}


bool PrecisionIdentify::handleIdentify(const cv::Mat &frameGray, cv::Rect &rect,bool display)
{
    cv::Mat DisplayImage ;
    if(!display)
        identifiGrayTemp = frameGray.clone();

    //LogDebug << "Attemp to get precisionMatching ...";

    int method = PreProcessor::getInstance().precisionMatching();
    LogTrack << "Attemp to get precisionMatching ...:" << method;
    Matching method_ = static_cast<Matching>(method);
    //LogDebug << "Attemp to precisionIdentifying ...";
    rect = cv::Rect();
    if(frameGray.empty())
    {
        LogError <<"Fail to precisionIdentify handleIdentify becaose frameGray is null";
        return false;
    }
    rect = cv::Rect(0,0,frameGray.size().width,frameGray.size().height);
    cv::Mat binaryImg;
    switch(method_)
    {
    case Template:
        return true;
        break;
    case Binarization:
    {
        binarizeImage(frameGray,binaryImg,DisplayImage);

    }
    break;
    case gradient:
        binarizeImage(frameGray,binaryImg,DisplayImage);
        if (needsInversion(binaryImg)) {
            //LogDebug << "Inverting binary image.";
            cv::bitwise_not(binaryImg, binaryImg);
        }
        break;
    }
    cv::Rect boundingRect;
    if (!binaryImg.empty()) {

        CvAlgEdgeDetection::getInstance().counttiltAngle(binaryImg);
        boundingRect = findMinBoundingRect(binaryImg);
        if (boundingRect.area() > 20) {
            rect = boundingRect;
            if(!display)
            {
                return true;
            }

            // cv::Mat resultImage = grayImage.clone();
            // cv::rectangle(resultImage, boundingRect, cv::Scalar(0, 255, 0), 2);

            // // 保存或展示处理后的图像
            // cv::imwrite("processed_image.jpg", resultImage);
            // cv::imshow("Processed Image", resultImage);
            // cv::waitKey(0);
        }
    }
    if(display)
    {
        cv::Mat resultImage = binaryImg.clone();
        cv::cvtColor(resultImage, resultImage, cv::COLOR_GRAY2RGB);
        if (!boundingRect.empty())
            cv::rectangle(resultImage, boundingRect, cv::Scalar(0, 255, 0), 4);
        LogDebug << "ImageProvider::getInstance().image2qml";
        ImageProvider::getInstance().image2qml(DisplayImage,"Box1");
        ImageProvider::getInstance().image2qml(resultImage,"Main");
    }
    return false;

}

void PrecisionIdentify::binarizeImage(const cv::Mat &grayImage, cv::Mat &binaryImage)
{
    cv::Mat emptyMat;
    binarizeImage(grayImage, binaryImage, emptyMat);
}

void PrecisionIdentify::binarizeImage(const cv::Mat &grayImage, cv::Mat &binaryImage, cv::Mat &displayImage )
{
    QVariantMap enableMap = ParamValue.getParameter<QVariantMap>("preprocessing","binaryalgorithm");
    if (grayImage.empty()) {
        LogError << " binarizeImage Input image is empty!";
        return;
    }

    if (grayImage.channels() != 1) {
        LogError << "binarizeImage Input image is not a grayscale image!";
        return;
    }

    if (enableMap["gaussian_blur"].toBool()) {



        int size =markEnhancement_ / 8;
        if (size % 2 == 0) {
            size += 1; // 保证高斯模糊核大小为奇数
        }
        cv::GaussianBlur(grayImage, grayImage, cv::Size(size, size), 0);
    }

    if (enableMap["equalize_hist"].toBool()) {
        cv::equalizeHist(grayImage, grayImage);
    }


    displayImage = grayImage.clone();
    if (enableMap["adaptive_threshold"].toBool()) {
        int blockSize = ParamValue.getParameter<int>("preprocessing", "adaptiveThresholdT");
        if (blockSize % 2 == 0) {
            blockSize += 1; // 保证块大小为奇数
        }
        int C = ParamValue.getParameter<int>("preprocessing", "adaptiveThresholdC");
        cv::adaptiveThreshold(grayImage, binaryImage, 255, cv::ADAPTIVE_THRESH_GAUSSIAN_C, cv::THRESH_BINARY, blockSize, C);
    } else if (identifiThresholdAuto_) {
        LogTrack << "Automatic threshold selection mode.";
        int identifiThresholdtemp = cv::threshold(grayImage, binaryImage, 0, 255, cv::THRESH_BINARY | cv::THRESH_OTSU);
        if (identifiThresholdtemp != identifiThreshold_) {
            identifiThreshold_ = identifiThresholdtemp;
            emit identifiThresholdhanged();
        }
        LogTrack << "Otsu's method selected threshold: " << identifiThresholdtemp;
    } else {
        //LogDebug << "Using provided threshold: " << identifiThreshold_;
        cv::threshold(grayImage, binaryImage, identifiThreshold_, 255, cv::THRESH_BINARY);
    }




    if (enableMap["morphology"].toBool()) {
        int size = ParamValue.getParameter<int>("preprocessing", "morphologyEx");
        if (size % 2 == 0) {
            size += 1; // 保证形态学操作核大小为奇数
        }
        cv::Mat element = cv::getStructuringElement(cv::MORPH_RECT, cv::Size(size, size));
        cv::morphologyEx(binaryImage, binaryImage, cv::MORPH_OPEN, element);
    }

    // 中值滤波
    if (enableMap["median_blur"].toBool()) {
        int size = ParamValue.getParameter<int>("preprocessing", "medianBlur");
        if (size % 2 == 0) {
            size += 1; // 保证中值滤波核大小为奇数
        }
        cv::medianBlur(binaryImage, binaryImage, size);
    }

    if(overturn_ == 0)
    {
        if (needsInversion(binaryImage)) {
            //LogDebug << "Inverting binary image.";
            cv::bitwise_not(binaryImage, binaryImage);
        }
    }else if(overturn_ == 2)
        cv::bitwise_not(binaryImage, binaryImage);
    // 连通组件分析去噪
    if (enableMap["contour_analysis"].toBool()) {
        LogTrack << "markSmoothing_" << markSmoothing_;
        //int contourAreaMin = ParamValue.getParameter<int>("preprocessing", "contourAreaMin");
        std::vector<std::vector<cv::Point>> contours;
        cv::findContours(binaryImage, contours, cv::RETR_EXTERNAL, cv::CHAIN_APPROX_SIMPLE);
        for (size_t i = 0; i < contours.size(); ++i) {
            if (cv::contourArea(contours[i]) < markSmoothing_) {
                cv::drawContours(binaryImage, contours, static_cast<int>(i), cv::Scalar(0), cv::FILLED);
            }
        }
    }
}
