#include "ImageProcessor.h"
#include "afterdetect.h"
#include "cameraeth0.h"
#include "camerathread.h"
#include "detectcachemodel.h"
#include "errordialogmanager.h"
#include "gpiocontroller.h"
#include "imagerecord.h"
#include "modelstatusmanager.h"
#include "ngtable.h"
#include "processtask.h"
#include "source/configmanager.h"
#include <QDebug>
#include <QDateTime>
#include <QFuture>
#include <pthread.h>
#include <cmath> // 包含cmath以使用std::round
#include <QApplication>
#include "autorunprocess.h"
#include "pccore.h"
#include "pccoremodel.h"
#include <sched.h>
#include <pthread.h>

ImageProcessor::ImageProcessor(ImageProvider *imageProvider, QObject *parent)
    : QObject(parent), camera_(CameraEth0::getInstance()), imageProvider_(imageProvider),
    currentAbortMode_(OnlyCaptured), captureImgMode_(RateTriggered), state_(Initializing)
    ,maxRecords_(5), processedFramesCount_(0),
    endStep_(Capture),
    selectedStep_(MeauBase), currentFrameRate_(0.0), CameeraSemaphore(0),
    setting(QString("master/config/")  + "link.ini", QSettings::IniFormat)
{
    m_linkName.init(&setting);
    m_linkPhone.init(&setting);
    m_linkadd.init(&setting);
    m_linkComny.init(&setting);
    m_totalLength.init(&setting);


    detectStepInfo_["Reprogress"] = "";
    detectStepInfo_["Detect"] = "";
    detectStepInfo_["Cv"] = "";
    m_NgResonDetail = "";
    m_detectIsOk = true;
    currentFrameRate_ = 0;
    displayimageInfo_["width"] = 0;
    displayimageInfo_["height"] = 0;
    displayimageInfo_["channels"] = 0;
    elapsedTimer_.start();
    LogInfo << "Initializing ImageProcessor";
    photographyDelay_ = ParamValue.getParameter<int>("base","photographyDelay");

    qRegisterMetaType<ImageProcessor::Status>("ImageProcessor::Status");
    qRegisterMetaType<StepProcess>("StepProcess");

    qRegisterMetaType<ImageRecord>("ImageRecord");
    threadPool_ = QThreadPool::globalInstance();

    // 信号连接
    connect(this, &ImageProcessor::identifiImgImgChange, &PreProcessor::getInstance(), &PreProcessor::identifiImgImgChangeSlots);
    connect(&CameraEth0::getInstance(), &CameraEth0::isOpenedChanged, this, &ImageProcessor::cameraStatusChanged);
    debounceTimer = new QTimer(this);

    connect(&ConfigManager::getInstance(), &ConfigManager::valueTableLoad, this, &ImageProcessor::valueTableLoad);

    autoRunProcess.moveToThread(&autoRunProcessThread);
    autoRunProcessThread.setPriority(QThread::TimeCriticalPriority);
    autoRunProcessThread.start();

    QMetaObject::invokeMethod(&autoRunProcess,"setThreadPriority");


    connect(&autoRunProcess, &AutoRunProcess::shortageGriggerSign,
            this, &ImageProcessor::shortageGriggerSign);
    connect(&autoRunProcess, &AutoRunProcess::inplaceSign,
            this, &ImageProcessor::inplaceSign);
    connect(&autoRunProcess, &AutoRunProcess::inplaceSlidingStepSign,
            this, &ImageProcessor::inplaceSlidingStepSign);
    connect(&autoRunProcess, &AutoRunProcess::arriveBatchGriggerSign,
            this, &ImageProcessor::arriveBatchGriggerSign);
    connect(&autoRunProcess, &AutoRunProcess::logosizeChangeSign,
            this, &ImageProcessor::logosizeChangeSign);
    connect(&autoRunProcess, &AutoRunProcess::sudentStopContinue,
            this, &ImageProcessor::sudentStopContinue);


    // camera_.startCapture();
}

ImageProcessor::~ImageProcessor()
{
    threadPool_->waitForDone();
    threadTriggerLoop.waitForDone();
}

void ImageProcessor::resetTodayallNum()
{
    updatetodayAllnum(0);
}

void ImageProcessor::autoSettrimminginterval()
{
    if(m_fabriclengthcm!=0)
    {
        set_trimminginterval(totalLength() / m_fabriclengthcm + discharge2cutting());
        qInfo() << "ImageProcessor::autoSettrimminginterva " << get_trimminginterval();
    }
}

void ImageProcessor::shutdownSystem()
{
    QProcess::execute("sudo /sbin/shutdown -h now");
}

void ImageProcessor::closeApplication()
{
    QApplication::quit();
}

void ImageProcessor::closeService()
{
    QProcess::execute("sudo systemctl stop masterQT");
}

// void ImageProcessor::setProcessingRate(int maxFramesPerSecond)
// {
//     LogDebug << "Setting processing rate to:" << maxFramesPerSecond;
//     maxFrameRate_ = maxFramesPerSecond;
//     frameInterval_ = 1000 / maxFrameRate_;
// }

void ImageProcessor::setAbortMode(ProcessingAbortMode mode)
{
    LogDebug << "set ProcessingAbortMode ：" << mode;
    currentAbortMode_ = mode;
    switch (currentAbortMode_)
    {
    case FullProcess:
        endStep_ = total;
        break;
    case OnlyCaptured:
        endStep_ = Capture;
        break;
    case OnlyPreProcess:
        endStep_ = Reprogress;
        break;
    case OnlyDetect:
        endStep_ = Detect;
        break;
    case OnlyCV:
        endStep_ = Cv;
        break;
    case OnlyPCcore:
        endStep_ = PCcoreStep;
        break;
    default:
        break;
    }
}

// void ImageProcessor::setIdentifiROI(int x, int y, int width, int height)
// {
//     LogDebug << "Attempint setIdentifiROI";
//     if (monitorImg.empty())
//     {
//         ErrorDialogManager::instance().showNonBlockingError("设置标识错误", "请先设置基准....");
//         LogWarning << "Attempint setIdentifiROI Must setMonitorROI first";
//         return;
//     }
//     // if(selectedStep_ != total && selectedStep_ !=  Reprogress)
//     // {
//     //     ErrorDialogManager::instance().showNonBlockingError("设置标识错误", "请返回基本设置或者预处理设置");
//     //     LogError << "setMonitorROI Error Please Open Camera first";
//     //     return;
//     // }
//     std::lock_guard<std::mutex> lock(imageCardMutex_);
//     cv::Mat displayImg = imageProvider_->getMainImg();
//     if (imgCards_.empty() || displayImg.empty() )
//     {
//         ErrorDialogManager::instance().showNonBlockingError("设置标识错误", "请先获取图片....");
//         LogError << "setMonitorROI Error Please Open Camera first";
//         return;
//     }

//     ImageRecord &imgcard_last = imgCards_.last();
//     LogDebug << "setIdentifiROI get imgcard_last.getResultImage()" << imgcard_last.getResultImage().cols
//              << "|" << imgcard_last.getResultImage().rows;

//     cv::Mat identifiImg;
//     if (PreProcessor::getInstance().setIdentifiROI(x, y, width, height))
//     {
//         identifiImg = displayImg(cv::Rect2d(x, y, width, height)).clone();

//         imageProvider_->image2qml(identifiImg, "Box2");
//         emit identifiImgImgChange(identifiImg);
//     }
//     else
//     {
//         LogError << "setMonitorROI Error ";
//         return;
//     }
// }

// void ImageProcessor::setMonitorROI(int x, int y, int width, int height)
// {
//     LogDebug << "Attempint setMonitorROI";
//     std::lock_guard<std::mutex> lock(imageCardMutex_);
//     PreProcessor::getInstance().setMonitorROI(x, y, width, height);
//     cv::Mat displayImg = imageProvider_->getMainImg();
//     if (imgCards_.empty() || displayImg.empty())
//     {
//         ErrorDialogManager::instance().showNonBlockingError("设置基准错误", "请先获取图片....");
//         LogError << "setMonitorROI Error Please Open Camera first";
//         return;
//     }
//     ImageRecord &imgcard_last = imgCards_.last();
//     monitorImg = displayImg(cv::Rect2d(x, y, width, height)).clone();
//     imageProvider_->image2qml(monitorImg, "Box1");
//     setAbortMode(OnlyPreProcess);
//     setSelectedStep(Reprogress);
//     emit selectedStepChange();
//     emit monitorImgChange(monitorImg);
//     // }else
//     // {
//     //     ErrorDialogManager::instance().showNonBlockingError("设置基准错误", "请返回基本设置或者预处理设置");
//     //     LogError << "setMonitorROI Error Pleasec urrentAbortMode_ is not OnlyCaptured|OnlyCaptured";
//     //     return;
//     // }
// }

void ImageProcessor::setcaptureImgMode(CaptureImgMode mode)
{
    LogDebug << "Attempting setcaptureImgMode: " << mode;
    if (mode == SingleSoftTrigger || mode == IOTriggered)
    {
        CameeraSemaphore.release(); // 释放信号量
    }
    if (captureImgMode_ != mode)
    {
        if (mode == RateTriggered)
        {
            CameeraSemaphore.release();
        }
        captureImgMode_ = mode;
        emit captureImgModeChange();
    }
}

// void* ImageProcessor::triggerLooptest(void* arg) {
//     // 将this指针从arg中取出
//     ImageProcessor* processor = static_cast<ImageProcessor*>(arg);
//     processor->runTriggerLoop();
//     return nullptr;
// }
void ImageProcessor::startTriggerLoop()
{
    if (state_ != Running)
    {
        setStatus(Running);

        // 创建并启动 CameraThread
        CameraThread *cameraThread = new CameraThread(this);
        cameraThread->start();

        elapsedTimer_.start();
        frameTimestamps_.clear();
        currentFrameRate_ = 0;
        LogInfo << "Started trigger loop successfully.";
    }
    else
    {
        LogInfo << "Trigger loop is already running.";
    }
}

void ImageProcessor::stopTriggerLoop()
{
    LogInfo << "Stoping  triggerloop thread ...";
    setStatus(Stop);                 // 更改状态以停止循环
    threadTriggerLoop.waitForDone(); // 等待所有任务完成
    LogInfo << "Stop triggerloop thread succeessfull";
}

void ImageProcessor::setSelectedStep(MeauName selectedStep)
{
    LogDebug << "setSelectedStep successful : " << selectedStep;
    selectedStep_ = selectedStep;

    switch (selectedStep_)
    {
    case MeauBase:
        setAbortMode(OnlyPreProcess);
        break;
    case MeauCamera:
        setAbortMode(OnlyCaptured);
        break;
    case MeauPreProcess:
        setAbortMode(OnlyPreProcess);
        break;
    case MeauAi:
        setAbortMode(OnlyDetect);
        break;
    case MeauCv:
        setAbortMode(OnlyCV);
        break;
    case MeauPCcore:
        PCcoreModel::instance().resetModel();
        setAbortMode(OnlyPCcore);
        break;
    case MeauIO:
    case MeauSummary:
    case MeauAbout:
        setAbortMode(OnlyPreProcess);
        break;
    case MeauAutoDetect:
    case MeauDetect:
        setAbortMode(FullProcess);
        break;
    default:
        setAbortMode(OnlyPreProcess);
        break;
    }
}

// void ImageProcessor::setmaxFrameRate(int maxFrameRate)
// {
//     maxFrameRate_ = maxFrameRate;
//     frameInterval_ = 1000 / maxFrameRate_;
//     LogDebug << "ImageProcessor::setmaxFrameRat :" << maxFrameRate_;
//     emit maxFrameRateChange();
// }

void ImageProcessor::setdetectDetail(const QString context)
{
    m_detectDetail = context;
    emit detectDetailChanged();

}

void ImageProcessor::setdetectIsOk(bool detectIsOk)
{
    m_detectIsOk = detectIsOk;
    emit detectIsOkChanged();
}

void ImageProcessor::setNgResonDetail(const QString NgResonDetail)
{
    m_NgResonDetail = NgResonDetail;
    emit NgResonDetailChanged();
}

void ImageProcessor::displayOKNGReson(const NgCard &ngCard)
{
    std::vector<int> ngList = ngCard.getDefects();
    LogTrack << "displayOKNGReson get ngList" << ngCard.getDefects() << ngList.size();
    if (ngList.empty())
    {
        setNgResonDetail("");
        setdetectIsOk(true);
    }
    else
    {
        m_NgResonDetail = NGTable::getInstance().parseNGListToDescription(ngList);
        LogTrack << "parseNGListToDescription" << m_NgResonDetail;
        setNgResonDetail(m_NgResonDetail);
        setdetectIsOk(false);
    }
}

void ImageProcessor::setmultithreading(int multithreading)
{
    if (multithreading_ != multithreading)
    {
        {
            std::lock_guard<std::mutex> lock(multithreadMutex_);
            multithreading_ = multithreading;
        }
        emit multithreadingChange();
    }
}

void ImageProcessor::displaydetectStepTime(QVariantMap detectStepTime)
{
    detectStepTime_ = detectStepTime;
    emit detectStepTimeChange();
}

void ImageProcessor::displaydetectStepInfo(QVariantMap detectStepInfo)
{
    detectStepInfo_ = detectStepInfo;
    emit detectStepInfoChange();
}

void ImageProcessor::displayDisplayDelay(int imgDisplayDelay)
{
    if (imgDisplayDelay_ != imgDisplayDelay)
    {
        imgDisplayDelay_ = imgDisplayDelay;
        emit imgDisplayDelayChange();
    }
}

void ImageProcessor::setphotographyDelay(int photographyDelay)
{
    if (photographyDelay_ != photographyDelay)
    {
        LogDebug << "Attempting to setphotographyDelay:" << photographyDelay;
        photographyDelay_ = photographyDelay;
        ParamValue.setParameter("base","photographyDelay" , photographyDelay);
        emit photographyDelayChanged();
    }
}


void ImageProcessor::setdisplayHistory(bool displayHistory)
{
    displayHistory_ = displayHistory;
    LogDebug << "setdisplayHistory:" << displayHistory;
}

void ImageProcessor::multithreadingsub()
{
    setmultithreading(multithreading_ - 1);
}
void ImageProcessor::multithreadingAdd()
{
    setmultithreading(multithreading_ + 1);
}

void ImageProcessor::getFinishProcess(ImageRecord imgCard)
{
    {
        std::lock_guard<std::mutex> lock(imageCardMutex_);
        imgCardsLast = imgCard;
    }

    if (state_ == Running)
    {
        // 幀率計算
        ImageProcessor::getInstance().displayDisplayDelay(imgCard.getElapsedTime());
        qint64 now = elapsedTimer_.elapsed();
        frameTimestamps_.push_back(now); // 更新当前帧的时间戳
        if (frameTimestamps_.size() > maxIntervals_)
        {
            frameTimestamps_.pop_front();
        }
        updateFrameRate();
        if (!imgCard.getIsSkipDisplay() && captureImgMode_ != CapPause)
        {
            cv::Mat img = imgCard.getResultImage();

            sendImgInfo(img);
            LogTrack << "imgCard.getStepProcess():" << imgCard.getStepProcess();
            if (imgCard.getdisplayStep() == ImageRecord::DisplayCap)
                imageProvider_->image2qml(img, "Main", ImageProvider::Capture);
            else if (imgCard.getdisplayStep() == ImageRecord::DisplayPre)
                imageProvider_->image2qml(img, "Main", ImageProvider::Reprogress);
            else
                imageProvider_->image2qml(img, "Main");
        }
        // }
        // else
        // {
        //     LogWarning << "Failed to display image on UI because image processing failed.";
        //     emit processingWarning("处理过程错误，跳过这张图片");
        // }
    }
    else
    {
        cv::Mat img_enpty;
        if (ImageProcessor::getInstance().captureImgMode() != ImageProcessor::CapPause)
            imageProvider_->image2qml(img_enpty, "Main");
    }
}

void ImageProcessor::updateFrameRate()
{
    if (frameTimestamps_.size() < 2)
        return;

    qint64 totalInterval = 0;
    for (size_t i = 1; i < frameTimestamps_.size(); ++i)
    {
        qint64 interval = frameTimestamps_[i] - frameTimestamps_[i - 1];
        totalInterval += interval;
    }

    double averageInterval = static_cast<double>(totalInterval) / (frameTimestamps_.size() - 1);
    currentFrameRate_ = std::round(1000.0 / averageInterval * 100.0) / 100.0; // 计算帧率（每秒帧数）并保留两位小数
    LogTrack << QString("Current Frame Rate: %1 fps").arg(currentFrameRate_, 0, 'f', 2);
    emit currentFrameRateChanged();
}
void ImageProcessor::DetectFullProcess(ImageRecord &imageRecord)
{
    if (threadPool_->activeThreadCount() < threadPool_->maxThreadCount())
    {
        ProcessTask *processTask = new ProcessTask(imageRecord, endStep_);
        connect(processTask, &ProcessTask::finishProcess, this, &ImageProcessor::getFinishProcess);
        processTask->run();
        delete processTask;
        // threadPool_->start(processTask);
    }
}
void ImageProcessor::triggerLoop()
{
    QElapsedTimer loopTimer;
    StepProcess CurrentStep;
    while (state_ == Running)
    {
        ModelStatusManager::instance().waitForCameraRunning(); // 阻塞等待相机
        ImageRecord imageRecord;
        LogTrack << "------------ Start Once Capture end: " << endStep_ << "|" << captureImgMode_ << "--------------------------------";

        loopTimer.start();
        if (captureImgMode_ == RateTriggered)
        {
            imageRecord.setStepImage(Capture, camera_.runCaptureOnce(), ImageRecord::DisplayCap);
            // if(endStep_ ==  Capture || endStep_ == Reprogress)
            CurrentStep = endStep_;
            // else
            //    CurrentStep = Reprogress;
        }
        else
        {
            CameeraSemaphore.acquire();
            LogTrack << "gogogo";
            if (captureImgMode_ == IOTriggered)
            {
                CurrentStep = total;
                imageRecord.resetTimer();
                if(selectedStep() == MeauAutoDetect)
                {
                    imageRecord.setStepImage(Capture, camera_.runCaptureOnce(), ImageRecord::DisplayCap);
                    imageRecord.setID(AfterDetect::getInstance().getImageId());
                }
                else
                {
                    imageRecord.setStepImage(Capture, camera_.runCaptureOnce(), ImageRecord::DisplayCap);
                }

                LogTrack << "IOTriggered GO1";
                if (multithreading() < m_Maxmultithreading)
                {
                    ProcessTask *processTask = new ProcessTask(imageRecord, CurrentStep);
                    connect(processTask, &ProcessTask::finishProcess, &AfterDetect::getInstance(), &AfterDetect::getFinishProcess);
                    // processTask->run();
                    threadPool_->start(processTask);
                }
                else
                {
                    imageRecord.setIsSucceeded(false);
                    QMetaObject::invokeMethod(&AfterDetect::getInstance(), "getFinishProcess",Q_ARG(ImageRecord,imageRecord));
                }
                continue;
            }
            else
            {
                CurrentStep = endStep_;
                imageRecord.resetTimer();
                imageRecord.setStepImage(Capture, camera_.runCaptureOnce(), ImageRecord::DisplayCap);
            }
        }
        LogTrack << "Start  ProcessTask *processTas";
        ProcessTask processTask(imageRecord, CurrentStep);
        connect(&processTask, &ProcessTask::finishProcess, this, &ImageProcessor::getFinishProcess);
        processTask.run();
        disconnect(&processTask, &ProcessTask::finishProcess, this, &ImageProcessor::getFinishProcess);
        // threadPool_->start(processTask);
        if (captureImgMode_ == RateTriggered)
        {
            int elapsed = loopTimer.elapsed();
            int timeToWait =  1000.0/m_expectedFPS - elapsed;
            if (timeToWait > 0)
            {
                QThread::msleep(timeToWait);
            }
        }
    }
}

void ImageProcessor::sendImgInfo(cv::Mat &frame)
{
    int width = frame.cols;
    int height = frame.rows;
    int channels = frame.channels();
    // 更新图像信息
    bool changed = false;
    if (displayimageInfo_["width"].toInt() != width)
    {
        displayimageInfo_["width"] = width;
        changed = true;
    }
    if (displayimageInfo_["height"].toInt() != height)
    {
        displayimageInfo_["height"] = height;
        changed = true;
    }
    if (displayimageInfo_["channels"].toInt() != channels)
    {
        displayimageInfo_["channels"] = channels;
        changed = true;
    }

    // 如果图像尺寸发生变化，发送改变信号
    if (changed)
    {
        emit displayimageInfoChanged();
    }
}
void ImageProcessor::cameraStatusChanged()
{
    if (CameraEth0::getInstance().getIsOpened())
    {
        if (state_ != Running)
        {
            LogInfo << "ImageProcessor detected camera initialization finished. Now starting trigger loop.";
            startTriggerLoop();
        }
    }
    // LogDebug << "Camera status changed:" << newStatus << " State:" << state_;
    // if (newStatus == Camera::Stop)
    // {
    //     LogInfo << "ImageProcessor stop because camera close";
    //     stopTriggerLoop();
    //     setStatus(Stop);
    // }
    // else if (newStatus == Camera::Initializing)
    // {
    //     setStatus(Stop);
    // }
    // else if (newStatus == Camera::Error)
    // {
    //     stopTriggerLoop();
    //     setStatus(Error);
    //     lastError_ = "Camera Model Error!!";
    //     emit processingError("Camera Model Error!!");
    // }
    // else if (newStatus == Camera::Idle && state_ != Running)
    // {
    //     LogInfo << "ImageProcessor detected camera initialization finished. Now starting trigger loop.";
    //     startTriggerLoop();
    // }
}

void ImageProcessor::valueTableLoad()
{
    photographyDelay_ = ParamValue.getParameter<int>("base","photographyDelay");
    emit photographyDelayChanged();
}

void ImageProcessor::onIO1SignalReceived()
{
    // if (!debounceTimer->isActive()) {
    //     qDebug() << "接收到IO1信号";
    //     //receivedIO1 = true;
    //     debounceTimer->start(50); // 防抖动，50毫秒内不接受新的IO1信号
    //     QTimer::singleShot(0, this, [this](){ setcaptureImgMode(IOTriggered);}); // 延迟后执行captureImgMode函数
    // }
}

void ImageProcessor::onIO2SignalReceived()
{

}

void ImageProcessor::setStatus(Status newStatus)
{
    if (state_ != newStatus)
    {
        {
            std::lock_guard<std::mutex> lock(stateMutex_);
            state_ = newStatus;
        }
        emit statusChanged(newStatus);
        LogDebug << "ImageProcessor status changed to:" << newStatus;
    }
}


void ImageProcessor::startAutoOperation()
{
    while (CameeraSemaphore.available() > 0) {
        CameeraSemaphore.acquire();
    }
    //debounceTimer->setSingleShot(true);
    QMetaObject::invokeMethod(&autoRunProcess, "startProcess", Qt::QueuedConnection);
    // connect(&GpioController::getInstance(), &GpioController::infrared1trigger,
    //         this, &ImageProcessor::onIO1SignalReceived);
    // connect(&GpioController::getInstance(), &GpioController::infrared2trigger,
    //         this, &ImageProcessor::onIO2SignalReceived);
}

void ImageProcessor::autoContinueProcess()
{
    LogInfo << "ImageProcessor::autoContinueProcess" ;
    QMetaObject::invokeMethod(&autoRunProcess, "continueProcess", Qt::QueuedConnection);

}

void ImageProcessor::startPLC()
{
    LogInfo << "ImageProcessor::startPLC" ;
    QMetaObject::invokeMethod(&autoRunProcess, "startPLC", Qt::QueuedConnection);

}

void ImageProcessor::stopPLC()
{
    LogInfo << "ImageProcessor::stopPLC" ;
    QMetaObject::invokeMethod(&autoRunProcess, "stopPLC", Qt::QueuedConnection);

}

void ImageProcessor::realTimestopPLC()
{
    LogInfo << "ImageProcessor::stopPLC" ;
    QMetaObject::invokeMethod(&autoRunProcess, "RealTimestopPLC", Qt::QueuedConnection);
}


void ImageProcessor::stopAutoOperation()
{
    QMetaObject::invokeMethod(&autoRunProcess, "stopProcess", Qt::QueuedConnection);
    //     disconnect(&GpioController::getInstance(), &GpioController::infrared1trigger,
    //             this, &ImageProcessor::onIO1SignalReceived);
    //     disconnect(&GpioController::getInstance(), &GpioController::infrared2trigger,
    //             this, &ImageProcessor::onIO2SignalReceived);
    //     if (debounceTimer->isActive()) {
    //         debounceTimer->stop();
    //     }
    // }
}

