#include "Observer.h"
#include <iostream>
#include <mutex>

// 必须在构造函数结束后再调用 runJudgeLoop()
Observer::Observer()
    : status(MotionDetector::MotionStatus::Unknown), is_judging_motion(false), queue_is_full(false),
      center_coordinate(cv::Point3f()), logger(Logger("observer.log")), rotationDetector(RotationDetector(logger))
{
    // clang-format off

    // 初始化 center_coordinate_kf 滤波器参数
    center_coordinate_kf.init(8, 4);

    // (1, 1, 0, 0, 0, 0, 0, 0) * ( x )
    // (0, 1, 0, 0, 0, 0, 0, 0) * (vx )
    // (0, 0, 1, 1, 0, 0, 0, 0) * ( y )
    // (0, 0, 0, 1, 0, 0, 0, 0) * (vy )
    // (0, 0, 0, 0, 1, 1, 0, 0) * ( z )
    // (0, 0, 0, 0, 0, 1, 0, 0) * (vz )
    // (0, 0, 0, 0, 0, 0, 1, 1) * (yaw)
    // (0, 0, 0, 0, 0, 0, 0, 1) * ( w )

    // 状态转移矩阵 A（每个位置受对应速度影响）
    center_coordinate_kf.transitionMatrix = (cv::Mat_<float>(8, 8) <<
        1, 1, 0, 0, 0, 0, 0, 0,
        0, 1, 0, 0, 0, 0, 0, 0,
        0, 0, 1, 1, 0, 0, 0, 0,
        0, 0, 0, 1, 0, 0, 0, 0,
        0, 0, 0, 0, 1, 1, 0, 0,
        0, 0, 0, 0, 0, 1, 0, 0,
        0, 0, 0, 0, 0, 0, 1, 1,
        0, 0, 0, 0, 0, 0, 0, 1);

    // 观测矩阵 H（仅观测 x, y, z, yaw）
    center_coordinate_kf.measurementMatrix = (cv::Mat_<float>(4, 8) <<
        1, 0, 0, 0, 0, 0, 0, 0,
        0, 0, 1, 0, 0, 0, 0, 0,
        0, 0, 0, 0, 1, 0, 0, 0,
        0, 0, 0, 0, 0, 0, 1, 0);

    center_coordinate_kf.processNoiseCov = cv::Mat::eye(8, 8, CV_32F) * 1e-3f;
    center_coordinate_kf.measurementNoiseCov = cv::Mat::eye(4, 4, CV_32F) * 1e-2f;
    center_coordinate_kf.errorCovPost = cv::Mat::eye(8, 8, CV_32F);
    center_coordinate_kf.statePost = cv::Mat::zeros(8, 1, CV_32F);
    // clang-format on

    cv::FileStorage fs("./../Configure/ObserverSetting.xml", cv::FileStorage::READ);
    if (fs.isOpened())
    {
        fs["max_pitch_yaw_history"] >> max_pitch_yaw_history;
        fs.release();
    }
    else
    {
        logger.log("⚠️ Failed to open ObserverSetting.xml, using default thresholds.");
        abort();
    }
#ifdef DEBUG
    logger.logFormatted("max_pitch_yaw_history : %d\n", max_pitch_yaw_history);
#endif
}

Observer::~Observer() {}

MotionDetector::MotionStatus Observer::getStatus() const
{
    std::lock_guard<std::mutex> lock(status_mutex);
    return status;
}

bool Observer::tryAddYaw(float yaw)
{
    if (is_judging_motion.load(std::memory_order_relaxed))
    {
        return false;
    }

    std::lock_guard<std::mutex> lock(pitch_yaw_mutex);
    if (pitch_yaw_history.size() >= max_pitch_yaw_history)
    {
        {
            std::lock_guard<std::mutex> notify_lock(judge_mutex);
            queue_is_full = true;
        }
        judge_cv.notify_one();
        return false;
    }

    pitch_yaw_history.push(yaw);
    return true;
}

bool Observer::ClearYawQueue()
{
    if (is_judging_motion.load(std::memory_order_relaxed))
    {
        return false;
    }

    std::lock_guard<std::mutex> lock(pitch_yaw_mutex);
    std::queue<float> empty;
    std::swap(pitch_yaw_history, empty);
    return true;
}

void Observer::runJudgeLoop()
{
    while (true)
    {
        auto wait_start = std::chrono::steady_clock::now();
        std::unique_lock<std::mutex> lock(judge_mutex);
        judge_cv.wait(lock, [this] { return queue_is_full; });

#ifdef DEBUG
        auto wait_end = std::chrono::steady_clock::now();
        auto wait_duration = std::chrono::duration_cast<std::chrono::milliseconds>(wait_end - wait_start).count();
        logger.log("Yaw history full, entering motion judgement.");
        auto process_start = std::chrono::steady_clock::now();
#endif

        {
            is_judging_motion.store(true);
            std::lock_guard<std::mutex> data_lock(pitch_yaw_mutex);

            std::vector<float> yaw_vec;
            while (!pitch_yaw_history.empty())
            {
                yaw_vec.push_back(pitch_yaw_history.front());
                pitch_yaw_history.pop();
            }

            MotionDetector::MotionStatus tmp_status = this->getStatus();
            if (rotationDetector.detect(yaw_vec, tmp_status))
            {
                std::lock_guard<std::mutex> lock(status_mutex);
                status = tmp_status;
            }
        }

#ifdef DEBUG
        logger.logFormatted("All assertions are completed and the final status is %s",
                            MotionDetector::toString(this->status).c_str());
        auto process_end = std::chrono::steady_clock::now();
        auto process_duration =
            std::chrono::duration_cast<std::chrono::milliseconds>(process_end - process_start).count();
        logger.logFormatted("Total judgement round: wait = %lld ms, process = %lld ms\n", wait_duration,
                            process_duration);
#endif

        queue_is_full = false;
        is_judging_motion.store(false);
    }
}

cv::Point3f Observer::getCenterCoordinate() const
{
    std::lock_guard<std::mutex> lock(center_coordinate_mutex);
    return center_coordinate;
}

void Observer::predictCenterCoordinate(const cv::Point3f &center, float yaw, float w)
{
    // （x, y, z, yaw）
    cv::Mat measurement = (cv::Mat_<float>(4, 1) << center.x, center.y, center.z, yaw);

    center_coordinate_kf.predict();
    center_coordinate_kf.correct(measurement);

    cv::Mat state = center_coordinate_kf.statePost;
    cv::Point3f predicted_center(state.at<float>(0), state.at<float>(2), state.at<float>(4));

    std::lock_guard<std::mutex> lock(center_coordinate_mutex);
    center_coordinate = predicted_center;
}

/*
cv::Mat residual = measured_yaw - prediction.row(0);
                cv::Mat residual_product = residual.t() * yaw_kf.measurementNoiseCov.inv() * residual;
                float mahalanobis_dist = std::sqrt(residual_product.at<float>(0, 0));
*/