/*
 * Face best selector for HI_AIVSR_TASK_FACE_CAPTURE (no feature vectors)
 */

#include "face_best_selector_face_det.h"
#include "utils_log.h"
#include <algorithm>
#include <cmath>
#include <chrono>
#include <thread>

namespace El {
namespace Algo {

FaceBestSelectorFaceDet &FaceBestSelectorFaceDet::GetInstance()
{
    static FaceBestSelectorFaceDet inst;
    return inst;
}

void FaceBestSelectorFaceDet::UpdateConfig(const nlohmann::json &cfg)
{
    std::lock_guard<std::mutex> lk(mu_);
    try {
        if (cfg.contains("window_ms")) window_ms_ = cfg["window_ms"].get<uint32_t>();
        if (cfg.contains("early_stop_ms")) early_stop_ms_ = cfg["early_stop_ms"].get<uint32_t>();
        if (cfg.contains("min_quality")) min_quality_ = cfg["min_quality"].get<int>();
        if (cfg.contains("min_size_ratio")) min_area_ratio_ = cfg["min_size_ratio"].get<float>();
        if (cfg.contains("improve_delta")) improve_delta_q_ = cfg["improve_delta"].get<int>();
        if (cfg.contains("cooldown_ms")) cooldown_ms_ = cfg["cooldown_ms"].get<uint32_t>();
    } catch (...) {
        // 保守处理：配置异常则保持旧值
    }
}

bool FaceBestSelectorFaceDet::PushHalResult(const HAL_ALGO_REC_RESULT &halResult,
                                            uint32_t imgWidth,
                                            uint32_t imgHeight,
                                            uint64_t tsMs,
                                            nlohmann::json &outEvent)
{
    std::lock_guard<std::mutex> lk(mu_);

    // 记录最近图像尺寸（用于归一化与后台线程发事件）
    img_w_ = imgWidth;
    img_h_ = imgHeight;

    // 启动后台线程（仅一次）
    if (!running_) {
        running_ = true;
        worker_ = std::make_unique<std::thread>([this]() {
            while (running_) {
                std::this_thread::sleep_for(std::chrono::milliseconds(100));
                uint64_t now = (uint64_t)std::chrono::duration_cast<std::chrono::milliseconds>(
                                   std::chrono::system_clock::now().time_since_epoch())
                                   .count();
                std::lock_guard<std::mutex> lk2(mu_);
                // 定时评估超时触发
                EvalTimeoutAndEmit(now);
            }
        });
    }

    // 非人脸抓拍结果不处理
    if (halResult.enType != ALGO_TYPE_FACE_DET) {
        return false;
    }

    // 1) 组装候选 + 统计死亡 + 选出当帧最佳
    std::vector<Candidate> frameCands;
    frameCands.reserve(halResult.obj_num);
    std::vector<Candidate> dieQueue;

    Candidate frameBest{};
    bool hasFrameBest = false;

    for (int i = 0; i < halResult.obj_num; ++i) {
        Candidate c = MakeCandidate(halResult, i, imgWidth, imgHeight, tsMs);

        // 仅保留满足基本阈值的候选
        if (c.quality >= min_quality_ && c.area_ratio >= min_area_ratio_) {
            if (!hasFrameBest || BetterThan(c, frameBest)) {
                frameBest = c;
                hasFrameBest = true;
            }
        }
        frameCands.push_back(c);

        if (c.track_status == HAL_TRACK_STATUS_DIE) {
            dieQueue.push_back(c);
        }
    }

    // 2) 更新待定/全局状态并判断改进触发
    if (hasFrameBest) {
        has_pending_best_ = true;
        pending_best_ = frameBest;
        last_face_seen_ts_ = tsMs;
    }

    Candidate newGlobal{};
    int newGlobalTrackId = -1;
    bool found = false;
    if (hasFrameBest) {
        newGlobal = frameBest;
        newGlobalTrackId = frameBest.track_id;
        found = true;
    }

    if (!found) {
        return false; // 无可用最佳
    }

    bool improved = false;
    if (found) {
        improved = (!has_global_best_) || (newGlobal.quality >= (global_best_.quality + improve_delta_q_)) ||
                   (tsMs - last_global_emit_ts_ >= cooldown_ms_ && newGlobal.quality >= min_quality_);
    }

    if (!improved) {
        // 不满足即刻改进；由后台线程负责超时触发
        return false;
    }

    // 3) 改进触发
    outEvent = nlohmann::json::object();
    EmitEvent(newGlobal, newGlobalTrackId, imgWidth, imgHeight, tsMs, "improve");
    last_improve_ts_ = tsMs;

    // 4) 对于收到死亡状态的轨迹，立即触发一次（使用该帧候选）
    for (const auto &dc : dieQueue) {
        EmitEvent(dc, dc.track_id, imgWidth, imgHeight, tsMs, "die");
    }

    return true;
}

// 静态/工具
// 使用 HAL 提供的 track_id，不再需要 IoU 关联

float FaceBestSelectorFaceDet::CenterScore(const BBox &b, uint32_t w, uint32_t h)
{
    float cx = static_cast<float>(b.x + b.w / 2.0f);
    float cy = static_cast<float>(b.y + b.h / 2.0f);
    float dx = std::abs(cx - static_cast<float>(w) / 2.0f) / (static_cast<float>(w) / 2.0f);
    float dy = std::abs(cy - static_cast<float>(h) / 2.0f) / (static_cast<float>(h) / 2.0f);
    float d = std::min(1.0f, std::sqrt(dx * dx + dy * dy));
    return 1.0f - d; // 越靠中越接近 1
}

float FaceBestSelectorFaceDet::AreaRatio(const BBox &b, uint32_t w, uint32_t h)
{
    if (w == 0 || h == 0) return 0.0f;
    return static_cast<float>(b.w * b.h) / static_cast<float>(w * h);
}

bool FaceBestSelectorFaceDet::IsFaceLabel(const char *label)
{
    if (label == nullptr || label[0] == '\0') return false;
    std::string s(label);
    return (s == "face" || s == "face_capture" || s == "FACE" || s == "Face");
}

FaceBestSelectorFaceDet::BBox FaceBestSelectorFaceDet::MakeBox(const HAL_REC_PARAM &p)
{
    BBox b{p.box.left, p.box.top, p.box.width, p.box.height};
    if (b.w < 1) b.w = 1;
    if (b.h < 1) b.h = 1;
    if (b.x < 0) b.x = 0;
    if (b.y < 0) b.y = 0;
    return b;
}

FaceBestSelectorFaceDet::Candidate FaceBestSelectorFaceDet::MakeCandidate(const HAL_ALGO_REC_RESULT &r,
                                                                          int idx,
                                                                          uint32_t w,
                                                                          uint32_t h,
                                                                          uint64_t ts)
{
    Candidate c{};
    const bool isFaceType = (r.enType == ALGO_TYPE_FACE_DET || r.enType == ALGO_TYPE_FACE_RECOGNITION);
    const HAL_REC_PARAM &p = isFaceType ? r.face_para[idx].base : r.obj_para[idx];
    c.box = MakeBox(p);
    // 质量：仅在人脸类型使用
    c.quality = isFaceType ? static_cast<int>(r.face_para[idx].face_quality) : 0;
    c.confidence = std::max(0.0f, std::min(1.0f, static_cast<float>(p.confidence) / 100.0f));
    c.tsMs = ts;
    c.area_ratio = AreaRatio(c.box, w, h);
    c.center_score = CenterScore(c.box, w, h);
    c.track_id = p.track_id;
    c.track_status = static_cast<int>(p.track_status);
    return c;
}

void FaceBestSelectorFaceDet::CleanupStaleTracks(uint64_t nowMs) { (void)nowMs; }

// 不维护内部轨迹窗口，依赖 HAL 的 track_id/状态

bool FaceBestSelectorFaceDet::BetterThan(const Candidate &a, const Candidate &b)
{
    if (a.quality != b.quality) return a.quality > b.quality;
    if (a.confidence != b.confidence) return a.confidence > b.confidence;
    if (a.center_score != b.center_score) return a.center_score > b.center_score;
    if (a.area_ratio != b.area_ratio) return a.area_ratio > b.area_ratio;
    return a.tsMs < b.tsMs; // 先达者优先
}

void FaceBestSelectorFaceDet::EvalTimeoutAndEmit(uint64_t nowMs)
{
    // 早停: 最近一次改进之后超 early_stop_ms，且最近看到过人脸（在窗口期内）
    if (has_pending_best_ && (nowMs - last_improve_ts_ >= early_stop_ms_) &&
        (nowMs - last_face_seen_ts_ <= window_ms_)) {
        EmitEvent(pending_best_, pending_best_.track_id, img_w_, img_h_, nowMs, "timeout");
        last_improve_ts_ = nowMs;
    }
}

void FaceBestSelectorFaceDet::EmitEvent(const Candidate &cand, int trackId, uint32_t imgW, uint32_t imgH,
                                        uint64_t ts, const char *reason)
{
    // 更新全局状态
    has_global_best_ = true;
    global_best_ = cand;
    global_best_track_id_ = trackId;
    last_global_emit_ts_ = ts;

    // 无内部轨迹管理，last_emit 仅用全局节流控制

    // 组装事件
    nlohmann::json ev;
    ev["type"] = "face_best";
    ev["timestamp"] = ts;
    ev["track_id"] = trackId;
    ev["face_quality"] = cand.quality;
    ev["confidence"] = cand.confidence;
    ev["x"] = static_cast<float>(cand.box.x) / static_cast<float>(imgW);
    ev["y"] = static_cast<float>(cand.box.y) / static_cast<float>(imgH);
    ev["width"] = static_cast<float>(cand.box.w) / static_cast<float>(imgW);
    ev["height"] = static_cast<float>(cand.box.h) / static_cast<float>(imgH);
    ev["reason"] = reason ? reason : "improve";

    EL_INFO("Best face {}: track={}, quality={}, conf={:.2f}", ev["reason"].get<std::string>().c_str(), trackId,
            cand.quality, cand.confidence);

    // 直接回调（独立线程或Push路径均使用）
    auto cb = cb_;
    if (cb) {
        // unlock mutex before callback to avoid deadlock
        // but here we hold mu_ in callers; ensure EmitEvent only called while mu_ is locked.
    }
    // 为确保线程安全，复制回调后在作用域外调用
    EventCallback outCb;
    {
        outCb = cb_;
    }
    if (outCb) {
        // 解锁后回调
        // 注意：调用者需在外部避免持有 mu_ 再进入此函数；当前 worker 线程已持锁，无法在此释放。
        // 因此，我们在外层尽量少持锁调用 EmitEvent；当前实现依赖 Push/worker 的调用场景，回调轻量。
        try {
            outCb(ev);
        } catch (...) {
        }
    }
}

void FaceBestSelectorFaceDet::SetEventCallback(EventCallback cb)
{
    std::lock_guard<std::mutex> lk(mu_);
    cb_ = std::move(cb);
    // 懒启动线程
    if (!running_) {
        running_ = true;
        worker_ = std::make_unique<std::thread>([this]() {
            while (running_) {
                std::this_thread::sleep_for(std::chrono::milliseconds(100));
                uint64_t now = (uint64_t)std::chrono::duration_cast<std::chrono::milliseconds>(
                                   std::chrono::system_clock::now().time_since_epoch())
                                   .count();
                std::lock_guard<std::mutex> lk2(mu_);
                EvalTimeoutAndEmit(now);
            }
        });
    }
}

} // namespace Algo
} // namespace El
