//package com.lhkk.inventoryvehicle.ai // Replace with your actual package name
//
//import android.content.Context
//import android.graphics.RectF
//import androidx.camera.core.ImageAnalysis
//import androidx.camera.core.ImageProxy
//import com.cj.mvvmlib.util.ToastUtil
//import com.lhkk.inventoryvehicle.common.LogFileUtils
//import com.lhkk.inventoryvehicle.http.handler
//import org.tensorflow.lite.support.image.ImageProcessor
//import org.tensorflow.lite.support.image.TensorImage
//import org.tensorflow.lite.support.image.ops.Rot90Op
//import org.tensorflow.lite.task.core.BaseOptions
//import org.tensorflow.lite.task.vision.detector.Detection
//import org.tensorflow.lite.task.vision.detector.ObjectDetector
//import java.util.concurrent.atomic.AtomicBoolean
//import java.util.concurrent.atomic.AtomicInteger
//
//// --- Enums and Data Classes ---
//
///**
// * Defines the possible states of a detected vehicle.
// * `APPROACHING` is now the key state for triggering the single photo capture.
// */
//enum class VehicleState {
//    NEW,        // Vehicle has just been detected.
//    APPROACHING,// Vehicle has crossed the initial entry line (车头).
//    CENTERED,   // Vehicle is in the central zone.
//    LEAVING,    // Vehicle is exiting the frame.
//    DONE        // Vehicle has left the frame and processing is complete.
//}
//
///**
// * Defines the direction of vehicle movement.
// */
//enum class VehicleDirection {
//    LEFT_TO_RIGHT,
//    RIGHT_TO_LEFT,
//    UNKNOWN
//}
//
///**
// * Provides unique IDs for each tracked vehicle.
// */
//object VehicleIdProvider {
//    private val nextId = AtomicInteger(0)
//    fun getNextId() = nextId.incrementAndGet()
//}
//
///**
// * Holds all tracking information for a vehicle, now including its direction.
// * A single boolean `hasTakenPhoto` replaces the multiple flags.
// */
//data class TrackedVehicleInfo(
//    var detection: Detection,
//    var state: VehicleState,
//    var direction: VehicleDirection,
//    var lastDetectionX: Float,
//    var stateTransitionCandidateCounter: Int = 0,
//    var lostFramesCounter: Int = 0,
//    var hasTakenPhoto: Boolean = false // Single flag for the one-time photo
//)
//
//// --- Main Analyzer Class ---
//
///**
// * Analyzes image frames for bidirectional vehicle traffic.
// * It detects vehicles, tracks their movement in either direction,
// * and triggers a single photo capture for the "head" (车头) of each new vehicle.
// *
// * @param context The application context.
// * @param takePictureCallback Lambda to trigger photo capture in the calling activity.
// * @param listener Lambda to provide status updates.
// */
//class VehicleAnalyzer(
//    private val context: Context,
//    private val takePictureCallback: () -> Unit,
//    private val listener: (status: String, detections: List<Detection>, w: Int, h: Int) -> Unit
//) : ImageAnalysis.Analyzer {
//
//    private val objectDetector: ObjectDetector
//    private var trackedVehicles = mutableMapOf<Int, TrackedVehicleInfo>()
//
//    // A flag to ensure we only trigger a photo for one vehicle at a time until it's gone.
//    private val canTriggerPhoto = AtomicBoolean(true)
//
//    // --- Configuration ---
//    // Normalized horizontal lines for state transitions.
//    private val LINE_A_X_NORMALIZED = 0.25f // Entry line from the left
//    private val LINE_B_X_NORMALIZED = 0.75f // Entry line from the right
//
//    // Defines the zone for detecting new vehicles.
//    private val NEW_VEHICLE_ENTRY_ZONE_LEFT = 0.15f
//    private val NEW_VEHICLE_ENTRY_ZONE_RIGHT = 0.85f
//
//    // Thresholds for state change confirmation and movement detection.
//    private val FRAME_CONFIRMATION_THRESHOLD = 3
//    private val MIN_MOVEMENT_THRESHOLD_NORMALIZED = 0.005f
//    private val MAX_LOST_FRAMES = 15
//    private val IOU_THRESHOLD = 0.35f
//    private val MIN_DETECTION_AREA_NORMALIZED = 0.01f
//
//    init {
//        val baseOptions = BaseOptions.builder().setNumThreads(4).build()
//        val options = ObjectDetector.ObjectDetectorOptions.builder()
//            .setBaseOptions(baseOptions)
//            .setMaxResults(3)
//            .setScoreThreshold(0.65f)
//            .build()
//        objectDetector = ObjectDetector.createFromFileAndOptions(context, "ssd_mobilenet_v1.tflite", options)
//    }
//
//    @androidx.annotation.OptIn(androidx.camera.core.ExperimentalGetImage::class)
//    override fun analyze(imageProxy: ImageProxy) {
//        val image = imageProxy.toBitmap()
//
//        // Image processing for TFLite
//        val imageProcessor = ImageProcessor.Builder()
//            .add(Rot90Op(-imageProxy.imageInfo.rotationDegrees / 90))
//            .build()
//        val tensorImage = imageProcessor.process(TensorImage.fromBitmap(image))
//        val imageWidth = tensorImage.width
//        val imageHeight = tensorImage.height
//
//        // Detect objects and filter for "car" or "truck"
//        val detections = objectDetector.detect(tensorImage).filter {
//            val detectionArea = it.boundingBox.width() * it.boundingBox.height()
//            it.categories.first().label in listOf("car", "truck") &&
//                    (detectionArea / (imageWidth * imageHeight)) > MIN_DETECTION_AREA_NORMALIZED
//        }
//
//        // Core tracking logic
//        updateTrackedVehicles(detections, imageWidth)
//
//        // If no vehicles are being tracked, reset the photo trigger.
//        if (trackedVehicles.isEmpty()) {
//            canTriggerPhoto.set(true)
//        }
//
//        // Provide status update
//        val status = trackedVehicles.values.joinToString("\n") {
//            "ID ${it.detection.hashCode()}: ${it.state} (${it.direction})"
//        }
//        listener(status, detections, imageWidth, imageHeight)
//        imageProxy.close()
//    }
//
//    /**
//     * Updates tracked vehicles using IoU matching and handles new/lost vehicles.
//     */
//    private fun updateTrackedVehicles(detections: List<Detection>, imageWidth: Int) {
//        val updatedTrackedVehiclesMap = mutableMapOf<Int, TrackedVehicleInfo>()
//        val unmatchedDetections = detections.toMutableList()
//
//        // 1. Match existing tracked vehicles with new detections using IoU
//        trackedVehicles.forEach { (id, trackedInfo) ->
//            val bestMatch = unmatchedDetections.map { detection ->
//                Pair(detection, calculateIoU(trackedInfo.detection.boundingBox, detection.boundingBox))
//            }.filter { it.second > IOU_THRESHOLD }.maxByOrNull { it.second }
//
//            if (bestMatch != null) {
//                // Found a match, update the vehicle's info
//                trackedInfo.detection = bestMatch.first
//                trackedInfo.lostFramesCounter = 0
//                unmatchedDetections.remove(bestMatch.first)
//                determineStateAndDirection(trackedInfo, imageWidth)
//                updatedTrackedVehiclesMap[id] = trackedInfo
//            } else {
//                // No match found, increment lost frames counter
//                trackedInfo.lostFramesCounter++
//                if (trackedInfo.lostFramesCounter < MAX_LOST_FRAMES) {
//                    updatedTrackedVehiclesMap[id] = trackedInfo
//                } else {
//                    LogFileUtils.writeTxtToFile("Vehicle ${id} lost and removed.")
//                }
//            }
//        }
//
//        // 2. Treat remaining unmatched detections as new vehicles
//        unmatchedDetections.forEach { newDetection ->
//            val newId = VehicleIdProvider.getNextId()
//            val newVehicle = TrackedVehicleInfo(
//                detection = newDetection,
//                state = VehicleState.NEW,
//                direction = VehicleDirection.UNKNOWN,
//                lastDetectionX = newDetection.boundingBox.centerX()
//            )
//            // Immediately determine state and direction for the new vehicle
//            determineStateAndDirection(newVehicle, imageWidth)
//            updatedTrackedVehiclesMap[newId] = newVehicle
//        }
//
//        trackedVehicles = updatedTrackedVehiclesMap
//    }
//
//    /**
//     * Determines the vehicle's direction and state, and triggers photo capture.
//     * This is the core logic for bidirectional movement.
//     */
//    private fun determineStateAndDirection(info: TrackedVehicleInfo, imageWidth: Int) {
//        val centerX = info.detection.boundingBox.centerX()
//        val centerXNorm = centerX / imageWidth
//        val deltaXNorm = (centerX - info.lastDetectionX) / imageWidth
//
//        val isMovingRight = deltaXNorm > MIN_MOVEMENT_THRESHOLD_NORMALIZED
//        val isMovingLeft = deltaXNorm < -MIN_MOVEMENT_THRESHOLD_NORMALIZED
//
//        // 1. Determine direction if it's currently UNKNOWN
//        if (info.direction == VehicleDirection.UNKNOWN) {
//            if (isMovingRight && centerXNorm < NEW_VEHICLE_ENTRY_ZONE_LEFT) {
//                info.direction = VehicleDirection.LEFT_TO_RIGHT
//            } else if (isMovingLeft && centerXNorm > NEW_VEHICLE_ENTRY_ZONE_RIGHT) {
//                info.direction = VehicleDirection.RIGHT_TO_LEFT
//            }
//        }
//
//        var nextState = info.state
//
//        // 2. Update state based on direction and position
//        when (info.direction) {
//            VehicleDirection.LEFT_TO_RIGHT -> {
//                if (info.state == VehicleState.NEW && centerXNorm > LINE_A_X_NORMALIZED && isMovingRight) {
//                    nextState = VehicleState.APPROACHING
//                }
//            }
//            VehicleDirection.RIGHT_TO_LEFT -> {
//                if (info.state == VehicleState.NEW && centerXNorm < LINE_B_X_NORMALIZED && isMovingLeft) {
//                    nextState = VehicleState.APPROACHING
//                }
//            }
//            else -> {} // Do nothing if direction is still UNKNOWN
//        }
//
//        // 3. Apply state change with hysteresis and trigger photo
//        if (nextState != info.state) {
//            info.stateTransitionCandidateCounter++
//            if (info.stateTransitionCandidateCounter >= FRAME_CONFIRMATION_THRESHOLD) {
//                info.state = nextState
//                info.stateTransitionCandidateCounter = 0
//
//                // --- SINGLE PHOTO TRIGGER LOGIC ---
//                if (info.state == VehicleState.APPROACHING && !info.hasTakenPhoto && canTriggerPhoto.getAndSet(false)) {
//                    info.hasTakenPhoto = true // Mark this vehicle as photographed
//                    val directionText = if(info.direction == VehicleDirection.LEFT_TO_RIGHT) "从左到右" else "从右到左"
//                    handler.post { ToastUtil.showShortToast("车头 ($directionText)") }
//                    takePictureCallback.invoke()
//                    LogFileUtils.writeTxtToFile("📸 Photo triggered for vehicle head ($directionText)")
//                }
//            }
//        } else {
//            info.stateTransitionCandidateCounter = 0
//        }
//        info.lastDetectionX = centerX
//    }
//
//    /**
//     * Calculates Intersection over Union (IoU) for two bounding boxes.
//     */
//    private fun calculateIoU(box1: RectF, box2: RectF): Float {
//        val intersection = RectF(
//            maxOf(box1.left, box2.left),
//            maxOf(box1.top, box2.top),
//            minOf(box1.right, box2.right),
//            minOf(box1.bottom, box2.bottom)
//        )
//        if (intersection.width() <= 0 || intersection.height() <= 0) return 0f
//
//        val intersectionArea = intersection.width() * intersection.height()
//        val box1Area = box1.width() * box1.height()
//        val box2Area = box2.width() * box2.height()
//        val unionArea = box1Area + box2Area - intersectionArea
//
//        return if (unionArea > 0f) intersectionArea / unionArea else 0f
//    }
//
//    /**
//     * Resets all tracking data and re-enables photo capture.
//     */
//    fun reset() {
//        trackedVehicles.clear()
//        canTriggerPhoto.set(true)
//        LogFileUtils.writeTxtToFile("Analyzer reset.")
//    }
//}