package com.siha_back.service.impl;

import com.siha_back.common.AnalysisResult;
import com.siha_back.common.AnalysisResult;
import com.siha_back.service.VideoAnalysisService;
import com.siha_back.utils.VideoAnalysisException;
import org.opencv.core.*;
import org.opencv.videoio.VideoCapture;
import org.opencv.videoio.Videoio;
import org.opencv.objdetect.CascadeClassifier;
import org.opencv.imgproc.Imgproc;
import org.springframework.stereotype.Service;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.multipart.MultipartFile;

import java.io.File;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;

@Service
public class VideoAnalysisServiceImpl implements VideoAnalysisService {

    static {
        try {
            System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
            System.out.println("OpenCV 本地库加载成功");
        } catch (UnsatisfiedLinkError e) {
            System.err.println("无法加载 OpenCV 本地库: " + e.getMessage());
            e.printStackTrace();
        }
    }

    private final CascadeClassifier faceDetector;
    private final CascadeClassifier eyeDetector;

    public VideoAnalysisServiceImpl() {
        // 初始化分类器
        this.faceDetector = new CascadeClassifier("src/main/resources/haarcascade_frontalface_default.xml");
        this.eyeDetector = new CascadeClassifier("src/main/resources/haarcascade_eye.xml");
    }

    @Override
    public AnalysisResult analyzeVideo(MultipartFile videoFile ) throws VideoAnalysisException, IOException {
        String customTempDirPath = "D:/siha";
        Path tempDir = Paths.get(customTempDirPath);
        if (!Files.exists(tempDir)) {
            Files.createDirectories(tempDir);
        }
        // 生成唯一的临时文件名
        if (Files.isWritable(tempDir)) {
            System.out.println("临时目录可写");
        } else {
            System.out.println("临时目录不可写，请检查权限");
        }
        // 将MultipartFile转换为File
        File tempFile = File.createTempFile("video", videoFile.getOriginalFilename(),tempDir.toFile());
        if (tempFile.exists()) {
            tempFile.delete();
        }
        try {
            videoFile.transferTo(tempFile);
            System.out.println("文件传输成功");
        } catch (IOException e) {
            System.err.println("文件传输失败: " + e.getMessage());
            e.printStackTrace();
        }
        return analyzeVideoFile(tempFile.toString());

    }

    private AnalysisResult analyzeVideoFile(String videoPath) throws VideoAnalysisException {
        VideoCapture capture = new VideoCapture(videoPath);
        if (!capture.isOpened()) {
            throw new VideoAnalysisException("无法打开视频文件");
        }

        try {
            AnalysisContext context = new AnalysisContext();
            double fps = capture.get(Videoio.CAP_PROP_FPS);
            long frameInterval = (long)(1000 / fps);
            int frameCount = 0;

            Mat frame = new Mat();
            while (capture.read(frame)) {
                frameCount++;
                if (frameCount % 3 != 0) continue; // 每5帧处理一次

                processFrame(frame, context, frameCount, fps);

                try {
                    Thread.sleep(frameInterval);
                } catch (InterruptedException e) {
                    Thread.currentThread().interrupt();
                    throw new VideoAnalysisException("分析被中断");
                }
            }

            return buildResult(context);
        } finally {
            capture.release();
        }
    }

    private void processFrame(Mat frame, AnalysisContext context, long frameNumber, double fps) {
        Mat grayFrame = new Mat();
        Imgproc.cvtColor(frame, grayFrame, Imgproc.COLOR_BGR2GRAY);
        Imgproc.equalizeHist(grayFrame, grayFrame);

        MatOfRect faceDetections = new MatOfRect();
        faceDetector.detectMultiScale(grayFrame, faceDetections);

        if (faceDetections.toArray().length > 0) {
            Rect faceRect = faceDetections.toArray()[0];
            double movement = calculateMovement(faceRect, context.previousFaceRect);

            detectHesitation(movement, frameNumber, fps, context);
            boolean eyesOpen = detectEyes(grayFrame, faceRect);

            updateEngagementScore(movement, eyesOpen, context);

            context.previousFaceRect = faceRect;
        }

        context.previousFrame = grayFrame.clone();
    }

    private double calculateMovement(Rect current, Rect previous) {
        if (previous == null) return 0;

        Point currentCenter = new Point(current.x + current.width/2, current.y + current.height/2);
        Point previousCenter = new Point(previous.x + previous.width/2, previous.y + previous.height/2);

        return Math.sqrt(Math.pow(currentCenter.x - previousCenter.x, 2) +
                Math.pow(currentCenter.y - previousCenter.y, 2));
    }

    private void detectHesitation(double movement, long frameNumber, double fps, AnalysisContext context) {
        double movementThreshold = 5.0;
        double hesitationDurationThreshold = 1.0;

        if (movement < movementThreshold) {
            if (!context.isHesitating) {
                context.isHesitating = true;
                context.lastMovementTime = frameNumber / fps;
            } else {
                double hesitationDuration = (frameNumber / fps) - context.lastMovementTime;
                if (hesitationDuration > hesitationDurationThreshold) {
                    context.hesitationCount++;
                    context.isHesitating = false;
                }
            }
        } else {
            context.isHesitating = false;
        }
    }

    private boolean detectEyes(Mat faceROI, Rect faceRect) {
        Rect eyeRegion = new Rect(
                faceRect.x,
                faceRect.y + faceRect.height/4,
                faceRect.width,
                faceRect.height/3
        );

        Mat eyeROI = new Mat(faceROI, eyeRegion);
        MatOfRect eyeDetections = new MatOfRect();
        eyeDetector.detectMultiScale(eyeROI, eyeDetections);

        return eyeDetections.toArray().length >= 1;
    }

    private void updateEngagementScore(double movement, boolean eyesOpen, AnalysisContext context) {
        double frameScore = 0;
        frameScore += Math.min(movement / 10.0, 1.0) * 0.3;
        if (eyesOpen) frameScore += 0.7;

        context.engagementScore = 0.9 * context.engagementScore + 0.1 * frameScore;
    }

    private AnalysisResult buildResult(AnalysisContext context) {
        String engagementLevel;
        if (context.engagementScore > 0.7) engagementLevel = "高度专注";
        else if (context.engagementScore > 0.4) engagementLevel = "一般专注";
        else engagementLevel = "分心";

        return new AnalysisResult(
                context.hesitationCount,
                context.engagementScore,
                engagementLevel
        );
    }

    private static class AnalysisContext {
        Mat previousFrame;
        Rect previousFaceRect;
        double lastMovementTime;
        boolean isHesitating;
        int hesitationCount;
        double engagementScore;
    }
}
