"""巡线模块
用于识别黑线并控制机器人沿着黑线行走
"""

import cv2
import numpy as np
from typing import Dict, Any, Optional, Tuple
from .vision_base import VisionBase
from lab_conf import color_range
import time
import hexapod
import PWMServo
import Serial_Servo_Running as SSR
import asyncio


class LineFollowing(VisionBase):
    """巡线类"""

    def __init__(self, config: Dict[str, Any]):
        """
        初始化巡线模块

        Args:
            config: 配置字典，包含巡线参数
        """
        super().__init__(config)
        self.angle_factor = config.get("angle_factor", 0.125)
        self.min_area = config.get("min_area", 500)
        self.turn_threshold = config.get("turn_threshold", 80)
        self.last_turn = None
        self.line_out = False
        self.deflection_angle = 0

        # 初始化机器人
        self._init_robot()

    def _init_robot(self) -> None:
        """初始化机器人"""
        # 设置相机位置
        PWMServo.setServo(2, 1500, 200)
        time.sleep(0.2)
        PWMServo.setServo(1, 1000, 200)
        time.sleep(0.2)

        # 初始化六足机器人
        SSR.run_ActionGroup("25", 1)

    def _get_line_position(self, frame: np.ndarray) -> Optional[float]:
        """
        获取黑线位置

        Args:
            frame: 输入图像帧

        Returns:
            黑线中心x坐标，如果未检测到则返回None
        """
        # 图像预处理
        gs_frame = cv2.GaussianBlur(frame, (5, 5), 0)
        lab = cv2.cvtColor(gs_frame, cv2.COLOR_BGR2LAB)

        # 颜色分割
        mask = cv2.inRange(lab, color_range["black"][0], color_range["black"][1])
        opened = cv2.morphologyEx(mask, cv2.MORPH_OPEN, np.ones((3, 3), np.uint8))
        closed = cv2.morphologyEx(opened, cv2.MORPH_CLOSE, np.ones((3, 3), np.uint8))

        # 查找轮廓
        cnts = cv2.findContours(
            closed.copy(), cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE
        )[-2]
        if len(cnts):
            c = max(cnts, key=cv2.contourArea)
            area = cv2.contourArea(c)
            if area >= self.min_area:
                rect = cv2.minAreaRect(c)
                xy = rect[0]
                x = int(xy[0])

                # 绘制检测结果
                cv2.circle(frame, (x, int(xy[1])), 3, (0, 255, 0), -1)
                box = cv2.boxPoints(rect)
                box = np.int0(box)
                cv2.drawContours(frame, [box], 0, (0, 255, 255), 1)

                return x
        return None

    async def process_frame(self, frame: np.ndarray) -> Dict[str, Any]:
        """
        处理图像帧，检测黑线位置

        Args:
            frame: 输入图像帧

        Returns:
            处理结果字典
        """
        # 获取图像尺寸
        img_h, img_w = frame.shape[:2]

        # 划分三个区域进行检测
        up_frame = frame[0:65, 0:img_w]
        center_frame = frame[145:210, 0:img_w]
        down_frame = frame[290:355, 0:img_w]

        # 检测各个区域的黑线位置
        up_x = self._get_line_position(up_frame)
        center_x = self._get_line_position(center_frame)
        down_x = self._get_line_position(down_frame)

        result = {
            "detected": False,
            "line_center": 0,
            "deflection_angle": 0,
            "line_out": False,
        }

        # 根据检测结果计算偏移角度
        if down_x is not None:
            result["line_center"] = down_x
            if result["line_center"] >= 360:
                self.last_turn = "R"
            elif result["line_center"] <= 120:
                self.last_turn = "L"
            d_line = result["line_center"] - img_w / 2
            result["deflection_angle"] = d_line * self.angle_factor
            result["detected"] = True
        elif center_x is not None:
            result["line_center"] = center_x
            if result["line_center"] >= 360:
                self.last_turn = "R"
            elif result["line_center"] <= 120:
                self.last_turn = "L"
            d_line = result["line_center"] - img_w / 2
            result["deflection_angle"] = d_line * self.angle_factor
            result["detected"] = True
        elif up_x is not None and down_x is not None:
            result["line_center"] = (up_x + down_x) / 2
            d_line = result["line_center"] - img_w / 2
            result["deflection_angle"] = d_line * self.angle_factor
            result["detected"] = True
        elif up_x is not None:
            result["line_center"] = up_x
            if result["line_center"] >= 360:
                self.last_turn = "R"
            elif result["line_center"] <= 120:
                self.last_turn = "L"
            d_line = result["line_center"] - img_w / 2
            result["deflection_angle"] = d_line * self.angle_factor
            result["detected"] = True
        elif up_x is None and down_x is None and center_x is None:
            result["line_out"] = True

        # 添加调试信息
        if self.debug:
            # 绘制中心十字
            frame = self.draw_crosshair(frame, (int(img_w / 2), int(img_h / 2)))
            frame = self.add_text_to_image(frame, "智能巡迹", 10, 10)
            frame = self.add_text_to_image(
                frame, f"Detected: {result['detected']}", 10, 40
            )
            frame = self.add_text_to_image(
                frame, f"Angle: {result['deflection_angle']:.2f}", 10, 70
            )

        return {"result": result, "frame": frame}

    async def get_vision_data(self) -> Dict[str, Any]:
        """
        获取视觉处理数据

        Returns:
            包含处理结果和时间戳的字典
        """
        frame = await self.get_frame()
        if frame is not None:
            result = await self.process_frame(frame)
            return {
                **result["result"],
                "timestamp": time.time(),
                "frame": result["frame"] if self.debug else None,
            }
        return {
            "detected": False,
            "line_center": 0,
            "deflection_angle": 0,
            "line_out": False,
            "timestamp": time.time(),
            "frame": None,
        }

    async def follow_line(self) -> None:
        """控制机器人跟随黑线"""
        if self.line_out:
            if self.last_turn == "R":
                hexapod.turn(5, 150)
            elif self.last_turn == "L":
                hexapod.turn(-5, 150)
            self.line_out = False
        else:
            if -20 <= self.deflection_angle <= 20:
                SSR.run_ActionGroup("41", 1)
            else:
                hexapod.turn(self.deflection_angle / 15, 150)
                await asyncio.sleep(0.15)
