#!/usr/bin/env python3
"""
MozRobot SDK Usage Examples

This comprehensive example demonstrates various features of the MOZ1Robot SDK:

1. Environment setup and validation
2. Robot configuration and connection management
3. Real-time state monitoring and data acquisition
4. Reset robot positions
5. Smooth trajectory generation and execution
6. High-frequency control loop implementation

Prerequisites:
- Robot system is running and connected to the network
    - Host IP is configured in 172.16.0.x subnet (Robot controller IP: 172.16.0.20)
- ROS2 environment is properly configured
    - ROS2 Humble environment is properly configured
    - Movax ros2 interface package is properly configured
- Camera devices are connected (or use no_camera=True for testing)
    - Camera serials are set as real robot camera serials (default: 0000000000, 0000000000, 0000000000)
- Parameters are set as real robot configuration
    - Robot structure is set as real robot structure (default: wholebody)
    - Robot hz is set as Movax's configuration (default: 120Hz)
- Movax controller system is ready
    - All motors are MotorPowerOn

Usage:
    python3 example_usage.py [--no-camera] [--structure dualarm] [--hz 120]
"""

import os
import sys
import time
import signal
import argparse
import logging
import numpy as np
from typing import Dict, Any, Optional, Tuple
from contextlib import contextmanager
from datetime import datetime
from pathlib import Path

# Try to import cv2 for camera visualization
try:
    import cv2
    CV2_AVAILABLE = True
except ImportError:
    CV2_AVAILABLE = False
    cv2 = None

# Try to import PIL for image saving (alternative to cv2)
try:
    from PIL import Image
    PIL_AVAILABLE = True
except ImportError:
    PIL_AVAILABLE = False
    Image = None

try:
    from mozrobot import MOZ1Robot, MOZ1RobotConfig
except ImportError as e:
    print(f"❌ Failed to import MozRobot SDK: {e}")
    print("📝 Please ensure mozrobot package is installed:")
    print("    pip install /path/to/mozrobot-*.whl")
    sys.exit(1)

# Configure logging
logging.basicConfig(
    level=logging.INFO,
    format='%(asctime)s - %(levelname)s - %(message)s',
    datefmt='%H:%M:%S'
)
logger = logging.getLogger(__name__)

def convert_numpy_to_lists(data):
    """
    Recursively convert all numpy arrays in a data structure to regular Python lists

    Args:
        data: A data structure that may contain numpy arrays (dict, list, numpy.ndarray, etc.)

    Returns:
        A data structure with all numpy arrays converted to regular Python lists
    """
    if isinstance(data, np.ndarray):
        return data.tolist()
    elif isinstance(data, dict):
        return {key: convert_numpy_to_lists(value) for key, value in data.items()}
    elif isinstance(data, (list, tuple)):
        return [convert_numpy_to_lists(item) for item in data]
    else:
        return data








def save_camera_images(observation: Dict[str, Any], save_dir: str, frame_number: int) -> int:
    """
    Save camera images from observation data to files in camera-specific subdirectories.

    Args:
        observation: Dict containing camera images
        save_dir: Base directory to save images in
        frame_number: Frame number for filename

    Returns:
        Number of images successfully saved
    """
    saved_count = 0
    base_path = Path(save_dir)
    base_path.mkdir(exist_ok=True)

    for cam_name in ["cam_high", "cam_left_wrist", "cam_right_wrist"]:
        if cam_name in observation:
            image = observation[cam_name]
            # Check if this is a real image or a dummy image from disabled camera
            is_dummy = np.all(image == 0)  # Dummy images are all zeros

            if not is_dummy:
                try:
                    # Create camera-specific subdirectory
                    cam_dir = base_path / cam_name
                    cam_dir.mkdir(exist_ok=True)

                    # Image is already in RGB format, no conversion needed
                    image_rgb = image

                    # Create filename with frame number (no camera name since it's in subdirectory)
                    filename = f"frame_{frame_number:06d}.png"
                    filepath = cam_dir / filename

                    if PIL_AVAILABLE:
                        # Use PIL to save image
                        pil_image = Image.fromarray(image_rgb)
                        pil_image.save(filepath)
                        logger.debug(f"  💾 Saved {cam_name} to {filepath}")
                    elif CV2_AVAILABLE:
                        # Use cv2 as fallback, convert RGB back to BGR for cv2
                        image_bgr = image_rgb[:, :, ::-1]  # RGB to BGR for cv2
                        cv2.imwrite(str(filepath), image_bgr)
                        logger.debug(f"  💾 Saved {cam_name} to {filepath}")
                    else:
                        # Use numpy to save as raw array (fallback)
                        np.save(filepath.with_suffix('.npy'), image)
                        logger.debug(f"  💾 Saved {cam_name} as numpy array to {filepath.with_suffix('.npy')}")

                    saved_count += 1

                except Exception as e:
                    logger.warning(f"⚠️  Failed to save {cam_name}: {e}")
            else:
                logger.debug(f"  ⚪ Skipping {cam_name} save (camera disabled)")

    return saved_count

# Global robot instance for shutdown checking
_robot_instance = None

def is_shutdown_requested(robot=None):
    """Check if shutdown has been requested from any source"""
    # Check ProcessManager shutdown status if robot is available
    try:
        # Try using passed robot first, then global robot
        check_robot = robot
        if not check_robot:
            global _robot_instance
            if '_robot_instance' in globals():
                check_robot = _robot_instance

        if check_robot and hasattr(check_robot, 'process_manager'):
            shutdown_status = check_robot.process_manager._shutdown_requested
            if shutdown_status:
                logger.debug(f"🔍 ProcessManager shutdown status: {shutdown_status}")
            return shutdown_status
    except Exception as e:
        logger.debug(f"Error checking shutdown status: {e}")
    return False


def parse_arguments() -> argparse.Namespace:
    """Parse command line arguments."""
    parser = argparse.ArgumentParser(
        description='MozRobot SDK Usage Examples',
        formatter_class=argparse.RawDescriptionHelpFormatter,
        epilog='''
Examples:
    python3 example_usage.py --no-camera
    python3 example_usage.py --structure dualarm --hz 120
    python3 example_usage.py --structure wholebody --hz 120 --demo trajectory
    python3 example_usage.py --enable-soft-realtime --bind-cpu 5
    python3 example_usage.py --enable-soft-realtime --bind-cpu 4 5 6 --no-camera
    python3 example_usage.py --demo basic
    python3 example_usage.py --demo monitor
    python3 example_usage.py --demo monitor --monitor-interval 0.1
    python3 example_usage.py --demo monitor --structure dualarm
    python3 example_usage.py --disabled-cameras cam_high
    python3 example_usage.py --disabled-cameras cam_high cam_left_wrist
    python3 example_usage.py --demo monitor --disabled-cameras cam_right_wrist
    python3 example_usage.py --demo monitor --save-images --save-dir my_images
    python3 example_usage.py --demo monitor --save-images --monitor-interval 0.5
        '''
    )

    parser.add_argument('--no-camera', action='store_true',
                       help='Skip camera initialization (useful for testing)')
    parser.add_argument('--structure', choices=['dualarm', 'wholebody_without_base', 'wholebody'],
                       default='wholebody', help='Robot configuration structure')
    parser.add_argument('--hz', type=int, default=120,
                       help='Control frequency in Hz (default: 120)')
    parser.add_argument('--demo', choices=['basic', 'trajectory', 'monitor', 'all'],
                       default='all', help='Which demo to run')
    parser.add_argument('--verbose', '-v', action='store_true',
                       help='Enable verbose logging')
    parser.add_argument('--enable-soft-realtime', action='store_true',
                       help='Enable soft real-time scheduling (requires scripts/setup_rtprio.sh)')
    parser.add_argument('--bind-cpu', type=int, nargs='+', metavar='CPU_IDX',
                       help='Bind robot control thread to specific CPU cores (e.g., --bind-cpu 5 or --bind-cpu 4 5 6)')
    parser.add_argument('--monitor-interval', type=float, default=1.0, metavar='SECONDS',
                       help='Monitor demo update interval in seconds (default: 1.0s, min: 0.1s)')
    parser.add_argument('--disabled-cameras', type=str, nargs='*', metavar='CAMERA_NAME',
                       choices=['cam_high', 'cam_left_wrist', 'cam_right_wrist'],
                       help='Disable specific cameras by name (e.g., --disabled-cameras cam_high cam_left_wrist)')
    parser.add_argument('--save-images', action='store_true',
                       help='Save camera images to files instead of displaying in windows (monitor demo only)')
    parser.add_argument('--save-dir', type=str, default='camera_images', metavar='DIR',
                       help='Directory to save camera images (default: camera_images)')

    return parser.parse_args()


def check_network_configuration() -> bool:
    """Setup and validate environment variables and network configuration."""
    logger.info("⚙️  Checking network configuration...")

    # Test robot connectivity
    logger.info("🌐 Testing robot connectivity...")
    import subprocess
    result = subprocess.run(['ping', '-c', '1', '-W', '2', '172.16.0.20'],
                            capture_output=True, text=True)
    if result.returncode == 0:
        logger.info("✅ Robot is reachable")
        return True
    else:
        logger.warning("⚠️  Robot not reachable - check network connection")
        return False


def create_robot_config(args: argparse.Namespace) -> MOZ1RobotConfig:
    """Create robot configuration based on command line arguments."""
    logger.info("🤖 Creating robot configuration...")

    # Determine camera configuration, Default development camera serials
    camera_serials = "230322270398, 313522302626, 230422271253"
    # camera_serials = "2-1,2-3,2-2"
    camera_resolutions = "320*240, 320*240, 320*240"

    config = MOZ1RobotConfig(
        realsense_serials=camera_serials,
        camera_resolutions=camera_resolutions,
        no_camera=args.no_camera,
        structure=args.structure,
        robot_control_hz=args.hz,
        enable_soft_realtime=args.enable_soft_realtime,
        bind_cpu_idxs=args.bind_cpu,
        disabled_cameras=args.disabled_cameras
    )

    # Display configuration
    logger.info(f"📝 Configuration:")
    logger.info(f"  Structure: {config.structure}")
    logger.info(f"  Control frequency: {config.robot_control_hz} Hz")
    logger.info(f"  Soft real-time: {config.enable_soft_realtime}")
    if config.bind_cpu_idxs:
        logger.info(f"  CPU binding: {config.bind_cpu_idxs}")
    else:
        logger.info(f"  CPU binding: disabled")
    logger.info(f"  Camera enabled: {not config.no_camera}")
    if not config.no_camera:
        logger.info(f"  Camera resolutions: {config.camera_resolutions}")
        logger.info(f"  Camera serials: {config.realsense_serials}")
        if config.disabled_cameras:
            logger.info(f"  Disabled cameras: {config.disabled_cameras}")
        else:
            logger.info(f"  Disabled cameras: None")

    # Display monitor demo configuration if monitor is requested
    if args.demo == 'monitor':
        logger.info(f"  Monitor demo: enabled")
        logger.info(f"  Monitor interval: {args.monitor_interval:.3f}s ({1/args.monitor_interval:.1f} Hz)")
        if args.save_images:
            logger.info(f"  Camera mode: image saving (directory: {args.save_dir})")
        else:
            logger.info(f"  Camera mode: monitoring only (no image saving)")

    # Display warnings for real-time configuration
    if config.enable_soft_realtime:
        logger.warning("⚠️  Soft real-time is enabled. Make sure you have run scripts/setup_rtprio.sh first!")
        logger.info("📋 If not done, please run: bash scripts/setup_rtprio.sh")
    if config.bind_cpu_idxs:
        logger.info(f"🔗 Robot control thread will be bound to CPU cores: {config.bind_cpu_idxs}")

    return config


def read_robot_state(robot: MOZ1Robot) -> Optional[Dict[str, Any]]:
    """Read and display current robot state information."""
    logger.info("📊 Reading robot state...")

    try:
        # Check for shutdown request before robot operation
        if is_shutdown_requested():
            logger.info("🛑 Shutdown requested, skipping robot state read")
            return None

        # Capture robot observation data with timing
        capture_start_time = time.time()
        observation = robot.capture_observation()
        capture_end_time = time.time()
        capture_duration = capture_end_time - capture_start_time

        logger.info(f"⏱️  capture_observation() took {capture_duration:.3f}s")

        # Check for shutdown request after robot operation
        if is_shutdown_requested():
            logger.info("🛑 Shutdown requested after robot state read")
            return None

        # You can choose to convert numpy arrays to regular Python lists, depending on your needs
        # observation = convert_numpy_to_lists(observation)

        logger.info("📝 Robot State Data:")

        # Display arm states
        for arm in ["leftarm", "rightarm"]:
            arm_name = "Left Arm" if arm == "leftarm" else "Right Arm"

            if f"{arm}_state_cart_pos" in observation:
                cart_pos = observation[f"{arm}_state_cart_pos"]
                position_vals = ' '.join([f'{x:.3f}' for x in cart_pos[:3]])
                rotation_vals = ' '.join([f'{x:.3f}' for x in cart_pos[3:]])
                logger.info(f"  {arm_name} Cartesian: [{position_vals}] (Position, unit: m) | [{rotation_vals}] (Rotation Vector, unit: rad)")

            if f"{arm}_state_joint_pos" in observation:
                joint_pos = observation[f"{arm}_state_joint_pos"]
                logger.info(f"  {arm_name} Joints: [{', '.join([f'{j:.3f}' for j in joint_pos])}] (Unit: rad)")

            if f"{arm}_gripper_state_pos" in observation:
                gripper_pos = observation[f"{arm}_gripper_state_pos"]
                logger.info(f"  {arm_name} Gripper: {gripper_pos[0]:.3f} (Unit: m)")

        # Display torso state (if available)
        if "torso_state_cart_pos" in observation:
            torso_pos = observation["torso_state_cart_pos"]
            position_vals = ' '.join([f'{x:.3f}' for x in torso_pos[:3]])
            rotation_vals = ' '.join([f'{x:.3f}' for x in torso_pos[3:]])
            logger.info(f"  Torso Cartesian: [{position_vals}] (Position, unit: m) | [{rotation_vals}] (Rotation Vector, unit: rad)")

        # Display base state (if available)
        if "base_state_speed" in observation:
            base_speed = observation["base_state_speed"]
            logger.info(f"  Base Velocity: [{', '.join([f'{v:.3f}' for v in base_speed])}] (Unit: m/s, rad/s)")

        # Display camera information
        camera_count = 0
        camera_info = []
        for cam_name in ["cam_high", "cam_left_wrist", "cam_right_wrist"]:
            if cam_name in observation:
                image = observation[cam_name]
                camera_info.append(f"{cam_name}: {image.shape}")
                camera_count += 1

        if camera_count > 0:
            logger.info(f"  Cameras ({camera_count}): {' | '.join(camera_info)}")
        else:
            logger.info("  Cameras: None (disabled)")

        return observation

    except KeyboardInterrupt:
        logger.info("🛑 Robot state read interrupted by user")
        raise
    except Exception as e:
        logger.error(f"❌ Failed to read robot state: {e}")
        logger.error(f"Full traceback: ", exc_info=True)
        return None


@contextmanager
def safe_robot_operation(robot: MOZ1Robot):
    """Context manager for safe robot operations with automatic cleanup."""
    try:
        yield robot
    except KeyboardInterrupt:
        logger.warning("⚠️  Operation interrupted by user")
        raise
    except Exception as e:
        logger.error(f"❌ Robot operation failed: {e}")
        raise
    finally:
        logger.info("🔒 Ensuring robot safety...")
        # Add any safety cleanup here if needed

def reset_robot_positions(robot: MOZ1Robot) -> bool:
    # using rad
    left_arm_init_joints = [pos * np.pi / 180 for pos in [-9, -50, -20, -90, -35, 8, -7]]
    right_arm_init_joints = [pos * np.pi / 180 for pos in [9, -50, 20, 90, 35, 8, 7]]
    torso_init_joints = [pos * np.pi / 180 for pos in [30, 0, 0, 30, 0, 0]]
    gripper_init_positions = [0.12, 0.12]
    if not robot.reset_robot_positions(left_arm_joints=left_arm_init_joints,
                                    right_arm_joints=right_arm_init_joints, 
                                    torso_joints=torso_init_joints,
                                    gripper_positions=gripper_init_positions):
        logger.error("❌ Failed to reset robot positions")
        return False

    # wait for robot move to reset position
    time.sleep(5)

    # reset will disable external following mode automatically, so we need to enable it again
    robot.enable_external_following_mode()
    return True

def generate_smooth_trajectory(start_pos: np.ndarray, target_pos: np.ndarray,
                              num_steps: int, trajectory_type: str = "cosine") -> np.ndarray:
    """
    Generate smooth trajectory between two positions.

    Based on implementation logic from origin/main branch @thirdparty/arx_r5_real/real_env_moz1.py:_reset_joints

    Args:
        start_pos: Starting position array
        target_pos: Target position array
        num_steps: Number of trajectory steps
        trajectory_type: Type of interpolation ('cosine', 'linear', 'cubic')

    Returns:
        Trajectory array of shape [num_steps, dim]
    """
    trajectory = np.zeros((num_steps, len(start_pos)))

    for i in range(num_steps):
        if trajectory_type == "cosine":
            # Smooth cosine interpolation
            alpha = 0.5 * (1 - np.cos(np.pi * i / (num_steps - 1)))
        elif trajectory_type == "cubic":
            # Cubic easing
            t = i / (num_steps - 1)
            alpha = t * t * (3 - 2 * t)
        else:  # linear
            alpha = i / (num_steps - 1)

        trajectory[i] = start_pos + alpha * (target_pos - start_pos)

    return trajectory


def generate_circular_trajectory(center: np.ndarray, radius: float,
                                num_points: int, plane: str = "xy") -> np.ndarray:
    """Generate circular trajectory in specified plane."""
    angles = np.linspace(0, 2*np.pi, num_points)
    trajectory = np.zeros((num_points, 6))  # [x, y, z, rx, ry, rz]

    for i, angle in enumerate(angles):
        if plane == "xy":
            trajectory[i, 0] = center[0] + radius * np.cos(angle)
            trajectory[i, 1] = center[1] + radius * np.sin(angle)
            trajectory[i, 2] = center[2]
        elif plane == "xz":
            trajectory[i, 0] = center[0] + radius * np.cos(angle)
            trajectory[i, 1] = center[1]
            trajectory[i, 2] = center[2] + radius * np.sin(angle)
        elif plane == "yz":
            trajectory[i, 0] = center[0]
            trajectory[i, 1] = center[1] + radius * np.cos(angle)
            trajectory[i, 2] = center[2] + radius * np.sin(angle)

        # Keep original orientation
        trajectory[i, 3:] = center[3:6] if len(center) >= 6 else [0, 0, 0]

    return trajectory

def execute_basic_motion_demo(robot: MOZ1Robot, observation: Dict[str, Any]) -> bool:
    """Execute basic motion demonstration with smooth trajectory."""
    logger.info("🎯 Starting basic motion demo...")

    try:
        # Validate required data
        required_keys = ["leftarm_state_cart_pos", "rightarm_state_cart_pos"]
        for key in required_keys:
            if key not in observation:
                logger.error(f"❌ Missing required state data: {key}")
                return False

        left_current = observation["leftarm_state_cart_pos"].copy()
        right_current = observation["rightarm_state_cart_pos"].copy()

        logger.info(f"📍 Current positions:")
        logger.info(f"  Left arm:  [{', '.join([f'{x:.3f}' for x in left_current])}]")
        logger.info(f"  Right arm: [{', '.join([f'{x:.3f}' for x in right_current])}]")

        # Define target positions (small upward movement)
        left_target = left_current.copy()
        right_target = right_current.copy()

        # Move up 5cm
        movement_distance = 0.05
        left_target[2] += movement_distance  # Z-axis up
        right_target[2] += movement_distance

        logger.info(f"🎯 Target positions (moving up {movement_distance*100:.1f}cm):")
        logger.info(f"  Left arm:  [{', '.join([f'{x:.3f}' for x in left_target])}]")
        logger.info(f"  Right arm: [{', '.join([f'{x:.3f}' for x in right_target])}]")

        # Generate trajectory
        duration = 2.0  # seconds
        control_hz = robot.control_hz
        num_steps = int(duration * control_hz)

        logger.info(f"📊 Motion parameters:")
        logger.info(f"  Duration: {duration}s")
        logger.info(f"  Control frequency: {control_hz}Hz")
        logger.info(f"  Trajectory steps: {num_steps}")

        left_trajectory = generate_smooth_trajectory(left_current, left_target, num_steps)
        right_trajectory = generate_smooth_trajectory(right_current, right_target, num_steps)

        logger.info("🚀 Executing motion trajectory...")

        # Execute trajectory with precise timing
        dt = 1.0 / control_hz
        start_time = time.time()

        with safe_robot_operation(robot):
            for i in range(num_steps):
                # Build action command
                action = {
                    "leftarm_cmd_cart_pos": left_trajectory[i],
                    "rightarm_cmd_cart_pos": right_trajectory[i],
                }

                # Send action command
                robot.send_action(action)

                # Maintain precise timing
                expected_time = start_time + (i + 1) * dt
                current_time = time.time()
                sleep_time = expected_time - current_time

                if sleep_time > 0:
                    time.sleep(sleep_time)

                # Progress reporting
                if (i + 1) % (control_hz // 2) == 0:  # Every 0.5 seconds
                    progress = (i + 1) / num_steps * 100
                    logger.info(f"  📈 Progress: {progress:.1f}%")

        actual_duration = time.time() - start_time
        logger.info(f"✅ Motion completed in {actual_duration:.2f}s (target: {duration:.2f}s)")

        # Hold final position
        logger.info("🔒 Holding final position for 1s...")
        final_action = {
            "leftarm_cmd_cart_pos": left_target,
            "rightarm_cmd_cart_pos": right_target,
        }

        for _ in range(control_hz):  # 1 second
            robot.send_action(final_action)
            time.sleep(dt)

        logger.info("✅ Basic motion demo completed successfully")
        return True

    except Exception as e:
        logger.error(f"❌ Basic motion demo failed: {e}")
        return False

def execute_monitor_demo(robot: MOZ1Robot, monitor_interval: float, no_camera: bool,
                        save_images: bool = False, save_dir: str = "camera_images") -> bool:
    """Execute monitor demo - continuously read robot data and save camera images until Ctrl+C."""
    logger.info("🔍 Starting monitor demo...")

    # Validate monitor interval
    min_interval = 0.1  # 100ms minimum
    if monitor_interval < min_interval:
        logger.warning(f"⚠️  Monitor interval {monitor_interval}s is too small, using minimum {min_interval}s")
        monitor_interval = min_interval

    # Determine camera saving mode
    camera_saving = save_images and not no_camera and (PIL_AVAILABLE or CV2_AVAILABLE or True)  # numpy always available

    if save_images:
        if no_camera:
            logger.warning("⚠️  Camera image saving disabled: --no-camera flag is set")
            camera_saving = False
        elif not PIL_AVAILABLE and not CV2_AVAILABLE:
            logger.warning("⚠️  Camera image saving will use numpy format: PIL and OpenCV are not available. Install Pillow or opencv-python for PNG format.")

        if camera_saving:
            # Create save directory with timestamp
            timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
            save_dir_full = f"{save_dir}_{timestamp}"
            logger.info(f"📁 Images will be saved to: {save_dir_full}")
        else:
            save_dir_full = None
    else:
        camera_saving = False
        save_dir_full = None

    logger.info(f"📊 Monitor parameters:")
    logger.info(f"  Update interval: {monitor_interval:.3f}s ({1/monitor_interval:.1f} Hz)")
    logger.info(f"  Camera image saving: {'enabled' if camera_saving else 'disabled'}")
    if camera_saving:
        logger.info(f"  Save directory: {save_dir_full}")
    logger.info(f"  Press Ctrl+C to exit monitor mode")

    # Monitor loop counters
    monitor_count = 0
    start_time = time.time()
    last_fps_report = start_time
    try:
        with safe_robot_operation(robot):
            while not is_shutdown_requested(robot):
                # Calculate when this execution should start
                target_execution_time = start_time + monitor_count * monitor_interval
                current_time = time.time()

                # Wait if we're ahead of schedule
                if current_time < target_execution_time:
                    sleep_time = target_execution_time - current_time
                    logger.debug(f"⏰ Waiting {sleep_time:.3f}s to maintain {monitor_interval:.3f}s interval")
                    time.sleep(sleep_time)
                    if is_shutdown_requested(robot):
                        logger.info("🛑 Shutdown requested during interval wait, exiting")
                        break

                execution_start = time.time()

                # Check shutdown request before any potentially blocking operations
                if is_shutdown_requested(robot):
                    logger.info("🛑 Shutdown requested, exiting monitor loop")
                    break

                # Read robot state with timeout protection
                try:
                    read_start_time = time.time()
                    observation = read_robot_state(robot)
                    read_end_time = time.time()
                    read_duration = read_end_time - read_start_time
                    logger.info(f"📋 read_robot_state() took {read_duration:.3f}s")
                except KeyboardInterrupt:
                    logger.info("🛑 Keyboard interrupt during robot state read, exiting monitor loop")
                    break
                except Exception as e:
                    if is_shutdown_requested(robot):
                        logger.info("🛑 Shutdown during robot state read, exiting")
                        break
                    logger.warning(f"⚠️  Failed to read robot state: {e}, continuing...")
                    monitor_count += 1  # Still increment to maintain timing
                    continue

                if observation is None:
                    logger.warning("⚠️  Failed to read robot state, continuing...")
                    monitor_count += 1  # Still increment to maintain timing
                    continue

                # Check shutdown request after robot state read
                if is_shutdown_requested(robot):
                    logger.info("🛑 Shutdown requested after robot state read, exiting")
                    break

                # Camera processing logic (saving only)
                camera_save_duration = 0.0
                if camera_saving:
                    camera_start_time = time.time()
                    saved_count = save_camera_images(observation, save_dir_full, monitor_count)
                    camera_end_time = time.time()
                    camera_save_duration = camera_end_time - camera_start_time
                    logger.info(f"💾 save_camera_images() took {camera_save_duration:.3f}s")
                    if monitor_count == 0:  # Log save info for first frame
                        logger.info(f"💾 Started saving images. First frame saved {saved_count} images.")

                monitor_count += 1
                execution_end = time.time()
                execution_duration = execution_end - execution_start

                # Report FPS every 10 seconds with detailed timing breakdown
                if execution_end - last_fps_report >= 10.0:
                    elapsed_total = execution_end - start_time
                    actual_fps = monitor_count / elapsed_total
                    target_fps = 1.0 / monitor_interval

                    logger.info(f"📈 Monitor stats: {monitor_count} readings in {elapsed_total:.1f}s "
                              f"(actual: {actual_fps:.2f} Hz, target: {target_fps:.2f} Hz)")
                    logger.info(f"📊 Last execution took {execution_duration:.3f}s")
                    logger.info(f"📊 Time breakdown - robot read: {read_duration:.3f}s, camera save: {camera_save_duration:.3f}s")
                    last_fps_report = execution_end

                # Warn if execution took longer than interval
                if execution_duration > monitor_interval:
                    logger.debug(f"⚠️  Execution took {execution_duration:.3f}s, longer than interval {monitor_interval:.3f}s")

                # Progress indicator every 50 readings (unless shutting down)
                if monitor_count % 50 == 0 and not is_shutdown_requested():
                    logger.info(f"🔄 Monitor count: {monitor_count}")

        # Final statistics
        total_time = time.time() - start_time
        final_fps = monitor_count / total_time if total_time > 0 else 0

        logger.info(f"✅ Monitor demo completed:")
        logger.info(f"  Total readings: {monitor_count}")
        logger.info(f"  Total time: {total_time:.1f}s")
        logger.info(f"  Average rate: {final_fps:.2f} Hz")
        if camera_saving:
            logger.info(f"  Images saved to: {save_dir_full}")
            logger.info(f"  Estimated total images: {monitor_count * len(['cam_high', 'cam_left_wrist', 'cam_right_wrist'])}")

        return True

    except KeyboardInterrupt:
        logger.info("🛑 Monitor demo interrupted by user (Ctrl+C)")
        return True
    except Exception as e:
        logger.error(f"❌ Monitor demo failed: {e}")
        logger.error("Full traceback:", exc_info=True)
        return False


def execute_trajectory_demo(robot: MOZ1Robot, observation: Dict[str, Any]) -> bool:
    """Execute advanced trajectory demonstration with circular motion."""
    logger.info("🌀 Starting trajectory demo...")

    try:
        if "leftarm_state_cart_pos" not in observation:
            logger.error("❌ Missing left arm position data")
            return False

        current_pos = observation["leftarm_state_cart_pos"].copy()
        center = current_pos.copy()

        # Generate circular trajectory
        radius = 0.05  # 5cm radius
        num_points = 240
        trajectory = generate_circular_trajectory(center, radius, num_points, "xy")

        logger.info(f"🌀 Circular trajectory parameters:")
        logger.info(f"  Center: [{', '.join([f'{x:.3f}' for x in center[:3]])}]")
        logger.info(f"  Radius: {radius*100:.1f}cm")
        logger.info(f"  Points: {num_points}")
        logger.info(f"  Plane: XY")

        dt = 1.0 / robot.control_hz

        logger.info("🚀 Executing circular motion...")

        with safe_robot_operation(robot):
            for i, waypoint in enumerate(trajectory):
                action = {"leftarm_cmd_cart_pos": waypoint}
                robot.send_action(action)
                time.sleep(dt)

                if (i + 1) % 15 == 0:  # Progress every quarter circle
                    progress = (i + 1) / len(trajectory) * 100
                    logger.info(f"  📈 Progress: {progress:.1f}%")

        logger.info("✅ Trajectory demo completed successfully")
        return True

    except Exception as e:
        logger.error(f"❌ Trajectory demo failed: {e}")
        return False


def main():
    """Main function demonstrating MozRobot SDK usage."""
    args = parse_arguments()

    if args.verbose:
        logging.getLogger().setLevel(logging.DEBUG)

    logger.info("🚀 MozRobot SDK Usage Examples")
    logger.info("=" * 50)

    # Network configuration validation
    if not check_network_configuration():
        logger.error("❌ Network configuration validation failed, exiting...")
        return

    # Create robot configuration
    config = create_robot_config(args)

    # Create robot instance
    logger.info("🤖 Creating robot instance...")
    global _robot_instance
    robot = MOZ1Robot(config)
    _robot_instance = robot

    try:
        # Connect to robot
        logger.info("🔗 Connecting to robot system...")

        # Check for interrupt before connecting
        if is_shutdown_requested():
            logger.info("🛑 Shutdown requested, aborting robot connection")
            return

        try:
            robot.connect()
        except Exception as e:
            logger.error(f"❌ Failed to connect to robot system: {e}")
            logger.info("📝 Troubleshooting tips:")
            logger.info("   • Check robot controller is powered on and connected")
            logger.info("   • Verify network connectivity to robot (ping 172.16.0.20)")
            logger.info("   • Ensure ROS2 environment is properly configured")
            logger.info("   • Check camera connections and USB ports")
            logger.info("   • Verify camera serial numbers in configuration")
            logger.info("   • Try running with --no-camera for testing without cameras")
            logger.info("   • Ensure no other processes are using the cameras")
            logger.info("   • Try restarting the robot controller")
            return

        # Check for interrupt after connecting
        if is_shutdown_requested():
            logger.info("🛑 Shutdown requested, disconnecting robot")
            robot.disconnect()
            return

        if not robot.is_robot_connected:
            logger.error("❌ Failed to connect to robot")
            return

        logger.info("✅ Robot connected successfully!")

        # Read robot state
        observation = read_robot_state(robot)
        if not observation:
            logger.error("❌ Cannot retrieve robot state, skipping demos")
            return

        # reset robot positions
        reset_robot_positions(robot)

        # Check for shutdown request before demos
        if is_shutdown_requested():
            logger.info("🛑 Shutdown requested, skipping demos")
            return

        # read robot state after reset
        observation = read_robot_state(robot)
        if not observation:
            logger.error("❌ Cannot retrieve robot state, skipping demos")
            return

        # Execute demonstrations based on selection
        demos_run = 0

        if args.demo in ['basic', 'all']:
            logger.info("\n🎯 Preparing basic motion demo...")
            if not args.no_camera or input("🤔 Execute basic motion demo? (Y/n): ").strip().lower() != 'n':
                if execute_basic_motion_demo(robot, observation):
                    demos_run += 1
                    time.sleep(1)  # Brief pause between demos

        if args.demo in ['trajectory', 'all'] and demos_run < 2:
            logger.info("\n🌀 Preparing trajectory demo...")
            if not args.no_camera or input("🤔 Execute trajectory demo? (Y/n): ").strip().lower() != 'n':
                if execute_trajectory_demo(robot, observation):
                    demos_run += 1

        if args.demo in ['monitor', 'all'] and demos_run < 3:
            logger.info("\n🔍 Preparing monitor demo...")
            logger.info("⚠️  Monitor demo will run continuously until Ctrl+C is pressed")
            if not args.no_camera or input("🤔 Execute monitor demo? (Y/n): ").strip().lower() != 'n':
                if execute_monitor_demo(robot, args.monitor_interval, args.no_camera, args.save_images, args.save_dir):
                    demos_run += 1

        if demos_run == 0:
            logger.info("📊 No demos executed. Final state check...")
            final_obs = read_robot_state(robot)
            if final_obs:
                logger.info("✅ Robot state retrieved successfully")

        logger.info(f"\n✨ Completed {demos_run} demonstration(s) successfully!")

    except KeyboardInterrupt:
        logger.warning("\n⚠️  Operation interrupted by user (Ctrl+C)")
        logger.info("Attempting graceful shutdown...")
    except Exception as e:
        logger.error(f"\n❌ Unexpected error during execution: {e}")
        logger.error("Full traceback:", exc_info=True)
    finally:
        logger.info("\n🔒 Cleaning up and disconnecting...")

        # No camera windows to clean up since visualization is removed

        try:
            robot.disconnect()
            logger.info("✅ Disconnected successfully")
        except Exception as e:
            logger.error(f"❌ Error during disconnect: {e}")

        logger.info("🎉 Program completed!")
        logger.info("📚 For more usage, check mozrobot documentation")

        # Ensure no child processes are left running
        import multiprocessing
        remaining_children = multiprocessing.active_children()
        if remaining_children:
            logger.warning(f"⚠️  Found {len(remaining_children)} remaining child process(es), cleaning up...")
            for process in remaining_children:
                try:
                    logger.debug(f"Terminating remaining process: {process.name}")
                    process.terminate()
                    process.join(timeout=1.0)
                    if process.is_alive():
                        logger.debug(f"Killing stubborn process: {process.name}")
                        process.kill()
                except Exception as e:
                    logger.debug(f"Error cleaning up process {process.name}: {e}")
            logger.info("🧹 Child process cleanup completed")


if __name__ == "__main__":
    try:
        main()
    except Exception as e:
        logger.error(f"❌ Fatal error: {e}")
        sys.exit(1)