# Copyright 2019 Open Source Robotics Foundation, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

import contextlib
import functools
import math
import os
import re
import sys
import unittest

from launch import LaunchDescription
from launch.actions import ExecuteProcess
from launch.actions import RegisterEventHandler
from launch.actions import ResetEnvironment
from launch.actions import SetEnvironmentVariable
from launch.event_handlers import OnShutdown

from launch_ros.actions import Node

import launch_testing
import launch_testing.actions
import launch_testing.asserts
import launch_testing.markers
import launch_testing.tools
from launch_testing_ros.actions import EnableRmwIsolation
import launch_testing_ros.tools

import pytest

from rclpy.utilities import get_available_rmw_implementations
from ros2cli.helpers import get_rmw_additional_env


# Skip cli tests on Windows while they exhibit pathological behavior
# https://github.com/ros2/build_farmer/issues/248
if sys.platform.startswith('win'):
    pytest.skip(
            'CLI tests can block for a pathological amount of time on Windows.',
            allow_module_level=True)


@pytest.mark.rostest
@launch_testing.parametrize('rmw_implementation', get_available_rmw_implementations())
def generate_test_description(rmw_implementation):
    path_to_fixtures = os.path.join(os.path.dirname(__file__), 'fixtures')
    additional_env = get_rmw_additional_env(rmw_implementation)
    additional_env['PYTHONUNBUFFERED'] = '1'
    set_env_actions = [SetEnvironmentVariable(k, v) for k, v in additional_env.items()]
    path_to_talker_node_script = os.path.join(path_to_fixtures, 'talker_node.py')
    path_to_listener_node_script = os.path.join(path_to_fixtures, 'listener_node.py')

    hidden_talker_node_action = Node(
        executable=sys.executable,
        arguments=[path_to_talker_node_script],
        remappings=[('chatter', '_hidden_chatter')],
    )
    talker_node_action = Node(
        executable=sys.executable,
        arguments=[path_to_talker_node_script],
    )
    listener_node_action = Node(
        executable=sys.executable,
        arguments=[path_to_listener_node_script],
        remappings=[('chatter', 'chit_chatter')],
    )

    path_to_repeater_node_script = os.path.join(path_to_fixtures, 'repeater_node.py')

    array_repeater_node_action = Node(
        executable=sys.executable,
        arguments=[path_to_repeater_node_script, 'test_msgs/msg/Arrays'],
        name='array_repeater',
        remappings=[('/array_repeater/output', '/arrays')],
        output='screen',
    )
    defaults_repeater_node_action = Node(
        executable=sys.executable,
        arguments=[path_to_repeater_node_script, 'test_msgs/msg/Defaults'],
        name='defaults_repeater',
        remappings=[('/defaults_repeater/output', '/defaults')],
    )
    bounded_sequences_repeater_node_action = Node(
        executable=sys.executable,
        arguments=[
            path_to_repeater_node_script, 'test_msgs/msg/BoundedSequences'
        ],
        name='bounded_sequences_repeater',
        remappings=[('/bounded_sequences_repeater/output', '/bounded_sequences')],
    )
    unbounded_sequences_repeater_node_action = Node(
        executable=sys.executable,
        arguments=[
            path_to_repeater_node_script, 'test_msgs/msg/UnboundedSequences'
        ],
        name='unbounded_sequences_repeater',
        remappings=[('/unbounded_sequences_repeater/output', '/unbounded_sequences')],
    )

    path_to_controller_node_script = os.path.join(path_to_fixtures, 'controller_node.py')

    cmd_vel_controller_node_action = Node(
        executable=sys.executable,
        arguments=[path_to_controller_node_script],
    )

    return LaunchDescription([
        # Always restart daemon to isolate tests.
        ExecuteProcess(
            cmd=['ros2', 'daemon', 'stop'],
            name='daemon-stop',
            on_exit=[
                *set_env_actions,
                EnableRmwIsolation(),
                RegisterEventHandler(OnShutdown(on_shutdown=[
                    # Stop daemon in isolated environment with proper ROS_DOMAIN_ID
                    ExecuteProcess(
                        cmd=['ros2', 'daemon', 'stop'],
                        name='daemon-stop-isolated',
                        # Use the same isolated environment
                        additional_env=dict(additional_env),
                    ),
                    # This must be done after stopping the daemon in the isolated environment
                    ResetEnvironment(),
                ])),
                ExecuteProcess(
                    cmd=['ros2', 'daemon', 'start'],
                    name='daemon-start',
                    on_exit=[
                        # Add talker/listener pair.
                        talker_node_action,
                        listener_node_action,
                        # Add hidden talker.
                        hidden_talker_node_action,
                        # Add topic repeaters.
                        array_repeater_node_action,
                        defaults_repeater_node_action,
                        bounded_sequences_repeater_node_action,
                        unbounded_sequences_repeater_node_action,
                        # Add stamped data publisher.
                        cmd_vel_controller_node_action,
                        launch_testing.actions.ReadyToTest()
                    ],
                )
            ]
        ),
    ]), locals()


class TestROS2TopicCLI(unittest.TestCase):

    @classmethod
    def setUpClass(
        cls,
        launch_service,
        proc_info,
        proc_output,
        rmw_implementation,
        listener_node_action
    ):
        rmw_implementation_filter = launch_testing_ros.tools.basic_output_filter(
            filtered_patterns=['WARNING: topic .* does not appear to be published yet'],
            filtered_rmw_implementation=rmw_implementation
        )

        @contextlib.contextmanager
        def launch_topic_command(self, arguments):
            topic_command_action = ExecuteProcess(
                cmd=['ros2', 'topic', *arguments],
                name='ros2topic-cli',
                output='screen'
            )
            with launch_testing.tools.launch_process(
                launch_service, topic_command_action, proc_info, proc_output,
                output_filter=rmw_implementation_filter
            ) as topic_command:
                yield topic_command
        cls.launch_topic_command = launch_topic_command

        cls.listener_node = launch_testing.tools.ProcessProxy(
            listener_node_action, proc_info, proc_output,
            output_filter=rmw_implementation_filter
        )

    @launch_testing.markers.retry_on_failure(times=5, delay=1)
    def test_list_topics(self):
        with self.launch_topic_command(arguments=['list']) as topic_command:
            assert topic_command.wait_for_shutdown(timeout=10)
        assert topic_command.exit_code == launch_testing.asserts.EXIT_OK
        assert launch_testing.tools.expect_output(
            expected_lines=[
                '/arrays',
                '/bounded_sequences',
                '/chatter',
                '/chit_chatter',
                '/cmd_vel',
                '/defaults',
                '/parameter_events',
                '/rosout',
                '/unbounded_sequences',
            ],
            text=topic_command.output,
            strict=True
        )

    @launch_testing.markers.retry_on_failure(times=5, delay=1)
    def test_list_all_topics(self):
        with self.launch_topic_command(
            arguments=['list', '--include-hidden-topics']
        ) as topic_command:
            assert topic_command.wait_for_shutdown(timeout=10)
        assert topic_command.exit_code == launch_testing.asserts.EXIT_OK
        assert launch_testing.tools.expect_output(
            expected_lines=[
                '/_hidden_chatter',
                '/arrays',
                '/bounded_sequences',
                '/chatter',
                '/chit_chatter',
                '/cmd_vel',
                '/defaults',
                '/parameter_events',
                '/rosout',
                '/unbounded_sequences',
            ],
            text=topic_command.output,
            strict=True
        )

    @launch_testing.markers.retry_on_failure(times=5, delay=1)
    def test_list_with_types(self):
        with self.launch_topic_command(arguments=['list', '-t']) as topic_command:
            assert topic_command.wait_for_shutdown(timeout=10)
        assert topic_command.exit_code == launch_testing.asserts.EXIT_OK
        assert launch_testing.tools.expect_output(
            expected_lines=[
                '/arrays [test_msgs/msg/Arrays]',
                '/bounded_sequences [test_msgs/msg/BoundedSequences]',
                '/chatter [std_msgs/msg/String]',
                '/chit_chatter [std_msgs/msg/String]',
                '/cmd_vel [geometry_msgs/msg/TwistStamped]',
                '/defaults [test_msgs/msg/Defaults]',
                '/parameter_events [rcl_interfaces/msg/ParameterEvent]',
                '/rosout [rcl_interfaces/msg/Log]',
                '/unbounded_sequences [test_msgs/msg/UnboundedSequences]',
            ],
            text=topic_command.output,
            strict=True
        )

    @launch_testing.markers.retry_on_failure(times=5, delay=1)
    def test_list_with_verbose(self):
        with self.launch_topic_command(arguments=['list', '-v']) as topic_command:
            assert topic_command.wait_for_shutdown(timeout=10)
        assert topic_command.exit_code == launch_testing.asserts.EXIT_OK
        assert launch_testing.tools.expect_output(
            expected_lines=[
                'Published topics:',
                ' * /arrays [test_msgs/msg/Arrays] 1 publisher',
                ' * /bounded_sequences [test_msgs/msg/BoundedSequences] 1 publisher',
                ' * /chatter [std_msgs/msg/String] 1 publisher',
                ' * /cmd_vel [geometry_msgs/msg/TwistStamped] 1 publisher',
                ' * /defaults [test_msgs/msg/Defaults] 1 publisher',
                ' * /parameter_events [rcl_interfaces/msg/ParameterEvent] 9 publishers',
                ' * /rosout [rcl_interfaces/msg/Log] 9 publishers',
                ' * /unbounded_sequences [test_msgs/msg/UnboundedSequences] 1 publisher',
                '',
                'Subscribed topics:',
                ' * /chit_chatter [std_msgs/msg/String] 1 subscriber',
                '',
            ],
            text=topic_command.output,
            strict=True
        )

    @launch_testing.markers.retry_on_failure(times=5, delay=1)
    def test_list_count(self):
        with self.launch_topic_command(arguments=['list', '-c']) as topic_command:
            assert topic_command.wait_for_shutdown(timeout=10)
        assert topic_command.exit_code == launch_testing.asserts.EXIT_OK
        output_lines = topic_command.output.splitlines()
        assert len(output_lines) == 1
        assert int(output_lines[0]) == 9

    @launch_testing.markers.retry_on_failure(times=5, delay=1)
    def test_topic_endpoint_info(self):
        with self.launch_topic_command(arguments=['info', '/chatter']) as topic_command:
            assert topic_command.wait_for_shutdown(timeout=10)
        assert topic_command.exit_code == launch_testing.asserts.EXIT_OK
        assert launch_testing.tools.expect_output(
            expected_lines=[
                'Type: std_msgs/msg/String',
                'Publisher count: 1',
                'Subscription count: 0'
            ],
            text=topic_command.output,
            strict=True
        )

    @launch_testing.markers.retry_on_failure(times=5)
    def test_topic_endpoint_info_verbose(self):
        # Hash value below can be found in std_msgs/msg/String.json
        STD_MSGS_STRING_TYPE_HASH_STR = 'RIHS01_' \
            'df668c740482bbd48fb39d76a70dfd4bd59db1288021743503259e948f6b1a18'

        with self.launch_topic_command(arguments=['info', '-v', '/chatter']) as topic_command:
            assert topic_command.wait_for_shutdown(timeout=10)
        assert topic_command.exit_code == launch_testing.asserts.EXIT_OK
        assert launch_testing.tools.expect_output(
            expected_lines=[
                'Type: std_msgs/msg/String',
                '',
                'Publisher count: 1',
                '',
                re.compile(r'Node name: \w+'),
                'Node namespace: /',
                'Topic type: std_msgs/msg/String',
                f'Topic type hash: {STD_MSGS_STRING_TYPE_HASH_STR}',
                re.compile(r'Endpoint type: (INVALID|PUBLISHER|SUBSCRIPTION)'),
                re.compile(r'GID: [\w\.]+'),
                'QoS profile:',
                re.compile(r'  Reliability: (RELIABLE|BEST_EFFORT|SYSTEM_DEFAULT|UNKNOWN)'),
                re.compile(
                    r'  History \(Depth\): (KEEP_LAST|KEEP_ALL|SYSTEM_DEFAULT|UNKNOWN)[\s\(\d\)]?'
                ),
                re.compile(r'  Durability: (VOLATILE|TRANSIENT_LOCAL|SYSTEM_DEFAULT|UNKNOWN)'),
                '  Lifespan: Infinite',
                '  Deadline: Infinite',
                re.compile(r'  Liveliness: (AUTOMATIC|MANUAL_BY_TOPIC|SYSTEM_DEFAULT|UNKNOWN)'),
                '  Liveliness lease duration: Infinite',
                '',
                'Subscription count: 0',
                ''
            ],
            text=topic_command.output,
            strict=True
        )

    def test_info_on_unknown_topic(self):
        with self.launch_topic_command(arguments=['info', '/unknown_topic']) as topic_command:
            assert topic_command.wait_for_shutdown(timeout=10)
        assert launch_testing.tools.expect_output(
            expected_lines=[
                "Unknown topic '/unknown_topic'",
            ],
            text=topic_command.output,
            strict=True
        )

    @launch_testing.markers.retry_on_failure(times=5, delay=1)
    def test_topic_type(self):
        with self.launch_topic_command(arguments=['type', '/chatter']) as topic_command:
            assert topic_command.wait_for_shutdown(timeout=10)
        assert topic_command.exit_code == launch_testing.asserts.EXIT_OK
        assert launch_testing.tools.expect_output(
            expected_lines=['std_msgs/msg/String'],
            text=topic_command.output,
            strict=True
        )

    @launch_testing.markers.retry_on_failure(times=5, delay=1)
    def test_hidden_topic_type(self):
        with self.launch_topic_command(
            arguments=['type', '/_hidden_chatter']
        ) as topic_command:
            assert topic_command.wait_for_shutdown(timeout=10)
        assert topic_command.exit_code == 1
        assert topic_command.output == ''

    @launch_testing.markers.retry_on_failure(times=5, delay=1)
    def test_find_topic_type(self):
        with self.launch_topic_command(
            arguments=['find', 'rcl_interfaces/msg/Log']
        ) as topic_command:
            assert topic_command.wait_for_shutdown(timeout=10)
        assert topic_command.exit_code == launch_testing.asserts.EXIT_OK
        assert launch_testing.tools.expect_output(
            expected_lines=['/rosout'], text=topic_command.output, strict=True
        )

    def test_find_not_a_topic_typename(self):
        with self.launch_topic_command(
            arguments=['find', 'rcl_interfaces/msg/NotAMessageTypeName']
        ) as topic_command:
            assert topic_command.wait_for_shutdown(timeout=10)
        assert topic_command.exit_code == launch_testing.asserts.EXIT_OK
        assert not topic_command.output

    @launch_testing.markers.retry_on_failure(times=5, delay=1)
    def test_topic_echo(self):
        with self.launch_topic_command(
            arguments=['echo', '/chatter']
        ) as topic_command:
            assert topic_command.wait_for_output(functools.partial(
                launch_testing.tools.expect_output, expected_lines=[
                    re.compile(r"data: 'Hello World: \d+'"),
                    '---'
                ], strict=True
            ), timeout=10)
        assert topic_command.wait_for_shutdown(timeout=10)

    @launch_testing.markers.retry_on_failure(times=5, delay=1)
    def test_no_str_topic_echo(self):
        with self.launch_topic_command(
            arguments=['echo', '--no-str', '/chatter']
        ) as topic_command:
            assert topic_command.wait_for_output(functools.partial(
                launch_testing.tools.expect_output, expected_lines=[
                    re.compile(r"data: '<string length: <\d+>>'"),
                    '---'
                ], strict=True
            ), timeout=10)
        assert topic_command.wait_for_shutdown(timeout=10)

    @launch_testing.markers.retry_on_failure(times=5, delay=1)
    def test_csv_topic_echo(self):
        with self.launch_topic_command(
            arguments=['echo', '--csv', '/defaults']
        ) as topic_command:
            assert topic_command.wait_for_output(functools.partial(
                launch_testing.tools.expect_output, expected_lines=[
                    "True,b'2',100,1.125,1.125,-50,200,-1000,2000,-30000,60000,-40000000,50000000"
                ], strict=True
            ), timeout=10)
        assert topic_command.wait_for_shutdown(timeout=10)

    @launch_testing.markers.retry_on_failure(times=5, delay=1)
    def test_no_arr_topic_echo_on_array_message(self):
        with self.launch_topic_command(
            arguments=['echo', '--no-arr', '/arrays'],
        ) as topic_command:
            assert topic_command.wait_for_output(functools.partial(
                launch_testing.tools.expect_output, expected_lines=[
                    "bool_values: '<array type: boolean[3]>'",
                    "byte_values: '<array type: octet[3]>'",
                    "char_values: '<array type: uint8[3]>'",
                    "float32_values: '<array type: float[3]>'",
                    "float64_values: '<array type: double[3]>'",
                    "int8_values: '<array type: int8[3]>'",
                    "uint8_values: '<array type: uint8[3]>'",
                    "int16_values: '<array type: int16[3]>'",
                    "uint16_values: '<array type: uint16[3]>'",
                    "int32_values: '<array type: int32[3]>'",
                    "uint32_values: '<array type: uint32[3]>'",
                    "int64_values: '<array type: int64[3]>'",
                    "uint64_values: '<array type: uint64[3]>'",
                    "string_values: '<array type: string[3]>'",
                    "basic_types_values: '<array type: test_msgs/msg/BasicTypes[3]>'",
                    "constants_values: '<array type: test_msgs/msg/Constants[3]>'",
                    "defaults_values: '<array type: test_msgs/msg/Defaults[3]>'",
                    "bool_values_default: '<array type: boolean[3]>'",
                    "byte_values_default: '<array type: octet[3]>'",
                    "char_values_default: '<array type: uint8[3]>'",
                    "float32_values_default: '<array type: float[3]>'",
                    "float64_values_default: '<array type: double[3]>'",
                    "int8_values_default: '<array type: int8[3]>'",
                    "uint8_values_default: '<array type: uint8[3]>'",
                    "int16_values_default: '<array type: int16[3]>'",
                    "uint16_values_default: '<array type: uint16[3]>'",
                    "int32_values_default: '<array type: int32[3]>'",
                    "uint32_values_default: '<array type: uint32[3]>'",
                    "int64_values_default: '<array type: int64[3]>'",
                    "uint64_values_default: '<array type: uint64[3]>'",
                    "string_values_default: '<array type: string[3]>'",
                    'alignment_check: 0',
                    '---'
                ], strict=False
            ), timeout=10), 'Output does not match: ' + topic_command.output
        assert topic_command.wait_for_shutdown(timeout=10)

    @launch_testing.markers.retry_on_failure(times=5, delay=1)
    def test_no_arr_topic_echo_on_seq_message(self):
        with self.launch_topic_command(
            arguments=['echo', '--no-arr', '/unbounded_sequences'],
        ) as topic_command:
            assert topic_command.wait_for_output(functools.partial(
                launch_testing.tools.expect_output, expected_lines=[
                    "bool_values: '<sequence type: boolean, length: 0>'",
                    "byte_values: '<sequence type: octet, length: 0>'",
                    "char_values: '<sequence type: uint8, length: 0>'",
                    "float32_values: '<sequence type: float, length: 0>'",
                    "float64_values: '<sequence type: double, length: 0>'",
                    "int8_values: '<sequence type: int8, length: 0>'",
                    "uint8_values: '<sequence type: uint8, length: 0>'",
                    "int16_values: '<sequence type: int16, length: 0>'",
                    "uint16_values: '<sequence type: uint16, length: 0>'",
                    "int32_values: '<sequence type: int32, length: 0>'",
                    "uint32_values: '<sequence type: uint32, length: 0>'",
                    "int64_values: '<sequence type: int64, length: 0>'",
                    "uint64_values: '<sequence type: uint64, length: 0>'",
                    "string_values: '<sequence type: string, length: 0>'",
                    "basic_types_values: '<sequence type: test_msgs/msg/BasicTypes, length: 0>'",
                    "constants_values: '<sequence type: test_msgs/msg/Constants, length: 0>'",
                    "defaults_values: '<sequence type: test_msgs/msg/Defaults, length: 0>'",
                    "bool_values_default: '<sequence type: boolean, length: 3>'",
                    "byte_values_default: '<sequence type: octet, length: 3>'",
                    "char_values_default: '<sequence type: uint8, length: 3>'",
                    "float32_values_default: '<sequence type: float, length: 3>'",
                    "float64_values_default: '<sequence type: double, length: 3>'",
                    "int8_values_default: '<sequence type: int8, length: 3>'",
                    "uint8_values_default: '<sequence type: uint8, length: 3>'",
                    "int16_values_default: '<sequence type: int16, length: 3>'",
                    "uint16_values_default: '<sequence type: uint16, length: 3>'",
                    "int32_values_default: '<sequence type: int32, length: 3>'",
                    "uint32_values_default: '<sequence type: uint32, length: 3>'",
                    "int64_values_default: '<sequence type: int64, length: 3>'",
                    "uint64_values_default: '<sequence type: uint64, length: 3>'",
                    "string_values_default: '<sequence type: string, length: 3>'",
                    'alignment_check: 0',
                    '---'
                ], strict=True
            ), timeout=10)
        assert topic_command.wait_for_shutdown(timeout=10)

    @launch_testing.markers.retry_on_failure(times=5, delay=1)
    def test_no_arr_topic_echo_on_bounded_seq_message(self):
        with self.launch_topic_command(
            arguments=['echo', '--no-arr', '/bounded_sequences'],
        ) as topic_command:
            assert topic_command.wait_for_output(functools.partial(
                launch_testing.tools.expect_output, expected_lines=[
                    "bool_values: '<sequence type: boolean[3], length: 0>'",
                    "byte_values: '<sequence type: octet[3], length: 0>'",
                    "char_values: '<sequence type: uint8[3], length: 0>'",
                    "float32_values: '<sequence type: float[3], length: 0>'",
                    "float64_values: '<sequence type: double[3], length: 0>'",
                    "int8_values: '<sequence type: int8[3], length: 0>'",
                    "uint8_values: '<sequence type: uint8[3], length: 0>'",
                    "int16_values: '<sequence type: int16[3], length: 0>'",
                    "uint16_values: '<sequence type: uint16[3], length: 0>'",
                    "int32_values: '<sequence type: int32[3], length: 0>'",
                    "uint32_values: '<sequence type: uint32[3], length: 0>'",
                    "int64_values: '<sequence type: int64[3], length: 0>'",
                    "uint64_values: '<sequence type: uint64[3], length: 0>'",
                    "string_values: '<sequence type: string[3], length: 0>'",
                    'basic_types_values: '
                    "'<sequence type: test_msgs/msg/BasicTypes[3], length: 0>'",
                    "constants_values: '<sequence type: test_msgs/msg/Constants[3], length: 0>'",
                    "defaults_values: '<sequence type: test_msgs/msg/Defaults[3], length: 0>'",
                    "bool_values_default: '<sequence type: boolean[3], length: 3>'",
                    "byte_values_default: '<sequence type: octet[3], length: 3>'",
                    "char_values_default: '<sequence type: uint8[3], length: 3>'",
                    "float32_values_default: '<sequence type: float[3], length: 3>'",
                    "float64_values_default: '<sequence type: double[3], length: 3>'",
                    "int8_values_default: '<sequence type: int8[3], length: 3>'",
                    "uint8_values_default: '<sequence type: uint8[3], length: 3>'",
                    "int16_values_default: '<sequence type: int16[3], length: 3>'",
                    "uint16_values_default: '<sequence type: uint16[3], length: 3>'",
                    "int32_values_default: '<sequence type: int32[3], length: 3>'",
                    "uint32_values_default: '<sequence type: uint32[3], length: 3>'",
                    "int64_values_default: '<sequence type: int64[3], length: 3>'",
                    "uint64_values_default: '<sequence type: uint64[3], length: 3>'",
                    "string_values_default: '<sequence type: string[3], length: 3>'",
                    'alignment_check: 0',
                    '---'
                ], strict=True
            ), timeout=10)
        assert topic_command.wait_for_shutdown(timeout=10)

    @launch_testing.markers.retry_on_failure(times=5, delay=1)
    def test_truncate_length_topic_echo(self):
        with self.launch_topic_command(
            arguments=['echo', '--truncate-length', '5', '/chatter'],
        ) as topic_command:
            assert topic_command.wait_for_output(functools.partial(
                launch_testing.tools.expect_output, expected_lines=[
                    re.compile(r'data: Hello...'),
                    '---'
                ], strict=True
            ), timeout=10)
        assert topic_command.wait_for_shutdown(timeout=10)

    @launch_testing.markers.retry_on_failure(times=5, delay=1)
    def test_topic_echo_field(self):
        with self.launch_topic_command(
            arguments=['echo', '/arrays', '--field', 'alignment_check'],
        ) as topic_command:
            assert topic_command.wait_for_output(functools.partial(
                launch_testing.tools.expect_output, expected_lines=[
                    '0',
                    '---',
                ], strict=True
            ), timeout=10)
        assert topic_command.wait_for_shutdown(timeout=10)

    @launch_testing.markers.retry_on_failure(times=5, delay=1)
    def test_topic_echo_multi_fields(self):
        with self.launch_topic_command(
            arguments=['echo', '/defaults', '--field', 'int8_value', '--field', 'uint8_value'],
        ) as topic_command:
            assert topic_command.wait_for_output(functools.partial(
                launch_testing.tools.expect_output, expected_lines=[
                    '-50',
                    '200',
                    '---',
                ], strict=True
            ), timeout=10)
        assert topic_command.wait_for_shutdown(timeout=10)

    @launch_testing.markers.retry_on_failure(times=5, delay=1)
    def test_topic_echo_field_nested(self):
        with self.launch_topic_command(
            arguments=['echo', '/cmd_vel', '--field', 'twist.angular'],
        ) as topic_command:
            assert topic_command.wait_for_output(functools.partial(
                launch_testing.tools.expect_output, expected_lines=[
                    'x: 0.0',
                    'y: 0.0',
                    'z: 0.0',
                    '---',
                ], strict=True
            ), timeout=10)
        assert topic_command.wait_for_shutdown(timeout=10)

    @launch_testing.markers.retry_on_failure(times=5, delay=1)
    def test_topic_echo_field_array(self):
        with self.launch_topic_command(
            arguments=['echo', '/arrays', '--field', 'float32_values_default.[2]'],
        ) as topic_command:
            assert topic_command.wait_for_output(functools.partial(
                launch_testing.tools.expect_output, expected_lines=[
                    '-1.125',
                    '---',
                ], strict=True
            ), timeout=10)
        assert topic_command.wait_for_shutdown(timeout=10)

    @launch_testing.markers.retry_on_failure(times=5, delay=1)
    def test_topic_echo_multi_fields_nested(self):
        with self.launch_topic_command(
            arguments=['echo', '/cmd_vel', '--field', 'twist.linear.x',
                       '--field', 'twist.linear.y'],
        ) as topic_command:
            assert topic_command.wait_for_output(functools.partial(
                launch_testing.tools.expect_output, expected_lines=[
                    '1.0',
                    '0.0',
                    '---',
                ], strict=True
            ), timeout=10)
        assert topic_command.wait_for_shutdown(timeout=10)

    @launch_testing.markers.retry_on_failure(times=5, delay=1)
    def test_topic_echo_multi_fields_array(self):
        with self.launch_topic_command(
            arguments=['echo', '/arrays', '--field', 'float32_values_default.[2]', '--field',
                       'string_values_default.[1]'],
        ) as topic_command:
            assert topic_command.wait_for_output(functools.partial(
                launch_testing.tools.expect_output, expected_lines=[
                    '-1.125',
                    'max value',
                    '---',
                ], strict=True
            ), timeout=10)
        assert topic_command.wait_for_shutdown(timeout=10)

    @launch_testing.markers.retry_on_failure(times=5, delay=1)
    def test_topic_echo_field_not_a_member(self):
        with self.launch_topic_command(
            arguments=['echo', '/arrays', '--field', 'not_member'],
        ) as topic_command:
            assert topic_command.wait_for_output(functools.partial(
                launch_testing.tools.expect_output, expected_lines=[
                    "Invalid field 'not_member': 'Arrays' object has no attribute 'not_member'",
                ], strict=True
            ), timeout=10)
        assert topic_command.wait_for_shutdown(timeout=10)

    @launch_testing.markers.retry_on_failure(times=5, delay=1)
    def test_topic_echo_field_array_not_an_array(self):
        with self.launch_topic_command(
            arguments=['echo', '/arrays', '--field', 'float32_values_default.[0].[0]'],
        ) as topic_command:
            assert topic_command.wait_for_output(functools.partial(
                launch_testing.tools.expect_output, expected_lines=[
                    "Invalid field 'float32_values_default.[0].[0]': invalid index to "
                    'scalar variable.',
                ], strict=True
            ), timeout=10)
        assert topic_command.wait_for_shutdown(timeout=10)

    @launch_testing.markers.retry_on_failure(times=5, delay=1)
    def test_topic_echo_field_array_index_out_of_bounds(self):
        with self.launch_topic_command(
            arguments=['echo', '/arrays', '--field', 'float32_values_default.[3]'],
        ) as topic_command:
            assert topic_command.wait_for_output(functools.partial(
                launch_testing.tools.expect_output, expected_lines=[
                    "Invalid field 'float32_values_default.[3]': index 3 is out of bounds "
                    'for axis 0 with size 3',
                ], strict=True
            ), timeout=10)
        assert topic_command.wait_for_shutdown(timeout=10)

    @launch_testing.markers.retry_on_failure(times=5, delay=1)
    def test_topic_echo_field_array_no_index(self):
        with self.launch_topic_command(
            arguments=['echo', '/arrays', '--field', 'float32_values_default.[abc]'],
        ) as topic_command:
            assert topic_command.wait_for_output(functools.partial(
                launch_testing.tools.expect_output, expected_lines=[
                    "Invalid field 'float32_values_default.[abc]': 'numpy.ndarray' object "
                    "has no attribute '[abc]'",
                ], strict=True
            ), timeout=10)
        assert topic_command.wait_for_shutdown(timeout=10)

    @launch_testing.markers.retry_on_failure(times=5, delay=1)
    def test_topic_echo_multi_fields_not_a_member(self):
        with self.launch_topic_command(
            arguments=['echo', '/arrays', '--field', 'not_member', '--field', 'alignment_check'],
        ) as topic_command:
            assert topic_command.wait_for_output(functools.partial(
                launch_testing.tools.expect_output, expected_lines=[
                    "Invalid field 'not_member': 'Arrays' object has no attribute 'not_member'",
                ], strict=True
            ), timeout=10)
        assert topic_command.wait_for_shutdown(timeout=10)

        with self.launch_topic_command(
            arguments=['echo', '/arrays', '--field', 'alignment_check', '--field', 'not_member'],
        ) as topic_command:
            assert topic_command.wait_for_output(functools.partial(
                launch_testing.tools.expect_output, expected_lines=[
                    "Invalid field 'not_member': 'Arrays' object has no attribute 'not_member'",
                ], strict=True
            ), timeout=10)
        assert topic_command.wait_for_shutdown(timeout=10)

    def test_topic_echo_field_invalid(self):
        with self.launch_topic_command(
            arguments=['echo', '/arrays', '--field', '/'],
        ) as topic_command:
            assert topic_command.wait_for_output(functools.partial(
                launch_testing.tools.expect_output, expected_lines=[
                    "Invalid field '/': 'Arrays' object has no attribute '/'",
                ], strict=True
            ), timeout=10)
        assert topic_command.wait_for_shutdown(timeout=10)

        with self.launch_topic_command(
            arguments=['echo', '/arrays', '--field', '.'],
        ) as topic_command:
            assert topic_command.wait_for_output(functools.partial(
                launch_testing.tools.expect_output, expected_lines=[
                    "Invalid field value '.'",
                ], strict=True
            ), timeout=10)
        assert topic_command.wait_for_shutdown(timeout=10)

    def test_topic_echo_multi_fields_invalid(self):
        with self.launch_topic_command(
            arguments=['echo', '/arrays', '--field', '/', '--field', 'alignment_check'],
        ) as topic_command:
            assert topic_command.wait_for_output(functools.partial(
                launch_testing.tools.expect_output, expected_lines=[
                    "Invalid field '/': 'Arrays' object has no attribute '/'",
                ], strict=True
            ), timeout=10)
        assert topic_command.wait_for_shutdown(timeout=10)

        with self.launch_topic_command(
            arguments=['echo', '/arrays', '--field', 'alignment_check', '--field', '.'],
        ) as topic_command:
            assert topic_command.wait_for_output(functools.partial(
                launch_testing.tools.expect_output, expected_lines=[
                    "Invalid field value '.'",
                ], strict=True
            ), timeout=10)
        assert topic_command.wait_for_shutdown(timeout=10)

    def test_topic_echo_no_publisher(self):
        with self.launch_topic_command(
            arguments=['echo', '/this_topic_has_no_pub'],
        ) as topic_command:
            assert topic_command.wait_for_shutdown(timeout=2)
        assert topic_command.exit_code != launch_testing.asserts.EXIT_OK
        assert launch_testing.tools.expect_output(
            expected_lines=[
                'Could not determine the type for the passed topic',
            ],
            text=topic_command.output,
            strict=True
        )

    def test_topic_pub(self):
        with self.launch_topic_command(
            arguments=[
                'pub',
                '--keep-alive', '3',  # seconds
                '--qos-durability', 'transient_local',
                '--qos-reliability', 'reliable',
                '/chit_chatter',
                'std_msgs/msg/String',
                '{data: foo}'
            ],
        ) as topic_command:
            assert topic_command.wait_for_output(functools.partial(
                launch_testing.tools.expect_output, expected_lines=[
                    'publisher: beginning loop',
                    "publishing #1: std_msgs.msg.String(data='foo')",
                    ''
                ]
            ), timeout=10)
            assert self.listener_node.wait_for_output(functools.partial(
                launch_testing.tools.expect_output, expected_lines=[
                    re.compile(r'\[INFO\] \[\d+\.\d*\] \[listener\]: I heard: \[foo\]')
                ] * 3, strict=False
            ), timeout=10)
        assert topic_command.wait_for_shutdown(timeout=10)

    def test_topic_pub_once(self):
        with self.launch_topic_command(
            arguments=[
                'pub', '--once',
                '--keep-alive', '3',  # seconds
                '--qos-durability', 'transient_local',
                '--qos-reliability', 'reliable',
                '/chit_chatter',
                'std_msgs/msg/String',
                '{data: bar}'
            ]
        ) as topic_command:
            assert topic_command.wait_for_output(functools.partial(
                launch_testing.tools.expect_output, expected_lines=[
                    'publisher: beginning loop',
                    "publishing #1: std_msgs.msg.String(data='bar')",
                    ''
                ]
            ), timeout=10)
            assert topic_command.wait_for_shutdown(timeout=10)
            assert self.listener_node.wait_for_output(functools.partial(
                launch_testing.tools.expect_output, expected_lines=[
                    re.compile(r'\[INFO\] \[\d+\.\d*\] \[listener\]: I heard: \[bar\]')
                ], strict=False
            ), timeout=10)
        assert topic_command.exit_code == launch_testing.asserts.EXIT_OK

    def test_topic_pub_once_matching_two_listeners(
        self, launch_service, proc_info, proc_output, path_to_listener_node_script
    ):
        second_listener_node_action = Node(
            executable=sys.executable,
            arguments=[path_to_listener_node_script],
            remappings=[('chatter', 'chit_chatter')],
            name='second_listener',
        )
        with launch_testing.tools.launch_process(
            launch_service, second_listener_node_action, proc_info, proc_output
        ) as second_listener_node, \
            self.launch_topic_command(
                arguments=[
                    'pub', '--once',
                    '--keep-alive', '3',  # seconds
                    '-w', '2',
                    '--qos-durability', 'transient_local',
                    '--qos-reliability', 'reliable',
                    '/chit_chatter',
                    'std_msgs/msg/String',
                    '{data: bar}'
                ]
        ) as topic_command:
            assert topic_command.wait_for_output(functools.partial(
                launch_testing.tools.expect_output, expected_lines=[
                    'publisher: beginning loop',
                    "publishing #1: std_msgs.msg.String(data='bar')",
                    ''
                ]
            ), timeout=10)
            assert topic_command.wait_for_shutdown(timeout=10)
            assert self.listener_node.wait_for_output(functools.partial(
                launch_testing.tools.expect_output, expected_lines=[
                    re.compile(r'\[INFO\] \[\d+\.\d*\] \[listener\]: I heard: \[bar\]')
                ]
            ), timeout=10)
            assert second_listener_node.wait_for_output(functools.partial(
                launch_testing.tools.expect_output, expected_lines=[
                    re.compile(r'\[INFO\] \[\d+\.\d*\] \[second_listener\]: I heard: \[bar\]')
                ]
            ), timeout=10)
        assert topic_command.exit_code == launch_testing.asserts.EXIT_OK

    def test_topic_pub_print_every_two(self):
        with self.launch_topic_command(
            arguments=[
                'pub',
                '-p', '2',
                '--keep-alive', '3',  # seconds
                '--qos-durability', 'transient_local',
                '--qos-reliability', 'reliable',
                '/chit_chatter',
                'std_msgs/msg/String',
                '{data: fizz}'
            ]
        ) as topic_command:
            assert topic_command.wait_for_output(functools.partial(
                launch_testing.tools.expect_output, expected_lines=[
                    'publisher: beginning loop',
                    "publishing #2: std_msgs.msg.String(data='fizz')",
                    '',
                    "publishing #4: std_msgs.msg.String(data='fizz')",
                    ''
                ]
            ), timeout=10), 'Output does not match: ' + topic_command.output
            assert self.listener_node.wait_for_output(functools.partial(
                launch_testing.tools.expect_output, expected_lines=[
                    re.compile(r'\[INFO\] \[\d+\.\d*\] \[listener\]: I heard: \[fizz\]')
                ], strict=False
            ), timeout=10)
        assert topic_command.wait_for_shutdown(timeout=10)

    @launch_testing.markers.retry_on_failure(times=5, delay=1)
    def test_topic_delay(self):
        average_delay_line_pattern = re.compile(r'average delay: (\d+.\d{3})')
        stats_line_pattern = re.compile(
            r'\s*min: \d+.\d{3}s max: \d+.\d{3}s std dev: \d+.\d{5}s window: \d+'
        )
        with self.launch_topic_command(arguments=['delay', '/cmd_vel']) as topic_command:
            assert topic_command.wait_for_output(functools.partial(
                launch_testing.tools.expect_output, expected_lines=[
                    average_delay_line_pattern, stats_line_pattern
                ], strict=True
            ), timeout=10)
        assert topic_command.wait_for_shutdown(timeout=10)

        head_line = topic_command.output.splitlines()[0]
        average_delay = float(average_delay_line_pattern.match(head_line).group(1))
        assert math.isclose(average_delay, 0.0, abs_tol=10e-3)

    @launch_testing.markers.retry_on_failure(times=5, delay=1)
    def test_topic_hz(self):
        average_rate_line_pattern = re.compile(r'average rate: (\d+.\d{3})')
        stats_line_pattern = re.compile(
            r'\s*min: \d+.\d{3}s max: \d+.\d{3}s std dev: \d+.\d{5}s window: \d+'
        )
        with self.launch_topic_command(arguments=['hz', '/chatter']) as topic_command:
            assert topic_command.wait_for_output(functools.partial(
                launch_testing.tools.expect_output, expected_lines=[
                    average_rate_line_pattern, stats_line_pattern
                ], strict=True
            ), timeout=10)
        assert topic_command.wait_for_shutdown(timeout=10)

        head_line = topic_command.output.splitlines()[0]
        average_rate = float(average_rate_line_pattern.match(head_line).group(1))
        assert math.isclose(average_rate, 1., rel_tol=1e-2)

    @launch_testing.markers.retry_on_failure(times=5, delay=1)
    def test_multiple_topics_hz(self):
        header_pattern = re.compile(r'\s+topic\s+rate\s+min_delta\s+max_delta\s+std_dev\s+window')
        hline_pattern = re.compile(r'=+')
        chatter_line_pattern = re.compile(
          r'/chatter\s+(\d+.\d{3})\s+\d+.\d{3}\s+\d+.\d{3}\s+\d+.\d{5}\s+\d+\s+')
        hidden_chatter_line_pattern = re.compile(
          r'/_hidden_chatter\s+(\d+.\d{3})\s+\d+.\d{3}\s+\d+.\d{3}\s+\d+.\d{5}\s+\d+\s+')
        with self.launch_topic_command(
            arguments=['hz', '/chatter', '/_hidden_chatter']
        ) as topic_command:
            assert topic_command.wait_for_output(functools.partial(
                launch_testing.tools.expect_output, expected_lines=[
                    'Subscribed to [/chatter]',
                    'Subscribed to [/_hidden_chatter]',
                    header_pattern, hline_pattern,
                    chatter_line_pattern, hidden_chatter_line_pattern
                ], strict=True
            ), timeout=10)
        assert topic_command.wait_for_shutdown(timeout=10)

        chatter_line = topic_command.output.splitlines()[4]
        chatter_average_rate = float(chatter_line_pattern.match(chatter_line).group(1))
        assert math.isclose(chatter_average_rate, 1., rel_tol=1e-2)
        hidden_chatter_line = topic_command.output.splitlines()[5]
        hidden_chatter_average_rate = float(hidden_chatter_line_pattern.match(
            hidden_chatter_line).group(1))
        assert math.isclose(hidden_chatter_average_rate, 1., rel_tol=1e-2)

    @launch_testing.markers.retry_on_failure(times=5, delay=1)
    def test_filtered_topic_hz(self):
        average_rate_line_pattern = re.compile(r'average rate: (\d+.\d{3})')
        stats_line_pattern = re.compile(
            r'\s*min: \d+.\d{3}s max: \d+.\d{3}s std dev: \d+.\d{5}s window: \d+'
        )
        with self.launch_topic_command(
            arguments=[
                'hz',
                '--filter',
                'int(m.data.rpartition(\":\")[-1]) % 2 == 0',
                '/chatter'
            ]
        ) as topic_command:
            assert topic_command.wait_for_output(functools.partial(
                launch_testing.tools.expect_output, expected_lines=[
                    average_rate_line_pattern, stats_line_pattern
                ], strict=True
            ), timeout=10), 'Output does not match: ' + topic_command.output
        assert topic_command.wait_for_shutdown(timeout=10)

        head_line = topic_command.output.splitlines()[0]
        average_rate = float(average_rate_line_pattern.match(head_line).group(1))
        assert math.isclose(average_rate, 0.5, rel_tol=1e-2)

    @launch_testing.markers.retry_on_failure(times=5, delay=1)
    def test_topic_bw(self):
        with self.launch_topic_command(arguments=['bw', '/defaults']) as topic_command:
            assert topic_command.wait_for_output(functools.partial(
                launch_testing.tools.expect_output, expected_lines=[
                    'Subscribed to [/defaults]',
                    re.compile(r'\d{2,3} B/s from \d+ messages'),
                    re.compile(r'\s*Message size mean: \d{2} B min: \d{2} B max: \d{2} B')
                ], strict=True
            ), timeout=10)
        assert topic_command.wait_for_shutdown(timeout=10)
