input
stringlengths
53
297k
output
stringclasses
604 values
repo_name
stringclasses
376 values
test_path
stringclasses
583 values
code_path
stringlengths
7
116
"""Support for Bond fans.""" import logging import math from typing import Any, Callable, List, Optional, Tuple from bond_api import Action, BPUPSubscriptions, DeviceType, Direction from homeassistant.components.fan import ( DIRECTION_FORWARD, DIRECTION_REVERSE, SUPPORT_DIRECTION, SUPPORT_SET_SPEED, FanEntity, ) from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity import Entity from homeassistant.util.percentage import ( percentage_to_ranged_value, ranged_value_to_percentage, ) from .const import BPUP_SUBS, DOMAIN, HUB from .entity import BondEntity from .utils import BondDevice, BondHub _LOGGER = logging.getLogger(__name__) async def async_setup_entry( hass: HomeAssistant, entry: ConfigEntry, async_add_entities: Callable[[List[Entity], bool], None], ) -> None: """Set up Bond fan devices.""" data = hass.data[DOMAIN][entry.entry_id] hub: BondHub = data[HUB] bpup_subs: BPUPSubscriptions = data[BPUP_SUBS] fans = [ BondFan(hub, device, bpup_subs) for device in hub.devices if DeviceType.is_fan(device.type) ] async_add_entities(fans, True) class BondFan(BondEntity, FanEntity): """Representation of a Bond fan.""" def __init__(self, hub: BondHub, device: BondDevice, bpup_subs: BPUPSubscriptions): """Create HA entity representing Bond fan.""" super().__init__(hub, device, bpup_subs) self._power: Optional[bool] = None self._speed: Optional[int] = None self._direction: Optional[int] = None def _apply_state(self, state: dict): self._power = state.get("power") self._speed = state.get("speed") self._direction = state.get("direction") @property def supported_features(self) -> int: """Flag supported features.""" features = 0 if self._device.supports_speed(): features |= SUPPORT_SET_SPEED if self._device.supports_direction(): features |= SUPPORT_DIRECTION return features @property def _speed_range(self) -> Tuple[int, int]: """Return the range of speeds.""" return (1, self._device.props.get("max_speed", 3)) @property def percentage(self) -> Optional[str]: """Return the current speed percentage for the fan.""" if not self._speed or not self._power: return 0 return ranged_value_to_percentage(self._speed_range, self._speed) @property def current_direction(self) -> Optional[str]: """Return fan rotation direction.""" direction = None if self._direction == Direction.FORWARD: direction = DIRECTION_FORWARD elif self._direction == Direction.REVERSE: direction = DIRECTION_REVERSE return direction async def async_set_percentage(self, percentage: int) -> None: """Set the desired speed for the fan.""" _LOGGER.debug("async_set_percentage called with percentage %s", percentage) if percentage == 0: await self.async_turn_off() return bond_speed = math.ceil( percentage_to_ranged_value(self._speed_range, percentage) ) _LOGGER.debug( "async_set_percentage converted percentage %s to bond speed %s", percentage, bond_speed, ) await self._hub.bond.action( self._device.device_id, Action.set_speed(bond_speed) ) async def async_turn_on( self, speed: Optional[str] = None, percentage: Optional[int] = None, preset_mode: Optional[str] = None, **kwargs, ) -> None: """Turn on the fan.""" _LOGGER.debug("Fan async_turn_on called with percentage %s", percentage) if percentage is not None: await self.async_set_percentage(percentage) else: await self._hub.bond.action(self._device.device_id, Action.turn_on()) async def async_turn_off(self, **kwargs: Any) -> None: """Turn the fan off.""" await self._hub.bond.action(self._device.device_id, Action.turn_off()) async def async_set_direction(self, direction: str): """Set fan rotation direction.""" bond_direction = ( Direction.REVERSE if direction == DIRECTION_REVERSE else Direction.FORWARD ) await self._hub.bond.action( self._device.device_id, Action.set_direction(bond_direction) )
"""Tests for Transmission init.""" from unittest.mock import patch import pytest from transmissionrpc.error import TransmissionError from homeassistant.components import transmission from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.setup import async_setup_component from tests.common import MockConfigEntry, mock_coro MOCK_ENTRY = MockConfigEntry( domain=transmission.DOMAIN, data={ transmission.CONF_NAME: "Transmission", transmission.CONF_HOST: "0.0.0.0", transmission.CONF_USERNAME: "user", transmission.CONF_PASSWORD: "pass", transmission.CONF_PORT: 9091, }, ) @pytest.fixture(name="api") def mock_transmission_api(): """Mock an api.""" with patch("transmissionrpc.Client"): yield @pytest.fixture(name="auth_error") def mock_api_authentication_error(): """Mock an api.""" with patch( "transmissionrpc.Client", side_effect=TransmissionError("401: Unauthorized") ): yield @pytest.fixture(name="unknown_error") def mock_api_unknown_error(): """Mock an api.""" with patch("transmissionrpc.Client", side_effect=TransmissionError): yield async def test_setup_with_no_config(hass): """Test that we do not discover anything or try to set up a Transmission client.""" assert await async_setup_component(hass, transmission.DOMAIN, {}) is True assert transmission.DOMAIN not in hass.data async def test_setup_with_config(hass, api): """Test that we import the config and setup the client.""" config = { transmission.DOMAIN: { transmission.CONF_NAME: "Transmission", transmission.CONF_HOST: "0.0.0.0", transmission.CONF_USERNAME: "user", transmission.CONF_PASSWORD: "pass", transmission.CONF_PORT: 9091, }, transmission.DOMAIN: { transmission.CONF_NAME: "Transmission2", transmission.CONF_HOST: "0.0.0.1", transmission.CONF_USERNAME: "user", transmission.CONF_PASSWORD: "pass", transmission.CONF_PORT: 9091, }, } assert await async_setup_component(hass, transmission.DOMAIN, config) is True async def test_successful_config_entry(hass, api): """Test that configured transmission is configured successfully.""" entry = MOCK_ENTRY entry.add_to_hass(hass) assert await transmission.async_setup_entry(hass, entry) is True assert entry.options == { transmission.CONF_SCAN_INTERVAL: transmission.DEFAULT_SCAN_INTERVAL, transmission.CONF_LIMIT: transmission.DEFAULT_LIMIT, transmission.CONF_ORDER: transmission.DEFAULT_ORDER, } async def test_setup_failed(hass): """Test transmission failed due to an error.""" entry = MOCK_ENTRY entry.add_to_hass(hass) # test connection error raising ConfigEntryNotReady with patch( "transmissionrpc.Client", side_effect=TransmissionError("111: Connection refused"), ), pytest.raises(ConfigEntryNotReady): await transmission.async_setup_entry(hass, entry) # test Authentication error returning false with patch( "transmissionrpc.Client", side_effect=TransmissionError("401: Unauthorized") ): assert await transmission.async_setup_entry(hass, entry) is False async def test_unload_entry(hass, api): """Test removing transmission client.""" entry = MOCK_ENTRY entry.add_to_hass(hass) with patch.object( hass.config_entries, "async_forward_entry_unload", return_value=mock_coro(True) ) as unload_entry: assert await transmission.async_setup_entry(hass, entry) assert await transmission.async_unload_entry(hass, entry) assert unload_entry.call_count == 2 assert entry.entry_id not in hass.data[transmission.DOMAIN]
turbokongen/home-assistant
tests/components/transmission/test_init.py
homeassistant/components/bond/fan.py
"""Support for Netgear LTE binary sensors.""" from homeassistant.components.binary_sensor import DOMAIN, BinarySensorEntity from homeassistant.exceptions import PlatformNotReady from . import CONF_MONITORED_CONDITIONS, DATA_KEY, LTEEntity from .sensor_types import BINARY_SENSOR_CLASSES async def async_setup_platform(hass, config, async_add_entities, discovery_info): """Set up Netgear LTE binary sensor devices.""" if discovery_info is None: return modem_data = hass.data[DATA_KEY].get_modem_data(discovery_info) if not modem_data or not modem_data.data: raise PlatformNotReady binary_sensor_conf = discovery_info[DOMAIN] monitored_conditions = binary_sensor_conf[CONF_MONITORED_CONDITIONS] binary_sensors = [] for sensor_type in monitored_conditions: binary_sensors.append(LTEBinarySensor(modem_data, sensor_type)) async_add_entities(binary_sensors) class LTEBinarySensor(LTEEntity, BinarySensorEntity): """Netgear LTE binary sensor entity.""" @property def is_on(self): """Return true if the binary sensor is on.""" return getattr(self.modem_data.data, self.sensor_type) @property def device_class(self): """Return the class of binary sensor.""" return BINARY_SENSOR_CLASSES[self.sensor_type]
"""Tests for Transmission init.""" from unittest.mock import patch import pytest from transmissionrpc.error import TransmissionError from homeassistant.components import transmission from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.setup import async_setup_component from tests.common import MockConfigEntry, mock_coro MOCK_ENTRY = MockConfigEntry( domain=transmission.DOMAIN, data={ transmission.CONF_NAME: "Transmission", transmission.CONF_HOST: "0.0.0.0", transmission.CONF_USERNAME: "user", transmission.CONF_PASSWORD: "pass", transmission.CONF_PORT: 9091, }, ) @pytest.fixture(name="api") def mock_transmission_api(): """Mock an api.""" with patch("transmissionrpc.Client"): yield @pytest.fixture(name="auth_error") def mock_api_authentication_error(): """Mock an api.""" with patch( "transmissionrpc.Client", side_effect=TransmissionError("401: Unauthorized") ): yield @pytest.fixture(name="unknown_error") def mock_api_unknown_error(): """Mock an api.""" with patch("transmissionrpc.Client", side_effect=TransmissionError): yield async def test_setup_with_no_config(hass): """Test that we do not discover anything or try to set up a Transmission client.""" assert await async_setup_component(hass, transmission.DOMAIN, {}) is True assert transmission.DOMAIN not in hass.data async def test_setup_with_config(hass, api): """Test that we import the config and setup the client.""" config = { transmission.DOMAIN: { transmission.CONF_NAME: "Transmission", transmission.CONF_HOST: "0.0.0.0", transmission.CONF_USERNAME: "user", transmission.CONF_PASSWORD: "pass", transmission.CONF_PORT: 9091, }, transmission.DOMAIN: { transmission.CONF_NAME: "Transmission2", transmission.CONF_HOST: "0.0.0.1", transmission.CONF_USERNAME: "user", transmission.CONF_PASSWORD: "pass", transmission.CONF_PORT: 9091, }, } assert await async_setup_component(hass, transmission.DOMAIN, config) is True async def test_successful_config_entry(hass, api): """Test that configured transmission is configured successfully.""" entry = MOCK_ENTRY entry.add_to_hass(hass) assert await transmission.async_setup_entry(hass, entry) is True assert entry.options == { transmission.CONF_SCAN_INTERVAL: transmission.DEFAULT_SCAN_INTERVAL, transmission.CONF_LIMIT: transmission.DEFAULT_LIMIT, transmission.CONF_ORDER: transmission.DEFAULT_ORDER, } async def test_setup_failed(hass): """Test transmission failed due to an error.""" entry = MOCK_ENTRY entry.add_to_hass(hass) # test connection error raising ConfigEntryNotReady with patch( "transmissionrpc.Client", side_effect=TransmissionError("111: Connection refused"), ), pytest.raises(ConfigEntryNotReady): await transmission.async_setup_entry(hass, entry) # test Authentication error returning false with patch( "transmissionrpc.Client", side_effect=TransmissionError("401: Unauthorized") ): assert await transmission.async_setup_entry(hass, entry) is False async def test_unload_entry(hass, api): """Test removing transmission client.""" entry = MOCK_ENTRY entry.add_to_hass(hass) with patch.object( hass.config_entries, "async_forward_entry_unload", return_value=mock_coro(True) ) as unload_entry: assert await transmission.async_setup_entry(hass, entry) assert await transmission.async_unload_entry(hass, entry) assert unload_entry.call_count == 2 assert entry.entry_id not in hass.data[transmission.DOMAIN]
turbokongen/home-assistant
tests/components/transmission/test_init.py
homeassistant/components/netgear_lte/binary_sensor.py
"""Event parser and human readable log generator.""" from datetime import timedelta from itertools import groupby import json import re import sqlalchemy from sqlalchemy.orm import aliased from sqlalchemy.sql.expression import literal import voluptuous as vol from homeassistant.components.automation import EVENT_AUTOMATION_TRIGGERED from homeassistant.components.history import sqlalchemy_filter_from_include_exclude_conf from homeassistant.components.http import HomeAssistantView from homeassistant.components.recorder.models import ( Events, States, process_timestamp_to_utc_isoformat, ) from homeassistant.components.recorder.util import session_scope from homeassistant.components.script import EVENT_SCRIPT_STARTED from homeassistant.const import ( ATTR_DOMAIN, ATTR_ENTITY_ID, ATTR_FRIENDLY_NAME, ATTR_ICON, ATTR_NAME, ATTR_SERVICE, EVENT_CALL_SERVICE, EVENT_HOMEASSISTANT_START, EVENT_HOMEASSISTANT_STOP, EVENT_LOGBOOK_ENTRY, EVENT_STATE_CHANGED, HTTP_BAD_REQUEST, ) from homeassistant.core import DOMAIN as HA_DOMAIN, callback, split_entity_id from homeassistant.exceptions import InvalidEntityFormatError import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entityfilter import ( INCLUDE_EXCLUDE_BASE_FILTER_SCHEMA, convert_include_exclude_filter, generate_filter, ) from homeassistant.helpers.integration_platform import ( async_process_integration_platforms, ) from homeassistant.loader import bind_hass import homeassistant.util.dt as dt_util ENTITY_ID_JSON_TEMPLATE = '"entity_id": "{}"' ENTITY_ID_JSON_EXTRACT = re.compile('"entity_id": "([^"]+)"') DOMAIN_JSON_EXTRACT = re.compile('"domain": "([^"]+)"') ICON_JSON_EXTRACT = re.compile('"icon": "([^"]+)"') ATTR_MESSAGE = "message" CONTINUOUS_DOMAINS = ["proximity", "sensor"] DOMAIN = "logbook" GROUP_BY_MINUTES = 15 EMPTY_JSON_OBJECT = "{}" UNIT_OF_MEASUREMENT_JSON = '"unit_of_measurement":' HA_DOMAIN_ENTITY_ID = f"{HA_DOMAIN}." CONFIG_SCHEMA = vol.Schema( {DOMAIN: INCLUDE_EXCLUDE_BASE_FILTER_SCHEMA}, extra=vol.ALLOW_EXTRA ) HOMEASSISTANT_EVENTS = [ EVENT_HOMEASSISTANT_START, EVENT_HOMEASSISTANT_STOP, ] ALL_EVENT_TYPES_EXCEPT_STATE_CHANGED = [ EVENT_LOGBOOK_ENTRY, EVENT_CALL_SERVICE, *HOMEASSISTANT_EVENTS, ] ALL_EVENT_TYPES = [ EVENT_STATE_CHANGED, *ALL_EVENT_TYPES_EXCEPT_STATE_CHANGED, ] EVENT_COLUMNS = [ Events.event_type, Events.event_data, Events.time_fired, Events.context_id, Events.context_user_id, Events.context_parent_id, ] SCRIPT_AUTOMATION_EVENTS = [EVENT_AUTOMATION_TRIGGERED, EVENT_SCRIPT_STARTED] LOG_MESSAGE_SCHEMA = vol.Schema( { vol.Required(ATTR_NAME): cv.string, vol.Required(ATTR_MESSAGE): cv.template, vol.Optional(ATTR_DOMAIN): cv.slug, vol.Optional(ATTR_ENTITY_ID): cv.entity_id, } ) @bind_hass def log_entry(hass, name, message, domain=None, entity_id=None, context=None): """Add an entry to the logbook.""" hass.add_job(async_log_entry, hass, name, message, domain, entity_id, context) @bind_hass def async_log_entry(hass, name, message, domain=None, entity_id=None, context=None): """Add an entry to the logbook.""" data = {ATTR_NAME: name, ATTR_MESSAGE: message} if domain is not None: data[ATTR_DOMAIN] = domain if entity_id is not None: data[ATTR_ENTITY_ID] = entity_id hass.bus.async_fire(EVENT_LOGBOOK_ENTRY, data, context=context) async def async_setup(hass, config): """Logbook setup.""" hass.data[DOMAIN] = {} @callback def log_message(service): """Handle sending notification message service calls.""" message = service.data[ATTR_MESSAGE] name = service.data[ATTR_NAME] domain = service.data.get(ATTR_DOMAIN) entity_id = service.data.get(ATTR_ENTITY_ID) if entity_id is None and domain is None: # If there is no entity_id or # domain, the event will get filtered # away so we use the "logbook" domain domain = DOMAIN message.hass = hass message = message.async_render(parse_result=False) async_log_entry(hass, name, message, domain, entity_id) hass.components.frontend.async_register_built_in_panel( "logbook", "logbook", "hass:format-list-bulleted-type" ) conf = config.get(DOMAIN, {}) if conf: filters = sqlalchemy_filter_from_include_exclude_conf(conf) entities_filter = convert_include_exclude_filter(conf) else: filters = None entities_filter = None hass.http.register_view(LogbookView(conf, filters, entities_filter)) hass.services.async_register(DOMAIN, "log", log_message, schema=LOG_MESSAGE_SCHEMA) await async_process_integration_platforms(hass, DOMAIN, _process_logbook_platform) return True async def _process_logbook_platform(hass, domain, platform): """Process a logbook platform.""" @callback def _async_describe_event(domain, event_name, describe_callback): """Teach logbook how to describe a new event.""" hass.data[DOMAIN][event_name] = (domain, describe_callback) platform.async_describe_events(hass, _async_describe_event) class LogbookView(HomeAssistantView): """Handle logbook view requests.""" url = "/api/logbook" name = "api:logbook" extra_urls = ["/api/logbook/{datetime}"] def __init__(self, config, filters, entities_filter): """Initialize the logbook view.""" self.config = config self.filters = filters self.entities_filter = entities_filter async def get(self, request, datetime=None): """Retrieve logbook entries.""" if datetime: datetime = dt_util.parse_datetime(datetime) if datetime is None: return self.json_message("Invalid datetime", HTTP_BAD_REQUEST) else: datetime = dt_util.start_of_local_day() period = request.query.get("period") if period is None: period = 1 else: period = int(period) entity_ids = request.query.get("entity") if entity_ids: try: entity_ids = cv.entity_ids(entity_ids) except vol.Invalid: raise InvalidEntityFormatError( f"Invalid entity id(s) encountered: {entity_ids}. " "Format should be <domain>.<object_id>" ) from vol.Invalid end_time = request.query.get("end_time") if end_time is None: start_day = dt_util.as_utc(datetime) - timedelta(days=period - 1) end_day = start_day + timedelta(days=period) else: start_day = datetime end_day = dt_util.parse_datetime(end_time) if end_day is None: return self.json_message("Invalid end_time", HTTP_BAD_REQUEST) hass = request.app["hass"] entity_matches_only = "entity_matches_only" in request.query def json_events(): """Fetch events and generate JSON.""" return self.json( _get_events( hass, start_day, end_day, entity_ids, self.filters, self.entities_filter, entity_matches_only, ) ) return await hass.async_add_executor_job(json_events) def humanify(hass, events, entity_attr_cache, context_lookup): """Generate a converted list of events into Entry objects. Will try to group events if possible: - if 2+ sensor updates in GROUP_BY_MINUTES, show last - if Home Assistant stop and start happen in same minute call it restarted """ external_events = hass.data.get(DOMAIN, {}) # Group events in batches of GROUP_BY_MINUTES for _, g_events in groupby( events, lambda event: event.time_fired_minute // GROUP_BY_MINUTES ): events_batch = list(g_events) # Keep track of last sensor states last_sensor_event = {} # Group HA start/stop events # Maps minute of event to 1: stop, 2: stop + start start_stop_events = {} # Process events for event in events_batch: if event.event_type == EVENT_STATE_CHANGED: if event.domain in CONTINUOUS_DOMAINS: last_sensor_event[event.entity_id] = event elif event.event_type == EVENT_HOMEASSISTANT_STOP: if event.time_fired_minute in start_stop_events: continue start_stop_events[event.time_fired_minute] = 1 elif event.event_type == EVENT_HOMEASSISTANT_START: if event.time_fired_minute not in start_stop_events: continue start_stop_events[event.time_fired_minute] = 2 # Yield entries for event in events_batch: if event.event_type == EVENT_STATE_CHANGED: entity_id = event.entity_id domain = event.domain if ( domain in CONTINUOUS_DOMAINS and event != last_sensor_event[entity_id] ): # Skip all but the last sensor state continue data = { "when": event.time_fired_isoformat, "name": _entity_name_from_event( entity_id, event, entity_attr_cache ), "state": event.state, "entity_id": entity_id, } icon = event.attributes_icon if icon: data["icon"] = icon if event.context_user_id: data["context_user_id"] = event.context_user_id _augment_data_with_context( data, entity_id, event, context_lookup, entity_attr_cache, external_events, ) yield data elif event.event_type in external_events: domain, describe_event = external_events[event.event_type] data = describe_event(event) data["when"] = event.time_fired_isoformat data["domain"] = domain if event.context_user_id: data["context_user_id"] = event.context_user_id _augment_data_with_context( data, data.get(ATTR_ENTITY_ID), event, context_lookup, entity_attr_cache, external_events, ) yield data elif event.event_type == EVENT_HOMEASSISTANT_START: if start_stop_events.get(event.time_fired_minute) == 2: continue yield { "when": event.time_fired_isoformat, "name": "Home Assistant", "message": "started", "domain": HA_DOMAIN, } elif event.event_type == EVENT_HOMEASSISTANT_STOP: if start_stop_events.get(event.time_fired_minute) == 2: action = "restarted" else: action = "stopped" yield { "when": event.time_fired_isoformat, "name": "Home Assistant", "message": action, "domain": HA_DOMAIN, } elif event.event_type == EVENT_LOGBOOK_ENTRY: event_data = event.data domain = event_data.get(ATTR_DOMAIN) entity_id = event_data.get(ATTR_ENTITY_ID) if domain is None and entity_id is not None: try: domain = split_entity_id(str(entity_id))[0] except IndexError: pass data = { "when": event.time_fired_isoformat, "name": event_data.get(ATTR_NAME), "message": event_data.get(ATTR_MESSAGE), "domain": domain, "entity_id": entity_id, } if event.context_user_id: data["context_user_id"] = event.context_user_id _augment_data_with_context( data, entity_id, event, context_lookup, entity_attr_cache, external_events, ) yield data def _get_events( hass, start_day, end_day, entity_ids=None, filters=None, entities_filter=None, entity_matches_only=False, ): """Get events for a period of time.""" entity_attr_cache = EntityAttributeCache(hass) context_lookup = {None: None} def yield_events(query): """Yield Events that are not filtered away.""" for row in query.yield_per(1000): event = LazyEventPartialState(row) context_lookup.setdefault(event.context_id, event) if event.event_type == EVENT_CALL_SERVICE: continue if event.event_type == EVENT_STATE_CHANGED or _keep_event( hass, event, entities_filter ): yield event if entity_ids is not None: entities_filter = generate_filter([], entity_ids, [], []) with session_scope(hass=hass) as session: old_state = aliased(States, name="old_state") if entity_ids is not None: query = _generate_events_query_without_states(session) query = _apply_event_time_filter(query, start_day, end_day) query = _apply_event_types_filter( hass, query, ALL_EVENT_TYPES_EXCEPT_STATE_CHANGED ) if entity_matches_only: # When entity_matches_only is provided, contexts and events that do not # contain the entity_ids are not included in the logbook response. query = _apply_event_entity_id_matchers(query, entity_ids) query = query.union_all( _generate_states_query( session, start_day, end_day, old_state, entity_ids ) ) else: query = _generate_events_query(session) query = _apply_event_time_filter(query, start_day, end_day) query = _apply_events_types_and_states_filter( hass, query, old_state ).filter( (States.last_updated == States.last_changed) | (Events.event_type != EVENT_STATE_CHANGED) ) if filters: query = query.filter( filters.entity_filter() | (Events.event_type != EVENT_STATE_CHANGED) ) query = query.order_by(Events.time_fired) return list( humanify(hass, yield_events(query), entity_attr_cache, context_lookup) ) def _generate_events_query(session): return session.query( *EVENT_COLUMNS, States.state, States.entity_id, States.domain, States.attributes, ) def _generate_events_query_without_states(session): return session.query( *EVENT_COLUMNS, literal(None).label("state"), literal(None).label("entity_id"), literal(None).label("domain"), literal(None).label("attributes"), ) def _generate_states_query(session, start_day, end_day, old_state, entity_ids): return ( _generate_events_query(session) .outerjoin(Events, (States.event_id == Events.event_id)) .outerjoin(old_state, (States.old_state_id == old_state.state_id)) .filter(_missing_state_matcher(old_state)) .filter(_continuous_entity_matcher()) .filter((States.last_updated > start_day) & (States.last_updated < end_day)) .filter( (States.last_updated == States.last_changed) & States.entity_id.in_(entity_ids) ) ) def _apply_events_types_and_states_filter(hass, query, old_state): events_query = ( query.outerjoin(States, (Events.event_id == States.event_id)) .outerjoin(old_state, (States.old_state_id == old_state.state_id)) .filter( (Events.event_type != EVENT_STATE_CHANGED) | _missing_state_matcher(old_state) ) .filter( (Events.event_type != EVENT_STATE_CHANGED) | _continuous_entity_matcher() ) ) return _apply_event_types_filter(hass, events_query, ALL_EVENT_TYPES) def _missing_state_matcher(old_state): # The below removes state change events that do not have # and old_state or the old_state is missing (newly added entities) # or the new_state is missing (removed entities) return sqlalchemy.and_( old_state.state_id.isnot(None), (States.state != old_state.state), States.state.isnot(None), ) def _continuous_entity_matcher(): # # Prefilter out continuous domains that have # ATTR_UNIT_OF_MEASUREMENT as its much faster in sql. # return sqlalchemy.or_( sqlalchemy.not_(States.domain.in_(CONTINUOUS_DOMAINS)), sqlalchemy.not_(States.attributes.contains(UNIT_OF_MEASUREMENT_JSON)), ) def _apply_event_time_filter(events_query, start_day, end_day): return events_query.filter( (Events.time_fired > start_day) & (Events.time_fired < end_day) ) def _apply_event_types_filter(hass, query, event_types): return query.filter( Events.event_type.in_(event_types + list(hass.data.get(DOMAIN, {}))) ) def _apply_event_entity_id_matchers(events_query, entity_ids): return events_query.filter( sqlalchemy.or_( *[ Events.event_data.contains(ENTITY_ID_JSON_TEMPLATE.format(entity_id)) for entity_id in entity_ids ] ) ) def _keep_event(hass, event, entities_filter): if event.event_type in HOMEASSISTANT_EVENTS: return entities_filter is None or entities_filter(HA_DOMAIN_ENTITY_ID) entity_id = event.data_entity_id if entity_id: return entities_filter is None or entities_filter(entity_id) if event.event_type in hass.data[DOMAIN]: # If the entity_id isn't described, use the domain that describes # the event for filtering. domain = hass.data[DOMAIN][event.event_type][0] else: domain = event.data_domain if domain is None: return False return entities_filter is None or entities_filter(f"{domain}.") def _augment_data_with_context( data, entity_id, event, context_lookup, entity_attr_cache, external_events ): context_event = context_lookup.get(event.context_id) if not context_event: return if event == context_event: # This is the first event with the given ID. Was it directly caused by # a parent event? if event.context_parent_id: context_event = context_lookup.get(event.context_parent_id) # Ensure the (parent) context_event exists and is not the root cause of # this log entry. if not context_event or event == context_event: return event_type = context_event.event_type context_entity_id = context_event.entity_id # State change if context_entity_id: data["context_entity_id"] = context_entity_id data["context_entity_id_name"] = _entity_name_from_event( context_entity_id, context_event, entity_attr_cache ) data["context_event_type"] = event_type return event_data = context_event.data # Call service if event_type == EVENT_CALL_SERVICE: event_data = context_event.data data["context_domain"] = event_data.get(ATTR_DOMAIN) data["context_service"] = event_data.get(ATTR_SERVICE) data["context_event_type"] = event_type return if not entity_id: return attr_entity_id = event_data.get(ATTR_ENTITY_ID) if not attr_entity_id or ( event_type in SCRIPT_AUTOMATION_EVENTS and attr_entity_id == entity_id ): return if context_event == event: return data["context_entity_id"] = attr_entity_id data["context_entity_id_name"] = _entity_name_from_event( attr_entity_id, context_event, entity_attr_cache ) data["context_event_type"] = event_type if event_type in external_events: domain, describe_event = external_events[event_type] data["context_domain"] = domain name = describe_event(context_event).get(ATTR_NAME) if name: data["context_name"] = name def _entity_name_from_event(entity_id, event, entity_attr_cache): """Extract the entity name from the event using the cache if possible.""" return entity_attr_cache.get( entity_id, ATTR_FRIENDLY_NAME, event ) or split_entity_id(entity_id)[1].replace("_", " ") class LazyEventPartialState: """A lazy version of core Event with limited State joined in.""" __slots__ = [ "_row", "_event_data", "_time_fired_isoformat", "_attributes", "event_type", "entity_id", "state", "domain", "context_id", "context_user_id", "context_parent_id", "time_fired_minute", ] def __init__(self, row): """Init the lazy event.""" self._row = row self._event_data = None self._time_fired_isoformat = None self._attributes = None self.event_type = self._row.event_type self.entity_id = self._row.entity_id self.state = self._row.state self.domain = self._row.domain self.context_id = self._row.context_id self.context_user_id = self._row.context_user_id self.context_parent_id = self._row.context_parent_id self.time_fired_minute = self._row.time_fired.minute @property def attributes_icon(self): """Extract the icon from the decoded attributes or json.""" if self._attributes: return self._attributes.get(ATTR_ICON) result = ICON_JSON_EXTRACT.search(self._row.attributes) return result and result.group(1) @property def data_entity_id(self): """Extract the entity id from the decoded data or json.""" if self._event_data: return self._event_data.get(ATTR_ENTITY_ID) result = ENTITY_ID_JSON_EXTRACT.search(self._row.event_data) return result and result.group(1) @property def data_domain(self): """Extract the domain from the decoded data or json.""" if self._event_data: return self._event_data.get(ATTR_DOMAIN) result = DOMAIN_JSON_EXTRACT.search(self._row.event_data) return result and result.group(1) @property def attributes(self): """State attributes.""" if not self._attributes: if ( self._row.attributes is None or self._row.attributes == EMPTY_JSON_OBJECT ): self._attributes = {} else: self._attributes = json.loads(self._row.attributes) return self._attributes @property def data(self): """Event data.""" if not self._event_data: if self._row.event_data == EMPTY_JSON_OBJECT: self._event_data = {} else: self._event_data = json.loads(self._row.event_data) return self._event_data @property def time_fired_isoformat(self): """Time event was fired in utc isoformat.""" if not self._time_fired_isoformat: self._time_fired_isoformat = process_timestamp_to_utc_isoformat( self._row.time_fired or dt_util.utcnow() ) return self._time_fired_isoformat class EntityAttributeCache: """A cache to lookup static entity_id attribute. This class should not be used to lookup attributes that are expected to change state. """ def __init__(self, hass): """Init the cache.""" self._hass = hass self._cache = {} def get(self, entity_id, attribute, event): """Lookup an attribute for an entity or get it from the cache.""" if entity_id in self._cache: if attribute in self._cache[entity_id]: return self._cache[entity_id][attribute] else: self._cache[entity_id] = {} current_state = self._hass.states.get(entity_id) if current_state: # Try the current state as its faster than decoding the # attributes self._cache[entity_id][attribute] = current_state.attributes.get(attribute) else: # If the entity has been removed, decode the attributes # instead self._cache[entity_id][attribute] = event.attributes.get(attribute) return self._cache[entity_id][attribute]
"""Tests for Transmission init.""" from unittest.mock import patch import pytest from transmissionrpc.error import TransmissionError from homeassistant.components import transmission from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.setup import async_setup_component from tests.common import MockConfigEntry, mock_coro MOCK_ENTRY = MockConfigEntry( domain=transmission.DOMAIN, data={ transmission.CONF_NAME: "Transmission", transmission.CONF_HOST: "0.0.0.0", transmission.CONF_USERNAME: "user", transmission.CONF_PASSWORD: "pass", transmission.CONF_PORT: 9091, }, ) @pytest.fixture(name="api") def mock_transmission_api(): """Mock an api.""" with patch("transmissionrpc.Client"): yield @pytest.fixture(name="auth_error") def mock_api_authentication_error(): """Mock an api.""" with patch( "transmissionrpc.Client", side_effect=TransmissionError("401: Unauthorized") ): yield @pytest.fixture(name="unknown_error") def mock_api_unknown_error(): """Mock an api.""" with patch("transmissionrpc.Client", side_effect=TransmissionError): yield async def test_setup_with_no_config(hass): """Test that we do not discover anything or try to set up a Transmission client.""" assert await async_setup_component(hass, transmission.DOMAIN, {}) is True assert transmission.DOMAIN not in hass.data async def test_setup_with_config(hass, api): """Test that we import the config and setup the client.""" config = { transmission.DOMAIN: { transmission.CONF_NAME: "Transmission", transmission.CONF_HOST: "0.0.0.0", transmission.CONF_USERNAME: "user", transmission.CONF_PASSWORD: "pass", transmission.CONF_PORT: 9091, }, transmission.DOMAIN: { transmission.CONF_NAME: "Transmission2", transmission.CONF_HOST: "0.0.0.1", transmission.CONF_USERNAME: "user", transmission.CONF_PASSWORD: "pass", transmission.CONF_PORT: 9091, }, } assert await async_setup_component(hass, transmission.DOMAIN, config) is True async def test_successful_config_entry(hass, api): """Test that configured transmission is configured successfully.""" entry = MOCK_ENTRY entry.add_to_hass(hass) assert await transmission.async_setup_entry(hass, entry) is True assert entry.options == { transmission.CONF_SCAN_INTERVAL: transmission.DEFAULT_SCAN_INTERVAL, transmission.CONF_LIMIT: transmission.DEFAULT_LIMIT, transmission.CONF_ORDER: transmission.DEFAULT_ORDER, } async def test_setup_failed(hass): """Test transmission failed due to an error.""" entry = MOCK_ENTRY entry.add_to_hass(hass) # test connection error raising ConfigEntryNotReady with patch( "transmissionrpc.Client", side_effect=TransmissionError("111: Connection refused"), ), pytest.raises(ConfigEntryNotReady): await transmission.async_setup_entry(hass, entry) # test Authentication error returning false with patch( "transmissionrpc.Client", side_effect=TransmissionError("401: Unauthorized") ): assert await transmission.async_setup_entry(hass, entry) is False async def test_unload_entry(hass, api): """Test removing transmission client.""" entry = MOCK_ENTRY entry.add_to_hass(hass) with patch.object( hass.config_entries, "async_forward_entry_unload", return_value=mock_coro(True) ) as unload_entry: assert await transmission.async_setup_entry(hass, entry) assert await transmission.async_unload_entry(hass, entry) assert unload_entry.call_count == 2 assert entry.entry_id not in hass.data[transmission.DOMAIN]
turbokongen/home-assistant
tests/components/transmission/test_init.py
homeassistant/components/logbook/__init__.py
"""Support for Envisalink-based alarm control panels (Honeywell/DSC).""" import logging import voluptuous as vol from homeassistant.components.alarm_control_panel import ( FORMAT_NUMBER, AlarmControlPanelEntity, ) from homeassistant.components.alarm_control_panel.const import ( SUPPORT_ALARM_ARM_AWAY, SUPPORT_ALARM_ARM_HOME, SUPPORT_ALARM_ARM_NIGHT, SUPPORT_ALARM_TRIGGER, ) from homeassistant.const import ( ATTR_ENTITY_ID, CONF_CODE, STATE_ALARM_ARMED_AWAY, STATE_ALARM_ARMED_HOME, STATE_ALARM_ARMED_NIGHT, STATE_ALARM_DISARMED, STATE_ALARM_PENDING, STATE_ALARM_TRIGGERED, STATE_UNKNOWN, ) from homeassistant.core import callback import homeassistant.helpers.config_validation as cv from homeassistant.helpers.dispatcher import async_dispatcher_connect from . import ( CONF_PANIC, CONF_PARTITIONNAME, DATA_EVL, DOMAIN, PARTITION_SCHEMA, SIGNAL_KEYPAD_UPDATE, SIGNAL_PARTITION_UPDATE, EnvisalinkDevice, ) _LOGGER = logging.getLogger(__name__) SERVICE_ALARM_KEYPRESS = "alarm_keypress" ATTR_KEYPRESS = "keypress" ALARM_KEYPRESS_SCHEMA = vol.Schema( { vol.Required(ATTR_ENTITY_ID): cv.entity_ids, vol.Required(ATTR_KEYPRESS): cv.string, } ) async def async_setup_platform(hass, config, async_add_entities, discovery_info=None): """Perform the setup for Envisalink alarm panels.""" configured_partitions = discovery_info["partitions"] code = discovery_info[CONF_CODE] panic_type = discovery_info[CONF_PANIC] devices = [] for part_num in configured_partitions: device_config_data = PARTITION_SCHEMA(configured_partitions[part_num]) device = EnvisalinkAlarm( hass, part_num, device_config_data[CONF_PARTITIONNAME], code, panic_type, hass.data[DATA_EVL].alarm_state["partition"][part_num], hass.data[DATA_EVL], ) devices.append(device) async_add_entities(devices) @callback def alarm_keypress_handler(service): """Map services to methods on Alarm.""" entity_ids = service.data.get(ATTR_ENTITY_ID) keypress = service.data.get(ATTR_KEYPRESS) target_devices = [ device for device in devices if device.entity_id in entity_ids ] for device in target_devices: device.async_alarm_keypress(keypress) hass.services.async_register( DOMAIN, SERVICE_ALARM_KEYPRESS, alarm_keypress_handler, schema=ALARM_KEYPRESS_SCHEMA, ) return True class EnvisalinkAlarm(EnvisalinkDevice, AlarmControlPanelEntity): """Representation of an Envisalink-based alarm panel.""" def __init__( self, hass, partition_number, alarm_name, code, panic_type, info, controller ): """Initialize the alarm panel.""" self._partition_number = partition_number self._code = code self._panic_type = panic_type _LOGGER.debug("Setting up alarm: %s", alarm_name) super().__init__(alarm_name, info, controller) async def async_added_to_hass(self): """Register callbacks.""" self.async_on_remove( async_dispatcher_connect( self.hass, SIGNAL_KEYPAD_UPDATE, self._update_callback ) ) self.async_on_remove( async_dispatcher_connect( self.hass, SIGNAL_PARTITION_UPDATE, self._update_callback ) ) @callback def _update_callback(self, partition): """Update Home Assistant state, if needed.""" if partition is None or int(partition) == self._partition_number: self.async_write_ha_state() @property def code_format(self): """Regex for code format or None if no code is required.""" if self._code: return None return FORMAT_NUMBER @property def state(self): """Return the state of the device.""" state = STATE_UNKNOWN if self._info["status"]["alarm"]: state = STATE_ALARM_TRIGGERED elif self._info["status"]["armed_zero_entry_delay"]: state = STATE_ALARM_ARMED_NIGHT elif self._info["status"]["armed_away"]: state = STATE_ALARM_ARMED_AWAY elif self._info["status"]["armed_stay"]: state = STATE_ALARM_ARMED_HOME elif self._info["status"]["exit_delay"]: state = STATE_ALARM_PENDING elif self._info["status"]["entry_delay"]: state = STATE_ALARM_PENDING elif self._info["status"]["alpha"]: state = STATE_ALARM_DISARMED return state @property def supported_features(self) -> int: """Return the list of supported features.""" return ( SUPPORT_ALARM_ARM_HOME | SUPPORT_ALARM_ARM_AWAY | SUPPORT_ALARM_ARM_NIGHT | SUPPORT_ALARM_TRIGGER ) async def async_alarm_disarm(self, code=None): """Send disarm command.""" if code: self.hass.data[DATA_EVL].disarm_partition(str(code), self._partition_number) else: self.hass.data[DATA_EVL].disarm_partition( str(self._code), self._partition_number ) async def async_alarm_arm_home(self, code=None): """Send arm home command.""" if code: self.hass.data[DATA_EVL].arm_stay_partition( str(code), self._partition_number ) else: self.hass.data[DATA_EVL].arm_stay_partition( str(self._code), self._partition_number ) async def async_alarm_arm_away(self, code=None): """Send arm away command.""" if code: self.hass.data[DATA_EVL].arm_away_partition( str(code), self._partition_number ) else: self.hass.data[DATA_EVL].arm_away_partition( str(self._code), self._partition_number ) async def async_alarm_trigger(self, code=None): """Alarm trigger command. Will be used to trigger a panic alarm.""" self.hass.data[DATA_EVL].panic_alarm(self._panic_type) async def async_alarm_arm_night(self, code=None): """Send arm night command.""" self.hass.data[DATA_EVL].arm_night_partition( str(code) if code else str(self._code), self._partition_number ) @callback def async_alarm_keypress(self, keypress=None): """Send custom keypress.""" if keypress: self.hass.data[DATA_EVL].keypresses_to_partition( self._partition_number, keypress )
"""Tests for Transmission init.""" from unittest.mock import patch import pytest from transmissionrpc.error import TransmissionError from homeassistant.components import transmission from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.setup import async_setup_component from tests.common import MockConfigEntry, mock_coro MOCK_ENTRY = MockConfigEntry( domain=transmission.DOMAIN, data={ transmission.CONF_NAME: "Transmission", transmission.CONF_HOST: "0.0.0.0", transmission.CONF_USERNAME: "user", transmission.CONF_PASSWORD: "pass", transmission.CONF_PORT: 9091, }, ) @pytest.fixture(name="api") def mock_transmission_api(): """Mock an api.""" with patch("transmissionrpc.Client"): yield @pytest.fixture(name="auth_error") def mock_api_authentication_error(): """Mock an api.""" with patch( "transmissionrpc.Client", side_effect=TransmissionError("401: Unauthorized") ): yield @pytest.fixture(name="unknown_error") def mock_api_unknown_error(): """Mock an api.""" with patch("transmissionrpc.Client", side_effect=TransmissionError): yield async def test_setup_with_no_config(hass): """Test that we do not discover anything or try to set up a Transmission client.""" assert await async_setup_component(hass, transmission.DOMAIN, {}) is True assert transmission.DOMAIN not in hass.data async def test_setup_with_config(hass, api): """Test that we import the config and setup the client.""" config = { transmission.DOMAIN: { transmission.CONF_NAME: "Transmission", transmission.CONF_HOST: "0.0.0.0", transmission.CONF_USERNAME: "user", transmission.CONF_PASSWORD: "pass", transmission.CONF_PORT: 9091, }, transmission.DOMAIN: { transmission.CONF_NAME: "Transmission2", transmission.CONF_HOST: "0.0.0.1", transmission.CONF_USERNAME: "user", transmission.CONF_PASSWORD: "pass", transmission.CONF_PORT: 9091, }, } assert await async_setup_component(hass, transmission.DOMAIN, config) is True async def test_successful_config_entry(hass, api): """Test that configured transmission is configured successfully.""" entry = MOCK_ENTRY entry.add_to_hass(hass) assert await transmission.async_setup_entry(hass, entry) is True assert entry.options == { transmission.CONF_SCAN_INTERVAL: transmission.DEFAULT_SCAN_INTERVAL, transmission.CONF_LIMIT: transmission.DEFAULT_LIMIT, transmission.CONF_ORDER: transmission.DEFAULT_ORDER, } async def test_setup_failed(hass): """Test transmission failed due to an error.""" entry = MOCK_ENTRY entry.add_to_hass(hass) # test connection error raising ConfigEntryNotReady with patch( "transmissionrpc.Client", side_effect=TransmissionError("111: Connection refused"), ), pytest.raises(ConfigEntryNotReady): await transmission.async_setup_entry(hass, entry) # test Authentication error returning false with patch( "transmissionrpc.Client", side_effect=TransmissionError("401: Unauthorized") ): assert await transmission.async_setup_entry(hass, entry) is False async def test_unload_entry(hass, api): """Test removing transmission client.""" entry = MOCK_ENTRY entry.add_to_hass(hass) with patch.object( hass.config_entries, "async_forward_entry_unload", return_value=mock_coro(True) ) as unload_entry: assert await transmission.async_setup_entry(hass, entry) assert await transmission.async_unload_entry(hass, entry) assert unload_entry.call_count == 2 assert entry.entry_id not in hass.data[transmission.DOMAIN]
turbokongen/home-assistant
tests/components/transmission/test_init.py
homeassistant/components/envisalink/alarm_control_panel.py
"""Support for file notification.""" import os import voluptuous as vol from homeassistant.components.notify import ( ATTR_TITLE, ATTR_TITLE_DEFAULT, PLATFORM_SCHEMA, BaseNotificationService, ) from homeassistant.const import CONF_FILENAME import homeassistant.helpers.config_validation as cv import homeassistant.util.dt as dt_util CONF_TIMESTAMP = "timestamp" PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( { vol.Required(CONF_FILENAME): cv.string, vol.Optional(CONF_TIMESTAMP, default=False): cv.boolean, } ) def get_service(hass, config, discovery_info=None): """Get the file notification service.""" filename = config[CONF_FILENAME] timestamp = config[CONF_TIMESTAMP] return FileNotificationService(hass, filename, timestamp) class FileNotificationService(BaseNotificationService): """Implement the notification service for the File service.""" def __init__(self, hass, filename, add_timestamp): """Initialize the service.""" self.filepath = os.path.join(hass.config.config_dir, filename) self.add_timestamp = add_timestamp def send_message(self, message="", **kwargs): """Send a message to a file.""" with open(self.filepath, "a") as file: if os.stat(self.filepath).st_size == 0: title = f"{kwargs.get(ATTR_TITLE, ATTR_TITLE_DEFAULT)} notifications (Log started: {dt_util.utcnow().isoformat()})\n{'-' * 80}\n" file.write(title) if self.add_timestamp: text = f"{dt_util.utcnow().isoformat()} {message}\n" else: text = f"{message}\n" file.write(text)
"""Tests for Transmission init.""" from unittest.mock import patch import pytest from transmissionrpc.error import TransmissionError from homeassistant.components import transmission from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.setup import async_setup_component from tests.common import MockConfigEntry, mock_coro MOCK_ENTRY = MockConfigEntry( domain=transmission.DOMAIN, data={ transmission.CONF_NAME: "Transmission", transmission.CONF_HOST: "0.0.0.0", transmission.CONF_USERNAME: "user", transmission.CONF_PASSWORD: "pass", transmission.CONF_PORT: 9091, }, ) @pytest.fixture(name="api") def mock_transmission_api(): """Mock an api.""" with patch("transmissionrpc.Client"): yield @pytest.fixture(name="auth_error") def mock_api_authentication_error(): """Mock an api.""" with patch( "transmissionrpc.Client", side_effect=TransmissionError("401: Unauthorized") ): yield @pytest.fixture(name="unknown_error") def mock_api_unknown_error(): """Mock an api.""" with patch("transmissionrpc.Client", side_effect=TransmissionError): yield async def test_setup_with_no_config(hass): """Test that we do not discover anything or try to set up a Transmission client.""" assert await async_setup_component(hass, transmission.DOMAIN, {}) is True assert transmission.DOMAIN not in hass.data async def test_setup_with_config(hass, api): """Test that we import the config and setup the client.""" config = { transmission.DOMAIN: { transmission.CONF_NAME: "Transmission", transmission.CONF_HOST: "0.0.0.0", transmission.CONF_USERNAME: "user", transmission.CONF_PASSWORD: "pass", transmission.CONF_PORT: 9091, }, transmission.DOMAIN: { transmission.CONF_NAME: "Transmission2", transmission.CONF_HOST: "0.0.0.1", transmission.CONF_USERNAME: "user", transmission.CONF_PASSWORD: "pass", transmission.CONF_PORT: 9091, }, } assert await async_setup_component(hass, transmission.DOMAIN, config) is True async def test_successful_config_entry(hass, api): """Test that configured transmission is configured successfully.""" entry = MOCK_ENTRY entry.add_to_hass(hass) assert await transmission.async_setup_entry(hass, entry) is True assert entry.options == { transmission.CONF_SCAN_INTERVAL: transmission.DEFAULT_SCAN_INTERVAL, transmission.CONF_LIMIT: transmission.DEFAULT_LIMIT, transmission.CONF_ORDER: transmission.DEFAULT_ORDER, } async def test_setup_failed(hass): """Test transmission failed due to an error.""" entry = MOCK_ENTRY entry.add_to_hass(hass) # test connection error raising ConfigEntryNotReady with patch( "transmissionrpc.Client", side_effect=TransmissionError("111: Connection refused"), ), pytest.raises(ConfigEntryNotReady): await transmission.async_setup_entry(hass, entry) # test Authentication error returning false with patch( "transmissionrpc.Client", side_effect=TransmissionError("401: Unauthorized") ): assert await transmission.async_setup_entry(hass, entry) is False async def test_unload_entry(hass, api): """Test removing transmission client.""" entry = MOCK_ENTRY entry.add_to_hass(hass) with patch.object( hass.config_entries, "async_forward_entry_unload", return_value=mock_coro(True) ) as unload_entry: assert await transmission.async_setup_entry(hass, entry) assert await transmission.async_unload_entry(hass, entry) assert unload_entry.call_count == 2 assert entry.entry_id not in hass.data[transmission.DOMAIN]
turbokongen/home-assistant
tests/components/transmission/test_init.py
homeassistant/components/file/notify.py
"""Config flow for Islamic Prayer Times integration.""" import voluptuous as vol from homeassistant import config_entries from homeassistant.core import callback # pylint: disable=unused-import from .const import CALC_METHODS, CONF_CALC_METHOD, DEFAULT_CALC_METHOD, DOMAIN, NAME class IslamicPrayerFlowHandler(config_entries.ConfigFlow, domain=DOMAIN): """Handle the Islamic Prayer config flow.""" VERSION = 1 CONNECTION_CLASS = config_entries.CONN_CLASS_LOCAL_POLL @staticmethod @callback def async_get_options_flow(config_entry): """Get the options flow for this handler.""" return IslamicPrayerOptionsFlowHandler(config_entry) async def async_step_user(self, user_input=None): """Handle a flow initialized by the user.""" if self._async_current_entries(): return self.async_abort(reason="single_instance_allowed") if user_input is None: return self.async_show_form(step_id="user") return self.async_create_entry(title=NAME, data=user_input) async def async_step_import(self, import_config): """Import from config.""" return await self.async_step_user(user_input=import_config) class IslamicPrayerOptionsFlowHandler(config_entries.OptionsFlow): """Handle Islamic Prayer client options.""" def __init__(self, config_entry): """Initialize options flow.""" self.config_entry = config_entry async def async_step_init(self, user_input=None): """Manage options.""" if user_input is not None: return self.async_create_entry(title="", data=user_input) options = { vol.Optional( CONF_CALC_METHOD, default=self.config_entry.options.get( CONF_CALC_METHOD, DEFAULT_CALC_METHOD ), ): vol.In(CALC_METHODS) } return self.async_show_form(step_id="init", data_schema=vol.Schema(options))
"""Tests for Transmission init.""" from unittest.mock import patch import pytest from transmissionrpc.error import TransmissionError from homeassistant.components import transmission from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.setup import async_setup_component from tests.common import MockConfigEntry, mock_coro MOCK_ENTRY = MockConfigEntry( domain=transmission.DOMAIN, data={ transmission.CONF_NAME: "Transmission", transmission.CONF_HOST: "0.0.0.0", transmission.CONF_USERNAME: "user", transmission.CONF_PASSWORD: "pass", transmission.CONF_PORT: 9091, }, ) @pytest.fixture(name="api") def mock_transmission_api(): """Mock an api.""" with patch("transmissionrpc.Client"): yield @pytest.fixture(name="auth_error") def mock_api_authentication_error(): """Mock an api.""" with patch( "transmissionrpc.Client", side_effect=TransmissionError("401: Unauthorized") ): yield @pytest.fixture(name="unknown_error") def mock_api_unknown_error(): """Mock an api.""" with patch("transmissionrpc.Client", side_effect=TransmissionError): yield async def test_setup_with_no_config(hass): """Test that we do not discover anything or try to set up a Transmission client.""" assert await async_setup_component(hass, transmission.DOMAIN, {}) is True assert transmission.DOMAIN not in hass.data async def test_setup_with_config(hass, api): """Test that we import the config and setup the client.""" config = { transmission.DOMAIN: { transmission.CONF_NAME: "Transmission", transmission.CONF_HOST: "0.0.0.0", transmission.CONF_USERNAME: "user", transmission.CONF_PASSWORD: "pass", transmission.CONF_PORT: 9091, }, transmission.DOMAIN: { transmission.CONF_NAME: "Transmission2", transmission.CONF_HOST: "0.0.0.1", transmission.CONF_USERNAME: "user", transmission.CONF_PASSWORD: "pass", transmission.CONF_PORT: 9091, }, } assert await async_setup_component(hass, transmission.DOMAIN, config) is True async def test_successful_config_entry(hass, api): """Test that configured transmission is configured successfully.""" entry = MOCK_ENTRY entry.add_to_hass(hass) assert await transmission.async_setup_entry(hass, entry) is True assert entry.options == { transmission.CONF_SCAN_INTERVAL: transmission.DEFAULT_SCAN_INTERVAL, transmission.CONF_LIMIT: transmission.DEFAULT_LIMIT, transmission.CONF_ORDER: transmission.DEFAULT_ORDER, } async def test_setup_failed(hass): """Test transmission failed due to an error.""" entry = MOCK_ENTRY entry.add_to_hass(hass) # test connection error raising ConfigEntryNotReady with patch( "transmissionrpc.Client", side_effect=TransmissionError("111: Connection refused"), ), pytest.raises(ConfigEntryNotReady): await transmission.async_setup_entry(hass, entry) # test Authentication error returning false with patch( "transmissionrpc.Client", side_effect=TransmissionError("401: Unauthorized") ): assert await transmission.async_setup_entry(hass, entry) is False async def test_unload_entry(hass, api): """Test removing transmission client.""" entry = MOCK_ENTRY entry.add_to_hass(hass) with patch.object( hass.config_entries, "async_forward_entry_unload", return_value=mock_coro(True) ) as unload_entry: assert await transmission.async_setup_entry(hass, entry) assert await transmission.async_unload_entry(hass, entry) assert unload_entry.call_count == 2 assert entry.entry_id not in hass.data[transmission.DOMAIN]
turbokongen/home-assistant
tests/components/transmission/test_init.py
homeassistant/components/islamic_prayer_times/config_flow.py
"""ONVIF event abstraction.""" import asyncio import datetime as dt from typing import Callable, Dict, List, Optional, Set from httpx import RemoteProtocolError, TransportError from onvif import ONVIFCamera, ONVIFService from zeep.exceptions import Fault from homeassistant.core import CALLBACK_TYPE, CoreState, HomeAssistant, callback from homeassistant.helpers.event import async_call_later from homeassistant.util import dt as dt_util from .const import LOGGER from .models import Event from .parsers import PARSERS UNHANDLED_TOPICS = set() SUBSCRIPTION_ERRORS = ( Fault, asyncio.TimeoutError, TransportError, ) class EventManager: """ONVIF Event Manager.""" def __init__(self, hass: HomeAssistant, device: ONVIFCamera, unique_id: str): """Initialize event manager.""" self.hass: HomeAssistant = hass self.device: ONVIFCamera = device self.unique_id: str = unique_id self.started: bool = False self._subscription: ONVIFService = None self._events: Dict[str, Event] = {} self._listeners: List[CALLBACK_TYPE] = [] self._unsub_refresh: Optional[CALLBACK_TYPE] = None super().__init__() @property def platforms(self) -> Set[str]: """Return platforms to setup.""" return {event.platform for event in self._events.values()} @callback def async_add_listener(self, update_callback: CALLBACK_TYPE) -> Callable[[], None]: """Listen for data updates.""" # This is the first listener, set up polling. if not self._listeners: self.async_schedule_pull() self._listeners.append(update_callback) @callback def remove_listener() -> None: """Remove update listener.""" self.async_remove_listener(update_callback) return remove_listener @callback def async_remove_listener(self, update_callback: CALLBACK_TYPE) -> None: """Remove data update.""" if update_callback in self._listeners: self._listeners.remove(update_callback) if not self._listeners and self._unsub_refresh: self._unsub_refresh() self._unsub_refresh = None async def async_start(self) -> bool: """Start polling events.""" if await self.device.create_pullpoint_subscription(): # Create subscription manager self._subscription = self.device.create_subscription_service( "PullPointSubscription" ) # Renew immediately await self.async_renew() # Initialize events pullpoint = self.device.create_pullpoint_service() try: await pullpoint.SetSynchronizationPoint() except SUBSCRIPTION_ERRORS: pass response = await pullpoint.PullMessages( {"MessageLimit": 100, "Timeout": dt.timedelta(seconds=5)} ) # Parse event initialization await self.async_parse_messages(response.NotificationMessage) self.started = True return True return False async def async_stop(self) -> None: """Unsubscribe from events.""" self._listeners = [] self.started = False if not self._subscription: return await self._subscription.Unsubscribe() self._subscription = None async def async_restart(self, _now: dt = None) -> None: """Restart the subscription assuming the camera rebooted.""" if not self.started: return if self._subscription: try: await self._subscription.Unsubscribe() except SUBSCRIPTION_ERRORS: pass # Ignored. The subscription may no longer exist. self._subscription = None try: restarted = await self.async_start() except SUBSCRIPTION_ERRORS: restarted = False if not restarted: LOGGER.warning( "Failed to restart ONVIF PullPoint subscription for '%s'. Retrying...", self.unique_id, ) # Try again in a minute self._unsub_refresh = async_call_later(self.hass, 60, self.async_restart) elif self._listeners: LOGGER.debug( "Restarted ONVIF PullPoint subscription for '%s'", self.unique_id ) self.async_schedule_pull() async def async_renew(self) -> None: """Renew subscription.""" if not self._subscription: return termination_time = ( (dt_util.utcnow() + dt.timedelta(days=1)) .isoformat(timespec="seconds") .replace("+00:00", "Z") ) await self._subscription.Renew(termination_time) def async_schedule_pull(self) -> None: """Schedule async_pull_messages to run.""" self._unsub_refresh = async_call_later(self.hass, 1, self.async_pull_messages) async def async_pull_messages(self, _now: dt = None) -> None: """Pull messages from device.""" if self.hass.state == CoreState.running: try: pullpoint = self.device.create_pullpoint_service() response = await pullpoint.PullMessages( {"MessageLimit": 100, "Timeout": dt.timedelta(seconds=60)} ) # Renew subscription if less than two hours is left if ( dt_util.as_utc(response.TerminationTime) - dt_util.utcnow() ).total_seconds() < 7200: await self.async_renew() except RemoteProtocolError: # Likley a shutdown event, nothing to see here return except SUBSCRIPTION_ERRORS as err: LOGGER.warning( "Failed to fetch ONVIF PullPoint subscription messages for '%s': %s", self.unique_id, err, ) # Treat errors as if the camera restarted. Assume that the pullpoint # subscription is no longer valid. self._unsub_refresh = None await self.async_restart() return # Parse response await self.async_parse_messages(response.NotificationMessage) # Update entities for update_callback in self._listeners: update_callback() # Reschedule another pull if self._listeners: self.async_schedule_pull() # pylint: disable=protected-access async def async_parse_messages(self, messages) -> None: """Parse notification message.""" for msg in messages: # Guard against empty message if not msg.Topic: continue topic = msg.Topic._value_1 parser = PARSERS.get(topic) if not parser: if topic not in UNHANDLED_TOPICS: LOGGER.info( "No registered handler for event from %s: %s", self.unique_id, msg, ) UNHANDLED_TOPICS.add(topic) continue event = await parser(self.unique_id, msg) if not event: LOGGER.warning("Unable to parse event from %s: %s", self.unique_id, msg) return self._events[event.uid] = event def get_uid(self, uid) -> Event: """Retrieve event for given id.""" return self._events[uid] def get_platform(self, platform) -> List[Event]: """Retrieve events for given platform.""" return [event for event in self._events.values() if event.platform == platform]
"""Tests for Transmission init.""" from unittest.mock import patch import pytest from transmissionrpc.error import TransmissionError from homeassistant.components import transmission from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.setup import async_setup_component from tests.common import MockConfigEntry, mock_coro MOCK_ENTRY = MockConfigEntry( domain=transmission.DOMAIN, data={ transmission.CONF_NAME: "Transmission", transmission.CONF_HOST: "0.0.0.0", transmission.CONF_USERNAME: "user", transmission.CONF_PASSWORD: "pass", transmission.CONF_PORT: 9091, }, ) @pytest.fixture(name="api") def mock_transmission_api(): """Mock an api.""" with patch("transmissionrpc.Client"): yield @pytest.fixture(name="auth_error") def mock_api_authentication_error(): """Mock an api.""" with patch( "transmissionrpc.Client", side_effect=TransmissionError("401: Unauthorized") ): yield @pytest.fixture(name="unknown_error") def mock_api_unknown_error(): """Mock an api.""" with patch("transmissionrpc.Client", side_effect=TransmissionError): yield async def test_setup_with_no_config(hass): """Test that we do not discover anything or try to set up a Transmission client.""" assert await async_setup_component(hass, transmission.DOMAIN, {}) is True assert transmission.DOMAIN not in hass.data async def test_setup_with_config(hass, api): """Test that we import the config and setup the client.""" config = { transmission.DOMAIN: { transmission.CONF_NAME: "Transmission", transmission.CONF_HOST: "0.0.0.0", transmission.CONF_USERNAME: "user", transmission.CONF_PASSWORD: "pass", transmission.CONF_PORT: 9091, }, transmission.DOMAIN: { transmission.CONF_NAME: "Transmission2", transmission.CONF_HOST: "0.0.0.1", transmission.CONF_USERNAME: "user", transmission.CONF_PASSWORD: "pass", transmission.CONF_PORT: 9091, }, } assert await async_setup_component(hass, transmission.DOMAIN, config) is True async def test_successful_config_entry(hass, api): """Test that configured transmission is configured successfully.""" entry = MOCK_ENTRY entry.add_to_hass(hass) assert await transmission.async_setup_entry(hass, entry) is True assert entry.options == { transmission.CONF_SCAN_INTERVAL: transmission.DEFAULT_SCAN_INTERVAL, transmission.CONF_LIMIT: transmission.DEFAULT_LIMIT, transmission.CONF_ORDER: transmission.DEFAULT_ORDER, } async def test_setup_failed(hass): """Test transmission failed due to an error.""" entry = MOCK_ENTRY entry.add_to_hass(hass) # test connection error raising ConfigEntryNotReady with patch( "transmissionrpc.Client", side_effect=TransmissionError("111: Connection refused"), ), pytest.raises(ConfigEntryNotReady): await transmission.async_setup_entry(hass, entry) # test Authentication error returning false with patch( "transmissionrpc.Client", side_effect=TransmissionError("401: Unauthorized") ): assert await transmission.async_setup_entry(hass, entry) is False async def test_unload_entry(hass, api): """Test removing transmission client.""" entry = MOCK_ENTRY entry.add_to_hass(hass) with patch.object( hass.config_entries, "async_forward_entry_unload", return_value=mock_coro(True) ) as unload_entry: assert await transmission.async_setup_entry(hass, entry) assert await transmission.async_unload_entry(hass, entry) assert unload_entry.call_count == 2 assert entry.entry_id not in hass.data[transmission.DOMAIN]
turbokongen/home-assistant
tests/components/transmission/test_init.py
homeassistant/components/onvif/event.py
"""Support for USCIS Case Status.""" from datetime import timedelta import logging import uscisstatus import voluptuous as vol from homeassistant.components.sensor import PLATFORM_SCHEMA from homeassistant.const import CONF_NAME from homeassistant.helpers import config_validation as cv from homeassistant.helpers.entity import Entity from homeassistant.util import Throttle _LOGGER = logging.getLogger(__name__) DEFAULT_NAME = "USCIS" PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( { vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, vol.Required("case_id"): cv.string, } ) def setup_platform(hass, config, add_entities, discovery_info=None): """Set up the platform in Home Assistant and Case Information.""" uscis = UscisSensor(config["case_id"], config[CONF_NAME]) uscis.update() if uscis.valid_case_id: add_entities([uscis]) else: _LOGGER.error("Setup USCIS Sensor Fail check if your Case ID is Valid") class UscisSensor(Entity): """USCIS Sensor will check case status on daily basis.""" MIN_TIME_BETWEEN_UPDATES = timedelta(hours=24) CURRENT_STATUS = "current_status" LAST_CASE_UPDATE = "last_update_date" def __init__(self, case, name): """Initialize the sensor.""" self._state = None self._case_id = case self._attributes = None self.valid_case_id = None self._name = name @property def name(self): """Return the name.""" return self._name @property def state(self): """Return the state.""" return self._state @property def device_state_attributes(self): """Return the state attributes.""" return self._attributes @Throttle(MIN_TIME_BETWEEN_UPDATES) def update(self): """Fetch data from the USCIS website and update state attributes.""" try: status = uscisstatus.get_case_status(self._case_id) self._attributes = {self.CURRENT_STATUS: status["status"]} self._state = status["date"] self.valid_case_id = True except ValueError: _LOGGER("Please Check that you have valid USCIS case id") self.valid_case_id = False
"""Tests for Transmission init.""" from unittest.mock import patch import pytest from transmissionrpc.error import TransmissionError from homeassistant.components import transmission from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.setup import async_setup_component from tests.common import MockConfigEntry, mock_coro MOCK_ENTRY = MockConfigEntry( domain=transmission.DOMAIN, data={ transmission.CONF_NAME: "Transmission", transmission.CONF_HOST: "0.0.0.0", transmission.CONF_USERNAME: "user", transmission.CONF_PASSWORD: "pass", transmission.CONF_PORT: 9091, }, ) @pytest.fixture(name="api") def mock_transmission_api(): """Mock an api.""" with patch("transmissionrpc.Client"): yield @pytest.fixture(name="auth_error") def mock_api_authentication_error(): """Mock an api.""" with patch( "transmissionrpc.Client", side_effect=TransmissionError("401: Unauthorized") ): yield @pytest.fixture(name="unknown_error") def mock_api_unknown_error(): """Mock an api.""" with patch("transmissionrpc.Client", side_effect=TransmissionError): yield async def test_setup_with_no_config(hass): """Test that we do not discover anything or try to set up a Transmission client.""" assert await async_setup_component(hass, transmission.DOMAIN, {}) is True assert transmission.DOMAIN not in hass.data async def test_setup_with_config(hass, api): """Test that we import the config and setup the client.""" config = { transmission.DOMAIN: { transmission.CONF_NAME: "Transmission", transmission.CONF_HOST: "0.0.0.0", transmission.CONF_USERNAME: "user", transmission.CONF_PASSWORD: "pass", transmission.CONF_PORT: 9091, }, transmission.DOMAIN: { transmission.CONF_NAME: "Transmission2", transmission.CONF_HOST: "0.0.0.1", transmission.CONF_USERNAME: "user", transmission.CONF_PASSWORD: "pass", transmission.CONF_PORT: 9091, }, } assert await async_setup_component(hass, transmission.DOMAIN, config) is True async def test_successful_config_entry(hass, api): """Test that configured transmission is configured successfully.""" entry = MOCK_ENTRY entry.add_to_hass(hass) assert await transmission.async_setup_entry(hass, entry) is True assert entry.options == { transmission.CONF_SCAN_INTERVAL: transmission.DEFAULT_SCAN_INTERVAL, transmission.CONF_LIMIT: transmission.DEFAULT_LIMIT, transmission.CONF_ORDER: transmission.DEFAULT_ORDER, } async def test_setup_failed(hass): """Test transmission failed due to an error.""" entry = MOCK_ENTRY entry.add_to_hass(hass) # test connection error raising ConfigEntryNotReady with patch( "transmissionrpc.Client", side_effect=TransmissionError("111: Connection refused"), ), pytest.raises(ConfigEntryNotReady): await transmission.async_setup_entry(hass, entry) # test Authentication error returning false with patch( "transmissionrpc.Client", side_effect=TransmissionError("401: Unauthorized") ): assert await transmission.async_setup_entry(hass, entry) is False async def test_unload_entry(hass, api): """Test removing transmission client.""" entry = MOCK_ENTRY entry.add_to_hass(hass) with patch.object( hass.config_entries, "async_forward_entry_unload", return_value=mock_coro(True) ) as unload_entry: assert await transmission.async_setup_entry(hass, entry) assert await transmission.async_unload_entry(hass, entry) assert unload_entry.call_count == 2 assert entry.entry_id not in hass.data[transmission.DOMAIN]
turbokongen/home-assistant
tests/components/transmission/test_init.py
homeassistant/components/uscis/sensor.py
"""Config flow for Rollease Acmeda Automate Pulse Hub.""" import asyncio from typing import Dict, Optional import aiopulse import async_timeout import voluptuous as vol from homeassistant import config_entries from .const import DOMAIN # pylint: disable=unused-import class AcmedaFlowHandler(config_entries.ConfigFlow, domain=DOMAIN): """Handle a Acmeda config flow.""" VERSION = 1 CONNECTION_CLASS = config_entries.CONN_CLASS_LOCAL_POLL def __init__(self): """Initialize the config flow.""" self.discovered_hubs: Optional[Dict[str, aiopulse.Hub]] = None async def async_step_user(self, user_input=None): """Handle a flow initialized by the user.""" if ( user_input is not None and self.discovered_hubs is not None and user_input["id"] in self.discovered_hubs ): return await self.async_create(self.discovered_hubs[user_input["id"]]) # Already configured hosts already_configured = { entry.unique_id for entry in self._async_current_entries() } hubs = [] try: with async_timeout.timeout(5): async for hub in aiopulse.Hub.discover(): if hub.id not in already_configured: hubs.append(hub) except asyncio.TimeoutError: pass if len(hubs) == 0: return self.async_abort(reason="no_devices_found") if len(hubs) == 1: return await self.async_create(hubs[0]) self.discovered_hubs = {hub.id: hub for hub in hubs} return self.async_show_form( step_id="user", data_schema=vol.Schema( { vol.Required("id"): vol.In( {hub.id: f"{hub.id} {hub.host}" for hub in hubs} ) } ), ) async def async_create(self, hub): """Create the Acmeda Hub entry.""" await self.async_set_unique_id(hub.id, raise_on_progress=False) return self.async_create_entry(title=hub.id, data={"host": hub.host})
"""Tests for Transmission init.""" from unittest.mock import patch import pytest from transmissionrpc.error import TransmissionError from homeassistant.components import transmission from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.setup import async_setup_component from tests.common import MockConfigEntry, mock_coro MOCK_ENTRY = MockConfigEntry( domain=transmission.DOMAIN, data={ transmission.CONF_NAME: "Transmission", transmission.CONF_HOST: "0.0.0.0", transmission.CONF_USERNAME: "user", transmission.CONF_PASSWORD: "pass", transmission.CONF_PORT: 9091, }, ) @pytest.fixture(name="api") def mock_transmission_api(): """Mock an api.""" with patch("transmissionrpc.Client"): yield @pytest.fixture(name="auth_error") def mock_api_authentication_error(): """Mock an api.""" with patch( "transmissionrpc.Client", side_effect=TransmissionError("401: Unauthorized") ): yield @pytest.fixture(name="unknown_error") def mock_api_unknown_error(): """Mock an api.""" with patch("transmissionrpc.Client", side_effect=TransmissionError): yield async def test_setup_with_no_config(hass): """Test that we do not discover anything or try to set up a Transmission client.""" assert await async_setup_component(hass, transmission.DOMAIN, {}) is True assert transmission.DOMAIN not in hass.data async def test_setup_with_config(hass, api): """Test that we import the config and setup the client.""" config = { transmission.DOMAIN: { transmission.CONF_NAME: "Transmission", transmission.CONF_HOST: "0.0.0.0", transmission.CONF_USERNAME: "user", transmission.CONF_PASSWORD: "pass", transmission.CONF_PORT: 9091, }, transmission.DOMAIN: { transmission.CONF_NAME: "Transmission2", transmission.CONF_HOST: "0.0.0.1", transmission.CONF_USERNAME: "user", transmission.CONF_PASSWORD: "pass", transmission.CONF_PORT: 9091, }, } assert await async_setup_component(hass, transmission.DOMAIN, config) is True async def test_successful_config_entry(hass, api): """Test that configured transmission is configured successfully.""" entry = MOCK_ENTRY entry.add_to_hass(hass) assert await transmission.async_setup_entry(hass, entry) is True assert entry.options == { transmission.CONF_SCAN_INTERVAL: transmission.DEFAULT_SCAN_INTERVAL, transmission.CONF_LIMIT: transmission.DEFAULT_LIMIT, transmission.CONF_ORDER: transmission.DEFAULT_ORDER, } async def test_setup_failed(hass): """Test transmission failed due to an error.""" entry = MOCK_ENTRY entry.add_to_hass(hass) # test connection error raising ConfigEntryNotReady with patch( "transmissionrpc.Client", side_effect=TransmissionError("111: Connection refused"), ), pytest.raises(ConfigEntryNotReady): await transmission.async_setup_entry(hass, entry) # test Authentication error returning false with patch( "transmissionrpc.Client", side_effect=TransmissionError("401: Unauthorized") ): assert await transmission.async_setup_entry(hass, entry) is False async def test_unload_entry(hass, api): """Test removing transmission client.""" entry = MOCK_ENTRY entry.add_to_hass(hass) with patch.object( hass.config_entries, "async_forward_entry_unload", return_value=mock_coro(True) ) as unload_entry: assert await transmission.async_setup_entry(hass, entry) assert await transmission.async_unload_entry(hass, entry) assert unload_entry.call_count == 2 assert entry.entry_id not in hass.data[transmission.DOMAIN]
turbokongen/home-assistant
tests/components/transmission/test_init.py
homeassistant/components/acmeda/config_flow.py
"""A sensor platform that give you information about the next space launch.""" from datetime import timedelta import logging from typing import Optional from pylaunches import PyLaunches, PyLaunchesException import voluptuous as vol from homeassistant.components.sensor import PLATFORM_SCHEMA from homeassistant.const import ATTR_ATTRIBUTION, CONF_NAME from homeassistant.helpers.aiohttp_client import async_get_clientsession import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entity import Entity from .const import ( ATTR_AGENCY, ATTR_AGENCY_COUNTRY_CODE, ATTR_LAUNCH_TIME, ATTR_STREAM, ATTRIBUTION, DEFAULT_NAME, ) _LOGGER = logging.getLogger(__name__) SCAN_INTERVAL = timedelta(hours=1) PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( {vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string} ) async def async_setup_platform(hass, config, async_add_entities, discovery_info=None): """Create the launch sensor.""" name = config[CONF_NAME] session = async_get_clientsession(hass) launches = PyLaunches(session) async_add_entities([LaunchLibrarySensor(launches, name)], True) class LaunchLibrarySensor(Entity): """Representation of a launch_library Sensor.""" def __init__(self, launches: PyLaunches, name: str) -> None: """Initialize the sensor.""" self.launches = launches self.next_launch = None self._name = name async def async_update(self) -> None: """Get the latest data.""" try: launches = await self.launches.upcoming_launches() except PyLaunchesException as exception: _LOGGER.error("Error getting data, %s", exception) else: if launches: self.next_launch = launches[0] @property def name(self) -> str: """Return the name of the sensor.""" return self._name @property def state(self) -> Optional[str]: """Return the state of the sensor.""" if self.next_launch: return self.next_launch.name return None @property def icon(self) -> str: """Return the icon of the sensor.""" return "mdi:rocket" @property def device_state_attributes(self) -> Optional[dict]: """Return attributes for the sensor.""" if self.next_launch: return { ATTR_LAUNCH_TIME: self.next_launch.net, ATTR_AGENCY: self.next_launch.launch_service_provider.name, ATTR_AGENCY_COUNTRY_CODE: self.next_launch.pad.location.country_code, ATTR_STREAM: self.next_launch.webcast_live, ATTR_ATTRIBUTION: ATTRIBUTION, } return None
"""Tests for Transmission init.""" from unittest.mock import patch import pytest from transmissionrpc.error import TransmissionError from homeassistant.components import transmission from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.setup import async_setup_component from tests.common import MockConfigEntry, mock_coro MOCK_ENTRY = MockConfigEntry( domain=transmission.DOMAIN, data={ transmission.CONF_NAME: "Transmission", transmission.CONF_HOST: "0.0.0.0", transmission.CONF_USERNAME: "user", transmission.CONF_PASSWORD: "pass", transmission.CONF_PORT: 9091, }, ) @pytest.fixture(name="api") def mock_transmission_api(): """Mock an api.""" with patch("transmissionrpc.Client"): yield @pytest.fixture(name="auth_error") def mock_api_authentication_error(): """Mock an api.""" with patch( "transmissionrpc.Client", side_effect=TransmissionError("401: Unauthorized") ): yield @pytest.fixture(name="unknown_error") def mock_api_unknown_error(): """Mock an api.""" with patch("transmissionrpc.Client", side_effect=TransmissionError): yield async def test_setup_with_no_config(hass): """Test that we do not discover anything or try to set up a Transmission client.""" assert await async_setup_component(hass, transmission.DOMAIN, {}) is True assert transmission.DOMAIN not in hass.data async def test_setup_with_config(hass, api): """Test that we import the config and setup the client.""" config = { transmission.DOMAIN: { transmission.CONF_NAME: "Transmission", transmission.CONF_HOST: "0.0.0.0", transmission.CONF_USERNAME: "user", transmission.CONF_PASSWORD: "pass", transmission.CONF_PORT: 9091, }, transmission.DOMAIN: { transmission.CONF_NAME: "Transmission2", transmission.CONF_HOST: "0.0.0.1", transmission.CONF_USERNAME: "user", transmission.CONF_PASSWORD: "pass", transmission.CONF_PORT: 9091, }, } assert await async_setup_component(hass, transmission.DOMAIN, config) is True async def test_successful_config_entry(hass, api): """Test that configured transmission is configured successfully.""" entry = MOCK_ENTRY entry.add_to_hass(hass) assert await transmission.async_setup_entry(hass, entry) is True assert entry.options == { transmission.CONF_SCAN_INTERVAL: transmission.DEFAULT_SCAN_INTERVAL, transmission.CONF_LIMIT: transmission.DEFAULT_LIMIT, transmission.CONF_ORDER: transmission.DEFAULT_ORDER, } async def test_setup_failed(hass): """Test transmission failed due to an error.""" entry = MOCK_ENTRY entry.add_to_hass(hass) # test connection error raising ConfigEntryNotReady with patch( "transmissionrpc.Client", side_effect=TransmissionError("111: Connection refused"), ), pytest.raises(ConfigEntryNotReady): await transmission.async_setup_entry(hass, entry) # test Authentication error returning false with patch( "transmissionrpc.Client", side_effect=TransmissionError("401: Unauthorized") ): assert await transmission.async_setup_entry(hass, entry) is False async def test_unload_entry(hass, api): """Test removing transmission client.""" entry = MOCK_ENTRY entry.add_to_hass(hass) with patch.object( hass.config_entries, "async_forward_entry_unload", return_value=mock_coro(True) ) as unload_entry: assert await transmission.async_setup_entry(hass, entry) assert await transmission.async_unload_entry(hass, entry) assert unload_entry.call_count == 2 assert entry.entry_id not in hass.data[transmission.DOMAIN]
turbokongen/home-assistant
tests/components/transmission/test_init.py
homeassistant/components/launch_library/sensor.py
"""Reproduce an Timer state.""" import asyncio import logging from typing import Any, Dict, Iterable, Optional from homeassistant.const import ATTR_ENTITY_ID from homeassistant.core import Context, State from homeassistant.helpers.typing import HomeAssistantType from . import ( ATTR_DURATION, DOMAIN, SERVICE_CANCEL, SERVICE_PAUSE, SERVICE_START, STATUS_ACTIVE, STATUS_IDLE, STATUS_PAUSED, ) _LOGGER = logging.getLogger(__name__) VALID_STATES = {STATUS_IDLE, STATUS_ACTIVE, STATUS_PAUSED} async def _async_reproduce_state( hass: HomeAssistantType, state: State, *, context: Optional[Context] = None, reproduce_options: Optional[Dict[str, Any]] = None, ) -> None: """Reproduce a single state.""" cur_state = hass.states.get(state.entity_id) if cur_state is None: _LOGGER.warning("Unable to find entity %s", state.entity_id) return if state.state not in VALID_STATES: _LOGGER.warning( "Invalid state specified for %s: %s", state.entity_id, state.state ) return # Return if we are already at the right state. if cur_state.state == state.state and cur_state.attributes.get( ATTR_DURATION ) == state.attributes.get(ATTR_DURATION): return service_data = {ATTR_ENTITY_ID: state.entity_id} if state.state == STATUS_ACTIVE: service = SERVICE_START if ATTR_DURATION in state.attributes: service_data[ATTR_DURATION] = state.attributes[ATTR_DURATION] elif state.state == STATUS_PAUSED: service = SERVICE_PAUSE elif state.state == STATUS_IDLE: service = SERVICE_CANCEL await hass.services.async_call( DOMAIN, service, service_data, context=context, blocking=True ) async def async_reproduce_states( hass: HomeAssistantType, states: Iterable[State], *, context: Optional[Context] = None, reproduce_options: Optional[Dict[str, Any]] = None, ) -> None: """Reproduce Timer states.""" await asyncio.gather( *( _async_reproduce_state( hass, state, context=context, reproduce_options=reproduce_options ) for state in states ) )
"""Tests for Transmission init.""" from unittest.mock import patch import pytest from transmissionrpc.error import TransmissionError from homeassistant.components import transmission from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.setup import async_setup_component from tests.common import MockConfigEntry, mock_coro MOCK_ENTRY = MockConfigEntry( domain=transmission.DOMAIN, data={ transmission.CONF_NAME: "Transmission", transmission.CONF_HOST: "0.0.0.0", transmission.CONF_USERNAME: "user", transmission.CONF_PASSWORD: "pass", transmission.CONF_PORT: 9091, }, ) @pytest.fixture(name="api") def mock_transmission_api(): """Mock an api.""" with patch("transmissionrpc.Client"): yield @pytest.fixture(name="auth_error") def mock_api_authentication_error(): """Mock an api.""" with patch( "transmissionrpc.Client", side_effect=TransmissionError("401: Unauthorized") ): yield @pytest.fixture(name="unknown_error") def mock_api_unknown_error(): """Mock an api.""" with patch("transmissionrpc.Client", side_effect=TransmissionError): yield async def test_setup_with_no_config(hass): """Test that we do not discover anything or try to set up a Transmission client.""" assert await async_setup_component(hass, transmission.DOMAIN, {}) is True assert transmission.DOMAIN not in hass.data async def test_setup_with_config(hass, api): """Test that we import the config and setup the client.""" config = { transmission.DOMAIN: { transmission.CONF_NAME: "Transmission", transmission.CONF_HOST: "0.0.0.0", transmission.CONF_USERNAME: "user", transmission.CONF_PASSWORD: "pass", transmission.CONF_PORT: 9091, }, transmission.DOMAIN: { transmission.CONF_NAME: "Transmission2", transmission.CONF_HOST: "0.0.0.1", transmission.CONF_USERNAME: "user", transmission.CONF_PASSWORD: "pass", transmission.CONF_PORT: 9091, }, } assert await async_setup_component(hass, transmission.DOMAIN, config) is True async def test_successful_config_entry(hass, api): """Test that configured transmission is configured successfully.""" entry = MOCK_ENTRY entry.add_to_hass(hass) assert await transmission.async_setup_entry(hass, entry) is True assert entry.options == { transmission.CONF_SCAN_INTERVAL: transmission.DEFAULT_SCAN_INTERVAL, transmission.CONF_LIMIT: transmission.DEFAULT_LIMIT, transmission.CONF_ORDER: transmission.DEFAULT_ORDER, } async def test_setup_failed(hass): """Test transmission failed due to an error.""" entry = MOCK_ENTRY entry.add_to_hass(hass) # test connection error raising ConfigEntryNotReady with patch( "transmissionrpc.Client", side_effect=TransmissionError("111: Connection refused"), ), pytest.raises(ConfigEntryNotReady): await transmission.async_setup_entry(hass, entry) # test Authentication error returning false with patch( "transmissionrpc.Client", side_effect=TransmissionError("401: Unauthorized") ): assert await transmission.async_setup_entry(hass, entry) is False async def test_unload_entry(hass, api): """Test removing transmission client.""" entry = MOCK_ENTRY entry.add_to_hass(hass) with patch.object( hass.config_entries, "async_forward_entry_unload", return_value=mock_coro(True) ) as unload_entry: assert await transmission.async_setup_entry(hass, entry) assert await transmission.async_unload_entry(hass, entry) assert unload_entry.call_count == 2 assert entry.entry_id not in hass.data[transmission.DOMAIN]
turbokongen/home-assistant
tests/components/transmission/test_init.py
homeassistant/components/timer/reproduce_state.py
"""Support for Neato sensors.""" from datetime import timedelta import logging from pybotvac.exceptions import NeatoRobotException from homeassistant.components.sensor import DEVICE_CLASS_BATTERY from homeassistant.const import PERCENTAGE from homeassistant.helpers.entity import Entity from .const import NEATO_DOMAIN, NEATO_LOGIN, NEATO_ROBOTS, SCAN_INTERVAL_MINUTES _LOGGER = logging.getLogger(__name__) SCAN_INTERVAL = timedelta(minutes=SCAN_INTERVAL_MINUTES) BATTERY = "Battery" async def async_setup_entry(hass, entry, async_add_entities): """Set up the Neato sensor using config entry.""" dev = [] neato = hass.data.get(NEATO_LOGIN) for robot in hass.data[NEATO_ROBOTS]: dev.append(NeatoSensor(neato, robot)) if not dev: return _LOGGER.debug("Adding robots for sensors %s", dev) async_add_entities(dev, True) class NeatoSensor(Entity): """Neato sensor.""" def __init__(self, neato, robot): """Initialize Neato sensor.""" self.robot = robot self._available = False self._robot_name = f"{self.robot.name} {BATTERY}" self._robot_serial = self.robot.serial self._state = None def update(self): """Update Neato Sensor.""" try: self._state = self.robot.state except NeatoRobotException as ex: if self._available: _LOGGER.error( "Neato sensor connection error for '%s': %s", self.entity_id, ex ) self._state = None self._available = False return self._available = True _LOGGER.debug("self._state=%s", self._state) @property def name(self): """Return the name of this sensor.""" return self._robot_name @property def unique_id(self): """Return unique ID.""" return self._robot_serial @property def device_class(self): """Return the device class.""" return DEVICE_CLASS_BATTERY @property def available(self): """Return availability.""" return self._available @property def state(self): """Return the state.""" return self._state["details"]["charge"] @property def unit_of_measurement(self): """Return unit of measurement.""" return PERCENTAGE @property def device_info(self): """Device info for neato robot.""" return {"identifiers": {(NEATO_DOMAIN, self._robot_serial)}}
"""Tests for Transmission init.""" from unittest.mock import patch import pytest from transmissionrpc.error import TransmissionError from homeassistant.components import transmission from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.setup import async_setup_component from tests.common import MockConfigEntry, mock_coro MOCK_ENTRY = MockConfigEntry( domain=transmission.DOMAIN, data={ transmission.CONF_NAME: "Transmission", transmission.CONF_HOST: "0.0.0.0", transmission.CONF_USERNAME: "user", transmission.CONF_PASSWORD: "pass", transmission.CONF_PORT: 9091, }, ) @pytest.fixture(name="api") def mock_transmission_api(): """Mock an api.""" with patch("transmissionrpc.Client"): yield @pytest.fixture(name="auth_error") def mock_api_authentication_error(): """Mock an api.""" with patch( "transmissionrpc.Client", side_effect=TransmissionError("401: Unauthorized") ): yield @pytest.fixture(name="unknown_error") def mock_api_unknown_error(): """Mock an api.""" with patch("transmissionrpc.Client", side_effect=TransmissionError): yield async def test_setup_with_no_config(hass): """Test that we do not discover anything or try to set up a Transmission client.""" assert await async_setup_component(hass, transmission.DOMAIN, {}) is True assert transmission.DOMAIN not in hass.data async def test_setup_with_config(hass, api): """Test that we import the config and setup the client.""" config = { transmission.DOMAIN: { transmission.CONF_NAME: "Transmission", transmission.CONF_HOST: "0.0.0.0", transmission.CONF_USERNAME: "user", transmission.CONF_PASSWORD: "pass", transmission.CONF_PORT: 9091, }, transmission.DOMAIN: { transmission.CONF_NAME: "Transmission2", transmission.CONF_HOST: "0.0.0.1", transmission.CONF_USERNAME: "user", transmission.CONF_PASSWORD: "pass", transmission.CONF_PORT: 9091, }, } assert await async_setup_component(hass, transmission.DOMAIN, config) is True async def test_successful_config_entry(hass, api): """Test that configured transmission is configured successfully.""" entry = MOCK_ENTRY entry.add_to_hass(hass) assert await transmission.async_setup_entry(hass, entry) is True assert entry.options == { transmission.CONF_SCAN_INTERVAL: transmission.DEFAULT_SCAN_INTERVAL, transmission.CONF_LIMIT: transmission.DEFAULT_LIMIT, transmission.CONF_ORDER: transmission.DEFAULT_ORDER, } async def test_setup_failed(hass): """Test transmission failed due to an error.""" entry = MOCK_ENTRY entry.add_to_hass(hass) # test connection error raising ConfigEntryNotReady with patch( "transmissionrpc.Client", side_effect=TransmissionError("111: Connection refused"), ), pytest.raises(ConfigEntryNotReady): await transmission.async_setup_entry(hass, entry) # test Authentication error returning false with patch( "transmissionrpc.Client", side_effect=TransmissionError("401: Unauthorized") ): assert await transmission.async_setup_entry(hass, entry) is False async def test_unload_entry(hass, api): """Test removing transmission client.""" entry = MOCK_ENTRY entry.add_to_hass(hass) with patch.object( hass.config_entries, "async_forward_entry_unload", return_value=mock_coro(True) ) as unload_entry: assert await transmission.async_setup_entry(hass, entry) assert await transmission.async_unload_entry(hass, entry) assert unload_entry.call_count == 2 assert entry.entry_id not in hass.data[transmission.DOMAIN]
turbokongen/home-assistant
tests/components/transmission/test_init.py
homeassistant/components/neato/sensor.py
"""Support for Synology DSM binary sensors.""" from typing import Dict from homeassistant.components.binary_sensor import BinarySensorEntity from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_DISKS from homeassistant.helpers.typing import HomeAssistantType from . import SynologyDSMDeviceEntity, SynologyDSMDispatcherEntity from .const import ( DOMAIN, SECURITY_BINARY_SENSORS, STORAGE_DISK_BINARY_SENSORS, SYNO_API, UPGRADE_BINARY_SENSORS, ) async def async_setup_entry( hass: HomeAssistantType, entry: ConfigEntry, async_add_entities ) -> None: """Set up the Synology NAS binary sensor.""" api = hass.data[DOMAIN][entry.unique_id][SYNO_API] entities = [ SynoDSMSecurityBinarySensor( api, sensor_type, SECURITY_BINARY_SENSORS[sensor_type] ) for sensor_type in SECURITY_BINARY_SENSORS ] entities += [ SynoDSMUpgradeBinarySensor( api, sensor_type, UPGRADE_BINARY_SENSORS[sensor_type] ) for sensor_type in UPGRADE_BINARY_SENSORS ] # Handle all disks if api.storage.disks_ids: for disk in entry.data.get(CONF_DISKS, api.storage.disks_ids): entities += [ SynoDSMStorageBinarySensor( api, sensor_type, STORAGE_DISK_BINARY_SENSORS[sensor_type], disk ) for sensor_type in STORAGE_DISK_BINARY_SENSORS ] async_add_entities(entities) class SynoDSMSecurityBinarySensor(SynologyDSMDispatcherEntity, BinarySensorEntity): """Representation a Synology Security binary sensor.""" @property def is_on(self) -> bool: """Return the state.""" return getattr(self._api.security, self.entity_type) != "safe" @property def available(self) -> bool: """Return True if entity is available.""" return bool(self._api.security) @property def device_state_attributes(self) -> Dict[str, str]: """Return security checks details.""" return self._api.security.status_by_check class SynoDSMStorageBinarySensor(SynologyDSMDeviceEntity, BinarySensorEntity): """Representation a Synology Storage binary sensor.""" @property def is_on(self) -> bool: """Return the state.""" return getattr(self._api.storage, self.entity_type)(self._device_id) class SynoDSMUpgradeBinarySensor(SynologyDSMDispatcherEntity, BinarySensorEntity): """Representation a Synology Upgrade binary sensor.""" @property def is_on(self) -> bool: """Return the state.""" return getattr(self._api.upgrade, self.entity_type) @property def available(self) -> bool: """Return True if entity is available.""" return bool(self._api.upgrade)
"""Tests for Transmission init.""" from unittest.mock import patch import pytest from transmissionrpc.error import TransmissionError from homeassistant.components import transmission from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.setup import async_setup_component from tests.common import MockConfigEntry, mock_coro MOCK_ENTRY = MockConfigEntry( domain=transmission.DOMAIN, data={ transmission.CONF_NAME: "Transmission", transmission.CONF_HOST: "0.0.0.0", transmission.CONF_USERNAME: "user", transmission.CONF_PASSWORD: "pass", transmission.CONF_PORT: 9091, }, ) @pytest.fixture(name="api") def mock_transmission_api(): """Mock an api.""" with patch("transmissionrpc.Client"): yield @pytest.fixture(name="auth_error") def mock_api_authentication_error(): """Mock an api.""" with patch( "transmissionrpc.Client", side_effect=TransmissionError("401: Unauthorized") ): yield @pytest.fixture(name="unknown_error") def mock_api_unknown_error(): """Mock an api.""" with patch("transmissionrpc.Client", side_effect=TransmissionError): yield async def test_setup_with_no_config(hass): """Test that we do not discover anything or try to set up a Transmission client.""" assert await async_setup_component(hass, transmission.DOMAIN, {}) is True assert transmission.DOMAIN not in hass.data async def test_setup_with_config(hass, api): """Test that we import the config and setup the client.""" config = { transmission.DOMAIN: { transmission.CONF_NAME: "Transmission", transmission.CONF_HOST: "0.0.0.0", transmission.CONF_USERNAME: "user", transmission.CONF_PASSWORD: "pass", transmission.CONF_PORT: 9091, }, transmission.DOMAIN: { transmission.CONF_NAME: "Transmission2", transmission.CONF_HOST: "0.0.0.1", transmission.CONF_USERNAME: "user", transmission.CONF_PASSWORD: "pass", transmission.CONF_PORT: 9091, }, } assert await async_setup_component(hass, transmission.DOMAIN, config) is True async def test_successful_config_entry(hass, api): """Test that configured transmission is configured successfully.""" entry = MOCK_ENTRY entry.add_to_hass(hass) assert await transmission.async_setup_entry(hass, entry) is True assert entry.options == { transmission.CONF_SCAN_INTERVAL: transmission.DEFAULT_SCAN_INTERVAL, transmission.CONF_LIMIT: transmission.DEFAULT_LIMIT, transmission.CONF_ORDER: transmission.DEFAULT_ORDER, } async def test_setup_failed(hass): """Test transmission failed due to an error.""" entry = MOCK_ENTRY entry.add_to_hass(hass) # test connection error raising ConfigEntryNotReady with patch( "transmissionrpc.Client", side_effect=TransmissionError("111: Connection refused"), ), pytest.raises(ConfigEntryNotReady): await transmission.async_setup_entry(hass, entry) # test Authentication error returning false with patch( "transmissionrpc.Client", side_effect=TransmissionError("401: Unauthorized") ): assert await transmission.async_setup_entry(hass, entry) is False async def test_unload_entry(hass, api): """Test removing transmission client.""" entry = MOCK_ENTRY entry.add_to_hass(hass) with patch.object( hass.config_entries, "async_forward_entry_unload", return_value=mock_coro(True) ) as unload_entry: assert await transmission.async_setup_entry(hass, entry) assert await transmission.async_unload_entry(hass, entry) assert unload_entry.call_count == 2 assert entry.entry_id not in hass.data[transmission.DOMAIN]
turbokongen/home-assistant
tests/components/transmission/test_init.py
homeassistant/components/synology_dsm/binary_sensor.py
"""Support for Powerview scenes from a Powerview hub.""" from typing import Any from aiopvapi.resources.scene import Scene as PvScene import voluptuous as vol from homeassistant.components.scene import Scene from homeassistant.config_entries import SOURCE_IMPORT from homeassistant.const import CONF_HOST, CONF_PLATFORM import homeassistant.helpers.config_validation as cv from .const import ( COORDINATOR, DEVICE_INFO, DOMAIN, HUB_ADDRESS, PV_API, PV_ROOM_DATA, PV_SCENE_DATA, ROOM_NAME_UNICODE, STATE_ATTRIBUTE_ROOM_NAME, ) from .entity import HDEntity PLATFORM_SCHEMA = vol.Schema( {vol.Required(CONF_PLATFORM): DOMAIN, vol.Required(HUB_ADDRESS): cv.string} ) async def async_setup_platform(hass, config, async_add_entities, discovery_info=None): """Import platform from yaml.""" hass.async_create_task( hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_IMPORT}, data={CONF_HOST: config[HUB_ADDRESS]}, ) ) async def async_setup_entry(hass, entry, async_add_entities): """Set up powerview scene entries.""" pv_data = hass.data[DOMAIN][entry.entry_id] room_data = pv_data[PV_ROOM_DATA] scene_data = pv_data[PV_SCENE_DATA] pv_request = pv_data[PV_API] coordinator = pv_data[COORDINATOR] device_info = pv_data[DEVICE_INFO] pvscenes = ( PowerViewScene( PvScene(raw_scene, pv_request), room_data, coordinator, device_info ) for scene_id, raw_scene in scene_data.items() ) async_add_entities(pvscenes) class PowerViewScene(HDEntity, Scene): """Representation of a Powerview scene.""" def __init__(self, scene, room_data, coordinator, device_info): """Initialize the scene.""" super().__init__(coordinator, device_info, scene.id) self._scene = scene self._room_name = room_data.get(scene.room_id, {}).get(ROOM_NAME_UNICODE, "") @property def name(self): """Return the name of the scene.""" return self._scene.name @property def device_state_attributes(self): """Return the state attributes.""" return {STATE_ATTRIBUTE_ROOM_NAME: self._room_name} @property def icon(self): """Icon to use in the frontend.""" return "mdi:blinds" async def async_activate(self, **kwargs: Any) -> None: """Activate scene. Try to get entities into requested state.""" await self._scene.activate()
"""Tests for Transmission init.""" from unittest.mock import patch import pytest from transmissionrpc.error import TransmissionError from homeassistant.components import transmission from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.setup import async_setup_component from tests.common import MockConfigEntry, mock_coro MOCK_ENTRY = MockConfigEntry( domain=transmission.DOMAIN, data={ transmission.CONF_NAME: "Transmission", transmission.CONF_HOST: "0.0.0.0", transmission.CONF_USERNAME: "user", transmission.CONF_PASSWORD: "pass", transmission.CONF_PORT: 9091, }, ) @pytest.fixture(name="api") def mock_transmission_api(): """Mock an api.""" with patch("transmissionrpc.Client"): yield @pytest.fixture(name="auth_error") def mock_api_authentication_error(): """Mock an api.""" with patch( "transmissionrpc.Client", side_effect=TransmissionError("401: Unauthorized") ): yield @pytest.fixture(name="unknown_error") def mock_api_unknown_error(): """Mock an api.""" with patch("transmissionrpc.Client", side_effect=TransmissionError): yield async def test_setup_with_no_config(hass): """Test that we do not discover anything or try to set up a Transmission client.""" assert await async_setup_component(hass, transmission.DOMAIN, {}) is True assert transmission.DOMAIN not in hass.data async def test_setup_with_config(hass, api): """Test that we import the config and setup the client.""" config = { transmission.DOMAIN: { transmission.CONF_NAME: "Transmission", transmission.CONF_HOST: "0.0.0.0", transmission.CONF_USERNAME: "user", transmission.CONF_PASSWORD: "pass", transmission.CONF_PORT: 9091, }, transmission.DOMAIN: { transmission.CONF_NAME: "Transmission2", transmission.CONF_HOST: "0.0.0.1", transmission.CONF_USERNAME: "user", transmission.CONF_PASSWORD: "pass", transmission.CONF_PORT: 9091, }, } assert await async_setup_component(hass, transmission.DOMAIN, config) is True async def test_successful_config_entry(hass, api): """Test that configured transmission is configured successfully.""" entry = MOCK_ENTRY entry.add_to_hass(hass) assert await transmission.async_setup_entry(hass, entry) is True assert entry.options == { transmission.CONF_SCAN_INTERVAL: transmission.DEFAULT_SCAN_INTERVAL, transmission.CONF_LIMIT: transmission.DEFAULT_LIMIT, transmission.CONF_ORDER: transmission.DEFAULT_ORDER, } async def test_setup_failed(hass): """Test transmission failed due to an error.""" entry = MOCK_ENTRY entry.add_to_hass(hass) # test connection error raising ConfigEntryNotReady with patch( "transmissionrpc.Client", side_effect=TransmissionError("111: Connection refused"), ), pytest.raises(ConfigEntryNotReady): await transmission.async_setup_entry(hass, entry) # test Authentication error returning false with patch( "transmissionrpc.Client", side_effect=TransmissionError("401: Unauthorized") ): assert await transmission.async_setup_entry(hass, entry) is False async def test_unload_entry(hass, api): """Test removing transmission client.""" entry = MOCK_ENTRY entry.add_to_hass(hass) with patch.object( hass.config_entries, "async_forward_entry_unload", return_value=mock_coro(True) ) as unload_entry: assert await transmission.async_setup_entry(hass, entry) assert await transmission.async_unload_entry(hass, entry) assert unload_entry.call_count == 2 assert entry.entry_id not in hass.data[transmission.DOMAIN]
turbokongen/home-assistant
tests/components/transmission/test_init.py
homeassistant/components/hunterdouglas_powerview/scene.py
"""The Global Disaster Alert and Coordination System (GDACS) integration.""" import asyncio from datetime import timedelta import logging from aio_georss_gdacs import GdacsFeedManager import voluptuous as vol from homeassistant.config_entries import SOURCE_IMPORT from homeassistant.const import ( CONF_LATITUDE, CONF_LONGITUDE, CONF_RADIUS, CONF_SCAN_INTERVAL, CONF_UNIT_SYSTEM_IMPERIAL, LENGTH_MILES, ) from homeassistant.core import callback from homeassistant.helpers import aiohttp_client, config_validation as cv from homeassistant.helpers.dispatcher import async_dispatcher_send from homeassistant.helpers.event import async_track_time_interval from homeassistant.util.unit_system import METRIC_SYSTEM from .const import ( CONF_CATEGORIES, DEFAULT_RADIUS, DEFAULT_SCAN_INTERVAL, DOMAIN, FEED, PLATFORMS, VALID_CATEGORIES, ) _LOGGER = logging.getLogger(__name__) CONFIG_SCHEMA = vol.Schema( { DOMAIN: vol.Schema( { vol.Inclusive(CONF_LATITUDE, "coordinates"): cv.latitude, vol.Inclusive(CONF_LONGITUDE, "coordinates"): cv.longitude, vol.Optional(CONF_RADIUS, default=DEFAULT_RADIUS): vol.Coerce(float), vol.Optional( CONF_SCAN_INTERVAL, default=DEFAULT_SCAN_INTERVAL ): cv.time_period, vol.Optional(CONF_CATEGORIES, default=[]): vol.All( cv.ensure_list, [vol.In(VALID_CATEGORIES)] ), } ) }, extra=vol.ALLOW_EXTRA, ) async def async_setup(hass, config): """Set up the GDACS component.""" if DOMAIN not in config: return True conf = config[DOMAIN] latitude = conf.get(CONF_LATITUDE, hass.config.latitude) longitude = conf.get(CONF_LONGITUDE, hass.config.longitude) scan_interval = conf[CONF_SCAN_INTERVAL] categories = conf[CONF_CATEGORIES] hass.async_create_task( hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_IMPORT}, data={ CONF_LATITUDE: latitude, CONF_LONGITUDE: longitude, CONF_RADIUS: conf[CONF_RADIUS], CONF_SCAN_INTERVAL: scan_interval, CONF_CATEGORIES: categories, }, ) ) return True async def async_setup_entry(hass, config_entry): """Set up the GDACS component as config entry.""" hass.data.setdefault(DOMAIN, {}) feeds = hass.data[DOMAIN].setdefault(FEED, {}) radius = config_entry.data[CONF_RADIUS] if hass.config.units.name == CONF_UNIT_SYSTEM_IMPERIAL: radius = METRIC_SYSTEM.length(radius, LENGTH_MILES) # Create feed entity manager for all platforms. manager = GdacsFeedEntityManager(hass, config_entry, radius) feeds[config_entry.entry_id] = manager _LOGGER.debug("Feed entity manager added for %s", config_entry.entry_id) await manager.async_init() return True async def async_unload_entry(hass, config_entry): """Unload an GDACS component config entry.""" manager = hass.data[DOMAIN][FEED].pop(config_entry.entry_id) await manager.async_stop() await asyncio.wait( [ hass.config_entries.async_forward_entry_unload(config_entry, domain) for domain in PLATFORMS ] ) return True class GdacsFeedEntityManager: """Feed Entity Manager for GDACS feed.""" def __init__(self, hass, config_entry, radius_in_km): """Initialize the Feed Entity Manager.""" self._hass = hass self._config_entry = config_entry coordinates = ( config_entry.data[CONF_LATITUDE], config_entry.data[CONF_LONGITUDE], ) categories = config_entry.data[CONF_CATEGORIES] websession = aiohttp_client.async_get_clientsession(hass) self._feed_manager = GdacsFeedManager( websession, self._generate_entity, self._update_entity, self._remove_entity, coordinates, filter_radius=radius_in_km, filter_categories=categories, status_async_callback=self._status_update, ) self._config_entry_id = config_entry.entry_id self._scan_interval = timedelta(seconds=config_entry.data[CONF_SCAN_INTERVAL]) self._track_time_remove_callback = None self._status_info = None self.listeners = [] async def async_init(self): """Schedule initial and regular updates based on configured time interval.""" for domain in PLATFORMS: self._hass.async_create_task( self._hass.config_entries.async_forward_entry_setup( self._config_entry, domain ) ) async def update(event_time): """Update.""" await self.async_update() # Trigger updates at regular intervals. self._track_time_remove_callback = async_track_time_interval( self._hass, update, self._scan_interval ) _LOGGER.debug("Feed entity manager initialized") async def async_update(self): """Refresh data.""" await self._feed_manager.update() _LOGGER.debug("Feed entity manager updated") async def async_stop(self): """Stop this feed entity manager from refreshing.""" for unsub_dispatcher in self.listeners: unsub_dispatcher() self.listeners = [] if self._track_time_remove_callback: self._track_time_remove_callback() _LOGGER.debug("Feed entity manager stopped") @callback def async_event_new_entity(self): """Return manager specific event to signal new entity.""" return f"gdacs_new_geolocation_{self._config_entry_id}" def get_entry(self, external_id): """Get feed entry by external id.""" return self._feed_manager.feed_entries.get(external_id) def status_info(self): """Return latest status update info received.""" return self._status_info async def _generate_entity(self, external_id): """Generate new entity.""" async_dispatcher_send( self._hass, self.async_event_new_entity(), self, self._config_entry.unique_id, external_id, ) async def _update_entity(self, external_id): """Update entity.""" async_dispatcher_send(self._hass, f"gdacs_update_{external_id}") async def _remove_entity(self, external_id): """Remove entity.""" async_dispatcher_send(self._hass, f"gdacs_delete_{external_id}") async def _status_update(self, status_info): """Propagate status update.""" _LOGGER.debug("Status update received: %s", status_info) self._status_info = status_info async_dispatcher_send(self._hass, f"gdacs_status_{self._config_entry_id}")
"""Tests for Transmission init.""" from unittest.mock import patch import pytest from transmissionrpc.error import TransmissionError from homeassistant.components import transmission from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.setup import async_setup_component from tests.common import MockConfigEntry, mock_coro MOCK_ENTRY = MockConfigEntry( domain=transmission.DOMAIN, data={ transmission.CONF_NAME: "Transmission", transmission.CONF_HOST: "0.0.0.0", transmission.CONF_USERNAME: "user", transmission.CONF_PASSWORD: "pass", transmission.CONF_PORT: 9091, }, ) @pytest.fixture(name="api") def mock_transmission_api(): """Mock an api.""" with patch("transmissionrpc.Client"): yield @pytest.fixture(name="auth_error") def mock_api_authentication_error(): """Mock an api.""" with patch( "transmissionrpc.Client", side_effect=TransmissionError("401: Unauthorized") ): yield @pytest.fixture(name="unknown_error") def mock_api_unknown_error(): """Mock an api.""" with patch("transmissionrpc.Client", side_effect=TransmissionError): yield async def test_setup_with_no_config(hass): """Test that we do not discover anything or try to set up a Transmission client.""" assert await async_setup_component(hass, transmission.DOMAIN, {}) is True assert transmission.DOMAIN not in hass.data async def test_setup_with_config(hass, api): """Test that we import the config and setup the client.""" config = { transmission.DOMAIN: { transmission.CONF_NAME: "Transmission", transmission.CONF_HOST: "0.0.0.0", transmission.CONF_USERNAME: "user", transmission.CONF_PASSWORD: "pass", transmission.CONF_PORT: 9091, }, transmission.DOMAIN: { transmission.CONF_NAME: "Transmission2", transmission.CONF_HOST: "0.0.0.1", transmission.CONF_USERNAME: "user", transmission.CONF_PASSWORD: "pass", transmission.CONF_PORT: 9091, }, } assert await async_setup_component(hass, transmission.DOMAIN, config) is True async def test_successful_config_entry(hass, api): """Test that configured transmission is configured successfully.""" entry = MOCK_ENTRY entry.add_to_hass(hass) assert await transmission.async_setup_entry(hass, entry) is True assert entry.options == { transmission.CONF_SCAN_INTERVAL: transmission.DEFAULT_SCAN_INTERVAL, transmission.CONF_LIMIT: transmission.DEFAULT_LIMIT, transmission.CONF_ORDER: transmission.DEFAULT_ORDER, } async def test_setup_failed(hass): """Test transmission failed due to an error.""" entry = MOCK_ENTRY entry.add_to_hass(hass) # test connection error raising ConfigEntryNotReady with patch( "transmissionrpc.Client", side_effect=TransmissionError("111: Connection refused"), ), pytest.raises(ConfigEntryNotReady): await transmission.async_setup_entry(hass, entry) # test Authentication error returning false with patch( "transmissionrpc.Client", side_effect=TransmissionError("401: Unauthorized") ): assert await transmission.async_setup_entry(hass, entry) is False async def test_unload_entry(hass, api): """Test removing transmission client.""" entry = MOCK_ENTRY entry.add_to_hass(hass) with patch.object( hass.config_entries, "async_forward_entry_unload", return_value=mock_coro(True) ) as unload_entry: assert await transmission.async_setup_entry(hass, entry) assert await transmission.async_unload_entry(hass, entry) assert unload_entry.call_count == 2 assert entry.entry_id not in hass.data[transmission.DOMAIN]
turbokongen/home-assistant
tests/components/transmission/test_init.py
homeassistant/components/gdacs/__init__.py
"""Support for Nexia / Trane XL Thermostats.""" from homeassistant.components.binary_sensor import BinarySensorEntity from .const import DOMAIN, NEXIA_DEVICE, UPDATE_COORDINATOR from .entity import NexiaThermostatEntity async def async_setup_entry(hass, config_entry, async_add_entities): """Set up sensors for a Nexia device.""" nexia_data = hass.data[DOMAIN][config_entry.entry_id] nexia_home = nexia_data[NEXIA_DEVICE] coordinator = nexia_data[UPDATE_COORDINATOR] entities = [] for thermostat_id in nexia_home.get_thermostat_ids(): thermostat = nexia_home.get_thermostat_by_id(thermostat_id) entities.append( NexiaBinarySensor( coordinator, thermostat, "is_blower_active", "Blower Active" ) ) if thermostat.has_emergency_heat(): entities.append( NexiaBinarySensor( coordinator, thermostat, "is_emergency_heat_active", "Emergency Heat Active", ) ) async_add_entities(entities, True) class NexiaBinarySensor(NexiaThermostatEntity, BinarySensorEntity): """Provices Nexia BinarySensor support.""" def __init__(self, coordinator, thermostat, sensor_call, sensor_name): """Initialize the nexia sensor.""" super().__init__( coordinator, thermostat, name=f"{thermostat.get_name()} {sensor_name}", unique_id=f"{thermostat.thermostat_id}_{sensor_call}", ) self._call = sensor_call self._state = None @property def is_on(self): """Return the status of the sensor.""" return getattr(self._thermostat, self._call)()
"""Tests for Transmission init.""" from unittest.mock import patch import pytest from transmissionrpc.error import TransmissionError from homeassistant.components import transmission from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.setup import async_setup_component from tests.common import MockConfigEntry, mock_coro MOCK_ENTRY = MockConfigEntry( domain=transmission.DOMAIN, data={ transmission.CONF_NAME: "Transmission", transmission.CONF_HOST: "0.0.0.0", transmission.CONF_USERNAME: "user", transmission.CONF_PASSWORD: "pass", transmission.CONF_PORT: 9091, }, ) @pytest.fixture(name="api") def mock_transmission_api(): """Mock an api.""" with patch("transmissionrpc.Client"): yield @pytest.fixture(name="auth_error") def mock_api_authentication_error(): """Mock an api.""" with patch( "transmissionrpc.Client", side_effect=TransmissionError("401: Unauthorized") ): yield @pytest.fixture(name="unknown_error") def mock_api_unknown_error(): """Mock an api.""" with patch("transmissionrpc.Client", side_effect=TransmissionError): yield async def test_setup_with_no_config(hass): """Test that we do not discover anything or try to set up a Transmission client.""" assert await async_setup_component(hass, transmission.DOMAIN, {}) is True assert transmission.DOMAIN not in hass.data async def test_setup_with_config(hass, api): """Test that we import the config and setup the client.""" config = { transmission.DOMAIN: { transmission.CONF_NAME: "Transmission", transmission.CONF_HOST: "0.0.0.0", transmission.CONF_USERNAME: "user", transmission.CONF_PASSWORD: "pass", transmission.CONF_PORT: 9091, }, transmission.DOMAIN: { transmission.CONF_NAME: "Transmission2", transmission.CONF_HOST: "0.0.0.1", transmission.CONF_USERNAME: "user", transmission.CONF_PASSWORD: "pass", transmission.CONF_PORT: 9091, }, } assert await async_setup_component(hass, transmission.DOMAIN, config) is True async def test_successful_config_entry(hass, api): """Test that configured transmission is configured successfully.""" entry = MOCK_ENTRY entry.add_to_hass(hass) assert await transmission.async_setup_entry(hass, entry) is True assert entry.options == { transmission.CONF_SCAN_INTERVAL: transmission.DEFAULT_SCAN_INTERVAL, transmission.CONF_LIMIT: transmission.DEFAULT_LIMIT, transmission.CONF_ORDER: transmission.DEFAULT_ORDER, } async def test_setup_failed(hass): """Test transmission failed due to an error.""" entry = MOCK_ENTRY entry.add_to_hass(hass) # test connection error raising ConfigEntryNotReady with patch( "transmissionrpc.Client", side_effect=TransmissionError("111: Connection refused"), ), pytest.raises(ConfigEntryNotReady): await transmission.async_setup_entry(hass, entry) # test Authentication error returning false with patch( "transmissionrpc.Client", side_effect=TransmissionError("401: Unauthorized") ): assert await transmission.async_setup_entry(hass, entry) is False async def test_unload_entry(hass, api): """Test removing transmission client.""" entry = MOCK_ENTRY entry.add_to_hass(hass) with patch.object( hass.config_entries, "async_forward_entry_unload", return_value=mock_coro(True) ) as unload_entry: assert await transmission.async_setup_entry(hass, entry) assert await transmission.async_unload_entry(hass, entry) assert unload_entry.call_count == 2 assert entry.entry_id not in hass.data[transmission.DOMAIN]
turbokongen/home-assistant
tests/components/transmission/test_init.py
homeassistant/components/nexia/binary_sensor.py
"""Support for Twilio.""" from twilio.rest import Client from twilio.twiml import TwiML import voluptuous as vol from homeassistant.const import CONF_WEBHOOK_ID from homeassistant.helpers import config_entry_flow import homeassistant.helpers.config_validation as cv from .const import DOMAIN CONF_ACCOUNT_SID = "account_sid" CONF_AUTH_TOKEN = "auth_token" DATA_TWILIO = DOMAIN RECEIVED_DATA = f"{DOMAIN}_data_received" CONFIG_SCHEMA = vol.Schema( { vol.Optional(DOMAIN): vol.Schema( { vol.Required(CONF_ACCOUNT_SID): cv.string, vol.Required(CONF_AUTH_TOKEN): cv.string, } ) }, extra=vol.ALLOW_EXTRA, ) async def async_setup(hass, config): """Set up the Twilio component.""" if DOMAIN not in config: return True conf = config[DOMAIN] hass.data[DATA_TWILIO] = Client( conf.get(CONF_ACCOUNT_SID), conf.get(CONF_AUTH_TOKEN) ) return True async def handle_webhook(hass, webhook_id, request): """Handle incoming webhook from Twilio for inbound messages and calls.""" data = dict(await request.post()) data["webhook_id"] = webhook_id hass.bus.async_fire(RECEIVED_DATA, dict(data)) return TwiML().to_xml() async def async_setup_entry(hass, entry): """Configure based on config entry.""" hass.components.webhook.async_register( DOMAIN, "Twilio", entry.data[CONF_WEBHOOK_ID], handle_webhook ) return True async def async_unload_entry(hass, entry): """Unload a config entry.""" hass.components.webhook.async_unregister(entry.data[CONF_WEBHOOK_ID]) return True async_remove_entry = config_entry_flow.webhook_async_remove_entry
"""Tests for Transmission init.""" from unittest.mock import patch import pytest from transmissionrpc.error import TransmissionError from homeassistant.components import transmission from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.setup import async_setup_component from tests.common import MockConfigEntry, mock_coro MOCK_ENTRY = MockConfigEntry( domain=transmission.DOMAIN, data={ transmission.CONF_NAME: "Transmission", transmission.CONF_HOST: "0.0.0.0", transmission.CONF_USERNAME: "user", transmission.CONF_PASSWORD: "pass", transmission.CONF_PORT: 9091, }, ) @pytest.fixture(name="api") def mock_transmission_api(): """Mock an api.""" with patch("transmissionrpc.Client"): yield @pytest.fixture(name="auth_error") def mock_api_authentication_error(): """Mock an api.""" with patch( "transmissionrpc.Client", side_effect=TransmissionError("401: Unauthorized") ): yield @pytest.fixture(name="unknown_error") def mock_api_unknown_error(): """Mock an api.""" with patch("transmissionrpc.Client", side_effect=TransmissionError): yield async def test_setup_with_no_config(hass): """Test that we do not discover anything or try to set up a Transmission client.""" assert await async_setup_component(hass, transmission.DOMAIN, {}) is True assert transmission.DOMAIN not in hass.data async def test_setup_with_config(hass, api): """Test that we import the config and setup the client.""" config = { transmission.DOMAIN: { transmission.CONF_NAME: "Transmission", transmission.CONF_HOST: "0.0.0.0", transmission.CONF_USERNAME: "user", transmission.CONF_PASSWORD: "pass", transmission.CONF_PORT: 9091, }, transmission.DOMAIN: { transmission.CONF_NAME: "Transmission2", transmission.CONF_HOST: "0.0.0.1", transmission.CONF_USERNAME: "user", transmission.CONF_PASSWORD: "pass", transmission.CONF_PORT: 9091, }, } assert await async_setup_component(hass, transmission.DOMAIN, config) is True async def test_successful_config_entry(hass, api): """Test that configured transmission is configured successfully.""" entry = MOCK_ENTRY entry.add_to_hass(hass) assert await transmission.async_setup_entry(hass, entry) is True assert entry.options == { transmission.CONF_SCAN_INTERVAL: transmission.DEFAULT_SCAN_INTERVAL, transmission.CONF_LIMIT: transmission.DEFAULT_LIMIT, transmission.CONF_ORDER: transmission.DEFAULT_ORDER, } async def test_setup_failed(hass): """Test transmission failed due to an error.""" entry = MOCK_ENTRY entry.add_to_hass(hass) # test connection error raising ConfigEntryNotReady with patch( "transmissionrpc.Client", side_effect=TransmissionError("111: Connection refused"), ), pytest.raises(ConfigEntryNotReady): await transmission.async_setup_entry(hass, entry) # test Authentication error returning false with patch( "transmissionrpc.Client", side_effect=TransmissionError("401: Unauthorized") ): assert await transmission.async_setup_entry(hass, entry) is False async def test_unload_entry(hass, api): """Test removing transmission client.""" entry = MOCK_ENTRY entry.add_to_hass(hass) with patch.object( hass.config_entries, "async_forward_entry_unload", return_value=mock_coro(True) ) as unload_entry: assert await transmission.async_setup_entry(hass, entry) assert await transmission.async_unload_entry(hass, entry) assert unload_entry.call_count == 2 assert entry.entry_id not in hass.data[transmission.DOMAIN]
turbokongen/home-assistant
tests/components/transmission/test_init.py
homeassistant/components/twilio/__init__.py
"""Support for Lupusec Security System switches.""" from datetime import timedelta import lupupy.constants as CONST from homeassistant.components.switch import SwitchEntity from . import DOMAIN as LUPUSEC_DOMAIN, LupusecDevice SCAN_INTERVAL = timedelta(seconds=2) def setup_platform(hass, config, add_entities, discovery_info=None): """Set up Lupusec switch devices.""" if discovery_info is None: return data = hass.data[LUPUSEC_DOMAIN] devices = [] for device in data.lupusec.get_devices(generic_type=CONST.TYPE_SWITCH): devices.append(LupusecSwitch(data, device)) add_entities(devices) class LupusecSwitch(LupusecDevice, SwitchEntity): """Representation of a Lupusec switch.""" def turn_on(self, **kwargs): """Turn on the device.""" self._device.switch_on() def turn_off(self, **kwargs): """Turn off the device.""" self._device.switch_off() @property def is_on(self): """Return true if device is on.""" return self._device.is_on
"""Tests for Transmission init.""" from unittest.mock import patch import pytest from transmissionrpc.error import TransmissionError from homeassistant.components import transmission from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.setup import async_setup_component from tests.common import MockConfigEntry, mock_coro MOCK_ENTRY = MockConfigEntry( domain=transmission.DOMAIN, data={ transmission.CONF_NAME: "Transmission", transmission.CONF_HOST: "0.0.0.0", transmission.CONF_USERNAME: "user", transmission.CONF_PASSWORD: "pass", transmission.CONF_PORT: 9091, }, ) @pytest.fixture(name="api") def mock_transmission_api(): """Mock an api.""" with patch("transmissionrpc.Client"): yield @pytest.fixture(name="auth_error") def mock_api_authentication_error(): """Mock an api.""" with patch( "transmissionrpc.Client", side_effect=TransmissionError("401: Unauthorized") ): yield @pytest.fixture(name="unknown_error") def mock_api_unknown_error(): """Mock an api.""" with patch("transmissionrpc.Client", side_effect=TransmissionError): yield async def test_setup_with_no_config(hass): """Test that we do not discover anything or try to set up a Transmission client.""" assert await async_setup_component(hass, transmission.DOMAIN, {}) is True assert transmission.DOMAIN not in hass.data async def test_setup_with_config(hass, api): """Test that we import the config and setup the client.""" config = { transmission.DOMAIN: { transmission.CONF_NAME: "Transmission", transmission.CONF_HOST: "0.0.0.0", transmission.CONF_USERNAME: "user", transmission.CONF_PASSWORD: "pass", transmission.CONF_PORT: 9091, }, transmission.DOMAIN: { transmission.CONF_NAME: "Transmission2", transmission.CONF_HOST: "0.0.0.1", transmission.CONF_USERNAME: "user", transmission.CONF_PASSWORD: "pass", transmission.CONF_PORT: 9091, }, } assert await async_setup_component(hass, transmission.DOMAIN, config) is True async def test_successful_config_entry(hass, api): """Test that configured transmission is configured successfully.""" entry = MOCK_ENTRY entry.add_to_hass(hass) assert await transmission.async_setup_entry(hass, entry) is True assert entry.options == { transmission.CONF_SCAN_INTERVAL: transmission.DEFAULT_SCAN_INTERVAL, transmission.CONF_LIMIT: transmission.DEFAULT_LIMIT, transmission.CONF_ORDER: transmission.DEFAULT_ORDER, } async def test_setup_failed(hass): """Test transmission failed due to an error.""" entry = MOCK_ENTRY entry.add_to_hass(hass) # test connection error raising ConfigEntryNotReady with patch( "transmissionrpc.Client", side_effect=TransmissionError("111: Connection refused"), ), pytest.raises(ConfigEntryNotReady): await transmission.async_setup_entry(hass, entry) # test Authentication error returning false with patch( "transmissionrpc.Client", side_effect=TransmissionError("401: Unauthorized") ): assert await transmission.async_setup_entry(hass, entry) is False async def test_unload_entry(hass, api): """Test removing transmission client.""" entry = MOCK_ENTRY entry.add_to_hass(hass) with patch.object( hass.config_entries, "async_forward_entry_unload", return_value=mock_coro(True) ) as unload_entry: assert await transmission.async_setup_entry(hass, entry) assert await transmission.async_unload_entry(hass, entry) assert unload_entry.call_count == 2 assert entry.entry_id not in hass.data[transmission.DOMAIN]
turbokongen/home-assistant
tests/components/transmission/test_init.py
homeassistant/components/lupusec/switch.py
"""Reproduce an input boolean state.""" import asyncio import logging from typing import Any, Dict, Iterable, Optional from homeassistant.const import ( ATTR_ENTITY_ID, SERVICE_TURN_OFF, SERVICE_TURN_ON, STATE_OFF, STATE_ON, ) from homeassistant.core import Context, State from homeassistant.helpers.typing import HomeAssistantType from . import DOMAIN _LOGGER = logging.getLogger(__name__) async def _async_reproduce_states( hass: HomeAssistantType, state: State, *, context: Optional[Context] = None, reproduce_options: Optional[Dict[str, Any]] = None, ) -> None: """Reproduce input boolean states.""" cur_state = hass.states.get(state.entity_id) if cur_state is None: _LOGGER.warning("Unable to find entity %s", state.entity_id) return if state.state not in (STATE_ON, STATE_OFF): _LOGGER.warning( "Invalid state specified for %s: %s", state.entity_id, state.state ) return if cur_state.state == state.state: return service = SERVICE_TURN_ON if state.state == STATE_ON else SERVICE_TURN_OFF await hass.services.async_call( DOMAIN, service, {ATTR_ENTITY_ID: state.entity_id}, context=context, blocking=True, ) async def async_reproduce_states( hass: HomeAssistantType, states: Iterable[State], *, context: Optional[Context] = None, reproduce_options: Optional[Dict[str, Any]] = None, ) -> None: """Reproduce component states.""" await asyncio.gather( *( _async_reproduce_states( hass, state, context=context, reproduce_options=reproduce_options ) for state in states ) )
"""Tests for Transmission init.""" from unittest.mock import patch import pytest from transmissionrpc.error import TransmissionError from homeassistant.components import transmission from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.setup import async_setup_component from tests.common import MockConfigEntry, mock_coro MOCK_ENTRY = MockConfigEntry( domain=transmission.DOMAIN, data={ transmission.CONF_NAME: "Transmission", transmission.CONF_HOST: "0.0.0.0", transmission.CONF_USERNAME: "user", transmission.CONF_PASSWORD: "pass", transmission.CONF_PORT: 9091, }, ) @pytest.fixture(name="api") def mock_transmission_api(): """Mock an api.""" with patch("transmissionrpc.Client"): yield @pytest.fixture(name="auth_error") def mock_api_authentication_error(): """Mock an api.""" with patch( "transmissionrpc.Client", side_effect=TransmissionError("401: Unauthorized") ): yield @pytest.fixture(name="unknown_error") def mock_api_unknown_error(): """Mock an api.""" with patch("transmissionrpc.Client", side_effect=TransmissionError): yield async def test_setup_with_no_config(hass): """Test that we do not discover anything or try to set up a Transmission client.""" assert await async_setup_component(hass, transmission.DOMAIN, {}) is True assert transmission.DOMAIN not in hass.data async def test_setup_with_config(hass, api): """Test that we import the config and setup the client.""" config = { transmission.DOMAIN: { transmission.CONF_NAME: "Transmission", transmission.CONF_HOST: "0.0.0.0", transmission.CONF_USERNAME: "user", transmission.CONF_PASSWORD: "pass", transmission.CONF_PORT: 9091, }, transmission.DOMAIN: { transmission.CONF_NAME: "Transmission2", transmission.CONF_HOST: "0.0.0.1", transmission.CONF_USERNAME: "user", transmission.CONF_PASSWORD: "pass", transmission.CONF_PORT: 9091, }, } assert await async_setup_component(hass, transmission.DOMAIN, config) is True async def test_successful_config_entry(hass, api): """Test that configured transmission is configured successfully.""" entry = MOCK_ENTRY entry.add_to_hass(hass) assert await transmission.async_setup_entry(hass, entry) is True assert entry.options == { transmission.CONF_SCAN_INTERVAL: transmission.DEFAULT_SCAN_INTERVAL, transmission.CONF_LIMIT: transmission.DEFAULT_LIMIT, transmission.CONF_ORDER: transmission.DEFAULT_ORDER, } async def test_setup_failed(hass): """Test transmission failed due to an error.""" entry = MOCK_ENTRY entry.add_to_hass(hass) # test connection error raising ConfigEntryNotReady with patch( "transmissionrpc.Client", side_effect=TransmissionError("111: Connection refused"), ), pytest.raises(ConfigEntryNotReady): await transmission.async_setup_entry(hass, entry) # test Authentication error returning false with patch( "transmissionrpc.Client", side_effect=TransmissionError("401: Unauthorized") ): assert await transmission.async_setup_entry(hass, entry) is False async def test_unload_entry(hass, api): """Test removing transmission client.""" entry = MOCK_ENTRY entry.add_to_hass(hass) with patch.object( hass.config_entries, "async_forward_entry_unload", return_value=mock_coro(True) ) as unload_entry: assert await transmission.async_setup_entry(hass, entry) assert await transmission.async_unload_entry(hass, entry) assert unload_entry.call_count == 2 assert entry.entry_id not in hass.data[transmission.DOMAIN]
turbokongen/home-assistant
tests/components/transmission/test_init.py
homeassistant/components/input_boolean/reproduce_state.py
"""The ATAG Integration.""" from datetime import timedelta import logging import async_timeout from pyatag import AtagException, AtagOne from homeassistant.components.climate import DOMAIN as CLIMATE from homeassistant.components.sensor import DOMAIN as SENSOR from homeassistant.components.water_heater import DOMAIN as WATER_HEATER from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant, asyncio from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.helpers.aiohttp_client import async_get_clientsession from homeassistant.helpers.update_coordinator import ( CoordinatorEntity, DataUpdateCoordinator, UpdateFailed, ) _LOGGER = logging.getLogger(__name__) DOMAIN = "atag" PLATFORMS = [CLIMATE, WATER_HEATER, SENSOR] async def async_setup(hass: HomeAssistant, config): """Set up the Atag component.""" return True async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry): """Set up Atag integration from a config entry.""" session = async_get_clientsession(hass) coordinator = AtagDataUpdateCoordinator(hass, session, entry) await coordinator.async_refresh() if not coordinator.last_update_success: raise ConfigEntryNotReady hass.data.setdefault(DOMAIN, {}) hass.data[DOMAIN][entry.entry_id] = coordinator if entry.unique_id is None: hass.config_entries.async_update_entry(entry, unique_id=coordinator.atag.id) for platform in PLATFORMS: hass.async_create_task( hass.config_entries.async_forward_entry_setup(entry, platform) ) return True class AtagDataUpdateCoordinator(DataUpdateCoordinator): """Define an object to hold Atag data.""" def __init__(self, hass, session, entry): """Initialize.""" self.atag = AtagOne(session=session, **entry.data) super().__init__( hass, _LOGGER, name=DOMAIN, update_interval=timedelta(seconds=30) ) async def _async_update_data(self): """Update data via library.""" with async_timeout.timeout(20): try: if not await self.atag.update(): raise UpdateFailed("No data received") except AtagException as error: raise UpdateFailed(error) from error return self.atag.report async def async_unload_entry(hass, entry): """Unload Atag config entry.""" unload_ok = all( await asyncio.gather( *[ hass.config_entries.async_forward_entry_unload(entry, component) for component in PLATFORMS ] ) ) if unload_ok: hass.data[DOMAIN].pop(entry.entry_id) return unload_ok class AtagEntity(CoordinatorEntity): """Defines a base Atag entity.""" def __init__(self, coordinator: AtagDataUpdateCoordinator, atag_id: str) -> None: """Initialize the Atag entity.""" super().__init__(coordinator) self._id = atag_id self._name = DOMAIN.title() @property def device_info(self) -> dict: """Return info for device registry.""" device = self.coordinator.atag.id version = self.coordinator.atag.apiversion return { "identifiers": {(DOMAIN, device)}, "name": "Atag Thermostat", "model": "Atag One", "sw_version": version, "manufacturer": "Atag", } @property def name(self) -> str: """Return the name of the entity.""" return self._name @property def unique_id(self): """Return a unique ID to use for this entity.""" return f"{self.coordinator.atag.id}-{self._id}"
"""Tests for Transmission init.""" from unittest.mock import patch import pytest from transmissionrpc.error import TransmissionError from homeassistant.components import transmission from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.setup import async_setup_component from tests.common import MockConfigEntry, mock_coro MOCK_ENTRY = MockConfigEntry( domain=transmission.DOMAIN, data={ transmission.CONF_NAME: "Transmission", transmission.CONF_HOST: "0.0.0.0", transmission.CONF_USERNAME: "user", transmission.CONF_PASSWORD: "pass", transmission.CONF_PORT: 9091, }, ) @pytest.fixture(name="api") def mock_transmission_api(): """Mock an api.""" with patch("transmissionrpc.Client"): yield @pytest.fixture(name="auth_error") def mock_api_authentication_error(): """Mock an api.""" with patch( "transmissionrpc.Client", side_effect=TransmissionError("401: Unauthorized") ): yield @pytest.fixture(name="unknown_error") def mock_api_unknown_error(): """Mock an api.""" with patch("transmissionrpc.Client", side_effect=TransmissionError): yield async def test_setup_with_no_config(hass): """Test that we do not discover anything or try to set up a Transmission client.""" assert await async_setup_component(hass, transmission.DOMAIN, {}) is True assert transmission.DOMAIN not in hass.data async def test_setup_with_config(hass, api): """Test that we import the config and setup the client.""" config = { transmission.DOMAIN: { transmission.CONF_NAME: "Transmission", transmission.CONF_HOST: "0.0.0.0", transmission.CONF_USERNAME: "user", transmission.CONF_PASSWORD: "pass", transmission.CONF_PORT: 9091, }, transmission.DOMAIN: { transmission.CONF_NAME: "Transmission2", transmission.CONF_HOST: "0.0.0.1", transmission.CONF_USERNAME: "user", transmission.CONF_PASSWORD: "pass", transmission.CONF_PORT: 9091, }, } assert await async_setup_component(hass, transmission.DOMAIN, config) is True async def test_successful_config_entry(hass, api): """Test that configured transmission is configured successfully.""" entry = MOCK_ENTRY entry.add_to_hass(hass) assert await transmission.async_setup_entry(hass, entry) is True assert entry.options == { transmission.CONF_SCAN_INTERVAL: transmission.DEFAULT_SCAN_INTERVAL, transmission.CONF_LIMIT: transmission.DEFAULT_LIMIT, transmission.CONF_ORDER: transmission.DEFAULT_ORDER, } async def test_setup_failed(hass): """Test transmission failed due to an error.""" entry = MOCK_ENTRY entry.add_to_hass(hass) # test connection error raising ConfigEntryNotReady with patch( "transmissionrpc.Client", side_effect=TransmissionError("111: Connection refused"), ), pytest.raises(ConfigEntryNotReady): await transmission.async_setup_entry(hass, entry) # test Authentication error returning false with patch( "transmissionrpc.Client", side_effect=TransmissionError("401: Unauthorized") ): assert await transmission.async_setup_entry(hass, entry) is False async def test_unload_entry(hass, api): """Test removing transmission client.""" entry = MOCK_ENTRY entry.add_to_hass(hass) with patch.object( hass.config_entries, "async_forward_entry_unload", return_value=mock_coro(True) ) as unload_entry: assert await transmission.async_setup_entry(hass, entry) assert await transmission.async_unload_entry(hass, entry) assert unload_entry.call_count == 2 assert entry.entry_id not in hass.data[transmission.DOMAIN]
turbokongen/home-assistant
tests/components/transmission/test_init.py
homeassistant/components/atag/__init__.py
"""The Dune HD component.""" import asyncio from pdunehd import DuneHDPlayer from homeassistant.const import CONF_HOST from .const import DOMAIN PLATFORMS = ["media_player"] async def async_setup(hass, config): """Set up the Dune HD component.""" return True async def async_setup_entry(hass, config_entry): """Set up a config entry.""" host = config_entry.data[CONF_HOST] player = DuneHDPlayer(host) hass.data.setdefault(DOMAIN, {}) hass.data[DOMAIN][config_entry.entry_id] = player for component in PLATFORMS: hass.async_create_task( hass.config_entries.async_forward_entry_setup(config_entry, component) ) return True async def async_unload_entry(hass, config_entry): """Unload a config entry.""" unload_ok = all( await asyncio.gather( *[ hass.config_entries.async_forward_entry_unload(config_entry, component) for component in PLATFORMS ] ) ) if unload_ok: hass.data[DOMAIN].pop(config_entry.entry_id) return unload_ok
"""Tests for Transmission init.""" from unittest.mock import patch import pytest from transmissionrpc.error import TransmissionError from homeassistant.components import transmission from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.setup import async_setup_component from tests.common import MockConfigEntry, mock_coro MOCK_ENTRY = MockConfigEntry( domain=transmission.DOMAIN, data={ transmission.CONF_NAME: "Transmission", transmission.CONF_HOST: "0.0.0.0", transmission.CONF_USERNAME: "user", transmission.CONF_PASSWORD: "pass", transmission.CONF_PORT: 9091, }, ) @pytest.fixture(name="api") def mock_transmission_api(): """Mock an api.""" with patch("transmissionrpc.Client"): yield @pytest.fixture(name="auth_error") def mock_api_authentication_error(): """Mock an api.""" with patch( "transmissionrpc.Client", side_effect=TransmissionError("401: Unauthorized") ): yield @pytest.fixture(name="unknown_error") def mock_api_unknown_error(): """Mock an api.""" with patch("transmissionrpc.Client", side_effect=TransmissionError): yield async def test_setup_with_no_config(hass): """Test that we do not discover anything or try to set up a Transmission client.""" assert await async_setup_component(hass, transmission.DOMAIN, {}) is True assert transmission.DOMAIN not in hass.data async def test_setup_with_config(hass, api): """Test that we import the config and setup the client.""" config = { transmission.DOMAIN: { transmission.CONF_NAME: "Transmission", transmission.CONF_HOST: "0.0.0.0", transmission.CONF_USERNAME: "user", transmission.CONF_PASSWORD: "pass", transmission.CONF_PORT: 9091, }, transmission.DOMAIN: { transmission.CONF_NAME: "Transmission2", transmission.CONF_HOST: "0.0.0.1", transmission.CONF_USERNAME: "user", transmission.CONF_PASSWORD: "pass", transmission.CONF_PORT: 9091, }, } assert await async_setup_component(hass, transmission.DOMAIN, config) is True async def test_successful_config_entry(hass, api): """Test that configured transmission is configured successfully.""" entry = MOCK_ENTRY entry.add_to_hass(hass) assert await transmission.async_setup_entry(hass, entry) is True assert entry.options == { transmission.CONF_SCAN_INTERVAL: transmission.DEFAULT_SCAN_INTERVAL, transmission.CONF_LIMIT: transmission.DEFAULT_LIMIT, transmission.CONF_ORDER: transmission.DEFAULT_ORDER, } async def test_setup_failed(hass): """Test transmission failed due to an error.""" entry = MOCK_ENTRY entry.add_to_hass(hass) # test connection error raising ConfigEntryNotReady with patch( "transmissionrpc.Client", side_effect=TransmissionError("111: Connection refused"), ), pytest.raises(ConfigEntryNotReady): await transmission.async_setup_entry(hass, entry) # test Authentication error returning false with patch( "transmissionrpc.Client", side_effect=TransmissionError("401: Unauthorized") ): assert await transmission.async_setup_entry(hass, entry) is False async def test_unload_entry(hass, api): """Test removing transmission client.""" entry = MOCK_ENTRY entry.add_to_hass(hass) with patch.object( hass.config_entries, "async_forward_entry_unload", return_value=mock_coro(True) ) as unload_entry: assert await transmission.async_setup_entry(hass, entry) assert await transmission.async_unload_entry(hass, entry) assert unload_entry.call_count == 2 assert entry.entry_id not in hass.data[transmission.DOMAIN]
turbokongen/home-assistant
tests/components/transmission/test_init.py
homeassistant/components/dunehd/__init__.py
"""Provides device trigger for lights.""" from typing import List import voluptuous as vol from homeassistant.components.automation import AutomationActionType from homeassistant.components.device_automation import toggle_entity from homeassistant.const import CONF_DOMAIN from homeassistant.core import CALLBACK_TYPE, HomeAssistant from homeassistant.helpers.typing import ConfigType from . import DOMAIN TRIGGER_SCHEMA = toggle_entity.TRIGGER_SCHEMA.extend( {vol.Required(CONF_DOMAIN): DOMAIN} ) async def async_attach_trigger( hass: HomeAssistant, config: ConfigType, action: AutomationActionType, automation_info: dict, ) -> CALLBACK_TYPE: """Listen for state changes based on configuration.""" return await toggle_entity.async_attach_trigger( hass, config, action, automation_info ) async def async_get_triggers(hass: HomeAssistant, device_id: str) -> List[dict]: """List device triggers.""" return await toggle_entity.async_get_triggers(hass, device_id, DOMAIN) async def async_get_trigger_capabilities(hass: HomeAssistant, config: dict) -> dict: """List trigger capabilities.""" return await toggle_entity.async_get_trigger_capabilities(hass, config)
"""Tests for Transmission init.""" from unittest.mock import patch import pytest from transmissionrpc.error import TransmissionError from homeassistant.components import transmission from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.setup import async_setup_component from tests.common import MockConfigEntry, mock_coro MOCK_ENTRY = MockConfigEntry( domain=transmission.DOMAIN, data={ transmission.CONF_NAME: "Transmission", transmission.CONF_HOST: "0.0.0.0", transmission.CONF_USERNAME: "user", transmission.CONF_PASSWORD: "pass", transmission.CONF_PORT: 9091, }, ) @pytest.fixture(name="api") def mock_transmission_api(): """Mock an api.""" with patch("transmissionrpc.Client"): yield @pytest.fixture(name="auth_error") def mock_api_authentication_error(): """Mock an api.""" with patch( "transmissionrpc.Client", side_effect=TransmissionError("401: Unauthorized") ): yield @pytest.fixture(name="unknown_error") def mock_api_unknown_error(): """Mock an api.""" with patch("transmissionrpc.Client", side_effect=TransmissionError): yield async def test_setup_with_no_config(hass): """Test that we do not discover anything or try to set up a Transmission client.""" assert await async_setup_component(hass, transmission.DOMAIN, {}) is True assert transmission.DOMAIN not in hass.data async def test_setup_with_config(hass, api): """Test that we import the config and setup the client.""" config = { transmission.DOMAIN: { transmission.CONF_NAME: "Transmission", transmission.CONF_HOST: "0.0.0.0", transmission.CONF_USERNAME: "user", transmission.CONF_PASSWORD: "pass", transmission.CONF_PORT: 9091, }, transmission.DOMAIN: { transmission.CONF_NAME: "Transmission2", transmission.CONF_HOST: "0.0.0.1", transmission.CONF_USERNAME: "user", transmission.CONF_PASSWORD: "pass", transmission.CONF_PORT: 9091, }, } assert await async_setup_component(hass, transmission.DOMAIN, config) is True async def test_successful_config_entry(hass, api): """Test that configured transmission is configured successfully.""" entry = MOCK_ENTRY entry.add_to_hass(hass) assert await transmission.async_setup_entry(hass, entry) is True assert entry.options == { transmission.CONF_SCAN_INTERVAL: transmission.DEFAULT_SCAN_INTERVAL, transmission.CONF_LIMIT: transmission.DEFAULT_LIMIT, transmission.CONF_ORDER: transmission.DEFAULT_ORDER, } async def test_setup_failed(hass): """Test transmission failed due to an error.""" entry = MOCK_ENTRY entry.add_to_hass(hass) # test connection error raising ConfigEntryNotReady with patch( "transmissionrpc.Client", side_effect=TransmissionError("111: Connection refused"), ), pytest.raises(ConfigEntryNotReady): await transmission.async_setup_entry(hass, entry) # test Authentication error returning false with patch( "transmissionrpc.Client", side_effect=TransmissionError("401: Unauthorized") ): assert await transmission.async_setup_entry(hass, entry) is False async def test_unload_entry(hass, api): """Test removing transmission client.""" entry = MOCK_ENTRY entry.add_to_hass(hass) with patch.object( hass.config_entries, "async_forward_entry_unload", return_value=mock_coro(True) ) as unload_entry: assert await transmission.async_setup_entry(hass, entry) assert await transmission.async_unload_entry(hass, entry) assert unload_entry.call_count == 2 assert entry.entry_id not in hass.data[transmission.DOMAIN]
turbokongen/home-assistant
tests/components/transmission/test_init.py
homeassistant/components/light/device_trigger.py
"""Support for switch controlled using a telnet connection.""" from datetime import timedelta import logging import telnetlib import voluptuous as vol from homeassistant.components.switch import ( ENTITY_ID_FORMAT, PLATFORM_SCHEMA, SwitchEntity, ) from homeassistant.const import ( CONF_COMMAND_OFF, CONF_COMMAND_ON, CONF_COMMAND_STATE, CONF_NAME, CONF_PORT, CONF_RESOURCE, CONF_SWITCHES, CONF_TIMEOUT, CONF_VALUE_TEMPLATE, ) import homeassistant.helpers.config_validation as cv _LOGGER = logging.getLogger(__name__) DEFAULT_PORT = 23 DEFAULT_TIMEOUT = 0.2 SWITCH_SCHEMA = vol.Schema( { vol.Required(CONF_COMMAND_OFF): cv.string, vol.Required(CONF_COMMAND_ON): cv.string, vol.Required(CONF_RESOURCE): cv.string, vol.Optional(CONF_VALUE_TEMPLATE): cv.template, vol.Optional(CONF_COMMAND_STATE): cv.string, vol.Optional(CONF_NAME): cv.string, vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port, vol.Optional(CONF_TIMEOUT, default=DEFAULT_TIMEOUT): vol.Coerce(float), } ) PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( {vol.Required(CONF_SWITCHES): cv.schema_with_slug_keys(SWITCH_SCHEMA)} ) SCAN_INTERVAL = timedelta(seconds=10) def setup_platform(hass, config, add_entities, discovery_info=None): """Find and return switches controlled by telnet commands.""" devices = config.get(CONF_SWITCHES, {}) switches = [] for object_id, device_config in devices.items(): value_template = device_config.get(CONF_VALUE_TEMPLATE) if value_template is not None: value_template.hass = hass switches.append( TelnetSwitch( hass, object_id, device_config.get(CONF_RESOURCE), device_config.get(CONF_PORT), device_config.get(CONF_NAME, object_id), device_config.get(CONF_COMMAND_ON), device_config.get(CONF_COMMAND_OFF), device_config.get(CONF_COMMAND_STATE), value_template, device_config.get(CONF_TIMEOUT), ) ) if not switches: _LOGGER.error("No switches added") return add_entities(switches) class TelnetSwitch(SwitchEntity): """Representation of a switch that can be toggled using telnet commands.""" def __init__( self, hass, object_id, resource, port, friendly_name, command_on, command_off, command_state, value_template, timeout, ): """Initialize the switch.""" self._hass = hass self.entity_id = ENTITY_ID_FORMAT.format(object_id) self._resource = resource self._port = port self._name = friendly_name self._state = False self._command_on = command_on self._command_off = command_off self._command_state = command_state self._value_template = value_template self._timeout = timeout def _telnet_command(self, command): try: telnet = telnetlib.Telnet(self._resource, self._port) telnet.write(command.encode("ASCII") + b"\r") response = telnet.read_until(b"\r", timeout=self._timeout) _LOGGER.debug("telnet response: %s", response.decode("ASCII").strip()) return response.decode("ASCII").strip() except OSError as error: _LOGGER.error( 'Command "%s" failed with exception: %s', command, repr(error) ) return None @property def name(self): """Return the name of the switch.""" return self._name @property def should_poll(self): """Only poll if we have state command.""" return self._command_state is not None @property def is_on(self): """Return true if device is on.""" return self._state @property def assumed_state(self): """Return true if no state command is defined, false otherwise.""" return self._command_state is None def update(self): """Update device state.""" response = self._telnet_command(self._command_state) if response: rendered = self._value_template.render_with_possible_json_value(response) self._state = rendered == "True" else: _LOGGER.warning("Empty response for command: %s", self._command_state) def turn_on(self, **kwargs): """Turn the device on.""" self._telnet_command(self._command_on) if self.assumed_state: self._state = True def turn_off(self, **kwargs): """Turn the device off.""" self._telnet_command(self._command_off) if self.assumed_state: self._state = False
"""Tests for Transmission init.""" from unittest.mock import patch import pytest from transmissionrpc.error import TransmissionError from homeassistant.components import transmission from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.setup import async_setup_component from tests.common import MockConfigEntry, mock_coro MOCK_ENTRY = MockConfigEntry( domain=transmission.DOMAIN, data={ transmission.CONF_NAME: "Transmission", transmission.CONF_HOST: "0.0.0.0", transmission.CONF_USERNAME: "user", transmission.CONF_PASSWORD: "pass", transmission.CONF_PORT: 9091, }, ) @pytest.fixture(name="api") def mock_transmission_api(): """Mock an api.""" with patch("transmissionrpc.Client"): yield @pytest.fixture(name="auth_error") def mock_api_authentication_error(): """Mock an api.""" with patch( "transmissionrpc.Client", side_effect=TransmissionError("401: Unauthorized") ): yield @pytest.fixture(name="unknown_error") def mock_api_unknown_error(): """Mock an api.""" with patch("transmissionrpc.Client", side_effect=TransmissionError): yield async def test_setup_with_no_config(hass): """Test that we do not discover anything or try to set up a Transmission client.""" assert await async_setup_component(hass, transmission.DOMAIN, {}) is True assert transmission.DOMAIN not in hass.data async def test_setup_with_config(hass, api): """Test that we import the config and setup the client.""" config = { transmission.DOMAIN: { transmission.CONF_NAME: "Transmission", transmission.CONF_HOST: "0.0.0.0", transmission.CONF_USERNAME: "user", transmission.CONF_PASSWORD: "pass", transmission.CONF_PORT: 9091, }, transmission.DOMAIN: { transmission.CONF_NAME: "Transmission2", transmission.CONF_HOST: "0.0.0.1", transmission.CONF_USERNAME: "user", transmission.CONF_PASSWORD: "pass", transmission.CONF_PORT: 9091, }, } assert await async_setup_component(hass, transmission.DOMAIN, config) is True async def test_successful_config_entry(hass, api): """Test that configured transmission is configured successfully.""" entry = MOCK_ENTRY entry.add_to_hass(hass) assert await transmission.async_setup_entry(hass, entry) is True assert entry.options == { transmission.CONF_SCAN_INTERVAL: transmission.DEFAULT_SCAN_INTERVAL, transmission.CONF_LIMIT: transmission.DEFAULT_LIMIT, transmission.CONF_ORDER: transmission.DEFAULT_ORDER, } async def test_setup_failed(hass): """Test transmission failed due to an error.""" entry = MOCK_ENTRY entry.add_to_hass(hass) # test connection error raising ConfigEntryNotReady with patch( "transmissionrpc.Client", side_effect=TransmissionError("111: Connection refused"), ), pytest.raises(ConfigEntryNotReady): await transmission.async_setup_entry(hass, entry) # test Authentication error returning false with patch( "transmissionrpc.Client", side_effect=TransmissionError("401: Unauthorized") ): assert await transmission.async_setup_entry(hass, entry) is False async def test_unload_entry(hass, api): """Test removing transmission client.""" entry = MOCK_ENTRY entry.add_to_hass(hass) with patch.object( hass.config_entries, "async_forward_entry_unload", return_value=mock_coro(True) ) as unload_entry: assert await transmission.async_setup_entry(hass, entry) assert await transmission.async_unload_entry(hass, entry) assert unload_entry.call_count == 2 assert entry.entry_id not in hass.data[transmission.DOMAIN]
turbokongen/home-assistant
tests/components/transmission/test_init.py
homeassistant/components/telnet/switch.py
"""Support for Google Nest SDM Cameras.""" import datetime import logging from typing import Optional from google_nest_sdm.camera_traits import ( CameraEventImageTrait, CameraImageTrait, CameraLiveStreamTrait, ) from google_nest_sdm.device import Device from google_nest_sdm.exceptions import GoogleNestException from haffmpeg.tools import IMAGE_JPEG from homeassistant.components.camera import SUPPORT_STREAM, Camera from homeassistant.components.ffmpeg import async_get_image from homeassistant.config_entries import ConfigEntry from homeassistant.exceptions import PlatformNotReady from homeassistant.helpers.event import async_track_point_in_utc_time from homeassistant.helpers.typing import HomeAssistantType from homeassistant.util.dt import utcnow from .const import DATA_SUBSCRIBER, DOMAIN from .device_info import DeviceInfo _LOGGER = logging.getLogger(__name__) # Used to schedule an alarm to refresh the stream before expiration STREAM_EXPIRATION_BUFFER = datetime.timedelta(seconds=30) async def async_setup_sdm_entry( hass: HomeAssistantType, entry: ConfigEntry, async_add_entities ) -> None: """Set up the cameras.""" subscriber = hass.data[DOMAIN][DATA_SUBSCRIBER] try: device_manager = await subscriber.async_get_device_manager() except GoogleNestException as err: raise PlatformNotReady from err # Fetch initial data so we have data when entities subscribe. entities = [] for device in device_manager.devices.values(): if ( CameraImageTrait.NAME in device.traits or CameraLiveStreamTrait.NAME in device.traits ): entities.append(NestCamera(device)) async_add_entities(entities) class NestCamera(Camera): """Devices that support cameras.""" def __init__(self, device: Device): """Initialize the camera.""" super().__init__() self._device = device self._device_info = DeviceInfo(device) self._stream = None self._stream_refresh_unsub = None # Cache of most recent event image self._event_id = None self._event_image_bytes = None self._event_image_cleanup_unsub = None @property def should_poll(self) -> bool: """Disable polling since entities have state pushed via pubsub.""" return False @property def unique_id(self) -> Optional[str]: """Return a unique ID.""" # The API "name" field is a unique device identifier. return f"{self._device.name}-camera" @property def name(self): """Return the name of the camera.""" return self._device_info.device_name @property def device_info(self): """Return device specific attributes.""" return self._device_info.device_info @property def brand(self): """Return the camera brand.""" return self._device_info.device_brand @property def model(self): """Return the camera model.""" return self._device_info.device_model @property def supported_features(self): """Flag supported features.""" supported_features = 0 if CameraLiveStreamTrait.NAME in self._device.traits: supported_features |= SUPPORT_STREAM return supported_features async def stream_source(self): """Return the source of the stream.""" if CameraLiveStreamTrait.NAME not in self._device.traits: return None trait = self._device.traits[CameraLiveStreamTrait.NAME] if not self._stream: _LOGGER.debug("Fetching stream url") self._stream = await trait.generate_rtsp_stream() self._schedule_stream_refresh() if self._stream.expires_at < utcnow(): _LOGGER.warning("Stream already expired") return self._stream.rtsp_stream_url def _schedule_stream_refresh(self): """Schedules an alarm to refresh the stream url before expiration.""" _LOGGER.debug("New stream url expires at %s", self._stream.expires_at) refresh_time = self._stream.expires_at - STREAM_EXPIRATION_BUFFER # Schedule an alarm to extend the stream if self._stream_refresh_unsub is not None: self._stream_refresh_unsub() self._stream_refresh_unsub = async_track_point_in_utc_time( self.hass, self._handle_stream_refresh, refresh_time, ) async def _handle_stream_refresh(self, now): """Alarm that fires to check if the stream should be refreshed.""" if not self._stream: return _LOGGER.debug("Extending stream url") try: self._stream = await self._stream.extend_rtsp_stream() except GoogleNestException as err: _LOGGER.debug("Failed to extend stream: %s", err) # Next attempt to catch a url will get a new one self._stream = None return # Update the stream worker with the latest valid url if self.stream: self.stream.update_source(self._stream.rtsp_stream_url) self._schedule_stream_refresh() async def async_will_remove_from_hass(self): """Invalidates the RTSP token when unloaded.""" if self._stream: _LOGGER.debug("Invalidating stream") await self._stream.stop_rtsp_stream() if self._stream_refresh_unsub: self._stream_refresh_unsub() self._event_id = None self._event_image_bytes = None if self._event_image_cleanup_unsub is not None: self._event_image_cleanup_unsub() async def async_added_to_hass(self): """Run when entity is added to register update signal handler.""" self.async_on_remove( self._device.add_update_listener(self.async_write_ha_state) ) async def async_camera_image(self): """Return bytes of camera image.""" # Returns the snapshot of the last event for ~30 seconds after the event active_event_image = await self._async_active_event_image() if active_event_image: return active_event_image # Fetch still image from the live stream stream_url = await self.stream_source() if not stream_url: return None return await async_get_image(self.hass, stream_url, output_format=IMAGE_JPEG) async def _async_active_event_image(self): """Return image from any active events happening.""" if CameraEventImageTrait.NAME not in self._device.traits: return None trait = self._device.active_event_trait if not trait: return None # Reuse image bytes if they have already been fetched event = trait.last_event if self._event_id is not None and self._event_id == event.event_id: return self._event_image_bytes _LOGGER.debug("Generating event image URL for event_id %s", event.event_id) image_bytes = await self._async_fetch_active_event_image(trait) if image_bytes is None: return None self._event_id = event.event_id self._event_image_bytes = image_bytes self._schedule_event_image_cleanup(event.expires_at) return image_bytes async def _async_fetch_active_event_image(self, trait): """Return image bytes for an active event.""" try: event_image = await trait.generate_active_event_image() except GoogleNestException as err: _LOGGER.debug("Unable to generate event image URL: %s", err) return None if not event_image: return None try: return await event_image.contents() except GoogleNestException as err: _LOGGER.debug("Unable to fetch event image: %s", err) return None def _schedule_event_image_cleanup(self, point_in_time): """Schedules an alarm to remove the image bytes from memory, honoring expiration.""" if self._event_image_cleanup_unsub is not None: self._event_image_cleanup_unsub() self._event_image_cleanup_unsub = async_track_point_in_utc_time( self.hass, self._handle_event_image_cleanup, point_in_time, ) def _handle_event_image_cleanup(self, now): """Clear images cached from events and scheduled callback.""" self._event_id = None self._event_image_bytes = None self._event_image_cleanup_unsub = None
"""Tests for Transmission init.""" from unittest.mock import patch import pytest from transmissionrpc.error import TransmissionError from homeassistant.components import transmission from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.setup import async_setup_component from tests.common import MockConfigEntry, mock_coro MOCK_ENTRY = MockConfigEntry( domain=transmission.DOMAIN, data={ transmission.CONF_NAME: "Transmission", transmission.CONF_HOST: "0.0.0.0", transmission.CONF_USERNAME: "user", transmission.CONF_PASSWORD: "pass", transmission.CONF_PORT: 9091, }, ) @pytest.fixture(name="api") def mock_transmission_api(): """Mock an api.""" with patch("transmissionrpc.Client"): yield @pytest.fixture(name="auth_error") def mock_api_authentication_error(): """Mock an api.""" with patch( "transmissionrpc.Client", side_effect=TransmissionError("401: Unauthorized") ): yield @pytest.fixture(name="unknown_error") def mock_api_unknown_error(): """Mock an api.""" with patch("transmissionrpc.Client", side_effect=TransmissionError): yield async def test_setup_with_no_config(hass): """Test that we do not discover anything or try to set up a Transmission client.""" assert await async_setup_component(hass, transmission.DOMAIN, {}) is True assert transmission.DOMAIN not in hass.data async def test_setup_with_config(hass, api): """Test that we import the config and setup the client.""" config = { transmission.DOMAIN: { transmission.CONF_NAME: "Transmission", transmission.CONF_HOST: "0.0.0.0", transmission.CONF_USERNAME: "user", transmission.CONF_PASSWORD: "pass", transmission.CONF_PORT: 9091, }, transmission.DOMAIN: { transmission.CONF_NAME: "Transmission2", transmission.CONF_HOST: "0.0.0.1", transmission.CONF_USERNAME: "user", transmission.CONF_PASSWORD: "pass", transmission.CONF_PORT: 9091, }, } assert await async_setup_component(hass, transmission.DOMAIN, config) is True async def test_successful_config_entry(hass, api): """Test that configured transmission is configured successfully.""" entry = MOCK_ENTRY entry.add_to_hass(hass) assert await transmission.async_setup_entry(hass, entry) is True assert entry.options == { transmission.CONF_SCAN_INTERVAL: transmission.DEFAULT_SCAN_INTERVAL, transmission.CONF_LIMIT: transmission.DEFAULT_LIMIT, transmission.CONF_ORDER: transmission.DEFAULT_ORDER, } async def test_setup_failed(hass): """Test transmission failed due to an error.""" entry = MOCK_ENTRY entry.add_to_hass(hass) # test connection error raising ConfigEntryNotReady with patch( "transmissionrpc.Client", side_effect=TransmissionError("111: Connection refused"), ), pytest.raises(ConfigEntryNotReady): await transmission.async_setup_entry(hass, entry) # test Authentication error returning false with patch( "transmissionrpc.Client", side_effect=TransmissionError("401: Unauthorized") ): assert await transmission.async_setup_entry(hass, entry) is False async def test_unload_entry(hass, api): """Test removing transmission client.""" entry = MOCK_ENTRY entry.add_to_hass(hass) with patch.object( hass.config_entries, "async_forward_entry_unload", return_value=mock_coro(True) ) as unload_entry: assert await transmission.async_setup_entry(hass, entry) assert await transmission.async_unload_entry(hass, entry) assert unload_entry.call_count == 2 assert entry.entry_id not in hass.data[transmission.DOMAIN]
turbokongen/home-assistant
tests/components/transmission/test_init.py
homeassistant/components/nest/camera_sdm.py
"""Support for testing internet speed via Speedtest.net.""" from datetime import timedelta import logging import speedtest import voluptuous as vol from homeassistant.config_entries import SOURCE_IMPORT from homeassistant.const import ( CONF_MONITORED_CONDITIONS, CONF_SCAN_INTERVAL, EVENT_HOMEASSISTANT_STARTED, ) from homeassistant.core import CoreState, callback from homeassistant.exceptions import ConfigEntryNotReady import homeassistant.helpers.config_validation as cv from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed from .const import ( CONF_MANUAL, CONF_SERVER_ID, DEFAULT_SCAN_INTERVAL, DEFAULT_SERVER, DOMAIN, SENSOR_TYPES, SPEED_TEST_SERVICE, ) _LOGGER = logging.getLogger(__name__) CONFIG_SCHEMA = vol.Schema( { DOMAIN: vol.Schema( { vol.Optional(CONF_SERVER_ID): cv.positive_int, vol.Optional( CONF_SCAN_INTERVAL, default=timedelta(minutes=DEFAULT_SCAN_INTERVAL) ): cv.positive_time_period, vol.Optional(CONF_MANUAL, default=False): cv.boolean, vol.Optional( CONF_MONITORED_CONDITIONS, default=list(SENSOR_TYPES) ): vol.All(cv.ensure_list, [vol.In(list(SENSOR_TYPES))]), } ) }, extra=vol.ALLOW_EXTRA, ) def server_id_valid(server_id): """Check if server_id is valid.""" try: api = speedtest.Speedtest() api.get_servers([int(server_id)]) except (speedtest.ConfigRetrievalError, speedtest.NoMatchedServers): return False return True async def async_setup(hass, config): """Import integration from config.""" if DOMAIN in config: hass.async_create_task( hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_IMPORT}, data=config[DOMAIN] ) ) return True async def async_setup_entry(hass, config_entry): """Set up the Speedtest.net component.""" coordinator = SpeedTestDataCoordinator(hass, config_entry) await coordinator.async_setup() async def _enable_scheduled_speedtests(*_): """Activate the data update coordinator.""" coordinator.update_interval = timedelta( minutes=config_entry.options.get(CONF_SCAN_INTERVAL, DEFAULT_SCAN_INTERVAL) ) await coordinator.async_refresh() if not config_entry.options[CONF_MANUAL]: if hass.state == CoreState.running: await _enable_scheduled_speedtests() if not coordinator.last_update_success: raise ConfigEntryNotReady else: # Running a speed test during startup can prevent # integrations from being able to setup because it # can saturate the network interface. hass.bus.async_listen_once( EVENT_HOMEASSISTANT_STARTED, _enable_scheduled_speedtests ) hass.data[DOMAIN] = coordinator hass.async_create_task( hass.config_entries.async_forward_entry_setup(config_entry, "sensor") ) return True async def async_unload_entry(hass, config_entry): """Unload SpeedTest Entry from config_entry.""" hass.services.async_remove(DOMAIN, SPEED_TEST_SERVICE) hass.data[DOMAIN].async_unload() await hass.config_entries.async_forward_entry_unload(config_entry, "sensor") hass.data.pop(DOMAIN) return True class SpeedTestDataCoordinator(DataUpdateCoordinator): """Get the latest data from speedtest.net.""" def __init__(self, hass, config_entry): """Initialize the data object.""" self.hass = hass self.config_entry = config_entry self.api = None self.servers = {} self._unsub_update_listener = None super().__init__( self.hass, _LOGGER, name=DOMAIN, update_method=self.async_update, ) def update_servers(self): """Update list of test servers.""" try: server_list = self.api.get_servers() except speedtest.ConfigRetrievalError: _LOGGER.debug("Error retrieving server list") return self.servers[DEFAULT_SERVER] = {} for server in sorted( server_list.values(), key=lambda server: server[0]["country"] + server[0]["sponsor"], ): self.servers[ f"{server[0]['country']} - {server[0]['sponsor']} - {server[0]['name']}" ] = server[0] def update_data(self): """Get the latest data from speedtest.net.""" self.update_servers() self.api.closest.clear() if self.config_entry.options.get(CONF_SERVER_ID): server_id = self.config_entry.options.get(CONF_SERVER_ID) self.api.get_servers(servers=[server_id]) self.api.get_best_server() _LOGGER.debug( "Executing speedtest.net speed test with server_id: %s", self.api.best["id"] ) self.api.download() self.api.upload() return self.api.results.dict() async def async_update(self, *_): """Update Speedtest data.""" try: return await self.hass.async_add_executor_job(self.update_data) except (speedtest.ConfigRetrievalError, speedtest.NoMatchedServers) as err: raise UpdateFailed from err async def async_set_options(self): """Set options for entry.""" if not self.config_entry.options: data = {**self.config_entry.data} options = { CONF_SCAN_INTERVAL: data.pop(CONF_SCAN_INTERVAL, DEFAULT_SCAN_INTERVAL), CONF_MANUAL: data.pop(CONF_MANUAL, False), CONF_SERVER_ID: str(data.pop(CONF_SERVER_ID, "")), } self.hass.config_entries.async_update_entry( self.config_entry, data=data, options=options ) async def async_setup(self): """Set up SpeedTest.""" try: self.api = await self.hass.async_add_executor_job(speedtest.Speedtest) except speedtest.ConfigRetrievalError as err: raise ConfigEntryNotReady from err async def request_update(call): """Request update.""" await self.async_request_refresh() await self.async_set_options() await self.hass.async_add_executor_job(self.update_servers) self.hass.services.async_register(DOMAIN, SPEED_TEST_SERVICE, request_update) self._unsub_update_listener = self.config_entry.add_update_listener( options_updated_listener ) @callback def async_unload(self): """Unload the coordinator.""" if not self._unsub_update_listener: return self._unsub_update_listener() self._unsub_update_listener = None async def options_updated_listener(hass, entry): """Handle options update.""" if entry.options[CONF_MANUAL]: hass.data[DOMAIN].update_interval = None return hass.data[DOMAIN].update_interval = timedelta( minutes=entry.options[CONF_SCAN_INTERVAL] ) await hass.data[DOMAIN].async_request_refresh()
"""Tests for Transmission init.""" from unittest.mock import patch import pytest from transmissionrpc.error import TransmissionError from homeassistant.components import transmission from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.setup import async_setup_component from tests.common import MockConfigEntry, mock_coro MOCK_ENTRY = MockConfigEntry( domain=transmission.DOMAIN, data={ transmission.CONF_NAME: "Transmission", transmission.CONF_HOST: "0.0.0.0", transmission.CONF_USERNAME: "user", transmission.CONF_PASSWORD: "pass", transmission.CONF_PORT: 9091, }, ) @pytest.fixture(name="api") def mock_transmission_api(): """Mock an api.""" with patch("transmissionrpc.Client"): yield @pytest.fixture(name="auth_error") def mock_api_authentication_error(): """Mock an api.""" with patch( "transmissionrpc.Client", side_effect=TransmissionError("401: Unauthorized") ): yield @pytest.fixture(name="unknown_error") def mock_api_unknown_error(): """Mock an api.""" with patch("transmissionrpc.Client", side_effect=TransmissionError): yield async def test_setup_with_no_config(hass): """Test that we do not discover anything or try to set up a Transmission client.""" assert await async_setup_component(hass, transmission.DOMAIN, {}) is True assert transmission.DOMAIN not in hass.data async def test_setup_with_config(hass, api): """Test that we import the config and setup the client.""" config = { transmission.DOMAIN: { transmission.CONF_NAME: "Transmission", transmission.CONF_HOST: "0.0.0.0", transmission.CONF_USERNAME: "user", transmission.CONF_PASSWORD: "pass", transmission.CONF_PORT: 9091, }, transmission.DOMAIN: { transmission.CONF_NAME: "Transmission2", transmission.CONF_HOST: "0.0.0.1", transmission.CONF_USERNAME: "user", transmission.CONF_PASSWORD: "pass", transmission.CONF_PORT: 9091, }, } assert await async_setup_component(hass, transmission.DOMAIN, config) is True async def test_successful_config_entry(hass, api): """Test that configured transmission is configured successfully.""" entry = MOCK_ENTRY entry.add_to_hass(hass) assert await transmission.async_setup_entry(hass, entry) is True assert entry.options == { transmission.CONF_SCAN_INTERVAL: transmission.DEFAULT_SCAN_INTERVAL, transmission.CONF_LIMIT: transmission.DEFAULT_LIMIT, transmission.CONF_ORDER: transmission.DEFAULT_ORDER, } async def test_setup_failed(hass): """Test transmission failed due to an error.""" entry = MOCK_ENTRY entry.add_to_hass(hass) # test connection error raising ConfigEntryNotReady with patch( "transmissionrpc.Client", side_effect=TransmissionError("111: Connection refused"), ), pytest.raises(ConfigEntryNotReady): await transmission.async_setup_entry(hass, entry) # test Authentication error returning false with patch( "transmissionrpc.Client", side_effect=TransmissionError("401: Unauthorized") ): assert await transmission.async_setup_entry(hass, entry) is False async def test_unload_entry(hass, api): """Test removing transmission client.""" entry = MOCK_ENTRY entry.add_to_hass(hass) with patch.object( hass.config_entries, "async_forward_entry_unload", return_value=mock_coro(True) ) as unload_entry: assert await transmission.async_setup_entry(hass, entry) assert await transmission.async_unload_entry(hass, entry) assert unload_entry.call_count == 2 assert entry.entry_id not in hass.data[transmission.DOMAIN]
turbokongen/home-assistant
tests/components/transmission/test_init.py
homeassistant/components/speedtestdotnet/__init__.py
from __future__ import absolute_import, division, print_function import json from toolz import map, partial import gzip from .resource import resource __all__ = 'resource', @resource.register('.*\.json') def resource_json(uri, open=open): f = open(uri) try: data = json.load(f) f.close() return data except ValueError: f = open(uri) data = map(json.loads, f) return data @resource.register('.*\.json.gz') def resource_json_gzip(uri): return resource_json(uri, open=partial(gzip.open, mode='rt'))
from __future__ import absolute_import, division, print_function import pytest from flask import json from datetime import datetime from pandas import DataFrame from blaze.utils import example from blaze import discover, Symbol, by, CSV, compute, join, into from blaze.server.server import Server, to_tree, from_tree from blaze.server.index import emit_index accounts = DataFrame([['Alice', 100], ['Bob', 200]], columns=['name', 'amount']) cities = DataFrame([['Alice', 'NYC'], ['Bob', 'LA']], columns=['name', 'city']) server = Server(datasets={'accounts': accounts, 'cities': cities}) test = server.app.test_client() def test_datasets(): response = test.get('/datasets.json') assert json.loads(response.data) == {'accounts': str(discover(accounts)), 'cities': str(discover(cities))} def test_bad_responses(): assert 'OK' not in test.post('/compute/accounts.json', data = json.dumps(500), content_type='application/json').status assert 'OK' not in test.post('/compute/non-existent-table.json', data = json.dumps(0), content_type='application/json').status assert 'OK' not in test.post('/compute/accounts.json').status def test_to_from_json(): t = Symbol('t', 'var * {name: string, amount: int}') assert from_tree(to_tree(t)).isidentical(t) assert from_tree(to_tree(t.amount + 1)).isidentical(t.amount + 1) def test_to_tree(): t = Symbol('t', 'var * {name: string, amount: int32}') expr = t.amount.sum() expected = {'op': 'sum', 'args': [{'op': 'Field', 'args': [ {'op': 'Symbol', 'args': [ 't', 'var * { name : string, amount : int32 }', ] }, 'amount' ] }, [0], False] } assert to_tree(expr) == expected def test_to_from_tree_namespace(): t = Symbol('t', 'var * {name: string, amount: int32}') expr = t.name tree = to_tree(expr, names={t: 't'}) assert tree == {'op': 'Field', 'args': ['t', 'name']} new = from_tree(tree, namespace={'t': t}) assert new.isidentical(expr) def test_from_tree_is_robust_to_unnecessary_namespace(): t = Symbol('t', 'var * {name: string, amount: int32}') expr = t.amount + 1 tree = to_tree(expr) # don't use namespace assert from_tree(tree, {'t': t}).isidentical(expr) def test_compute(): t = Symbol('t', 'var * {name: string, amount: int}') expr = t.amount.sum() query = {'expr': to_tree(expr)} expected = 300 response = test.post('/compute/accounts.json', data = json.dumps(query), content_type='application/json') assert 'OK' in response.status assert json.loads(response.data)['data'] == expected def test_compute_with_namespace(): query = {'expr': {'op': 'Field', 'args': ['accounts', 'name']}} expected = ['Alice', 'Bob'] response = test.post('/compute/accounts.json', data = json.dumps(query), content_type='application/json') assert 'OK' in response.status assert json.loads(response.data)['data'] == expected @pytest.fixture def iris_server(): iris = CSV(example('iris.csv')) server = Server(datasets={'iris': iris}) return server.app.test_client() iris = CSV(example('iris.csv')) def test_compute_with_variable_in_namespace(iris_server): test = iris_server t = Symbol('t', iris.dshape) pl = Symbol('pl', 'float32') expr = t[t.petal_length > pl].species tree = to_tree(expr, {pl: 'pl'}) blob = json.dumps({'expr': tree, 'namespace': {'pl': 5}}) resp = test.post('/compute/iris.json', data=blob, content_type='application/json') assert 'OK' in resp.status result = json.loads(resp.data)['data'] expected = list(compute(expr._subs({pl: 5}), {t: iris})) assert result == expected def test_compute_by_with_summary(iris_server): test = iris_server t = Symbol('t', iris.dshape) expr = by(t.species, max=t.petal_length.max(), sum=t.petal_width.sum()) tree = to_tree(expr) blob = json.dumps({'expr': tree}) resp = test.post('/compute/iris.json', data=blob, content_type='application/json') assert 'OK' in resp.status result = json.loads(resp.data)['data'] expected = compute(expr, iris) assert result == list(map(list, expected)) def test_compute_column_wise(iris_server): test = iris_server t = Symbol('t', iris.dshape) subexpr = ((t.petal_width / 2 > 0.5) & (t.petal_length / 2 > 0.5)) expr = t[subexpr] tree = to_tree(expr) blob = json.dumps({'expr': tree}) resp = test.post('/compute/iris.json', data=blob, content_type='application/json') assert 'OK' in resp.status result = json.loads(resp.data)['data'] expected = compute(expr, iris) assert list(map(tuple, result)) == list(map(tuple, expected)) def test_multi_expression_compute(): a = Symbol('accounts', discover(accounts)) c = Symbol('cities', discover(cities)) expr = join(a, c) resp = test.post('/compute.json', data=json.dumps({'expr': to_tree(expr)}), content_type='application/json') assert 'OK' in resp.status result = json.loads(resp.data)['data'] expected = compute(expr, {a: accounts, c: cities}) assert list(map(tuple, result))== into(list, expected)
vitan/blaze
blaze/server/tests/test_server.py
blaze/json.py
""" Eigenvalue solver using iterative methods. Find k eigenvectors and eigenvalues of a matrix A using the Arnoldi/Lanczos iterative methods from ARPACK [1]_,[2]_. These methods are most useful for large sparse matrices. - eigs(A,k) - eigsh(A,k) References ---------- .. [1] ARPACK Software, http://www.caam.rice.edu/software/ARPACK/ .. [2] R. B. Lehoucq, D. C. Sorensen, and C. Yang, ARPACK USERS GUIDE: Solution of Large Scale Eigenvalue Problems by Implicitly Restarted Arnoldi Methods. SIAM, Philadelphia, PA, 1998. """ from .arpack import *
__usage__ = """ To run tests locally: python tests/test_arpack.py [-l<int>] [-v<int>] """ import threading import itertools import numpy as np from numpy.testing import assert_allclose, assert_equal, suppress_warnings from pytest import raises as assert_raises import pytest from numpy import dot, conj, random from scipy.linalg import eig, eigh from scipy.sparse import csc_matrix, csr_matrix, diags, rand from scipy.sparse.linalg import LinearOperator, aslinearoperator from scipy.sparse.linalg.eigen.arpack import (eigs, eigsh, arpack, ArpackNoConvergence) from scipy._lib._gcutils import assert_deallocated, IS_PYPY # precision for tests _ndigits = {'f': 3, 'd': 11, 'F': 3, 'D': 11} def _get_test_tolerance(type_char, mattype=None): """ Return tolerance values suitable for a given test: Parameters ---------- type_char : {'f', 'd', 'F', 'D'} Data type in ARPACK eigenvalue problem mattype : {csr_matrix, aslinearoperator, asarray}, optional Linear operator type Returns ------- tol Tolerance to pass to the ARPACK routine rtol Relative tolerance for outputs atol Absolute tolerance for outputs """ rtol = {'f': 3000 * np.finfo(np.float32).eps, 'F': 3000 * np.finfo(np.float32).eps, 'd': 2000 * np.finfo(np.float64).eps, 'D': 2000 * np.finfo(np.float64).eps}[type_char] atol = rtol tol = 0 if mattype is aslinearoperator and type_char in ('f', 'F'): # iterative methods in single precision: worse errors # also: bump ARPACK tolerance so that the iterative method converges tol = 30 * np.finfo(np.float32).eps rtol *= 5 if mattype is csr_matrix and type_char in ('f', 'F'): # sparse in single precision: worse errors rtol *= 5 return tol, rtol, atol def generate_matrix(N, complex_=False, hermitian=False, pos_definite=False, sparse=False): M = np.random.random((N, N)) if complex_: M = M + 1j * np.random.random((N, N)) if hermitian: if pos_definite: if sparse: i = np.arange(N) j = np.random.randint(N, size=N-2) i, j = np.meshgrid(i, j) M[i, j] = 0 M = np.dot(M.conj(), M.T) else: M = np.dot(M.conj(), M.T) if sparse: i = np.random.randint(N, size=N * N // 4) j = np.random.randint(N, size=N * N // 4) ind = np.nonzero(i == j) j[ind] = (j[ind] + 1) % N M[i, j] = 0 M[j, i] = 0 else: if sparse: i = np.random.randint(N, size=N * N // 2) j = np.random.randint(N, size=N * N // 2) M[i, j] = 0 return M def generate_matrix_symmetric(N, pos_definite=False, sparse=False): M = np.random.random((N, N)) M = 0.5 * (M + M.T) # Make M symmetric if pos_definite: Id = N * np.eye(N) if sparse: M = csr_matrix(M) M += Id else: if sparse: M = csr_matrix(M) return M def _aslinearoperator_with_dtype(m): m = aslinearoperator(m) if not hasattr(m, 'dtype'): x = np.zeros(m.shape[1]) m.dtype = (m * x).dtype return m def assert_allclose_cc(actual, desired, **kw): """Almost equal or complex conjugates almost equal""" try: assert_allclose(actual, desired, **kw) except AssertionError: assert_allclose(actual, conj(desired), **kw) def argsort_which(eigenvalues, typ, k, which, sigma=None, OPpart=None, mode=None): """Return sorted indices of eigenvalues using the "which" keyword from eigs and eigsh""" if sigma is None: reval = np.round(eigenvalues, decimals=_ndigits[typ]) else: if mode is None or mode == 'normal': if OPpart is None: reval = 1. / (eigenvalues - sigma) elif OPpart == 'r': reval = 0.5 * (1. / (eigenvalues - sigma) + 1. / (eigenvalues - np.conj(sigma))) elif OPpart == 'i': reval = -0.5j * (1. / (eigenvalues - sigma) - 1. / (eigenvalues - np.conj(sigma))) elif mode == 'cayley': reval = (eigenvalues + sigma) / (eigenvalues - sigma) elif mode == 'buckling': reval = eigenvalues / (eigenvalues - sigma) else: raise ValueError("mode='%s' not recognized" % mode) reval = np.round(reval, decimals=_ndigits[typ]) if which in ['LM', 'SM']: ind = np.argsort(abs(reval)) elif which in ['LR', 'SR', 'LA', 'SA', 'BE']: ind = np.argsort(np.real(reval)) elif which in ['LI', 'SI']: # for LI,SI ARPACK returns largest,smallest abs(imaginary) why? if typ.islower(): ind = np.argsort(abs(np.imag(reval))) else: ind = np.argsort(np.imag(reval)) else: raise ValueError("which='%s' is unrecognized" % which) if which in ['LM', 'LA', 'LR', 'LI']: return ind[-k:] elif which in ['SM', 'SA', 'SR', 'SI']: return ind[:k] elif which == 'BE': return np.concatenate((ind[:k//2], ind[k//2-k:])) def eval_evec(symmetric, d, typ, k, which, v0=None, sigma=None, mattype=np.asarray, OPpart=None, mode='normal'): general = ('bmat' in d) if symmetric: eigs_func = eigsh else: eigs_func = eigs if general: err = ("error for %s:general, typ=%s, which=%s, sigma=%s, " "mattype=%s, OPpart=%s, mode=%s" % (eigs_func.__name__, typ, which, sigma, mattype.__name__, OPpart, mode)) else: err = ("error for %s:standard, typ=%s, which=%s, sigma=%s, " "mattype=%s, OPpart=%s, mode=%s" % (eigs_func.__name__, typ, which, sigma, mattype.__name__, OPpart, mode)) a = d['mat'].astype(typ) ac = mattype(a) if general: b = d['bmat'].astype(typ) bc = mattype(b) # get exact eigenvalues exact_eval = d['eval'].astype(typ.upper()) ind = argsort_which(exact_eval, typ, k, which, sigma, OPpart, mode) exact_eval = exact_eval[ind] # compute arpack eigenvalues kwargs = dict(which=which, v0=v0, sigma=sigma) if eigs_func is eigsh: kwargs['mode'] = mode else: kwargs['OPpart'] = OPpart # compute suitable tolerances kwargs['tol'], rtol, atol = _get_test_tolerance(typ, mattype) # on rare occasions, ARPACK routines return results that are proper # eigenvalues and -vectors, but not necessarily the ones requested in # the parameter which. This is inherent to the Krylov methods, and # should not be treated as a failure. If such a rare situation # occurs, the calculation is tried again (but at most a few times). ntries = 0 while ntries < 5: # solve if general: try: eigenvalues, evec = eigs_func(ac, k, bc, **kwargs) except ArpackNoConvergence: kwargs['maxiter'] = 20*a.shape[0] eigenvalues, evec = eigs_func(ac, k, bc, **kwargs) else: try: eigenvalues, evec = eigs_func(ac, k, **kwargs) except ArpackNoConvergence: kwargs['maxiter'] = 20*a.shape[0] eigenvalues, evec = eigs_func(ac, k, **kwargs) ind = argsort_which(eigenvalues, typ, k, which, sigma, OPpart, mode) eigenvalues = eigenvalues[ind] evec = evec[:, ind] # check eigenvectors LHS = np.dot(a, evec) if general: RHS = eigenvalues * np.dot(b, evec) else: RHS = eigenvalues * evec assert_allclose(LHS, RHS, rtol=rtol, atol=atol, err_msg=err) try: # check eigenvalues assert_allclose_cc(eigenvalues, exact_eval, rtol=rtol, atol=atol, err_msg=err) break except AssertionError: ntries += 1 # check eigenvalues assert_allclose_cc(eigenvalues, exact_eval, rtol=rtol, atol=atol, err_msg=err) class DictWithRepr(dict): def __init__(self, name): self.name = name def __repr__(self): return "<%s>" % self.name class SymmetricParams: def __init__(self): self.eigs = eigsh self.which = ['LM', 'SM', 'LA', 'SA', 'BE'] self.mattypes = [csr_matrix, aslinearoperator, np.asarray] self.sigmas_modes = {None: ['normal'], 0.5: ['normal', 'buckling', 'cayley']} # generate matrices # these should all be float32 so that the eigenvalues # are the same in float32 and float64 N = 6 np.random.seed(2300) Ar = generate_matrix(N, hermitian=True, pos_definite=True).astype('f').astype('d') M = generate_matrix(N, hermitian=True, pos_definite=True).astype('f').astype('d') Ac = generate_matrix(N, hermitian=True, pos_definite=True, complex_=True).astype('F').astype('D') Mc = generate_matrix(N, hermitian=True, pos_definite=True, complex_=True).astype('F').astype('D') v0 = np.random.random(N) # standard symmetric problem SS = DictWithRepr("std-symmetric") SS['mat'] = Ar SS['v0'] = v0 SS['eval'] = eigh(SS['mat'], eigvals_only=True) # general symmetric problem GS = DictWithRepr("gen-symmetric") GS['mat'] = Ar GS['bmat'] = M GS['v0'] = v0 GS['eval'] = eigh(GS['mat'], GS['bmat'], eigvals_only=True) # standard hermitian problem SH = DictWithRepr("std-hermitian") SH['mat'] = Ac SH['v0'] = v0 SH['eval'] = eigh(SH['mat'], eigvals_only=True) # general hermitian problem GH = DictWithRepr("gen-hermitian") GH['mat'] = Ac GH['bmat'] = M GH['v0'] = v0 GH['eval'] = eigh(GH['mat'], GH['bmat'], eigvals_only=True) # general hermitian problem with hermitian M GHc = DictWithRepr("gen-hermitian-Mc") GHc['mat'] = Ac GHc['bmat'] = Mc GHc['v0'] = v0 GHc['eval'] = eigh(GHc['mat'], GHc['bmat'], eigvals_only=True) self.real_test_cases = [SS, GS] self.complex_test_cases = [SH, GH, GHc] class NonSymmetricParams: def __init__(self): self.eigs = eigs self.which = ['LM', 'LR', 'LI'] # , 'SM', 'LR', 'SR', 'LI', 'SI'] self.mattypes = [csr_matrix, aslinearoperator, np.asarray] self.sigmas_OPparts = {None: [None], 0.1: ['r'], 0.1 + 0.1j: ['r', 'i']} # generate matrices # these should all be float32 so that the eigenvalues # are the same in float32 and float64 N = 6 np.random.seed(2300) Ar = generate_matrix(N).astype('f').astype('d') M = generate_matrix(N, hermitian=True, pos_definite=True).astype('f').astype('d') Ac = generate_matrix(N, complex_=True).astype('F').astype('D') v0 = np.random.random(N) # standard real nonsymmetric problem SNR = DictWithRepr("std-real-nonsym") SNR['mat'] = Ar SNR['v0'] = v0 SNR['eval'] = eig(SNR['mat'], left=False, right=False) # general real nonsymmetric problem GNR = DictWithRepr("gen-real-nonsym") GNR['mat'] = Ar GNR['bmat'] = M GNR['v0'] = v0 GNR['eval'] = eig(GNR['mat'], GNR['bmat'], left=False, right=False) # standard complex nonsymmetric problem SNC = DictWithRepr("std-cmplx-nonsym") SNC['mat'] = Ac SNC['v0'] = v0 SNC['eval'] = eig(SNC['mat'], left=False, right=False) # general complex nonsymmetric problem GNC = DictWithRepr("gen-cmplx-nonsym") GNC['mat'] = Ac GNC['bmat'] = M GNC['v0'] = v0 GNC['eval'] = eig(GNC['mat'], GNC['bmat'], left=False, right=False) self.real_test_cases = [SNR, GNR] self.complex_test_cases = [SNC, GNC] def test_symmetric_modes(): params = SymmetricParams() k = 2 symmetric = True for D in params.real_test_cases: for typ in 'fd': for which in params.which: for mattype in params.mattypes: for (sigma, modes) in params.sigmas_modes.items(): for mode in modes: eval_evec(symmetric, D, typ, k, which, None, sigma, mattype, None, mode) def test_hermitian_modes(): params = SymmetricParams() k = 2 symmetric = True for D in params.complex_test_cases: for typ in 'FD': for which in params.which: if which == 'BE': continue # BE invalid for complex for mattype in params.mattypes: for sigma in params.sigmas_modes: eval_evec(symmetric, D, typ, k, which, None, sigma, mattype) def test_symmetric_starting_vector(): params = SymmetricParams() symmetric = True for k in [1, 2, 3, 4, 5]: for D in params.real_test_cases: for typ in 'fd': v0 = random.rand(len(D['v0'])).astype(typ) eval_evec(symmetric, D, typ, k, 'LM', v0) def test_symmetric_no_convergence(): np.random.seed(1234) m = generate_matrix(30, hermitian=True, pos_definite=True) tol, rtol, atol = _get_test_tolerance('d') try: w, v = eigsh(m, 4, which='LM', v0=m[:, 0], maxiter=5, tol=tol, ncv=9) raise AssertionError("Spurious no-error exit") except ArpackNoConvergence as err: k = len(err.eigenvalues) if k <= 0: raise AssertionError("Spurious no-eigenvalues-found case") from err w, v = err.eigenvalues, err.eigenvectors assert_allclose(dot(m, v), w * v, rtol=rtol, atol=atol) def test_real_nonsymmetric_modes(): params = NonSymmetricParams() k = 2 symmetric = False for D in params.real_test_cases: for typ in 'fd': for which in params.which: for mattype in params.mattypes: for sigma, OPparts in params.sigmas_OPparts.items(): for OPpart in OPparts: eval_evec(symmetric, D, typ, k, which, None, sigma, mattype, OPpart) def test_complex_nonsymmetric_modes(): params = NonSymmetricParams() k = 2 symmetric = False for D in params.complex_test_cases: for typ in 'DF': for which in params.which: for mattype in params.mattypes: for sigma in params.sigmas_OPparts: eval_evec(symmetric, D, typ, k, which, None, sigma, mattype) def test_standard_nonsymmetric_starting_vector(): params = NonSymmetricParams() sigma = None symmetric = False for k in [1, 2, 3, 4]: for d in params.complex_test_cases: for typ in 'FD': A = d['mat'] n = A.shape[0] v0 = random.rand(n).astype(typ) eval_evec(symmetric, d, typ, k, "LM", v0, sigma) def test_general_nonsymmetric_starting_vector(): params = NonSymmetricParams() sigma = None symmetric = False for k in [1, 2, 3, 4]: for d in params.complex_test_cases: for typ in 'FD': A = d['mat'] n = A.shape[0] v0 = random.rand(n).astype(typ) eval_evec(symmetric, d, typ, k, "LM", v0, sigma) def test_standard_nonsymmetric_no_convergence(): np.random.seed(1234) m = generate_matrix(30, complex_=True) tol, rtol, atol = _get_test_tolerance('d') try: w, v = eigs(m, 4, which='LM', v0=m[:, 0], maxiter=5, tol=tol) raise AssertionError("Spurious no-error exit") except ArpackNoConvergence as err: k = len(err.eigenvalues) if k <= 0: raise AssertionError("Spurious no-eigenvalues-found case") from err w, v = err.eigenvalues, err.eigenvectors for ww, vv in zip(w, v.T): assert_allclose(dot(m, vv), ww * vv, rtol=rtol, atol=atol) def test_eigen_bad_shapes(): # A is not square. A = csc_matrix(np.zeros((2, 3))) assert_raises(ValueError, eigs, A) def test_eigen_bad_kwargs(): # Test eigen on wrong keyword argument A = csc_matrix(np.zeros((8, 8))) assert_raises(ValueError, eigs, A, which='XX') def test_ticket_1459_arpack_crash(): for dtype in [np.float32, np.float64]: # This test does not seem to catch the issue for float32, # but we made the same fix there, just to be sure N = 6 k = 2 np.random.seed(2301) A = np.random.random((N, N)).astype(dtype) v0 = np.array([-0.71063568258907849895, -0.83185111795729227424, -0.34365925382227402451, 0.46122533684552280420, -0.58001341115969040629, -0.78844877570084292984e-01], dtype=dtype) # Should not crash: evals, evecs = eigs(A, k, v0=v0) @pytest.mark.skipif(IS_PYPY, reason="Test not meaningful on PyPy") def test_linearoperator_deallocation(): # Check that the linear operators used by the Arpack wrappers are # deallocatable by reference counting -- they are big objects, so # Python's cyclic GC may not collect them fast enough before # running out of memory if eigs/eigsh are called in a tight loop. M_d = np.eye(10) M_s = csc_matrix(M_d) M_o = aslinearoperator(M_d) with assert_deallocated(lambda: arpack.SpLuInv(M_s)): pass with assert_deallocated(lambda: arpack.LuInv(M_d)): pass with assert_deallocated(lambda: arpack.IterInv(M_s)): pass with assert_deallocated(lambda: arpack.IterOpInv(M_o, None, 0.3)): pass with assert_deallocated(lambda: arpack.IterOpInv(M_o, M_o, 0.3)): pass def test_parallel_threads(): results = [] v0 = np.random.rand(50) def worker(): x = diags([1, -2, 1], [-1, 0, 1], shape=(50, 50)) w, v = eigs(x, k=3, v0=v0) results.append(w) w, v = eigsh(x, k=3, v0=v0) results.append(w) threads = [threading.Thread(target=worker) for k in range(10)] for t in threads: t.start() for t in threads: t.join() worker() for r in results: assert_allclose(r, results[-1]) def test_reentering(): # Just some linear operator that calls eigs recursively def A_matvec(x): x = diags([1, -2, 1], [-1, 0, 1], shape=(50, 50)) w, v = eigs(x, k=1) return v / w[0] A = LinearOperator(matvec=A_matvec, dtype=float, shape=(50, 50)) # The Fortran code is not reentrant, so this fails (gracefully, not crashing) assert_raises(RuntimeError, eigs, A, k=1) assert_raises(RuntimeError, eigsh, A, k=1) def test_regression_arpackng_1315(): # Check that issue arpack-ng/#1315 is not present. # Adapted from arpack-ng/TESTS/bug_1315_single.c # If this fails, then the installed ARPACK library is faulty. for dtype in [np.float32, np.float64]: np.random.seed(1234) w0 = np.arange(1, 1000+1).astype(dtype) A = diags([w0], [0], shape=(1000, 1000)) v0 = np.random.rand(1000).astype(dtype) w, v = eigs(A, k=9, ncv=2*9+1, which="LM", v0=v0) assert_allclose(np.sort(w), np.sort(w0[-9:]), rtol=1e-4) def test_eigs_for_k_greater(): # Test eigs() for k beyond limits. A_sparse = diags([1, -2, 1], [-1, 0, 1], shape=(4, 4)) # sparse A = generate_matrix(4, sparse=False) M_dense = np.random.random((4, 4)) M_sparse = generate_matrix(4, sparse=True) M_linop = aslinearoperator(M_dense) eig_tuple1 = eig(A, b=M_dense) eig_tuple2 = eig(A, b=M_sparse) with suppress_warnings() as sup: sup.filter(RuntimeWarning) assert_equal(eigs(A, M=M_dense, k=3), eig_tuple1) assert_equal(eigs(A, M=M_dense, k=4), eig_tuple1) assert_equal(eigs(A, M=M_dense, k=5), eig_tuple1) assert_equal(eigs(A, M=M_sparse, k=5), eig_tuple2) # M as LinearOperator assert_raises(TypeError, eigs, A, M=M_linop, k=3) # Test 'A' for different types assert_raises(TypeError, eigs, aslinearoperator(A), k=3) assert_raises(TypeError, eigs, A_sparse, k=3) def test_eigsh_for_k_greater(): # Test eigsh() for k beyond limits. A_sparse = diags([1, -2, 1], [-1, 0, 1], shape=(4, 4)) # sparse A = generate_matrix(4, sparse=False) M_dense = generate_matrix_symmetric(4, pos_definite=True) M_sparse = generate_matrix_symmetric(4, pos_definite=True, sparse=True) M_linop = aslinearoperator(M_dense) eig_tuple1 = eigh(A, b=M_dense) eig_tuple2 = eigh(A, b=M_sparse) with suppress_warnings() as sup: sup.filter(RuntimeWarning) assert_equal(eigsh(A, M=M_dense, k=4), eig_tuple1) assert_equal(eigsh(A, M=M_dense, k=5), eig_tuple1) assert_equal(eigsh(A, M=M_sparse, k=5), eig_tuple2) # M as LinearOperator assert_raises(TypeError, eigsh, A, M=M_linop, k=4) # Test 'A' for different types assert_raises(TypeError, eigsh, aslinearoperator(A), k=4) assert_raises(TypeError, eigsh, A_sparse, M=M_dense, k=4) def test_real_eigs_real_k_subset(): np.random.seed(1) n = 10 A = rand(n, n, density=0.5) A.data *= 2 A.data -= 1 v0 = np.ones(n) whichs = ['LM', 'SM', 'LR', 'SR', 'LI', 'SI'] dtypes = [np.float32, np.float64] for which, sigma, dtype in itertools.product(whichs, [None, 0, 5], dtypes): prev_w = np.array([], dtype=dtype) eps = np.finfo(dtype).eps for k in range(1, 9): w, z = eigs(A.astype(dtype), k=k, which=which, sigma=sigma, v0=v0.astype(dtype), tol=0) assert_allclose(np.linalg.norm(A.dot(z) - z * w), 0, atol=np.sqrt(eps)) # Check that the set of eigenvalues for `k` is a subset of that for `k+1` dist = abs(prev_w[:,None] - w).min(axis=1) assert_allclose(dist, 0, atol=np.sqrt(eps)) prev_w = w # Check sort order if sigma is None: d = w else: d = 1 / (w - sigma) if which == 'LM': # ARPACK is systematic for 'LM', but sort order # appears not well defined for other modes assert np.all(np.diff(abs(d)) <= 1e-6)
endolith/scipy
scipy/sparse/linalg/eigen/arpack/tests/test_arpack.py
scipy/sparse/linalg/eigen/arpack/__init__.py
""" Sparse linear algebra (:mod:`scipy.sparse.linalg`) ================================================== .. currentmodule:: scipy.sparse.linalg Abstract linear operators ------------------------- .. autosummary:: :toctree: generated/ LinearOperator -- abstract representation of a linear operator aslinearoperator -- convert an object to an abstract linear operator Matrix Operations ----------------- .. autosummary:: :toctree: generated/ inv -- compute the sparse matrix inverse expm -- compute the sparse matrix exponential expm_multiply -- compute the product of a matrix exponential and a matrix Matrix norms ------------ .. autosummary:: :toctree: generated/ norm -- Norm of a sparse matrix onenormest -- Estimate the 1-norm of a sparse matrix Solving linear problems ----------------------- Direct methods for linear equation systems: .. autosummary:: :toctree: generated/ spsolve -- Solve the sparse linear system Ax=b spsolve_triangular -- Solve the sparse linear system Ax=b for a triangular matrix factorized -- Pre-factorize matrix to a function solving a linear system MatrixRankWarning -- Warning on exactly singular matrices use_solver -- Select direct solver to use Iterative methods for linear equation systems: .. autosummary:: :toctree: generated/ bicg -- Use BIConjugate Gradient iteration to solve A x = b bicgstab -- Use BIConjugate Gradient STABilized iteration to solve A x = b cg -- Use Conjugate Gradient iteration to solve A x = b cgs -- Use Conjugate Gradient Squared iteration to solve A x = b gmres -- Use Generalized Minimal RESidual iteration to solve A x = b lgmres -- Solve a matrix equation using the LGMRES algorithm minres -- Use MINimum RESidual iteration to solve Ax = b qmr -- Use Quasi-Minimal Residual iteration to solve A x = b gcrotmk -- Solve a matrix equation using the GCROT(m,k) algorithm Iterative methods for least-squares problems: .. autosummary:: :toctree: generated/ lsqr -- Find the least-squares solution to a sparse linear equation system lsmr -- Find the least-squares solution to a sparse linear equation system Matrix factorizations --------------------- Eigenvalue problems: .. autosummary:: :toctree: generated/ eigs -- Find k eigenvalues and eigenvectors of the square matrix A eigsh -- Find k eigenvalues and eigenvectors of a symmetric matrix lobpcg -- Solve symmetric partial eigenproblems with optional preconditioning Singular values problems: .. autosummary:: :toctree: generated/ svds -- Compute k singular values/vectors for a sparse matrix The `svds` function supports the following solvers: .. toctree:: sparse.linalg.svds-arpack sparse.linalg.svds-lobpcg Complete or incomplete LU factorizations .. autosummary:: :toctree: generated/ splu -- Compute a LU decomposition for a sparse matrix spilu -- Compute an incomplete LU decomposition for a sparse matrix SuperLU -- Object representing an LU factorization Exceptions ---------- .. autosummary:: :toctree: generated/ ArpackNoConvergence ArpackError """ from .isolve import * from .dsolve import * from .interface import * from .eigen import * from .matfuncs import * from ._onenormest import * from ._norm import * from ._expm_multiply import * __all__ = [s for s in dir() if not s.startswith('_')] from scipy._lib._testutils import PytestTester test = PytestTester(__name__) del PytestTester
__usage__ = """ To run tests locally: python tests/test_arpack.py [-l<int>] [-v<int>] """ import threading import itertools import numpy as np from numpy.testing import assert_allclose, assert_equal, suppress_warnings from pytest import raises as assert_raises import pytest from numpy import dot, conj, random from scipy.linalg import eig, eigh from scipy.sparse import csc_matrix, csr_matrix, diags, rand from scipy.sparse.linalg import LinearOperator, aslinearoperator from scipy.sparse.linalg.eigen.arpack import (eigs, eigsh, arpack, ArpackNoConvergence) from scipy._lib._gcutils import assert_deallocated, IS_PYPY # precision for tests _ndigits = {'f': 3, 'd': 11, 'F': 3, 'D': 11} def _get_test_tolerance(type_char, mattype=None): """ Return tolerance values suitable for a given test: Parameters ---------- type_char : {'f', 'd', 'F', 'D'} Data type in ARPACK eigenvalue problem mattype : {csr_matrix, aslinearoperator, asarray}, optional Linear operator type Returns ------- tol Tolerance to pass to the ARPACK routine rtol Relative tolerance for outputs atol Absolute tolerance for outputs """ rtol = {'f': 3000 * np.finfo(np.float32).eps, 'F': 3000 * np.finfo(np.float32).eps, 'd': 2000 * np.finfo(np.float64).eps, 'D': 2000 * np.finfo(np.float64).eps}[type_char] atol = rtol tol = 0 if mattype is aslinearoperator and type_char in ('f', 'F'): # iterative methods in single precision: worse errors # also: bump ARPACK tolerance so that the iterative method converges tol = 30 * np.finfo(np.float32).eps rtol *= 5 if mattype is csr_matrix and type_char in ('f', 'F'): # sparse in single precision: worse errors rtol *= 5 return tol, rtol, atol def generate_matrix(N, complex_=False, hermitian=False, pos_definite=False, sparse=False): M = np.random.random((N, N)) if complex_: M = M + 1j * np.random.random((N, N)) if hermitian: if pos_definite: if sparse: i = np.arange(N) j = np.random.randint(N, size=N-2) i, j = np.meshgrid(i, j) M[i, j] = 0 M = np.dot(M.conj(), M.T) else: M = np.dot(M.conj(), M.T) if sparse: i = np.random.randint(N, size=N * N // 4) j = np.random.randint(N, size=N * N // 4) ind = np.nonzero(i == j) j[ind] = (j[ind] + 1) % N M[i, j] = 0 M[j, i] = 0 else: if sparse: i = np.random.randint(N, size=N * N // 2) j = np.random.randint(N, size=N * N // 2) M[i, j] = 0 return M def generate_matrix_symmetric(N, pos_definite=False, sparse=False): M = np.random.random((N, N)) M = 0.5 * (M + M.T) # Make M symmetric if pos_definite: Id = N * np.eye(N) if sparse: M = csr_matrix(M) M += Id else: if sparse: M = csr_matrix(M) return M def _aslinearoperator_with_dtype(m): m = aslinearoperator(m) if not hasattr(m, 'dtype'): x = np.zeros(m.shape[1]) m.dtype = (m * x).dtype return m def assert_allclose_cc(actual, desired, **kw): """Almost equal or complex conjugates almost equal""" try: assert_allclose(actual, desired, **kw) except AssertionError: assert_allclose(actual, conj(desired), **kw) def argsort_which(eigenvalues, typ, k, which, sigma=None, OPpart=None, mode=None): """Return sorted indices of eigenvalues using the "which" keyword from eigs and eigsh""" if sigma is None: reval = np.round(eigenvalues, decimals=_ndigits[typ]) else: if mode is None or mode == 'normal': if OPpart is None: reval = 1. / (eigenvalues - sigma) elif OPpart == 'r': reval = 0.5 * (1. / (eigenvalues - sigma) + 1. / (eigenvalues - np.conj(sigma))) elif OPpart == 'i': reval = -0.5j * (1. / (eigenvalues - sigma) - 1. / (eigenvalues - np.conj(sigma))) elif mode == 'cayley': reval = (eigenvalues + sigma) / (eigenvalues - sigma) elif mode == 'buckling': reval = eigenvalues / (eigenvalues - sigma) else: raise ValueError("mode='%s' not recognized" % mode) reval = np.round(reval, decimals=_ndigits[typ]) if which in ['LM', 'SM']: ind = np.argsort(abs(reval)) elif which in ['LR', 'SR', 'LA', 'SA', 'BE']: ind = np.argsort(np.real(reval)) elif which in ['LI', 'SI']: # for LI,SI ARPACK returns largest,smallest abs(imaginary) why? if typ.islower(): ind = np.argsort(abs(np.imag(reval))) else: ind = np.argsort(np.imag(reval)) else: raise ValueError("which='%s' is unrecognized" % which) if which in ['LM', 'LA', 'LR', 'LI']: return ind[-k:] elif which in ['SM', 'SA', 'SR', 'SI']: return ind[:k] elif which == 'BE': return np.concatenate((ind[:k//2], ind[k//2-k:])) def eval_evec(symmetric, d, typ, k, which, v0=None, sigma=None, mattype=np.asarray, OPpart=None, mode='normal'): general = ('bmat' in d) if symmetric: eigs_func = eigsh else: eigs_func = eigs if general: err = ("error for %s:general, typ=%s, which=%s, sigma=%s, " "mattype=%s, OPpart=%s, mode=%s" % (eigs_func.__name__, typ, which, sigma, mattype.__name__, OPpart, mode)) else: err = ("error for %s:standard, typ=%s, which=%s, sigma=%s, " "mattype=%s, OPpart=%s, mode=%s" % (eigs_func.__name__, typ, which, sigma, mattype.__name__, OPpart, mode)) a = d['mat'].astype(typ) ac = mattype(a) if general: b = d['bmat'].astype(typ) bc = mattype(b) # get exact eigenvalues exact_eval = d['eval'].astype(typ.upper()) ind = argsort_which(exact_eval, typ, k, which, sigma, OPpart, mode) exact_eval = exact_eval[ind] # compute arpack eigenvalues kwargs = dict(which=which, v0=v0, sigma=sigma) if eigs_func is eigsh: kwargs['mode'] = mode else: kwargs['OPpart'] = OPpart # compute suitable tolerances kwargs['tol'], rtol, atol = _get_test_tolerance(typ, mattype) # on rare occasions, ARPACK routines return results that are proper # eigenvalues and -vectors, but not necessarily the ones requested in # the parameter which. This is inherent to the Krylov methods, and # should not be treated as a failure. If such a rare situation # occurs, the calculation is tried again (but at most a few times). ntries = 0 while ntries < 5: # solve if general: try: eigenvalues, evec = eigs_func(ac, k, bc, **kwargs) except ArpackNoConvergence: kwargs['maxiter'] = 20*a.shape[0] eigenvalues, evec = eigs_func(ac, k, bc, **kwargs) else: try: eigenvalues, evec = eigs_func(ac, k, **kwargs) except ArpackNoConvergence: kwargs['maxiter'] = 20*a.shape[0] eigenvalues, evec = eigs_func(ac, k, **kwargs) ind = argsort_which(eigenvalues, typ, k, which, sigma, OPpart, mode) eigenvalues = eigenvalues[ind] evec = evec[:, ind] # check eigenvectors LHS = np.dot(a, evec) if general: RHS = eigenvalues * np.dot(b, evec) else: RHS = eigenvalues * evec assert_allclose(LHS, RHS, rtol=rtol, atol=atol, err_msg=err) try: # check eigenvalues assert_allclose_cc(eigenvalues, exact_eval, rtol=rtol, atol=atol, err_msg=err) break except AssertionError: ntries += 1 # check eigenvalues assert_allclose_cc(eigenvalues, exact_eval, rtol=rtol, atol=atol, err_msg=err) class DictWithRepr(dict): def __init__(self, name): self.name = name def __repr__(self): return "<%s>" % self.name class SymmetricParams: def __init__(self): self.eigs = eigsh self.which = ['LM', 'SM', 'LA', 'SA', 'BE'] self.mattypes = [csr_matrix, aslinearoperator, np.asarray] self.sigmas_modes = {None: ['normal'], 0.5: ['normal', 'buckling', 'cayley']} # generate matrices # these should all be float32 so that the eigenvalues # are the same in float32 and float64 N = 6 np.random.seed(2300) Ar = generate_matrix(N, hermitian=True, pos_definite=True).astype('f').astype('d') M = generate_matrix(N, hermitian=True, pos_definite=True).astype('f').astype('d') Ac = generate_matrix(N, hermitian=True, pos_definite=True, complex_=True).astype('F').astype('D') Mc = generate_matrix(N, hermitian=True, pos_definite=True, complex_=True).astype('F').astype('D') v0 = np.random.random(N) # standard symmetric problem SS = DictWithRepr("std-symmetric") SS['mat'] = Ar SS['v0'] = v0 SS['eval'] = eigh(SS['mat'], eigvals_only=True) # general symmetric problem GS = DictWithRepr("gen-symmetric") GS['mat'] = Ar GS['bmat'] = M GS['v0'] = v0 GS['eval'] = eigh(GS['mat'], GS['bmat'], eigvals_only=True) # standard hermitian problem SH = DictWithRepr("std-hermitian") SH['mat'] = Ac SH['v0'] = v0 SH['eval'] = eigh(SH['mat'], eigvals_only=True) # general hermitian problem GH = DictWithRepr("gen-hermitian") GH['mat'] = Ac GH['bmat'] = M GH['v0'] = v0 GH['eval'] = eigh(GH['mat'], GH['bmat'], eigvals_only=True) # general hermitian problem with hermitian M GHc = DictWithRepr("gen-hermitian-Mc") GHc['mat'] = Ac GHc['bmat'] = Mc GHc['v0'] = v0 GHc['eval'] = eigh(GHc['mat'], GHc['bmat'], eigvals_only=True) self.real_test_cases = [SS, GS] self.complex_test_cases = [SH, GH, GHc] class NonSymmetricParams: def __init__(self): self.eigs = eigs self.which = ['LM', 'LR', 'LI'] # , 'SM', 'LR', 'SR', 'LI', 'SI'] self.mattypes = [csr_matrix, aslinearoperator, np.asarray] self.sigmas_OPparts = {None: [None], 0.1: ['r'], 0.1 + 0.1j: ['r', 'i']} # generate matrices # these should all be float32 so that the eigenvalues # are the same in float32 and float64 N = 6 np.random.seed(2300) Ar = generate_matrix(N).astype('f').astype('d') M = generate_matrix(N, hermitian=True, pos_definite=True).astype('f').astype('d') Ac = generate_matrix(N, complex_=True).astype('F').astype('D') v0 = np.random.random(N) # standard real nonsymmetric problem SNR = DictWithRepr("std-real-nonsym") SNR['mat'] = Ar SNR['v0'] = v0 SNR['eval'] = eig(SNR['mat'], left=False, right=False) # general real nonsymmetric problem GNR = DictWithRepr("gen-real-nonsym") GNR['mat'] = Ar GNR['bmat'] = M GNR['v0'] = v0 GNR['eval'] = eig(GNR['mat'], GNR['bmat'], left=False, right=False) # standard complex nonsymmetric problem SNC = DictWithRepr("std-cmplx-nonsym") SNC['mat'] = Ac SNC['v0'] = v0 SNC['eval'] = eig(SNC['mat'], left=False, right=False) # general complex nonsymmetric problem GNC = DictWithRepr("gen-cmplx-nonsym") GNC['mat'] = Ac GNC['bmat'] = M GNC['v0'] = v0 GNC['eval'] = eig(GNC['mat'], GNC['bmat'], left=False, right=False) self.real_test_cases = [SNR, GNR] self.complex_test_cases = [SNC, GNC] def test_symmetric_modes(): params = SymmetricParams() k = 2 symmetric = True for D in params.real_test_cases: for typ in 'fd': for which in params.which: for mattype in params.mattypes: for (sigma, modes) in params.sigmas_modes.items(): for mode in modes: eval_evec(symmetric, D, typ, k, which, None, sigma, mattype, None, mode) def test_hermitian_modes(): params = SymmetricParams() k = 2 symmetric = True for D in params.complex_test_cases: for typ in 'FD': for which in params.which: if which == 'BE': continue # BE invalid for complex for mattype in params.mattypes: for sigma in params.sigmas_modes: eval_evec(symmetric, D, typ, k, which, None, sigma, mattype) def test_symmetric_starting_vector(): params = SymmetricParams() symmetric = True for k in [1, 2, 3, 4, 5]: for D in params.real_test_cases: for typ in 'fd': v0 = random.rand(len(D['v0'])).astype(typ) eval_evec(symmetric, D, typ, k, 'LM', v0) def test_symmetric_no_convergence(): np.random.seed(1234) m = generate_matrix(30, hermitian=True, pos_definite=True) tol, rtol, atol = _get_test_tolerance('d') try: w, v = eigsh(m, 4, which='LM', v0=m[:, 0], maxiter=5, tol=tol, ncv=9) raise AssertionError("Spurious no-error exit") except ArpackNoConvergence as err: k = len(err.eigenvalues) if k <= 0: raise AssertionError("Spurious no-eigenvalues-found case") from err w, v = err.eigenvalues, err.eigenvectors assert_allclose(dot(m, v), w * v, rtol=rtol, atol=atol) def test_real_nonsymmetric_modes(): params = NonSymmetricParams() k = 2 symmetric = False for D in params.real_test_cases: for typ in 'fd': for which in params.which: for mattype in params.mattypes: for sigma, OPparts in params.sigmas_OPparts.items(): for OPpart in OPparts: eval_evec(symmetric, D, typ, k, which, None, sigma, mattype, OPpart) def test_complex_nonsymmetric_modes(): params = NonSymmetricParams() k = 2 symmetric = False for D in params.complex_test_cases: for typ in 'DF': for which in params.which: for mattype in params.mattypes: for sigma in params.sigmas_OPparts: eval_evec(symmetric, D, typ, k, which, None, sigma, mattype) def test_standard_nonsymmetric_starting_vector(): params = NonSymmetricParams() sigma = None symmetric = False for k in [1, 2, 3, 4]: for d in params.complex_test_cases: for typ in 'FD': A = d['mat'] n = A.shape[0] v0 = random.rand(n).astype(typ) eval_evec(symmetric, d, typ, k, "LM", v0, sigma) def test_general_nonsymmetric_starting_vector(): params = NonSymmetricParams() sigma = None symmetric = False for k in [1, 2, 3, 4]: for d in params.complex_test_cases: for typ in 'FD': A = d['mat'] n = A.shape[0] v0 = random.rand(n).astype(typ) eval_evec(symmetric, d, typ, k, "LM", v0, sigma) def test_standard_nonsymmetric_no_convergence(): np.random.seed(1234) m = generate_matrix(30, complex_=True) tol, rtol, atol = _get_test_tolerance('d') try: w, v = eigs(m, 4, which='LM', v0=m[:, 0], maxiter=5, tol=tol) raise AssertionError("Spurious no-error exit") except ArpackNoConvergence as err: k = len(err.eigenvalues) if k <= 0: raise AssertionError("Spurious no-eigenvalues-found case") from err w, v = err.eigenvalues, err.eigenvectors for ww, vv in zip(w, v.T): assert_allclose(dot(m, vv), ww * vv, rtol=rtol, atol=atol) def test_eigen_bad_shapes(): # A is not square. A = csc_matrix(np.zeros((2, 3))) assert_raises(ValueError, eigs, A) def test_eigen_bad_kwargs(): # Test eigen on wrong keyword argument A = csc_matrix(np.zeros((8, 8))) assert_raises(ValueError, eigs, A, which='XX') def test_ticket_1459_arpack_crash(): for dtype in [np.float32, np.float64]: # This test does not seem to catch the issue for float32, # but we made the same fix there, just to be sure N = 6 k = 2 np.random.seed(2301) A = np.random.random((N, N)).astype(dtype) v0 = np.array([-0.71063568258907849895, -0.83185111795729227424, -0.34365925382227402451, 0.46122533684552280420, -0.58001341115969040629, -0.78844877570084292984e-01], dtype=dtype) # Should not crash: evals, evecs = eigs(A, k, v0=v0) @pytest.mark.skipif(IS_PYPY, reason="Test not meaningful on PyPy") def test_linearoperator_deallocation(): # Check that the linear operators used by the Arpack wrappers are # deallocatable by reference counting -- they are big objects, so # Python's cyclic GC may not collect them fast enough before # running out of memory if eigs/eigsh are called in a tight loop. M_d = np.eye(10) M_s = csc_matrix(M_d) M_o = aslinearoperator(M_d) with assert_deallocated(lambda: arpack.SpLuInv(M_s)): pass with assert_deallocated(lambda: arpack.LuInv(M_d)): pass with assert_deallocated(lambda: arpack.IterInv(M_s)): pass with assert_deallocated(lambda: arpack.IterOpInv(M_o, None, 0.3)): pass with assert_deallocated(lambda: arpack.IterOpInv(M_o, M_o, 0.3)): pass def test_parallel_threads(): results = [] v0 = np.random.rand(50) def worker(): x = diags([1, -2, 1], [-1, 0, 1], shape=(50, 50)) w, v = eigs(x, k=3, v0=v0) results.append(w) w, v = eigsh(x, k=3, v0=v0) results.append(w) threads = [threading.Thread(target=worker) for k in range(10)] for t in threads: t.start() for t in threads: t.join() worker() for r in results: assert_allclose(r, results[-1]) def test_reentering(): # Just some linear operator that calls eigs recursively def A_matvec(x): x = diags([1, -2, 1], [-1, 0, 1], shape=(50, 50)) w, v = eigs(x, k=1) return v / w[0] A = LinearOperator(matvec=A_matvec, dtype=float, shape=(50, 50)) # The Fortran code is not reentrant, so this fails (gracefully, not crashing) assert_raises(RuntimeError, eigs, A, k=1) assert_raises(RuntimeError, eigsh, A, k=1) def test_regression_arpackng_1315(): # Check that issue arpack-ng/#1315 is not present. # Adapted from arpack-ng/TESTS/bug_1315_single.c # If this fails, then the installed ARPACK library is faulty. for dtype in [np.float32, np.float64]: np.random.seed(1234) w0 = np.arange(1, 1000+1).astype(dtype) A = diags([w0], [0], shape=(1000, 1000)) v0 = np.random.rand(1000).astype(dtype) w, v = eigs(A, k=9, ncv=2*9+1, which="LM", v0=v0) assert_allclose(np.sort(w), np.sort(w0[-9:]), rtol=1e-4) def test_eigs_for_k_greater(): # Test eigs() for k beyond limits. A_sparse = diags([1, -2, 1], [-1, 0, 1], shape=(4, 4)) # sparse A = generate_matrix(4, sparse=False) M_dense = np.random.random((4, 4)) M_sparse = generate_matrix(4, sparse=True) M_linop = aslinearoperator(M_dense) eig_tuple1 = eig(A, b=M_dense) eig_tuple2 = eig(A, b=M_sparse) with suppress_warnings() as sup: sup.filter(RuntimeWarning) assert_equal(eigs(A, M=M_dense, k=3), eig_tuple1) assert_equal(eigs(A, M=M_dense, k=4), eig_tuple1) assert_equal(eigs(A, M=M_dense, k=5), eig_tuple1) assert_equal(eigs(A, M=M_sparse, k=5), eig_tuple2) # M as LinearOperator assert_raises(TypeError, eigs, A, M=M_linop, k=3) # Test 'A' for different types assert_raises(TypeError, eigs, aslinearoperator(A), k=3) assert_raises(TypeError, eigs, A_sparse, k=3) def test_eigsh_for_k_greater(): # Test eigsh() for k beyond limits. A_sparse = diags([1, -2, 1], [-1, 0, 1], shape=(4, 4)) # sparse A = generate_matrix(4, sparse=False) M_dense = generate_matrix_symmetric(4, pos_definite=True) M_sparse = generate_matrix_symmetric(4, pos_definite=True, sparse=True) M_linop = aslinearoperator(M_dense) eig_tuple1 = eigh(A, b=M_dense) eig_tuple2 = eigh(A, b=M_sparse) with suppress_warnings() as sup: sup.filter(RuntimeWarning) assert_equal(eigsh(A, M=M_dense, k=4), eig_tuple1) assert_equal(eigsh(A, M=M_dense, k=5), eig_tuple1) assert_equal(eigsh(A, M=M_sparse, k=5), eig_tuple2) # M as LinearOperator assert_raises(TypeError, eigsh, A, M=M_linop, k=4) # Test 'A' for different types assert_raises(TypeError, eigsh, aslinearoperator(A), k=4) assert_raises(TypeError, eigsh, A_sparse, M=M_dense, k=4) def test_real_eigs_real_k_subset(): np.random.seed(1) n = 10 A = rand(n, n, density=0.5) A.data *= 2 A.data -= 1 v0 = np.ones(n) whichs = ['LM', 'SM', 'LR', 'SR', 'LI', 'SI'] dtypes = [np.float32, np.float64] for which, sigma, dtype in itertools.product(whichs, [None, 0, 5], dtypes): prev_w = np.array([], dtype=dtype) eps = np.finfo(dtype).eps for k in range(1, 9): w, z = eigs(A.astype(dtype), k=k, which=which, sigma=sigma, v0=v0.astype(dtype), tol=0) assert_allclose(np.linalg.norm(A.dot(z) - z * w), 0, atol=np.sqrt(eps)) # Check that the set of eigenvalues for `k` is a subset of that for `k+1` dist = abs(prev_w[:,None] - w).min(axis=1) assert_allclose(dist, 0, atol=np.sqrt(eps)) prev_w = w # Check sort order if sigma is None: d = w else: d = 1 / (w - sigma) if which == 'LM': # ARPACK is systematic for 'LM', but sort order # appears not well defined for other modes assert np.all(np.diff(abs(d)) <= 1e-6)
endolith/scipy
scipy/sparse/linalg/eigen/arpack/tests/test_arpack.py
scipy/sparse/linalg/__init__.py
# -*- coding: utf-8 -*- import attr from navmazing import NavigateToAttribute from navmazing import NavigateToSibling from widgetastic.widget import NoSuchElementException from widgetastic.widget import Text from widgetastic.widget import View from widgetastic_patternfly import BootstrapNav from widgetastic_patternfly import BreadCrumb from widgetastic_patternfly import Button from widgetastic_patternfly import Dropdown from cfme.base.ui import BaseLoggedInPage from cfme.common import Taggable from cfme.common import TagPageView from cfme.exceptions import ItemNotFound from cfme.modeling.base import BaseCollection from cfme.modeling.base import BaseEntity from cfme.utils.appliance.implementations.ui import CFMENavigateStep from cfme.utils.appliance.implementations.ui import navigate_to from cfme.utils.appliance.implementations.ui import navigator from cfme.utils.log import logger from cfme.utils.providers import get_crud_by_name from cfme.utils.wait import wait_for from widgetastic_manageiq import Accordion from widgetastic_manageiq import BaseEntitiesView from widgetastic_manageiq import ItemsToolBarViewSelector from widgetastic_manageiq import ManageIQTree from widgetastic_manageiq import Search from widgetastic_manageiq import SummaryTable class VolumeSnapshotToolbar(View): """The toolbar on the Volume Snapshot page""" policy = Dropdown('Policy') download = Dropdown('Download') view_selector = View.nested(ItemsToolBarViewSelector) class VolumeSnapshotDetailsToolbar(View): """The toolbar on the Volume Snapshot detail page""" configuration = Dropdown('Configuration') policy = Dropdown('Policy') download = Button('Print or export summary') class VolumeSnapshotDetailsEntities(View): """The entities on the Volume Snapshot detail page""" breadcrumb = BreadCrumb() title = Text('.//div[@id="center_div" or @id="main-content"]//h1') properties = SummaryTable('Properties') relationships = SummaryTable('Relationships') smart_management = SummaryTable('Smart Management') class VolumeSnapshotDetailSidebar(View): """The accordion on the Volume Snapshot details page""" @View.nested class properties(Accordion): # noqa tree = ManageIQTree() @View.nested class relationships(Accordion): # noqa tree = ManageIQTree() class VolumeSnapshotView(BaseLoggedInPage): """A base view for all the Volume Snapshot pages""" @property def in_volume_snapshots(self): return ( self.logged_in_as_current_user and self.navigation.currently_selected == ['Storage', 'Block Storage', 'Volume Snapshots'] ) @property def is_displayed(self): return self.in_volume_snapshots class VolumeSnapshotAllView(VolumeSnapshotView): """The all Volume Snapshot page""" toolbar = View.nested(VolumeSnapshotToolbar) search = View.nested(Search) including_entities = View.include(BaseEntitiesView, use_parent=True) @property def is_displayed(self): return ( self.in_volume_snapshots and self.entities.title.text == 'Cloud Volume Snapshots') @View.nested class my_filters(Accordion): # noqa ACCORDION_NAME = "My Filters" navigation = BootstrapNav('.//div/ul') tree = ManageIQTree() class VolumeSnapshotDetailsView(VolumeSnapshotView): """The detail Volume Snapshot page""" @property def is_displayed(self): obj = self.context['object'] return ( self.in_volume_snapshots and self.entities.title.text == obj.expected_details_title and self.entities.breadcrumb.active_location == obj.expected_details_breadcrumb ) toolbar = View.nested(VolumeSnapshotDetailsToolbar) sidebar = View.nested(VolumeSnapshotDetailSidebar) entities = View.nested(VolumeSnapshotDetailsEntities) @attr.s class VolumeSnapshot(BaseEntity, Taggable): """ Model of an Storage Volume Snapshots in cfme Args: name: name of the snapshot provider: provider """ name = attr.ib() provider = attr.ib() def refresh(self): self.provider.refresh_provider_relationships() self.browser.refresh() @property def exists(self): """ check for snapshot exist on UI. Returns: :py:class:`bool` """ view = navigate_to(self.parent, 'All') return self.name in view.entities.all_entity_names @property def status(self): """ status of cloud volume snapshot. Returns: :py:class:`str` Status of volume snapshot. """ view = navigate_to(self.parent, 'All') view.toolbar.view_selector.select("List View") try: ent = view.entities.get_entity(name=self.name, surf_pages=True) return ent.data["status"] except ItemNotFound: return False @property def size(self): """ size of cloud volume snapshot. Returns: :py:class:`int` size of volume snapshot in GB. """ view = navigate_to(self, 'Details') return int(view.entities.properties.get_text_of('Size').split()[0]) @property def volume_name(self): """ volume name of snapshot. Returns: :py:class:`str` respective volume name. """ view = navigate_to(self, 'Details') return view.entities.relationships.get_text_of('Cloud Volume') @property def tenant_name(self): """ Tenant name of snapshot. Returns: :py:class:`str` respective tenant name for snapshot. """ view = navigate_to(self, 'Details') return view.entities.relationships.get_text_of('Cloud Tenants') def delete(self, wait=True): """Delete snapshot """ view = navigate_to(self, 'Details') view.toolbar.configuration.item_select('Delete Cloud Volume Snapshot') view.flash.assert_success_message('Delete initiated for 1 Cloud Volume Snapshot.') if wait: wait_for( lambda: not self.exists, message="Wait snapshot to disappear", delay=20, timeout=800, fail_func=self.refresh ) @attr.s class VolumeSnapshotCollection(BaseCollection): """Collection object for :py:class:`cfme.storage.volume_snapshots.VolumeSnapshot`""" ENTITY = VolumeSnapshot def all(self): """returning all Snapshot objects for respective storage manager type""" view = navigate_to(self, 'All') view.toolbar.view_selector.select("List View") snapshots = [] try: if 'provider' in self.filters: for item in view.entities.elements.read(): if self.filters.get('provider').name in item['Storage Manager']: snapshots.append(self.instantiate(name=item['Name'], provider=self.filters.get('provider'))) else: for item in view.entities.elements.read(): provider_name = item['Storage Manager'].split()[0] provider = get_crud_by_name(provider_name) snapshots.append(self.instantiate(name=item['Name'], provider=provider)) except NoSuchElementException: if snapshots: logger.error('VolumeSnapshotCollection: ' 'NoSuchElementException in the middle of entities read') else: logger.warning('The snapshot table is probably not present or empty') return snapshots @navigator.register(VolumeSnapshotCollection, 'All') class All(CFMENavigateStep): VIEW = VolumeSnapshotAllView prerequisite = NavigateToAttribute('appliance.server', 'LoggedIn') def step(self, *args, **kwargs): self.prerequisite_view.navigation.select('Storage', 'Block Storage', 'Volume Snapshots') @navigator.register(VolumeSnapshot, 'Details') class Details(CFMENavigateStep): VIEW = VolumeSnapshotDetailsView prerequisite = NavigateToAttribute('parent', 'All') def step(self, *args, **kwargs): try: self.prerequisite_view.entities.get_entity(name=self.obj.name, surf_pages=True).click() except ItemNotFound: raise ItemNotFound( 'Could not locate volume snapshot {}'.format(self.obj.name) ) @navigator.register(VolumeSnapshot, 'EditTagsFromDetails') class SnapshotDetailEditTag(CFMENavigateStep): """ This navigation destination help to Taggable""" VIEW = TagPageView prerequisite = NavigateToSibling('Details') def step(self, *args, **kwargs): self.prerequisite_view.toolbar.policy.item_select('Edit Tags')
import re import fauxfactory import pytest from widgetastic_patternfly import Dropdown from cfme import test_requirements from cfme.cloud.provider.openstack import OpenStackProvider from cfme.markers.env_markers.provider import ONE_PER_TYPE from cfme.tests.automate.custom_button import log_request_check from cfme.tests.automate.custom_button import OBJ_TYPE_59 from cfme.tests.automate.custom_button import TextInputDialogView from cfme.utils.appliance.implementations.ui import navigate_to from cfme.utils.blockers import BZ from cfme.utils.log import logger from cfme.utils.wait import TimedOutError from cfme.utils.wait import wait_for pytestmark = [ pytest.mark.tier(2), test_requirements.custom_button, pytest.mark.usefixtures("setup_provider"), pytest.mark.provider([OpenStackProvider], selector=ONE_PER_TYPE), ] CLOUD_OBJECTS = [ "PROVIDER", "VM_INSTANCE", "TEMPLATE_IMAGE", "AZONE", "CLOUD_NETWORK", "CLOUD_SUBNET", "SECURITY_GROUP", "ROUTER", "CLOUD_OBJECT_STORE_CONTAINER", ] DISPLAY_NAV = { "Single entity": ["Details"], "List": ["All"], "Single and list": ["All", "Details"], } SUBMIT = ["Submit all", "One by one"] @pytest.fixture( params=CLOUD_OBJECTS, ids=[obj.capitalize() for obj in CLOUD_OBJECTS], scope="module" ) def button_group(appliance, request): collection = appliance.collections.button_groups button_gp = collection.create( text=fauxfactory.gen_alphanumeric(), hover=fauxfactory.gen_alphanumeric(), type=getattr(collection, request.param), ) yield button_gp, request.param button_gp.delete_if_exists() @pytest.fixture() def setup_objs(button_group, provider): """ Setup object for specific custom button object type.""" obj_type = button_group[1] if obj_type == "PROVIDER": # Note: For the custom button provider object points # provider, storage managers, network managers block_coll = provider.appliance.collections.block_managers.filter({"provider": provider}) block_manager = block_coll.all()[0] object_coll = provider.appliance.collections.object_managers.filter({"provider": provider}) object_manager = object_coll.all()[0] network_manager = provider.appliance.collections.network_providers.all()[0] obj = [provider, network_manager, block_manager, object_manager] elif obj_type == "VM_INSTANCE": obj = [provider.appliance.provider_based_collection(provider).all()[0]] elif obj_type == "TEMPLATE_IMAGE": obj = [provider.appliance.collections.cloud_images.all()[0]] elif obj_type == "AZONE": obj = [ provider.appliance.collections.cloud_av_zones.filter({"provider": provider}).all()[0] ] elif obj_type == "CLOUD_SUBNET": obj = [provider.appliance.collections.network_subnets.all()[0]] elif obj_type == "SECURITY_GROUP": obj = [provider.appliance.collections.network_security_groups.all()[0]] elif obj_type == "ROUTER": obj = [provider.appliance.collections.network_routers.all()[0]] elif obj_type == "CLOUD_OBJECT_STORE_CONTAINER": obj = [ provider.appliance.collections.object_store_containers.filter( {"provider": provider} ).all()[0] ] elif obj_type == "CLOUD_NETWORK": obj = [provider.appliance.collections.cloud_networks.all()[0]] else: logger.error("No object collected for custom button object type '{}'".format(obj_type)) return obj @pytest.mark.tier(1) @pytest.mark.uncollectif( lambda appliance, button_group: not bool([obj for obj in OBJ_TYPE_59 if obj in button_group]) and appliance.version < "5.10" ) @pytest.mark.parametrize( "display", list(DISPLAY_NAV.keys()), ids=["_".join(item.split()) for item in DISPLAY_NAV.keys()] ) def test_custom_button_display_cloud_obj(appliance, request, display, setup_objs, button_group): """ Test custom button display on a targeted page Polarion: assignee: ndhandre initialEstimate: 1/4h caseimportance: critical caseposneg: positive testtype: functional startsin: 5.8 casecomponent: CustomButton tags: custom_button testSteps: 1. Create custom button group with the Object type 2. Create a custom button with specific display 3. Navigate to object type page as per display selected 4. Single entity: Details page of the entity 5. List: All page of the entity 6. Single and list: Both All and Details page of the entity 7. Check for button group and button """ group, obj_type = button_group button = group.buttons.create( text=fauxfactory.gen_alphanumeric(), hover=fauxfactory.gen_alphanumeric(), display_for=display, system="Request", request="InspectMe", ) request.addfinalizer(button.delete_if_exists) for setup_obj in setup_objs: for destination in DISPLAY_NAV[display]: obj = setup_obj.parent if destination == "All" else setup_obj view = navigate_to(obj, destination) custom_button_group = Dropdown(view, group.hover) assert custom_button_group.is_displayed assert custom_button_group.has_item(button.text) @pytest.mark.meta(automates=[1635797, 1574403, 1640592, 1710350, 1732436]) @pytest.mark.uncollectif( lambda appliance, button_group: not bool([obj for obj in OBJ_TYPE_59 if obj in button_group]) and appliance.version < "5.10" ) def test_custom_button_dialog_cloud_obj(appliance, dialog, request, setup_objs, button_group): """ Test custom button with dialog and InspectMe method Polarion: assignee: ndhandre initialEstimate: 1/4h caseimportance: high caseposneg: positive testtype: functional startsin: 5.9 casecomponent: CustomButton tags: custom_button testSteps: 1. Simple TextInput service dialog 2. Create custom button group with the Object type 3. Create a custom button with service dialog 4. Navigate to object Details page 5. Check for button group and button 6. Select/execute button from group dropdown for selected entities 7. Fill dialog and submit 8. Check for the proper flash message related to button execution Bugzilla: 1635797 1555331 1574403 1640592 1710350 1732436 """ group, obj_type = button_group # Note: No need to set display_for dialog only work with Single entity button = group.buttons.create( text=fauxfactory.gen_alphanumeric(), hover=fauxfactory.gen_alphanumeric(), dialog=dialog, system="Request", request="InspectMe", ) request.addfinalizer(button.delete_if_exists) for setup_obj in setup_objs: view = navigate_to(setup_obj, "Details") custom_button_group = Dropdown(view, group.hover) assert custom_button_group.has_item(button.text) custom_button_group.item_select(button.text) dialog_view = view.browser.create_view(TextInputDialogView, wait="10s") dialog_view.service_name.fill("Custom Button Execute") # Clear the automation log assert appliance.ssh_client.run_command( 'echo -n "" > /var/www/miq/vmdb/log/automation.log' ) # Submit order request dialog_view.submit.click() if not (BZ(1732436, forced_streams=["5.10", "5.11"]).blocks and obj_type == "PROVIDER"): view.flash.assert_message("Order Request was Submitted") # Check for request in automation log try: wait_for( log_request_check, [appliance, 1], timeout=300, message="Check for expected request count", delay=20, ) except TimedOutError: assert False, "Expected 1 requests not found in automation log" @pytest.mark.meta(automates=[1628224]) @pytest.mark.uncollectif( lambda appliance, button_group: not bool([obj for obj in OBJ_TYPE_59 if obj in button_group]) and appliance.version < "5.10" ) @pytest.mark.parametrize("submit", SUBMIT, ids=[item.replace(" ", "_") for item in SUBMIT]) def test_custom_button_automate_cloud_obj(appliance, request, submit, setup_objs, button_group): """ Test custom button for automate and requests count as per submit Polarion: assignee: ndhandre initialEstimate: 1/4h caseimportance: high caseposneg: positive testtype: functional startsin: 5.9 casecomponent: CustomButton tags: custom_button testSteps: 1. Create custom button group with the Object type 2. Create a custom button with specific submit option and Single and list display 3. Navigate to object type pages (All and Details) 4. Check for button group and button 5. Select/execute button from group dropdown for selected entities 6. Check for the proper flash message related to button execution 7. Check automation log requests. Submitted as per selected submit option or not. 8. Submit all: single request for all entities execution 9 One by one: separate requests for all entities execution Bugzilla: 1628224 1642147 """ group, obj_type = button_group button = group.buttons.create( text=fauxfactory.gen_alphanumeric(), hover=fauxfactory.gen_alphanumeric(), display_for="Single and list", submit=submit, system="Request", request="InspectMe", ) request.addfinalizer(button.delete_if_exists) for setup_obj in setup_objs: for destination in ["All", "Details"]: obj = setup_obj.parent if destination == "All" else setup_obj view = navigate_to(obj, destination) custom_button_group = Dropdown(view, group.hover) assert custom_button_group.has_item(button.text) # Entity count depends on the destination for `All` available entities and # `Details` means a single entity. # To-Do: remove Manager check as BZ-1642147 fix if destination == "All": try: paginator = view.paginator except AttributeError: paginator = view.entities.paginator entity_count = min(paginator.items_amount, paginator.items_per_page) # Work around for BZ-1642147 try: if "Manager" in setup_obj.name: entity_count = 1 except AttributeError: pass paginator.check_all() else: entity_count = 1 # Clear the automation log assert appliance.ssh_client.run_command( 'echo -n "" > ' "/var/www/miq/vmdb/log/automation.log" ) custom_button_group.item_select(button.text) diff = "executed" if appliance.version < "5.10" else "launched" view.flash.assert_message('"{btn}" was {diff}'.format(btn=button.text, diff=diff)) # Submit all: single request for all entity execution # One by one: separate requests for all entity execution expected_count = 1 if submit == "Submit all" else entity_count try: wait_for( log_request_check, [appliance, expected_count], timeout=300, message="Check for expected request count", delay=10, ) except TimedOutError: assert False, "Expected {} requests not found in automation log".format( str(expected_count) ) @pytest.mark.uncollectif( lambda appliance, button_group: not bool([obj for obj in OBJ_TYPE_59 if obj in button_group]) and appliance.version < "5.10" ) @pytest.mark.parametrize("expression", ["enablement", "visibility"]) def test_custom_button_expression_cloud_obj( appliance, request, setup_objs, button_group, expression ): """ Test custom button as per expression enablement/visibility. Polarion: assignee: ndhandre initialEstimate: 1/4h caseimportance: medium caseposneg: positive testtype: functional startsin: 5.9 casecomponent: CustomButton tags: custom_button testSteps: 1. Create custom button group with the Object type 2. Create a custom button with expression (Tag) a. Enablement Expression b. Visibility Expression 3. Navigate to object Detail page 4. Check: button should not enable/visible without tag 5. Check: button should enable/visible with tag """ group, obj_type = button_group exp = {expression: {"tag": "My Company Tags : Department", "value": "Engineering"}} disabled_txt = "Tag - My Company Tags : Department : Engineering" button = group.buttons.create( text=fauxfactory.gen_alphanumeric(), hover=fauxfactory.gen_alphanumeric(), display_for="Single entity", system="Request", request="InspectMe", **exp ) request.addfinalizer(button.delete_if_exists) tag_cat = appliance.collections.categories.instantiate( name="department", display_name="Department" ) tag = tag_cat.collections.tags.instantiate(name="engineering", display_name="Engineering") for setup_obj in setup_objs: view = navigate_to(setup_obj, "Details") custom_button_group = Dropdown(view, group.text) if tag in setup_obj.get_tags(): if expression == "enablement": assert custom_button_group.item_enabled(button.text) setup_obj.remove_tag(tag) assert not custom_button_group.is_enabled assert re.search(disabled_txt, custom_button_group.hover) elif expression == "visibility": assert button.text in custom_button_group.items setup_obj.remove_tag(tag) assert not custom_button_group.is_displayed else: if expression == "enablement": assert not custom_button_group.is_enabled assert re.search(disabled_txt, custom_button_group.hover) setup_obj.add_tag(tag) assert custom_button_group.item_enabled(button.text) elif expression == "visibility": assert not custom_button_group.is_displayed setup_obj.add_tag(tag) assert button.text in custom_button_group.items @pytest.mark.meta( blockers=[BZ(1680525, unblock=lambda button_group: "CLOUD_NETWORK" not in button_group)] ) @pytest.mark.parametrize("btn_dialog", [False, True], ids=["simple", "dialog"]) def test_custom_button_events_cloud_obj(request, dialog, setup_objs, button_group, btn_dialog): """Test custom button events Polarion: assignee: ndhandre caseimportance: medium initialEstimate: 1/4h caseposneg: positive testtype: functional startsin: 5.10 casecomponent: CustomButton tags: custom_button testSteps: 1. Create a Button Group 2. Create custom button [with dialog/ without dialog] 2. Execute button from respective location 3. Assert event count Bugzilla: 1668023 1702490 1680525 """ group, obj_type = button_group dialog_ = dialog if btn_dialog else None button = group.buttons.create( text="btn_{}".format(fauxfactory.gen_alphanumeric(3)), hover="btn_hover{}".format(fauxfactory.gen_alphanumeric(3)), dialog=dialog_, system="Request", request="InspectMe", ) request.addfinalizer(button.delete_if_exists) for setup_obj in setup_objs: initial_count = len(setup_obj.get_button_events()) view = navigate_to(setup_obj, "Details") custom_button_group = Dropdown(view, group.hover) custom_button_group.item_select(button.text) if btn_dialog: dialog_view = view.browser.create_view(TextInputDialogView, wait="10s") dialog_view.submit.click() view.browser.refresh() current_count = len(setup_obj.get_button_events()) assert current_count == (initial_count + 1)
apagac/cfme_tests
cfme/tests/automate/custom_button/test_cloud_objects.py
cfme/storage/volume_snapshot.py
""" NOT TESTED YET """ import re from cfme.utils.conf import cfme_data from cfme.utils.log import logger from cfme.utils.template.base import log_wrap from cfme.utils.template.base import ProviderTemplateUpload from cfme.utils.template.base import TemplateUploadException class RHEVMTemplateUpload(ProviderTemplateUpload): provider_type = 'rhevm' log_name = 'RHEVM' image_pattern = re.compile( r'<a href="?\'?([^"\']*(?:(?:rhevm|ovirt)[^"\']*\.(?:qcow2|qc2))[^"\'>]*)') @log_wrap('add glance to rhevm provider') def add_glance_to_provider(self): """Add glance as an external provider if needed""" glance_data = cfme_data.template_upload.get(self.glance_key) if self.mgmt.does_glance_server_exist(self.glance_key): logger.info('RHEVM provider already has glance added, skipping step') else: self.mgmt.add_glance_server(name=self.glance_key, description=self.glance_key, url=glance_data.url, requires_authentication=False) return True @log_wrap("import template from Glance server") def import_template_from_glance(self): """Import the template from glance to local rhevm datastore, sucks.""" self.mgmt.import_glance_image( source_storage_domain_name=self.glance_key, target_cluster_name=self.provider_data.template_upload.cluster, source_template_name=self.image_name, target_template_name=self.temp_template_name, target_storage_domain_name=self.provider_data.template_upload.storage_domain) mgmt_network = self.provider_data.template_upload.get('management_network') rv_tmpl = self.mgmt.get_template(self.temp_template_name) if mgmt_network: # network devices, qcow template doesn't have any temp_nics = rv_tmpl.get_nics() nic_args = dict(network_name=mgmt_network, nic_name='eth0') if 'eth0' not in [n.name for n in temp_nics]: rv_tmpl.add_nic(**nic_args) else: rv_tmpl.update_nic(**nic_args) return True @log_wrap('Deploy template to vm - before templatizing') def deploy_vm_from_template(self): """Deploy a VM from the raw template with resource limits set from yaml""" stream_hardware = cfme_data.template_upload.hardware[self.stream] self.mgmt.get_template(self.temp_template_name).deploy( vm_name=self.temp_vm_name, cluster=self.provider_data.template_upload.cluster, storage_domain=self.provider_data.template_upload.storage_domain, cpu=stream_hardware.cores, sockets=stream_hardware.sockets, ram=int(stream_hardware.memory) * 2**30) # GB -> B # check, if the vm is really there if not self.mgmt.does_vm_exist(self.temp_vm_name): raise TemplateUploadException('Failed to deploy VM from imported template') return True @log_wrap('Add db disk to temp vm') def add_disk_to_vm(self): """Add a disk with specs from cfme_data.template_upload Generally for database disk """ temp_vm = self.mgmt.get_vm(self.temp_vm_name) if temp_vm.get_disks_count() > 1: logger.warning('%s Warning: found more than one disk in existing VM (%s).', self.provider_key, self.temp_vm_name) return rhevm_specs = cfme_data.template_upload.template_upload_rhevm disk_kwargs = dict(storage_domain=self.provider_data.template_upload.storage_domain, size=rhevm_specs.get('disk_size', 5000000000), interface=rhevm_specs.get('disk_interface', 'virtio'), format=rhevm_specs.get('disk_format', 'cow'), name=rhevm_specs.get('disk_name')) temp_vm.add_disk(**disk_kwargs) # check, if there are two disks if temp_vm.get_disks_count() < 2: raise TemplateUploadException('%s disk failed to add with specs: %r', self.provider_key, disk_kwargs) logger.info('%s:%s Successfully added disk', self.provider_key, self.temp_vm_name) return True @log_wrap('templatize temp vm with disk') def templatize_vm(self): """Templatizes temporary VM. Result is template with two disks. """ self.mgmt.get_vm(self.temp_vm_name).mark_as_template( template_name=self.template_name, cluster_name=self.provider_data.template_upload.cluster, storage_domain_name=self.provider_data.template_upload.get('template_domain', None), delete=False # leave vm in place in case it fails, for debug ) # check, if template is really there if not self.mgmt.does_template_exist(self.template_name): raise TemplateUploadException('%s templatizing %s to %s FAILED', self.provider_key, self.temp_vm_name, self.template_name) logger.info(":%s successfully templatized %s to %s", self.provider_key, self.temp_vm_name, self.template_name) return True @log_wrap('cleanup temp resources') def teardown(self): """Cleans up all the mess that the previous functions left behind.""" logger.info('%s Deleting temp_vm "%s"', self.provider_key, self.temp_vm_name) if self.mgmt.does_vm_exist(self.temp_vm_name): self.mgmt.get_vm(self.temp_vm_name).cleanup() logger.info('%s Deleting temp_template "%s"on storage domain', self.provider_key, self.temp_template_name) if self.mgmt.does_template_exist(self.temp_template_name): self.mgmt.get_template(self.temp_template_name).cleanup() return True def run(self): """call methods for individual steps of CFME templatization of qcow2 image from glance""" try: self.glance_upload() self.add_glance_to_provider() self.import_template_from_glance() self.deploy_vm_from_template() if self.stream == 'upstream': self.manageiq_cleanup() self.add_disk_to_vm() self.templatize_vm() return True except Exception: logger.exception('template creation failed for provider {}'.format( self.provider_data.name)) return False
import re import fauxfactory import pytest from widgetastic_patternfly import Dropdown from cfme import test_requirements from cfme.cloud.provider.openstack import OpenStackProvider from cfme.markers.env_markers.provider import ONE_PER_TYPE from cfme.tests.automate.custom_button import log_request_check from cfme.tests.automate.custom_button import OBJ_TYPE_59 from cfme.tests.automate.custom_button import TextInputDialogView from cfme.utils.appliance.implementations.ui import navigate_to from cfme.utils.blockers import BZ from cfme.utils.log import logger from cfme.utils.wait import TimedOutError from cfme.utils.wait import wait_for pytestmark = [ pytest.mark.tier(2), test_requirements.custom_button, pytest.mark.usefixtures("setup_provider"), pytest.mark.provider([OpenStackProvider], selector=ONE_PER_TYPE), ] CLOUD_OBJECTS = [ "PROVIDER", "VM_INSTANCE", "TEMPLATE_IMAGE", "AZONE", "CLOUD_NETWORK", "CLOUD_SUBNET", "SECURITY_GROUP", "ROUTER", "CLOUD_OBJECT_STORE_CONTAINER", ] DISPLAY_NAV = { "Single entity": ["Details"], "List": ["All"], "Single and list": ["All", "Details"], } SUBMIT = ["Submit all", "One by one"] @pytest.fixture( params=CLOUD_OBJECTS, ids=[obj.capitalize() for obj in CLOUD_OBJECTS], scope="module" ) def button_group(appliance, request): collection = appliance.collections.button_groups button_gp = collection.create( text=fauxfactory.gen_alphanumeric(), hover=fauxfactory.gen_alphanumeric(), type=getattr(collection, request.param), ) yield button_gp, request.param button_gp.delete_if_exists() @pytest.fixture() def setup_objs(button_group, provider): """ Setup object for specific custom button object type.""" obj_type = button_group[1] if obj_type == "PROVIDER": # Note: For the custom button provider object points # provider, storage managers, network managers block_coll = provider.appliance.collections.block_managers.filter({"provider": provider}) block_manager = block_coll.all()[0] object_coll = provider.appliance.collections.object_managers.filter({"provider": provider}) object_manager = object_coll.all()[0] network_manager = provider.appliance.collections.network_providers.all()[0] obj = [provider, network_manager, block_manager, object_manager] elif obj_type == "VM_INSTANCE": obj = [provider.appliance.provider_based_collection(provider).all()[0]] elif obj_type == "TEMPLATE_IMAGE": obj = [provider.appliance.collections.cloud_images.all()[0]] elif obj_type == "AZONE": obj = [ provider.appliance.collections.cloud_av_zones.filter({"provider": provider}).all()[0] ] elif obj_type == "CLOUD_SUBNET": obj = [provider.appliance.collections.network_subnets.all()[0]] elif obj_type == "SECURITY_GROUP": obj = [provider.appliance.collections.network_security_groups.all()[0]] elif obj_type == "ROUTER": obj = [provider.appliance.collections.network_routers.all()[0]] elif obj_type == "CLOUD_OBJECT_STORE_CONTAINER": obj = [ provider.appliance.collections.object_store_containers.filter( {"provider": provider} ).all()[0] ] elif obj_type == "CLOUD_NETWORK": obj = [provider.appliance.collections.cloud_networks.all()[0]] else: logger.error("No object collected for custom button object type '{}'".format(obj_type)) return obj @pytest.mark.tier(1) @pytest.mark.uncollectif( lambda appliance, button_group: not bool([obj for obj in OBJ_TYPE_59 if obj in button_group]) and appliance.version < "5.10" ) @pytest.mark.parametrize( "display", list(DISPLAY_NAV.keys()), ids=["_".join(item.split()) for item in DISPLAY_NAV.keys()] ) def test_custom_button_display_cloud_obj(appliance, request, display, setup_objs, button_group): """ Test custom button display on a targeted page Polarion: assignee: ndhandre initialEstimate: 1/4h caseimportance: critical caseposneg: positive testtype: functional startsin: 5.8 casecomponent: CustomButton tags: custom_button testSteps: 1. Create custom button group with the Object type 2. Create a custom button with specific display 3. Navigate to object type page as per display selected 4. Single entity: Details page of the entity 5. List: All page of the entity 6. Single and list: Both All and Details page of the entity 7. Check for button group and button """ group, obj_type = button_group button = group.buttons.create( text=fauxfactory.gen_alphanumeric(), hover=fauxfactory.gen_alphanumeric(), display_for=display, system="Request", request="InspectMe", ) request.addfinalizer(button.delete_if_exists) for setup_obj in setup_objs: for destination in DISPLAY_NAV[display]: obj = setup_obj.parent if destination == "All" else setup_obj view = navigate_to(obj, destination) custom_button_group = Dropdown(view, group.hover) assert custom_button_group.is_displayed assert custom_button_group.has_item(button.text) @pytest.mark.meta(automates=[1635797, 1574403, 1640592, 1710350, 1732436]) @pytest.mark.uncollectif( lambda appliance, button_group: not bool([obj for obj in OBJ_TYPE_59 if obj in button_group]) and appliance.version < "5.10" ) def test_custom_button_dialog_cloud_obj(appliance, dialog, request, setup_objs, button_group): """ Test custom button with dialog and InspectMe method Polarion: assignee: ndhandre initialEstimate: 1/4h caseimportance: high caseposneg: positive testtype: functional startsin: 5.9 casecomponent: CustomButton tags: custom_button testSteps: 1. Simple TextInput service dialog 2. Create custom button group with the Object type 3. Create a custom button with service dialog 4. Navigate to object Details page 5. Check for button group and button 6. Select/execute button from group dropdown for selected entities 7. Fill dialog and submit 8. Check for the proper flash message related to button execution Bugzilla: 1635797 1555331 1574403 1640592 1710350 1732436 """ group, obj_type = button_group # Note: No need to set display_for dialog only work with Single entity button = group.buttons.create( text=fauxfactory.gen_alphanumeric(), hover=fauxfactory.gen_alphanumeric(), dialog=dialog, system="Request", request="InspectMe", ) request.addfinalizer(button.delete_if_exists) for setup_obj in setup_objs: view = navigate_to(setup_obj, "Details") custom_button_group = Dropdown(view, group.hover) assert custom_button_group.has_item(button.text) custom_button_group.item_select(button.text) dialog_view = view.browser.create_view(TextInputDialogView, wait="10s") dialog_view.service_name.fill("Custom Button Execute") # Clear the automation log assert appliance.ssh_client.run_command( 'echo -n "" > /var/www/miq/vmdb/log/automation.log' ) # Submit order request dialog_view.submit.click() if not (BZ(1732436, forced_streams=["5.10", "5.11"]).blocks and obj_type == "PROVIDER"): view.flash.assert_message("Order Request was Submitted") # Check for request in automation log try: wait_for( log_request_check, [appliance, 1], timeout=300, message="Check for expected request count", delay=20, ) except TimedOutError: assert False, "Expected 1 requests not found in automation log" @pytest.mark.meta(automates=[1628224]) @pytest.mark.uncollectif( lambda appliance, button_group: not bool([obj for obj in OBJ_TYPE_59 if obj in button_group]) and appliance.version < "5.10" ) @pytest.mark.parametrize("submit", SUBMIT, ids=[item.replace(" ", "_") for item in SUBMIT]) def test_custom_button_automate_cloud_obj(appliance, request, submit, setup_objs, button_group): """ Test custom button for automate and requests count as per submit Polarion: assignee: ndhandre initialEstimate: 1/4h caseimportance: high caseposneg: positive testtype: functional startsin: 5.9 casecomponent: CustomButton tags: custom_button testSteps: 1. Create custom button group with the Object type 2. Create a custom button with specific submit option and Single and list display 3. Navigate to object type pages (All and Details) 4. Check for button group and button 5. Select/execute button from group dropdown for selected entities 6. Check for the proper flash message related to button execution 7. Check automation log requests. Submitted as per selected submit option or not. 8. Submit all: single request for all entities execution 9 One by one: separate requests for all entities execution Bugzilla: 1628224 1642147 """ group, obj_type = button_group button = group.buttons.create( text=fauxfactory.gen_alphanumeric(), hover=fauxfactory.gen_alphanumeric(), display_for="Single and list", submit=submit, system="Request", request="InspectMe", ) request.addfinalizer(button.delete_if_exists) for setup_obj in setup_objs: for destination in ["All", "Details"]: obj = setup_obj.parent if destination == "All" else setup_obj view = navigate_to(obj, destination) custom_button_group = Dropdown(view, group.hover) assert custom_button_group.has_item(button.text) # Entity count depends on the destination for `All` available entities and # `Details` means a single entity. # To-Do: remove Manager check as BZ-1642147 fix if destination == "All": try: paginator = view.paginator except AttributeError: paginator = view.entities.paginator entity_count = min(paginator.items_amount, paginator.items_per_page) # Work around for BZ-1642147 try: if "Manager" in setup_obj.name: entity_count = 1 except AttributeError: pass paginator.check_all() else: entity_count = 1 # Clear the automation log assert appliance.ssh_client.run_command( 'echo -n "" > ' "/var/www/miq/vmdb/log/automation.log" ) custom_button_group.item_select(button.text) diff = "executed" if appliance.version < "5.10" else "launched" view.flash.assert_message('"{btn}" was {diff}'.format(btn=button.text, diff=diff)) # Submit all: single request for all entity execution # One by one: separate requests for all entity execution expected_count = 1 if submit == "Submit all" else entity_count try: wait_for( log_request_check, [appliance, expected_count], timeout=300, message="Check for expected request count", delay=10, ) except TimedOutError: assert False, "Expected {} requests not found in automation log".format( str(expected_count) ) @pytest.mark.uncollectif( lambda appliance, button_group: not bool([obj for obj in OBJ_TYPE_59 if obj in button_group]) and appliance.version < "5.10" ) @pytest.mark.parametrize("expression", ["enablement", "visibility"]) def test_custom_button_expression_cloud_obj( appliance, request, setup_objs, button_group, expression ): """ Test custom button as per expression enablement/visibility. Polarion: assignee: ndhandre initialEstimate: 1/4h caseimportance: medium caseposneg: positive testtype: functional startsin: 5.9 casecomponent: CustomButton tags: custom_button testSteps: 1. Create custom button group with the Object type 2. Create a custom button with expression (Tag) a. Enablement Expression b. Visibility Expression 3. Navigate to object Detail page 4. Check: button should not enable/visible without tag 5. Check: button should enable/visible with tag """ group, obj_type = button_group exp = {expression: {"tag": "My Company Tags : Department", "value": "Engineering"}} disabled_txt = "Tag - My Company Tags : Department : Engineering" button = group.buttons.create( text=fauxfactory.gen_alphanumeric(), hover=fauxfactory.gen_alphanumeric(), display_for="Single entity", system="Request", request="InspectMe", **exp ) request.addfinalizer(button.delete_if_exists) tag_cat = appliance.collections.categories.instantiate( name="department", display_name="Department" ) tag = tag_cat.collections.tags.instantiate(name="engineering", display_name="Engineering") for setup_obj in setup_objs: view = navigate_to(setup_obj, "Details") custom_button_group = Dropdown(view, group.text) if tag in setup_obj.get_tags(): if expression == "enablement": assert custom_button_group.item_enabled(button.text) setup_obj.remove_tag(tag) assert not custom_button_group.is_enabled assert re.search(disabled_txt, custom_button_group.hover) elif expression == "visibility": assert button.text in custom_button_group.items setup_obj.remove_tag(tag) assert not custom_button_group.is_displayed else: if expression == "enablement": assert not custom_button_group.is_enabled assert re.search(disabled_txt, custom_button_group.hover) setup_obj.add_tag(tag) assert custom_button_group.item_enabled(button.text) elif expression == "visibility": assert not custom_button_group.is_displayed setup_obj.add_tag(tag) assert button.text in custom_button_group.items @pytest.mark.meta( blockers=[BZ(1680525, unblock=lambda button_group: "CLOUD_NETWORK" not in button_group)] ) @pytest.mark.parametrize("btn_dialog", [False, True], ids=["simple", "dialog"]) def test_custom_button_events_cloud_obj(request, dialog, setup_objs, button_group, btn_dialog): """Test custom button events Polarion: assignee: ndhandre caseimportance: medium initialEstimate: 1/4h caseposneg: positive testtype: functional startsin: 5.10 casecomponent: CustomButton tags: custom_button testSteps: 1. Create a Button Group 2. Create custom button [with dialog/ without dialog] 2. Execute button from respective location 3. Assert event count Bugzilla: 1668023 1702490 1680525 """ group, obj_type = button_group dialog_ = dialog if btn_dialog else None button = group.buttons.create( text="btn_{}".format(fauxfactory.gen_alphanumeric(3)), hover="btn_hover{}".format(fauxfactory.gen_alphanumeric(3)), dialog=dialog_, system="Request", request="InspectMe", ) request.addfinalizer(button.delete_if_exists) for setup_obj in setup_objs: initial_count = len(setup_obj.get_button_events()) view = navigate_to(setup_obj, "Details") custom_button_group = Dropdown(view, group.hover) custom_button_group.item_select(button.text) if btn_dialog: dialog_view = view.browser.create_view(TextInputDialogView, wait="10s") dialog_view.submit.click() view.browser.refresh() current_count = len(setup_obj.get_button_events()) assert current_count == (initial_count + 1)
apagac/cfme_tests
cfme/tests/automate/custom_button/test_cloud_objects.py
cfme/utils/template/rhevm.py
from pandas.util._decorators import cache_readonly import pandas.util.testing as tm import pandas as pd _ts = tm.makeTimeSeries() class TestData(object): @cache_readonly def ts(self): ts = _ts.copy() ts.name = 'ts' return ts @cache_readonly def series(self): series = tm.makeStringSeries() series.name = 'series' return series @cache_readonly def objSeries(self): objSeries = tm.makeObjectSeries() objSeries.name = 'objects' return objSeries @cache_readonly def empty(self): return pd.Series([], index=[])
# -*- coding: utf-8 -*- import pytest from datetime import datetime, timedelta from collections import defaultdict import pandas.util.testing as tm from pandas.core.dtypes.common import is_unsigned_integer_dtype from pandas.core.indexes.api import Index, MultiIndex from pandas.tests.indexes.common import Base from pandas.compat import (range, lrange, lzip, u, text_type, zip, PY3, PY36, PYPY) import operator import numpy as np from pandas import (period_range, date_range, Series, DataFrame, Float64Index, Int64Index, UInt64Index, CategoricalIndex, DatetimeIndex, TimedeltaIndex, PeriodIndex, isna) from pandas.core.index import _get_combined_index, _ensure_index_from_sequences from pandas.util.testing import assert_almost_equal from pandas.compat.numpy import np_datetime64_compat import pandas.core.config as cf from pandas.core.indexes.datetimes import _to_m8 import pandas as pd from pandas._libs.lib import Timestamp class TestIndex(Base): _holder = Index def setup_method(self, method): self.indices = dict(unicodeIndex=tm.makeUnicodeIndex(100), strIndex=tm.makeStringIndex(100), dateIndex=tm.makeDateIndex(100), periodIndex=tm.makePeriodIndex(100), tdIndex=tm.makeTimedeltaIndex(100), intIndex=tm.makeIntIndex(100), uintIndex=tm.makeUIntIndex(100), rangeIndex=tm.makeIntIndex(100), floatIndex=tm.makeFloatIndex(100), boolIndex=Index([True, False]), catIndex=tm.makeCategoricalIndex(100), empty=Index([]), tuples=MultiIndex.from_tuples(lzip( ['foo', 'bar', 'baz'], [1, 2, 3])), repeats=Index([0, 0, 1, 1, 2, 2])) self.setup_indices() def create_index(self): return Index(list('abcde')) def test_new_axis(self): new_index = self.dateIndex[None, :] assert new_index.ndim == 2 assert isinstance(new_index, np.ndarray) def test_copy_and_deepcopy(self, indices): super(TestIndex, self).test_copy_and_deepcopy(indices) new_copy2 = self.intIndex.copy(dtype=int) assert new_copy2.dtype.kind == 'i' def test_constructor(self): # regular instance creation tm.assert_contains_all(self.strIndex, self.strIndex) tm.assert_contains_all(self.dateIndex, self.dateIndex) # casting arr = np.array(self.strIndex) index = Index(arr) tm.assert_contains_all(arr, index) tm.assert_index_equal(self.strIndex, index) # copy arr = np.array(self.strIndex) index = Index(arr, copy=True, name='name') assert isinstance(index, Index) assert index.name == 'name' tm.assert_numpy_array_equal(arr, index.values) arr[0] = "SOMEBIGLONGSTRING" assert index[0] != "SOMEBIGLONGSTRING" # what to do here? # arr = np.array(5.) # pytest.raises(Exception, arr.view, Index) def test_constructor_corner(self): # corner case pytest.raises(TypeError, Index, 0) def test_construction_list_mixed_tuples(self): # see gh-10697: if we are constructing from a mixed list of tuples, # make sure that we are independent of the sorting order. idx1 = Index([('A', 1), 'B']) assert isinstance(idx1, Index) assert not isinstance(idx1, MultiIndex) idx2 = Index(['B', ('A', 1)]) assert isinstance(idx2, Index) assert not isinstance(idx2, MultiIndex) @pytest.mark.parametrize('na_value', [None, np.nan]) @pytest.mark.parametrize('vtype', [list, tuple, iter]) def test_construction_list_tuples_nan(self, na_value, vtype): # GH 18505 : valid tuples containing NaN values = [(1, 'two'), (3., na_value)] result = Index(vtype(values)) expected = MultiIndex.from_tuples(values) tm.assert_index_equal(result, expected) def test_constructor_from_index_datetimetz(self): idx = pd.date_range('2015-01-01 10:00', freq='D', periods=3, tz='US/Eastern') result = pd.Index(idx) tm.assert_index_equal(result, idx) assert result.tz == idx.tz result = pd.Index(idx.asobject) tm.assert_index_equal(result, idx) assert result.tz == idx.tz def test_constructor_from_index_timedelta(self): idx = pd.timedelta_range('1 days', freq='D', periods=3) result = pd.Index(idx) tm.assert_index_equal(result, idx) result = pd.Index(idx.asobject) tm.assert_index_equal(result, idx) def test_constructor_from_index_period(self): idx = pd.period_range('2015-01-01', freq='D', periods=3) result = pd.Index(idx) tm.assert_index_equal(result, idx) result = pd.Index(idx.asobject) tm.assert_index_equal(result, idx) def test_constructor_from_series_datetimetz(self): idx = pd.date_range('2015-01-01 10:00', freq='D', periods=3, tz='US/Eastern') result = pd.Index(pd.Series(idx)) tm.assert_index_equal(result, idx) assert result.tz == idx.tz def test_constructor_from_series_timedelta(self): idx = pd.timedelta_range('1 days', freq='D', periods=3) result = pd.Index(pd.Series(idx)) tm.assert_index_equal(result, idx) def test_constructor_from_series_period(self): idx = pd.period_range('2015-01-01', freq='D', periods=3) result = pd.Index(pd.Series(idx)) tm.assert_index_equal(result, idx) def test_constructor_from_series(self): expected = DatetimeIndex([Timestamp('20110101'), Timestamp('20120101'), Timestamp('20130101')]) s = Series([Timestamp('20110101'), Timestamp('20120101'), Timestamp('20130101')]) result = Index(s) tm.assert_index_equal(result, expected) result = DatetimeIndex(s) tm.assert_index_equal(result, expected) # GH 6273 # create from a series, passing a freq s = Series(pd.to_datetime(['1-1-1990', '2-1-1990', '3-1-1990', '4-1-1990', '5-1-1990'])) result = DatetimeIndex(s, freq='MS') expected = DatetimeIndex(['1-1-1990', '2-1-1990', '3-1-1990', '4-1-1990', '5-1-1990'], freq='MS') tm.assert_index_equal(result, expected) df = pd.DataFrame(np.random.rand(5, 3)) df['date'] = ['1-1-1990', '2-1-1990', '3-1-1990', '4-1-1990', '5-1-1990'] result = DatetimeIndex(df['date'], freq='MS') expected.name = 'date' tm.assert_index_equal(result, expected) assert df['date'].dtype == object exp = pd.Series(['1-1-1990', '2-1-1990', '3-1-1990', '4-1-1990', '5-1-1990'], name='date') tm.assert_series_equal(df['date'], exp) # GH 6274 # infer freq of same result = pd.infer_freq(df['date']) assert result == 'MS' def test_constructor_ndarray_like(self): # GH 5460#issuecomment-44474502 # it should be possible to convert any object that satisfies the numpy # ndarray interface directly into an Index class ArrayLike(object): def __init__(self, array): self.array = array def __array__(self, dtype=None): return self.array for array in [np.arange(5), np.array(['a', 'b', 'c']), date_range('2000-01-01', periods=3).values]: expected = pd.Index(array) result = pd.Index(ArrayLike(array)) tm.assert_index_equal(result, expected) @pytest.mark.parametrize('dtype', [ int, 'int64', 'int32', 'int16', 'int8', 'uint64', 'uint32', 'uint16', 'uint8']) def test_constructor_int_dtype_float(self, dtype): # GH 18400 if is_unsigned_integer_dtype(dtype): index_type = UInt64Index else: index_type = Int64Index expected = index_type([0, 1, 2, 3]) result = Index([0., 1., 2., 3.], dtype=dtype) tm.assert_index_equal(result, expected) def test_constructor_int_dtype_nan(self): # see gh-15187 data = [np.nan] msg = "cannot convert" with tm.assert_raises_regex(ValueError, msg): Index(data, dtype='int64') with tm.assert_raises_regex(ValueError, msg): Index(data, dtype='uint64') # This, however, should not break # because NaN is float. expected = Float64Index(data) result = Index(data, dtype='float') tm.assert_index_equal(result, expected) def test_index_ctor_infer_nan_nat(self): # GH 13467 exp = pd.Float64Index([np.nan, np.nan]) assert exp.dtype == np.float64 tm.assert_index_equal(Index([np.nan, np.nan]), exp) tm.assert_index_equal(Index(np.array([np.nan, np.nan])), exp) exp = pd.DatetimeIndex([pd.NaT, pd.NaT]) assert exp.dtype == 'datetime64[ns]' tm.assert_index_equal(Index([pd.NaT, pd.NaT]), exp) tm.assert_index_equal(Index(np.array([pd.NaT, pd.NaT])), exp) exp = pd.DatetimeIndex([pd.NaT, pd.NaT]) assert exp.dtype == 'datetime64[ns]' for data in [[pd.NaT, np.nan], [np.nan, pd.NaT], [np.nan, np.datetime64('nat')], [np.datetime64('nat'), np.nan]]: tm.assert_index_equal(Index(data), exp) tm.assert_index_equal(Index(np.array(data, dtype=object)), exp) exp = pd.TimedeltaIndex([pd.NaT, pd.NaT]) assert exp.dtype == 'timedelta64[ns]' for data in [[np.nan, np.timedelta64('nat')], [np.timedelta64('nat'), np.nan], [pd.NaT, np.timedelta64('nat')], [np.timedelta64('nat'), pd.NaT]]: tm.assert_index_equal(Index(data), exp) tm.assert_index_equal(Index(np.array(data, dtype=object)), exp) # mixed np.datetime64/timedelta64 nat results in object data = [np.datetime64('nat'), np.timedelta64('nat')] exp = pd.Index(data, dtype=object) tm.assert_index_equal(Index(data), exp) tm.assert_index_equal(Index(np.array(data, dtype=object)), exp) data = [np.timedelta64('nat'), np.datetime64('nat')] exp = pd.Index(data, dtype=object) tm.assert_index_equal(Index(data), exp) tm.assert_index_equal(Index(np.array(data, dtype=object)), exp) def test_index_ctor_infer_periodindex(self): xp = period_range('2012-1-1', freq='M', periods=3) rs = Index(xp) tm.assert_index_equal(rs, xp) assert isinstance(rs, PeriodIndex) def test_constructor_simple_new(self): idx = Index([1, 2, 3, 4, 5], name='int') result = idx._simple_new(idx, 'int') tm.assert_index_equal(result, idx) idx = Index([1.1, np.nan, 2.2, 3.0], name='float') result = idx._simple_new(idx, 'float') tm.assert_index_equal(result, idx) idx = Index(['A', 'B', 'C', np.nan], name='obj') result = idx._simple_new(idx, 'obj') tm.assert_index_equal(result, idx) def test_constructor_dtypes(self): for idx in [Index(np.array([1, 2, 3], dtype=int)), Index(np.array([1, 2, 3], dtype=int), dtype=int), Index([1, 2, 3], dtype=int)]: assert isinstance(idx, Int64Index) # These should coerce for idx in [Index(np.array([1., 2., 3.], dtype=float), dtype=int), Index([1., 2., 3.], dtype=int)]: assert isinstance(idx, Int64Index) for idx in [Index(np.array([1., 2., 3.], dtype=float)), Index(np.array([1, 2, 3], dtype=int), dtype=float), Index(np.array([1., 2., 3.], dtype=float), dtype=float), Index([1, 2, 3], dtype=float), Index([1., 2., 3.], dtype=float)]: assert isinstance(idx, Float64Index) for idx in [Index(np.array([True, False, True], dtype=bool)), Index([True, False, True]), Index(np.array([True, False, True], dtype=bool), dtype=bool), Index([True, False, True], dtype=bool)]: assert isinstance(idx, Index) assert idx.dtype == object for idx in [Index(np.array([1, 2, 3], dtype=int), dtype='category'), Index([1, 2, 3], dtype='category'), Index(np.array([np_datetime64_compat('2011-01-01'), np_datetime64_compat('2011-01-02')]), dtype='category'), Index([datetime(2011, 1, 1), datetime(2011, 1, 2)], dtype='category')]: assert isinstance(idx, CategoricalIndex) for idx in [Index(np.array([np_datetime64_compat('2011-01-01'), np_datetime64_compat('2011-01-02')])), Index([datetime(2011, 1, 1), datetime(2011, 1, 2)])]: assert isinstance(idx, DatetimeIndex) for idx in [Index(np.array([np_datetime64_compat('2011-01-01'), np_datetime64_compat('2011-01-02')]), dtype=object), Index([datetime(2011, 1, 1), datetime(2011, 1, 2)], dtype=object)]: assert not isinstance(idx, DatetimeIndex) assert isinstance(idx, Index) assert idx.dtype == object for idx in [Index(np.array([np.timedelta64(1, 'D'), np.timedelta64( 1, 'D')])), Index([timedelta(1), timedelta(1)])]: assert isinstance(idx, TimedeltaIndex) for idx in [Index(np.array([np.timedelta64(1, 'D'), np.timedelta64(1, 'D')]), dtype=object), Index([timedelta(1), timedelta(1)], dtype=object)]: assert not isinstance(idx, TimedeltaIndex) assert isinstance(idx, Index) assert idx.dtype == object def test_constructor_dtypes_datetime(self): for tz in [None, 'UTC', 'US/Eastern', 'Asia/Tokyo']: idx = pd.date_range('2011-01-01', periods=5, tz=tz) dtype = idx.dtype # pass values without timezone, as DatetimeIndex localizes it for values in [pd.date_range('2011-01-01', periods=5).values, pd.date_range('2011-01-01', periods=5).asi8]: for res in [pd.Index(values, tz=tz), pd.Index(values, dtype=dtype), pd.Index(list(values), tz=tz), pd.Index(list(values), dtype=dtype)]: tm.assert_index_equal(res, idx) # check compat with DatetimeIndex for res in [pd.DatetimeIndex(values, tz=tz), pd.DatetimeIndex(values, dtype=dtype), pd.DatetimeIndex(list(values), tz=tz), pd.DatetimeIndex(list(values), dtype=dtype)]: tm.assert_index_equal(res, idx) def test_constructor_dtypes_timedelta(self): idx = pd.timedelta_range('1 days', periods=5) dtype = idx.dtype for values in [idx.values, idx.asi8]: for res in [pd.Index(values, dtype=dtype), pd.Index(list(values), dtype=dtype)]: tm.assert_index_equal(res, idx) # check compat with TimedeltaIndex for res in [pd.TimedeltaIndex(values, dtype=dtype), pd.TimedeltaIndex(list(values), dtype=dtype)]: tm.assert_index_equal(res, idx) def test_view_with_args(self): restricted = ['unicodeIndex', 'strIndex', 'catIndex', 'boolIndex', 'empty'] for i in restricted: ind = self.indices[i] # with arguments pytest.raises(TypeError, lambda: ind.view('i8')) # these are ok for i in list(set(self.indices.keys()) - set(restricted)): ind = self.indices[i] # with arguments ind.view('i8') def test_astype(self): casted = self.intIndex.astype('i8') # it works! casted.get_loc(5) # pass on name self.intIndex.name = 'foobar' casted = self.intIndex.astype('i8') assert casted.name == 'foobar' def test_equals_object(self): # same assert Index(['a', 'b', 'c']).equals(Index(['a', 'b', 'c'])) # different length assert not Index(['a', 'b', 'c']).equals(Index(['a', 'b'])) # same length, different values assert not Index(['a', 'b', 'c']).equals(Index(['a', 'b', 'd'])) # Must also be an Index assert not Index(['a', 'b', 'c']).equals(['a', 'b', 'c']) def test_insert(self): # GH 7256 # validate neg/pos inserts result = Index(['b', 'c', 'd']) # test 0th element tm.assert_index_equal(Index(['a', 'b', 'c', 'd']), result.insert(0, 'a')) # test Nth element that follows Python list behavior tm.assert_index_equal(Index(['b', 'c', 'e', 'd']), result.insert(-1, 'e')) # test loc +/- neq (0, -1) tm.assert_index_equal(result.insert(1, 'z'), result.insert(-2, 'z')) # test empty null_index = Index([]) tm.assert_index_equal(Index(['a']), null_index.insert(0, 'a')) # GH 18295 (test missing) expected = Index(['a', np.nan, 'b', 'c']) for na in (np.nan, pd.NaT, None): result = Index(list('abc')).insert(1, na) tm.assert_index_equal(result, expected) def test_delete(self): idx = Index(['a', 'b', 'c', 'd'], name='idx') expected = Index(['b', 'c', 'd'], name='idx') result = idx.delete(0) tm.assert_index_equal(result, expected) assert result.name == expected.name expected = Index(['a', 'b', 'c'], name='idx') result = idx.delete(-1) tm.assert_index_equal(result, expected) assert result.name == expected.name with pytest.raises((IndexError, ValueError)): # either depending on numpy version result = idx.delete(5) def test_identical(self): # index i1 = Index(['a', 'b', 'c']) i2 = Index(['a', 'b', 'c']) assert i1.identical(i2) i1 = i1.rename('foo') assert i1.equals(i2) assert not i1.identical(i2) i2 = i2.rename('foo') assert i1.identical(i2) i3 = Index([('a', 'a'), ('a', 'b'), ('b', 'a')]) i4 = Index([('a', 'a'), ('a', 'b'), ('b', 'a')], tupleize_cols=False) assert not i3.identical(i4) def test_is_(self): ind = Index(range(10)) assert ind.is_(ind) assert ind.is_(ind.view().view().view().view()) assert not ind.is_(Index(range(10))) assert not ind.is_(ind.copy()) assert not ind.is_(ind.copy(deep=False)) assert not ind.is_(ind[:]) assert not ind.is_(ind.view(np.ndarray).view(Index)) assert not ind.is_(np.array(range(10))) # quasi-implementation dependent assert ind.is_(ind.view()) ind2 = ind.view() ind2.name = 'bob' assert ind.is_(ind2) assert ind2.is_(ind) # doesn't matter if Indices are *actually* views of underlying data, assert not ind.is_(Index(ind.values)) arr = np.array(range(1, 11)) ind1 = Index(arr, copy=False) ind2 = Index(arr, copy=False) assert not ind1.is_(ind2) def test_asof(self): d = self.dateIndex[0] assert self.dateIndex.asof(d) == d assert isna(self.dateIndex.asof(d - timedelta(1))) d = self.dateIndex[-1] assert self.dateIndex.asof(d + timedelta(1)) == d d = self.dateIndex[0].to_pydatetime() assert isinstance(self.dateIndex.asof(d), Timestamp) def test_asof_datetime_partial(self): idx = pd.date_range('2010-01-01', periods=2, freq='m') expected = Timestamp('2010-02-28') result = idx.asof('2010-02') assert result == expected assert not isinstance(result, Index) def test_nanosecond_index_access(self): s = Series([Timestamp('20130101')]).values.view('i8')[0] r = DatetimeIndex([s + 50 + i for i in range(100)]) x = Series(np.random.randn(100), index=r) first_value = x.asof(x.index[0]) # this does not yet work, as parsing strings is done via dateutil # assert first_value == x['2013-01-01 00:00:00.000000050+0000'] exp_ts = np_datetime64_compat('2013-01-01 00:00:00.000000050+0000', 'ns') assert first_value == x[Timestamp(exp_ts)] def test_comparators(self): index = self.dateIndex element = index[len(index) // 2] element = _to_m8(element) arr = np.array(index) def _check(op): arr_result = op(arr, element) index_result = op(index, element) assert isinstance(index_result, np.ndarray) tm.assert_numpy_array_equal(arr_result, index_result) _check(operator.eq) _check(operator.ne) _check(operator.gt) _check(operator.lt) _check(operator.ge) _check(operator.le) def test_booleanindex(self): boolIdx = np.repeat(True, len(self.strIndex)).astype(bool) boolIdx[5:30:2] = False subIndex = self.strIndex[boolIdx] for i, val in enumerate(subIndex): assert subIndex.get_loc(val) == i subIndex = self.strIndex[list(boolIdx)] for i, val in enumerate(subIndex): assert subIndex.get_loc(val) == i def test_fancy(self): sl = self.strIndex[[1, 2, 3]] for i in sl: assert i == sl[sl.get_loc(i)] def test_empty_fancy(self): empty_farr = np.array([], dtype=np.float_) empty_iarr = np.array([], dtype=np.int_) empty_barr = np.array([], dtype=np.bool_) # pd.DatetimeIndex is excluded, because it overrides getitem and should # be tested separately. for idx in [self.strIndex, self.intIndex, self.floatIndex]: empty_idx = idx.__class__([]) assert idx[[]].identical(empty_idx) assert idx[empty_iarr].identical(empty_idx) assert idx[empty_barr].identical(empty_idx) # np.ndarray only accepts ndarray of int & bool dtypes, so should # Index. pytest.raises(IndexError, idx.__getitem__, empty_farr) def test_getitem_error(self, indices): with pytest.raises(IndexError): indices[101] with pytest.raises(IndexError): indices['no_int'] def test_intersection(self): first = self.strIndex[:20] second = self.strIndex[:10] intersect = first.intersection(second) assert tm.equalContents(intersect, second) # Corner cases inter = first.intersection(first) assert inter is first idx1 = Index([1, 2, 3, 4, 5], name='idx') # if target has the same name, it is preserved idx2 = Index([3, 4, 5, 6, 7], name='idx') expected2 = Index([3, 4, 5], name='idx') result2 = idx1.intersection(idx2) tm.assert_index_equal(result2, expected2) assert result2.name == expected2.name # if target name is different, it will be reset idx3 = Index([3, 4, 5, 6, 7], name='other') expected3 = Index([3, 4, 5], name=None) result3 = idx1.intersection(idx3) tm.assert_index_equal(result3, expected3) assert result3.name == expected3.name # non monotonic idx1 = Index([5, 3, 2, 4, 1], name='idx') idx2 = Index([4, 7, 6, 5, 3], name='idx') expected = Index([5, 3, 4], name='idx') result = idx1.intersection(idx2) tm.assert_index_equal(result, expected) idx2 = Index([4, 7, 6, 5, 3], name='other') expected = Index([5, 3, 4], name=None) result = idx1.intersection(idx2) tm.assert_index_equal(result, expected) # non-monotonic non-unique idx1 = Index(['A', 'B', 'A', 'C']) idx2 = Index(['B', 'D']) expected = Index(['B'], dtype='object') result = idx1.intersection(idx2) tm.assert_index_equal(result, expected) idx2 = Index(['B', 'D', 'A']) expected = Index(['A', 'B', 'A'], dtype='object') result = idx1.intersection(idx2) tm.assert_index_equal(result, expected) # preserve names first = self.strIndex[5:20] second = self.strIndex[:10] first.name = 'A' second.name = 'A' intersect = first.intersection(second) assert intersect.name == 'A' second.name = 'B' intersect = first.intersection(second) assert intersect.name is None first.name = None second.name = 'B' intersect = first.intersection(second) assert intersect.name is None def test_intersect_str_dates(self): dt_dates = [datetime(2012, 2, 9), datetime(2012, 2, 22)] i1 = Index(dt_dates, dtype=object) i2 = Index(['aa'], dtype=object) res = i2.intersection(i1) assert len(res) == 0 def test_union(self): first = self.strIndex[5:20] second = self.strIndex[:10] everything = self.strIndex[:20] union = first.union(second) assert tm.equalContents(union, everything) # GH 10149 cases = [klass(second.values) for klass in [np.array, Series, list]] for case in cases: result = first.union(case) assert tm.equalContents(result, everything) # Corner cases union = first.union(first) assert union is first union = first.union([]) assert union is first union = Index([]).union(first) assert union is first # preserve names first = Index(list('ab'), name='A') second = Index(list('ab'), name='B') union = first.union(second) expected = Index(list('ab'), name=None) tm.assert_index_equal(union, expected) first = Index(list('ab'), name='A') second = Index([], name='B') union = first.union(second) expected = Index(list('ab'), name=None) tm.assert_index_equal(union, expected) first = Index([], name='A') second = Index(list('ab'), name='B') union = first.union(second) expected = Index(list('ab'), name=None) tm.assert_index_equal(union, expected) first = Index(list('ab')) second = Index(list('ab'), name='B') union = first.union(second) expected = Index(list('ab'), name='B') tm.assert_index_equal(union, expected) first = Index([]) second = Index(list('ab'), name='B') union = first.union(second) expected = Index(list('ab'), name='B') tm.assert_index_equal(union, expected) first = Index(list('ab')) second = Index([], name='B') union = first.union(second) expected = Index(list('ab'), name='B') tm.assert_index_equal(union, expected) first = Index(list('ab'), name='A') second = Index(list('ab')) union = first.union(second) expected = Index(list('ab'), name='A') tm.assert_index_equal(union, expected) first = Index(list('ab'), name='A') second = Index([]) union = first.union(second) expected = Index(list('ab'), name='A') tm.assert_index_equal(union, expected) first = Index([], name='A') second = Index(list('ab')) union = first.union(second) expected = Index(list('ab'), name='A') tm.assert_index_equal(union, expected) with tm.assert_produces_warning(RuntimeWarning): firstCat = self.strIndex.union(self.dateIndex) secondCat = self.strIndex.union(self.strIndex) if self.dateIndex.dtype == np.object_: appended = np.append(self.strIndex, self.dateIndex) else: appended = np.append(self.strIndex, self.dateIndex.astype('O')) assert tm.equalContents(firstCat, appended) assert tm.equalContents(secondCat, self.strIndex) tm.assert_contains_all(self.strIndex, firstCat) tm.assert_contains_all(self.strIndex, secondCat) tm.assert_contains_all(self.dateIndex, firstCat) def test_add(self): idx = self.strIndex expected = Index(self.strIndex.values * 2) tm.assert_index_equal(idx + idx, expected) tm.assert_index_equal(idx + idx.tolist(), expected) tm.assert_index_equal(idx.tolist() + idx, expected) # test add and radd idx = Index(list('abc')) expected = Index(['a1', 'b1', 'c1']) tm.assert_index_equal(idx + '1', expected) expected = Index(['1a', '1b', '1c']) tm.assert_index_equal('1' + idx, expected) def test_sub(self): idx = self.strIndex pytest.raises(TypeError, lambda: idx - 'a') pytest.raises(TypeError, lambda: idx - idx) pytest.raises(TypeError, lambda: idx - idx.tolist()) pytest.raises(TypeError, lambda: idx.tolist() - idx) def test_map_identity_mapping(self): # GH 12766 for name, cur_index in self.indices.items(): tm.assert_index_equal(cur_index, cur_index.map(lambda x: x)) def test_map_with_tuples(self): # GH 12766 # Test that returning a single tuple from an Index # returns an Index. boolean_index = tm.makeIntIndex(3).map(lambda x: (x,)) expected = Index([(0,), (1,), (2,)]) tm.assert_index_equal(boolean_index, expected) # Test that returning a tuple from a map of a single index # returns a MultiIndex object. boolean_index = tm.makeIntIndex(3).map(lambda x: (x, x == 1)) expected = MultiIndex.from_tuples([(0, False), (1, True), (2, False)]) tm.assert_index_equal(boolean_index, expected) # Test that returning a single object from a MultiIndex # returns an Index. first_level = ['foo', 'bar', 'baz'] multi_index = MultiIndex.from_tuples(lzip(first_level, [1, 2, 3])) reduced_index = multi_index.map(lambda x: x[0]) tm.assert_index_equal(reduced_index, Index(first_level)) def test_map_tseries_indices_return_index(self): date_index = tm.makeDateIndex(10) exp = Index([1] * 10) tm.assert_index_equal(exp, date_index.map(lambda x: 1)) period_index = tm.makePeriodIndex(10) tm.assert_index_equal(exp, period_index.map(lambda x: 1)) tdelta_index = tm.makeTimedeltaIndex(10) tm.assert_index_equal(exp, tdelta_index.map(lambda x: 1)) date_index = tm.makeDateIndex(24, freq='h', name='hourly') exp = Index(range(24), name='hourly') tm.assert_index_equal(exp, date_index.map(lambda x: x.hour)) @pytest.mark.parametrize( "mapper", [ lambda values, index: {i: e for e, i in zip(values, index)}, lambda values, index: pd.Series(values, index)]) def test_map_dictlike(self, mapper): # GH 12756 expected = Index(['foo', 'bar', 'baz']) result = tm.makeIntIndex(3).map(mapper(expected.values, [0, 1, 2])) tm.assert_index_equal(result, expected) for name in self.indices.keys(): if name == 'catIndex': # Tested in test_categorical continue elif name == 'repeats': # Cannot map duplicated index continue index = self.indices[name] expected = Index(np.arange(len(index), 0, -1)) # to match proper result coercion for uints if name == 'uintIndex': expected = expected.astype('uint64') elif name == 'empty': expected = Index([]) result = index.map(mapper(expected, index)) tm.assert_index_equal(result, expected) def test_map_with_non_function_missing_values(self): # GH 12756 expected = Index([2., np.nan, 'foo']) input = Index([2, 1, 0]) mapper = Series(['foo', 2., 'baz'], index=[0, 2, -1]) tm.assert_index_equal(expected, input.map(mapper)) mapper = {0: 'foo', 2: 2.0, -1: 'baz'} tm.assert_index_equal(expected, input.map(mapper)) def test_map_na_exclusion(self): idx = Index([1.5, np.nan, 3, np.nan, 5]) result = idx.map(lambda x: x * 2, na_action='ignore') exp = idx * 2 tm.assert_index_equal(result, exp) def test_map_defaultdict(self): idx = Index([1, 2, 3]) default_dict = defaultdict(lambda: 'blank') default_dict[1] = 'stuff' result = idx.map(default_dict) expected = Index(['stuff', 'blank', 'blank']) tm.assert_index_equal(result, expected) def test_append_multiple(self): index = Index(['a', 'b', 'c', 'd', 'e', 'f']) foos = [index[:2], index[2:4], index[4:]] result = foos[0].append(foos[1:]) tm.assert_index_equal(result, index) # empty result = index.append([]) tm.assert_index_equal(result, index) def test_append_empty_preserve_name(self): left = Index([], name='foo') right = Index([1, 2, 3], name='foo') result = left.append(right) assert result.name == 'foo' left = Index([], name='foo') right = Index([1, 2, 3], name='bar') result = left.append(right) assert result.name is None def test_add_string(self): # from bug report index = Index(['a', 'b', 'c']) index2 = index + 'foo' assert 'a' not in index2 assert 'afoo' in index2 def test_iadd_string(self): index = pd.Index(['a', 'b', 'c']) # doesn't fail test unless there is a check before `+=` assert 'a' in index index += '_x' assert 'a_x' in index def test_difference(self): first = self.strIndex[5:20] second = self.strIndex[:10] answer = self.strIndex[10:20] first.name = 'name' # different names result = first.difference(second) assert tm.equalContents(result, answer) assert result.name is None # same names second.name = 'name' result = first.difference(second) assert result.name == 'name' # with empty result = first.difference([]) assert tm.equalContents(result, first) assert result.name == first.name # with everything result = first.difference(first) assert len(result) == 0 assert result.name == first.name def test_symmetric_difference(self): # smoke idx1 = Index([1, 2, 3, 4], name='idx1') idx2 = Index([2, 3, 4, 5]) result = idx1.symmetric_difference(idx2) expected = Index([1, 5]) assert tm.equalContents(result, expected) assert result.name is None # __xor__ syntax expected = idx1 ^ idx2 assert tm.equalContents(result, expected) assert result.name is None # multiIndex idx1 = MultiIndex.from_tuples(self.tuples) idx2 = MultiIndex.from_tuples([('foo', 1), ('bar', 3)]) result = idx1.symmetric_difference(idx2) expected = MultiIndex.from_tuples([('bar', 2), ('baz', 3), ('bar', 3)]) assert tm.equalContents(result, expected) # nans: # GH 13514 change: {nan} - {nan} == {} # (GH 6444, sorting of nans, is no longer an issue) idx1 = Index([1, np.nan, 2, 3]) idx2 = Index([0, 1, np.nan]) idx3 = Index([0, 1]) result = idx1.symmetric_difference(idx2) expected = Index([0.0, 2.0, 3.0]) tm.assert_index_equal(result, expected) result = idx1.symmetric_difference(idx3) expected = Index([0.0, 2.0, 3.0, np.nan]) tm.assert_index_equal(result, expected) # other not an Index: idx1 = Index([1, 2, 3, 4], name='idx1') idx2 = np.array([2, 3, 4, 5]) expected = Index([1, 5]) result = idx1.symmetric_difference(idx2) assert tm.equalContents(result, expected) assert result.name == 'idx1' result = idx1.symmetric_difference(idx2, result_name='new_name') assert tm.equalContents(result, expected) assert result.name == 'new_name' def test_is_numeric(self): assert not self.dateIndex.is_numeric() assert not self.strIndex.is_numeric() assert self.intIndex.is_numeric() assert self.floatIndex.is_numeric() assert not self.catIndex.is_numeric() def test_is_object(self): assert self.strIndex.is_object() assert self.boolIndex.is_object() assert not self.catIndex.is_object() assert not self.intIndex.is_object() assert not self.dateIndex.is_object() assert not self.floatIndex.is_object() def test_is_all_dates(self): assert self.dateIndex.is_all_dates assert not self.strIndex.is_all_dates assert not self.intIndex.is_all_dates def test_summary(self): self._check_method_works(Index.summary) # GH3869 ind = Index(['{other}%s', "~:{range}:0"], name='A') result = ind.summary() # shouldn't be formatted accidentally. assert '~:{range}:0' in result assert '{other}%s' in result def test_format(self): self._check_method_works(Index.format) # GH 14626 # windows has different precision on datetime.datetime.now (it doesn't # include us since the default for Timestamp shows these but Index # formating does not we are skipping) now = datetime.now() if not str(now).endswith("000"): index = Index([now]) formatted = index.format() expected = [str(index[0])] assert formatted == expected # 2845 index = Index([1, 2.0 + 3.0j, np.nan]) formatted = index.format() expected = [str(index[0]), str(index[1]), u('NaN')] assert formatted == expected # is this really allowed? index = Index([1, 2.0 + 3.0j, None]) formatted = index.format() expected = [str(index[0]), str(index[1]), u('NaN')] assert formatted == expected self.strIndex[:0].format() def test_format_with_name_time_info(self): # bug I fixed 12/20/2011 inc = timedelta(hours=4) dates = Index([dt + inc for dt in self.dateIndex], name='something') formatted = dates.format(name=True) assert formatted[0] == 'something' def test_format_datetime_with_time(self): t = Index([datetime(2012, 2, 7), datetime(2012, 2, 7, 23)]) result = t.format() expected = ['2012-02-07 00:00:00', '2012-02-07 23:00:00'] assert len(result) == 2 assert result == expected def test_format_none(self): values = ['a', 'b', 'c', None] idx = Index(values) idx.format() assert idx[3] is None def test_logical_compat(self): idx = self.create_index() assert idx.all() == idx.values.all() assert idx.any() == idx.values.any() def _check_method_works(self, method): method(self.empty) method(self.dateIndex) method(self.unicodeIndex) method(self.strIndex) method(self.intIndex) method(self.tuples) method(self.catIndex) def test_get_indexer(self): idx1 = Index([1, 2, 3, 4, 5]) idx2 = Index([2, 4, 6]) r1 = idx1.get_indexer(idx2) assert_almost_equal(r1, np.array([1, 3, -1], dtype=np.intp)) r1 = idx2.get_indexer(idx1, method='pad') e1 = np.array([-1, 0, 0, 1, 1], dtype=np.intp) assert_almost_equal(r1, e1) r2 = idx2.get_indexer(idx1[::-1], method='pad') assert_almost_equal(r2, e1[::-1]) rffill1 = idx2.get_indexer(idx1, method='ffill') assert_almost_equal(r1, rffill1) r1 = idx2.get_indexer(idx1, method='backfill') e1 = np.array([0, 0, 1, 1, 2], dtype=np.intp) assert_almost_equal(r1, e1) rbfill1 = idx2.get_indexer(idx1, method='bfill') assert_almost_equal(r1, rbfill1) r2 = idx2.get_indexer(idx1[::-1], method='backfill') assert_almost_equal(r2, e1[::-1]) def test_get_indexer_invalid(self): # GH10411 idx = Index(np.arange(10)) with tm.assert_raises_regex(ValueError, 'tolerance argument'): idx.get_indexer([1, 0], tolerance=1) with tm.assert_raises_regex(ValueError, 'limit argument'): idx.get_indexer([1, 0], limit=1) @pytest.mark.parametrize( 'method, tolerance, indexer, expected', [ ('pad', None, [0, 5, 9], [0, 5, 9]), ('backfill', None, [0, 5, 9], [0, 5, 9]), ('nearest', None, [0, 5, 9], [0, 5, 9]), ('pad', 0, [0, 5, 9], [0, 5, 9]), ('backfill', 0, [0, 5, 9], [0, 5, 9]), ('nearest', 0, [0, 5, 9], [0, 5, 9]), ('pad', None, [0.2, 1.8, 8.5], [0, 1, 8]), ('backfill', None, [0.2, 1.8, 8.5], [1, 2, 9]), ('nearest', None, [0.2, 1.8, 8.5], [0, 2, 9]), ('pad', 1, [0.2, 1.8, 8.5], [0, 1, 8]), ('backfill', 1, [0.2, 1.8, 8.5], [1, 2, 9]), ('nearest', 1, [0.2, 1.8, 8.5], [0, 2, 9]), ('pad', 0.2, [0.2, 1.8, 8.5], [0, -1, -1]), ('backfill', 0.2, [0.2, 1.8, 8.5], [-1, 2, -1]), ('nearest', 0.2, [0.2, 1.8, 8.5], [0, 2, -1])]) def test_get_indexer_nearest(self, method, tolerance, indexer, expected): idx = Index(np.arange(10)) actual = idx.get_indexer(indexer, method=method, tolerance=tolerance) tm.assert_numpy_array_equal(actual, np.array(expected, dtype=np.intp)) @pytest.mark.parametrize('listtype', [list, tuple, Series, np.array]) @pytest.mark.parametrize( 'tolerance, expected', list(zip([[0.3, 0.3, 0.1], [0.2, 0.1, 0.1], [0.1, 0.5, 0.5]], [[0, 2, -1], [0, -1, -1], [-1, 2, 9]]))) def test_get_indexer_nearest_listlike_tolerance(self, tolerance, expected, listtype): idx = Index(np.arange(10)) actual = idx.get_indexer([0.2, 1.8, 8.5], method='nearest', tolerance=listtype(tolerance)) tm.assert_numpy_array_equal(actual, np.array(expected, dtype=np.intp)) def test_get_indexer_nearest_error(self): idx = Index(np.arange(10)) with tm.assert_raises_regex(ValueError, 'limit argument'): idx.get_indexer([1, 0], method='nearest', limit=1) with pytest.raises(ValueError, match='tolerance size must match'): idx.get_indexer([1, 0], method='nearest', tolerance=[1, 2, 3]) def test_get_indexer_nearest_decreasing(self): idx = Index(np.arange(10))[::-1] all_methods = ['pad', 'backfill', 'nearest'] for method in all_methods: actual = idx.get_indexer([0, 5, 9], method=method) tm.assert_numpy_array_equal(actual, np.array([9, 4, 0], dtype=np.intp)) for method, expected in zip(all_methods, [[8, 7, 0], [9, 8, 1], [9, 7, 0]]): actual = idx.get_indexer([0.2, 1.8, 8.5], method=method) tm.assert_numpy_array_equal(actual, np.array(expected, dtype=np.intp)) def test_get_indexer_strings(self): idx = pd.Index(['b', 'c']) actual = idx.get_indexer(['a', 'b', 'c', 'd'], method='pad') expected = np.array([-1, 0, 1, 1], dtype=np.intp) tm.assert_numpy_array_equal(actual, expected) actual = idx.get_indexer(['a', 'b', 'c', 'd'], method='backfill') expected = np.array([0, 0, 1, -1], dtype=np.intp) tm.assert_numpy_array_equal(actual, expected) with pytest.raises(TypeError): idx.get_indexer(['a', 'b', 'c', 'd'], method='nearest') with pytest.raises(TypeError): idx.get_indexer(['a', 'b', 'c', 'd'], method='pad', tolerance=2) with pytest.raises(TypeError): idx.get_indexer(['a', 'b', 'c', 'd'], method='pad', tolerance=[2, 2, 2, 2]) def test_get_indexer_numeric_index_boolean_target(self): # GH 16877 numeric_idx = pd.Index(range(4)) result = numeric_idx.get_indexer([True, False, True]) expected = np.array([-1, -1, -1], dtype=np.intp) tm.assert_numpy_array_equal(result, expected) def test_get_loc(self): idx = pd.Index([0, 1, 2]) all_methods = [None, 'pad', 'backfill', 'nearest'] for method in all_methods: assert idx.get_loc(1, method=method) == 1 if method is not None: assert idx.get_loc(1, method=method, tolerance=0) == 1 with pytest.raises(TypeError): idx.get_loc([1, 2], method=method) for method, loc in [('pad', 1), ('backfill', 2), ('nearest', 1)]: assert idx.get_loc(1.1, method) == loc for method, loc in [('pad', 1), ('backfill', 2), ('nearest', 1)]: assert idx.get_loc(1.1, method, tolerance=1) == loc for method in ['pad', 'backfill', 'nearest']: with pytest.raises(KeyError): idx.get_loc(1.1, method, tolerance=0.05) with tm.assert_raises_regex(ValueError, 'must be numeric'): idx.get_loc(1.1, 'nearest', tolerance='invalid') with tm.assert_raises_regex(ValueError, 'tolerance .* valid if'): idx.get_loc(1.1, tolerance=1) with pytest.raises(ValueError, match='tolerance size must match'): idx.get_loc(1.1, 'nearest', tolerance=[1, 1]) idx = pd.Index(['a', 'c']) with pytest.raises(TypeError): idx.get_loc('a', method='nearest') with pytest.raises(TypeError): idx.get_loc('a', method='pad', tolerance='invalid') def test_slice_locs(self): for dtype in [int, float]: idx = Index(np.array([0, 1, 2, 5, 6, 7, 9, 10], dtype=dtype)) n = len(idx) assert idx.slice_locs(start=2) == (2, n) assert idx.slice_locs(start=3) == (3, n) assert idx.slice_locs(3, 8) == (3, 6) assert idx.slice_locs(5, 10) == (3, n) assert idx.slice_locs(end=8) == (0, 6) assert idx.slice_locs(end=9) == (0, 7) # reversed idx2 = idx[::-1] assert idx2.slice_locs(8, 2) == (2, 6) assert idx2.slice_locs(7, 3) == (2, 5) # float slicing idx = Index(np.array([0, 1, 2, 5, 6, 7, 9, 10], dtype=float)) n = len(idx) assert idx.slice_locs(5.0, 10.0) == (3, n) assert idx.slice_locs(4.5, 10.5) == (3, 8) idx2 = idx[::-1] assert idx2.slice_locs(8.5, 1.5) == (2, 6) assert idx2.slice_locs(10.5, -1) == (0, n) # int slicing with floats # GH 4892, these are all TypeErrors idx = Index(np.array([0, 1, 2, 5, 6, 7, 9, 10], dtype=int)) pytest.raises(TypeError, lambda: idx.slice_locs(5.0, 10.0), (3, n)) pytest.raises(TypeError, lambda: idx.slice_locs(4.5, 10.5), (3, 8)) idx2 = idx[::-1] pytest.raises(TypeError, lambda: idx2.slice_locs(8.5, 1.5), (2, 6)) pytest.raises(TypeError, lambda: idx2.slice_locs(10.5, -1), (0, n)) def test_slice_locs_dup(self): idx = Index(['a', 'a', 'b', 'c', 'd', 'd']) assert idx.slice_locs('a', 'd') == (0, 6) assert idx.slice_locs(end='d') == (0, 6) assert idx.slice_locs('a', 'c') == (0, 4) assert idx.slice_locs('b', 'd') == (2, 6) idx2 = idx[::-1] assert idx2.slice_locs('d', 'a') == (0, 6) assert idx2.slice_locs(end='a') == (0, 6) assert idx2.slice_locs('d', 'b') == (0, 4) assert idx2.slice_locs('c', 'a') == (2, 6) for dtype in [int, float]: idx = Index(np.array([10, 12, 12, 14], dtype=dtype)) assert idx.slice_locs(12, 12) == (1, 3) assert idx.slice_locs(11, 13) == (1, 3) idx2 = idx[::-1] assert idx2.slice_locs(12, 12) == (1, 3) assert idx2.slice_locs(13, 11) == (1, 3) def test_slice_locs_na(self): idx = Index([np.nan, 1, 2]) pytest.raises(KeyError, idx.slice_locs, start=1.5) pytest.raises(KeyError, idx.slice_locs, end=1.5) assert idx.slice_locs(1) == (1, 3) assert idx.slice_locs(np.nan) == (0, 3) idx = Index([0, np.nan, np.nan, 1, 2]) assert idx.slice_locs(np.nan) == (1, 5) def test_slice_locs_negative_step(self): idx = Index(list('bcdxy')) SLC = pd.IndexSlice def check_slice(in_slice, expected): s_start, s_stop = idx.slice_locs(in_slice.start, in_slice.stop, in_slice.step) result = idx[s_start:s_stop:in_slice.step] expected = pd.Index(list(expected)) tm.assert_index_equal(result, expected) for in_slice, expected in [ (SLC[::-1], 'yxdcb'), (SLC['b':'y':-1], ''), (SLC['b'::-1], 'b'), (SLC[:'b':-1], 'yxdcb'), (SLC[:'y':-1], 'y'), (SLC['y'::-1], 'yxdcb'), (SLC['y'::-4], 'yb'), # absent labels (SLC[:'a':-1], 'yxdcb'), (SLC[:'a':-2], 'ydb'), (SLC['z'::-1], 'yxdcb'), (SLC['z'::-3], 'yc'), (SLC['m'::-1], 'dcb'), (SLC[:'m':-1], 'yx'), (SLC['a':'a':-1], ''), (SLC['z':'z':-1], ''), (SLC['m':'m':-1], '') ]: check_slice(in_slice, expected) def test_drop(self): n = len(self.strIndex) drop = self.strIndex[lrange(5, 10)] dropped = self.strIndex.drop(drop) expected = self.strIndex[lrange(5) + lrange(10, n)] tm.assert_index_equal(dropped, expected) pytest.raises(ValueError, self.strIndex.drop, ['foo', 'bar']) pytest.raises(ValueError, self.strIndex.drop, ['1', 'bar']) # errors='ignore' mixed = drop.tolist() + ['foo'] dropped = self.strIndex.drop(mixed, errors='ignore') expected = self.strIndex[lrange(5) + lrange(10, n)] tm.assert_index_equal(dropped, expected) dropped = self.strIndex.drop(['foo', 'bar'], errors='ignore') expected = self.strIndex[lrange(n)] tm.assert_index_equal(dropped, expected) dropped = self.strIndex.drop(self.strIndex[0]) expected = self.strIndex[1:] tm.assert_index_equal(dropped, expected) ser = Index([1, 2, 3]) dropped = ser.drop(1) expected = Index([2, 3]) tm.assert_index_equal(dropped, expected) # errors='ignore' pytest.raises(ValueError, ser.drop, [3, 4]) dropped = ser.drop(4, errors='ignore') expected = Index([1, 2, 3]) tm.assert_index_equal(dropped, expected) dropped = ser.drop([3, 4, 5], errors='ignore') expected = Index([1, 2]) tm.assert_index_equal(dropped, expected) def test_tuple_union_bug(self): import pandas import numpy as np aidx1 = np.array([(1, 'A'), (2, 'A'), (1, 'B'), (2, 'B')], dtype=[('num', int), ('let', 'a1')]) aidx2 = np.array([(1, 'A'), (2, 'A'), (1, 'B'), (2, 'B'), (1, 'C'), (2, 'C')], dtype=[('num', int), ('let', 'a1')]) idx1 = pandas.Index(aidx1) idx2 = pandas.Index(aidx2) # intersection broken? int_idx = idx1.intersection(idx2) # needs to be 1d like idx1 and idx2 expected = idx1[:4] # pandas.Index(sorted(set(idx1) & set(idx2))) assert int_idx.ndim == 1 tm.assert_index_equal(int_idx, expected) # union broken union_idx = idx1.union(idx2) expected = idx2 assert union_idx.ndim == 1 tm.assert_index_equal(union_idx, expected) def test_is_monotonic_incomparable(self): index = Index([5, datetime.now(), 7]) assert not index.is_monotonic_increasing assert not index.is_monotonic_decreasing assert not index._is_strictly_monotonic_increasing assert not index._is_strictly_monotonic_decreasing def test_get_set_value(self): values = np.random.randn(100) date = self.dateIndex[67] assert_almost_equal(self.dateIndex.get_value(values, date), values[67]) self.dateIndex.set_value(values, date, 10) assert values[67] == 10 def test_isin(self): values = ['foo', 'bar', 'quux'] idx = Index(['qux', 'baz', 'foo', 'bar']) result = idx.isin(values) expected = np.array([False, False, True, True]) tm.assert_numpy_array_equal(result, expected) # set result = idx.isin(set(values)) tm.assert_numpy_array_equal(result, expected) # empty, return dtype bool idx = Index([]) result = idx.isin(values) assert len(result) == 0 assert result.dtype == np.bool_ @pytest.mark.skipif(PYPY, reason="np.nan is float('nan') on PyPy") def test_isin_nan_not_pypy(self): tm.assert_numpy_array_equal(Index(['a', np.nan]).isin([float('nan')]), np.array([False, False])) @pytest.mark.skipif(not PYPY, reason="np.nan is float('nan') on PyPy") def test_isin_nan_pypy(self): tm.assert_numpy_array_equal(Index(['a', np.nan]).isin([float('nan')]), np.array([False, True])) def test_isin_nan_common(self): tm.assert_numpy_array_equal(Index(['a', np.nan]).isin([np.nan]), np.array([False, True])) tm.assert_numpy_array_equal(Index(['a', pd.NaT]).isin([pd.NaT]), np.array([False, True])) tm.assert_numpy_array_equal(Index(['a', np.nan]).isin([pd.NaT]), np.array([False, False])) # Float64Index overrides isin, so must be checked separately tm.assert_numpy_array_equal(Float64Index([1.0, np.nan]).isin([np.nan]), np.array([False, True])) tm.assert_numpy_array_equal( Float64Index([1.0, np.nan]).isin([float('nan')]), np.array([False, True])) # we cannot compare NaT with NaN tm.assert_numpy_array_equal(Float64Index([1.0, np.nan]).isin([pd.NaT]), np.array([False, False])) def test_isin_level_kwarg(self): def check_idx(idx): values = idx.tolist()[-2:] + ['nonexisting'] expected = np.array([False, False, True, True]) tm.assert_numpy_array_equal(expected, idx.isin(values, level=0)) tm.assert_numpy_array_equal(expected, idx.isin(values, level=-1)) pytest.raises(IndexError, idx.isin, values, level=1) pytest.raises(IndexError, idx.isin, values, level=10) pytest.raises(IndexError, idx.isin, values, level=-2) pytest.raises(KeyError, idx.isin, values, level=1.0) pytest.raises(KeyError, idx.isin, values, level='foobar') idx.name = 'foobar' tm.assert_numpy_array_equal(expected, idx.isin(values, level='foobar')) pytest.raises(KeyError, idx.isin, values, level='xyzzy') pytest.raises(KeyError, idx.isin, values, level=np.nan) check_idx(Index(['qux', 'baz', 'foo', 'bar'])) # Float64Index overrides isin, so must be checked separately check_idx(Float64Index([1.0, 2.0, 3.0, 4.0])) @pytest.mark.parametrize("empty", [[], Series(), np.array([])]) def test_isin_empty(self, empty): # see gh-16991 idx = Index(["a", "b"]) expected = np.array([False, False]) result = idx.isin(empty) tm.assert_numpy_array_equal(expected, result) def test_boolean_cmp(self): values = [1, 2, 3, 4] idx = Index(values) res = (idx == values) tm.assert_numpy_array_equal(res, np.array( [True, True, True, True], dtype=bool)) def test_get_level_values(self): result = self.strIndex.get_level_values(0) tm.assert_index_equal(result, self.strIndex) # test for name (GH 17414) index_with_name = self.strIndex.copy() index_with_name.name = 'a' result = index_with_name.get_level_values('a') tm.assert_index_equal(result, index_with_name) def test_slice_keep_name(self): idx = Index(['a', 'b'], name='asdf') assert idx.name == idx[1:].name def test_join_self(self): # instance attributes of the form self.<name>Index indices = 'unicode', 'str', 'date', 'int', 'float' kinds = 'outer', 'inner', 'left', 'right' for index_kind in indices: res = getattr(self, '{0}Index'.format(index_kind)) for kind in kinds: joined = res.join(res, how=kind) assert res is joined def test_str_attribute(self): # GH9068 methods = ['strip', 'rstrip', 'lstrip'] idx = Index([' jack', 'jill ', ' jesse ', 'frank']) for method in methods: expected = Index([getattr(str, method)(x) for x in idx.values]) tm.assert_index_equal( getattr(Index.str, method)(idx.str), expected) # create a few instances that are not able to use .str accessor indices = [Index(range(5)), tm.makeDateIndex(10), MultiIndex.from_tuples([('foo', '1'), ('bar', '3')]), PeriodIndex(start='2000', end='2010', freq='A')] for idx in indices: with tm.assert_raises_regex(AttributeError, 'only use .str accessor'): idx.str.repeat(2) idx = Index(['a b c', 'd e', 'f']) expected = Index([['a', 'b', 'c'], ['d', 'e'], ['f']]) tm.assert_index_equal(idx.str.split(), expected) tm.assert_index_equal(idx.str.split(expand=False), expected) expected = MultiIndex.from_tuples([('a', 'b', 'c'), ('d', 'e', np.nan), ('f', np.nan, np.nan)]) tm.assert_index_equal(idx.str.split(expand=True), expected) # test boolean case, should return np.array instead of boolean Index idx = Index(['a1', 'a2', 'b1', 'b2']) expected = np.array([True, True, False, False]) tm.assert_numpy_array_equal(idx.str.startswith('a'), expected) assert isinstance(idx.str.startswith('a'), np.ndarray) s = Series(range(4), index=idx) expected = Series(range(2), index=['a1', 'a2']) tm.assert_series_equal(s[s.index.str.startswith('a')], expected) def test_tab_completion(self): # GH 9910 idx = Index(list('abcd')) assert 'str' in dir(idx) idx = Index(range(4)) assert 'str' not in dir(idx) def test_indexing_doesnt_change_class(self): idx = Index([1, 2, 3, 'a', 'b', 'c']) assert idx[1:3].identical(pd.Index([2, 3], dtype=np.object_)) assert idx[[0, 1]].identical(pd.Index([1, 2], dtype=np.object_)) def test_outer_join_sort(self): left_idx = Index(np.random.permutation(15)) right_idx = tm.makeDateIndex(10) with tm.assert_produces_warning(RuntimeWarning): joined = left_idx.join(right_idx, how='outer') # right_idx in this case because DatetimeIndex has join precedence over # Int64Index with tm.assert_produces_warning(RuntimeWarning): expected = right_idx.astype(object).union(left_idx.astype(object)) tm.assert_index_equal(joined, expected) def test_nan_first_take_datetime(self): idx = Index([pd.NaT, Timestamp('20130101'), Timestamp('20130102')]) res = idx.take([-1, 0, 1]) exp = Index([idx[-1], idx[0], idx[1]]) tm.assert_index_equal(res, exp) def test_take_fill_value(self): # GH 12631 idx = pd.Index(list('ABC'), name='xxx') result = idx.take(np.array([1, 0, -1])) expected = pd.Index(list('BAC'), name='xxx') tm.assert_index_equal(result, expected) # fill_value result = idx.take(np.array([1, 0, -1]), fill_value=True) expected = pd.Index(['B', 'A', np.nan], name='xxx') tm.assert_index_equal(result, expected) # allow_fill=False result = idx.take(np.array([1, 0, -1]), allow_fill=False, fill_value=True) expected = pd.Index(['B', 'A', 'C'], name='xxx') tm.assert_index_equal(result, expected) msg = ('When allow_fill=True and fill_value is not None, ' 'all indices must be >= -1') with tm.assert_raises_regex(ValueError, msg): idx.take(np.array([1, 0, -2]), fill_value=True) with tm.assert_raises_regex(ValueError, msg): idx.take(np.array([1, 0, -5]), fill_value=True) with pytest.raises(IndexError): idx.take(np.array([1, -5])) def test_reshape_raise(self): msg = "reshaping is not supported" idx = pd.Index([0, 1, 2]) tm.assert_raises_regex(NotImplementedError, msg, idx.reshape, idx.shape) def test_reindex_preserves_name_if_target_is_list_or_ndarray(self): # GH6552 idx = pd.Index([0, 1, 2]) dt_idx = pd.date_range('20130101', periods=3) idx.name = None assert idx.reindex([])[0].name is None assert idx.reindex(np.array([]))[0].name is None assert idx.reindex(idx.tolist())[0].name is None assert idx.reindex(idx.tolist()[:-1])[0].name is None assert idx.reindex(idx.values)[0].name is None assert idx.reindex(idx.values[:-1])[0].name is None # Must preserve name even if dtype changes. assert idx.reindex(dt_idx.values)[0].name is None assert idx.reindex(dt_idx.tolist())[0].name is None idx.name = 'foobar' assert idx.reindex([])[0].name == 'foobar' assert idx.reindex(np.array([]))[0].name == 'foobar' assert idx.reindex(idx.tolist())[0].name == 'foobar' assert idx.reindex(idx.tolist()[:-1])[0].name == 'foobar' assert idx.reindex(idx.values)[0].name == 'foobar' assert idx.reindex(idx.values[:-1])[0].name == 'foobar' # Must preserve name even if dtype changes. assert idx.reindex(dt_idx.values)[0].name == 'foobar' assert idx.reindex(dt_idx.tolist())[0].name == 'foobar' def test_reindex_preserves_type_if_target_is_empty_list_or_array(self): # GH7774 idx = pd.Index(list('abc')) def get_reindex_type(target): return idx.reindex(target)[0].dtype.type assert get_reindex_type([]) == np.object_ assert get_reindex_type(np.array([])) == np.object_ assert get_reindex_type(np.array([], dtype=np.int64)) == np.object_ def test_reindex_doesnt_preserve_type_if_target_is_empty_index(self): # GH7774 idx = pd.Index(list('abc')) def get_reindex_type(target): return idx.reindex(target)[0].dtype.type assert get_reindex_type(pd.Int64Index([])) == np.int64 assert get_reindex_type(pd.Float64Index([])) == np.float64 assert get_reindex_type(pd.DatetimeIndex([])) == np.datetime64 reindexed = idx.reindex(pd.MultiIndex( [pd.Int64Index([]), pd.Float64Index([])], [[], []]))[0] assert reindexed.levels[0].dtype.type == np.int64 assert reindexed.levels[1].dtype.type == np.float64 def test_groupby(self): idx = Index(range(5)) groups = idx.groupby(np.array([1, 1, 2, 2, 2])) exp = {1: pd.Index([0, 1]), 2: pd.Index([2, 3, 4])} tm.assert_dict_equal(groups, exp) def test_equals_op_multiindex(self): # GH9785 # test comparisons of multiindex from pandas.compat import StringIO df = pd.read_csv(StringIO('a,b,c\n1,2,3\n4,5,6'), index_col=[0, 1]) tm.assert_numpy_array_equal(df.index == df.index, np.array([True, True])) mi1 = MultiIndex.from_tuples([(1, 2), (4, 5)]) tm.assert_numpy_array_equal(df.index == mi1, np.array([True, True])) mi2 = MultiIndex.from_tuples([(1, 2), (4, 6)]) tm.assert_numpy_array_equal(df.index == mi2, np.array([True, False])) mi3 = MultiIndex.from_tuples([(1, 2), (4, 5), (8, 9)]) with tm.assert_raises_regex(ValueError, "Lengths must match"): df.index == mi3 index_a = Index(['foo', 'bar', 'baz']) with tm.assert_raises_regex(ValueError, "Lengths must match"): df.index == index_a tm.assert_numpy_array_equal(index_a == mi3, np.array([False, False, False])) def test_conversion_preserves_name(self): # GH 10875 i = pd.Index(['01:02:03', '01:02:04'], name='label') assert i.name == pd.to_datetime(i).name assert i.name == pd.to_timedelta(i).name def test_string_index_repr(self): # py3/py2 repr can differ because of "u" prefix # which also affects to displayed element size if PY3: coerce = lambda x: x else: coerce = unicode # noqa # short idx = pd.Index(['a', 'bb', 'ccc']) if PY3: expected = u"""Index(['a', 'bb', 'ccc'], dtype='object')""" assert repr(idx) == expected else: expected = u"""Index([u'a', u'bb', u'ccc'], dtype='object')""" assert coerce(idx) == expected # multiple lines idx = pd.Index(['a', 'bb', 'ccc'] * 10) if PY3: expected = u"""\ Index(['a', 'bb', 'ccc', 'a', 'bb', 'ccc', 'a', 'bb', 'ccc', 'a', 'bb', 'ccc', 'a', 'bb', 'ccc', 'a', 'bb', 'ccc', 'a', 'bb', 'ccc', 'a', 'bb', 'ccc', 'a', 'bb', 'ccc', 'a', 'bb', 'ccc'], dtype='object')""" assert repr(idx) == expected else: expected = u"""\ Index([u'a', u'bb', u'ccc', u'a', u'bb', u'ccc', u'a', u'bb', u'ccc', u'a', u'bb', u'ccc', u'a', u'bb', u'ccc', u'a', u'bb', u'ccc', u'a', u'bb', u'ccc', u'a', u'bb', u'ccc', u'a', u'bb', u'ccc', u'a', u'bb', u'ccc'], dtype='object')""" assert coerce(idx) == expected # truncated idx = pd.Index(['a', 'bb', 'ccc'] * 100) if PY3: expected = u"""\ Index(['a', 'bb', 'ccc', 'a', 'bb', 'ccc', 'a', 'bb', 'ccc', 'a', ... 'ccc', 'a', 'bb', 'ccc', 'a', 'bb', 'ccc', 'a', 'bb', 'ccc'], dtype='object', length=300)""" assert repr(idx) == expected else: expected = u"""\ Index([u'a', u'bb', u'ccc', u'a', u'bb', u'ccc', u'a', u'bb', u'ccc', u'a', ... u'ccc', u'a', u'bb', u'ccc', u'a', u'bb', u'ccc', u'a', u'bb', u'ccc'], dtype='object', length=300)""" assert coerce(idx) == expected # short idx = pd.Index([u'あ', u'いい', u'ううう']) if PY3: expected = u"""Index(['あ', 'いい', 'ううう'], dtype='object')""" assert repr(idx) == expected else: expected = u"""Index([u'あ', u'いい', u'ううう'], dtype='object')""" assert coerce(idx) == expected # multiple lines idx = pd.Index([u'あ', u'いい', u'ううう'] * 10) if PY3: expected = (u"Index(['あ', 'いい', 'ううう', 'あ', 'いい', 'ううう', " u"'あ', 'いい', 'ううう', 'あ', 'いい', 'ううう',\n" u" 'あ', 'いい', 'ううう', 'あ', 'いい', 'ううう', " u"'あ', 'いい', 'ううう', 'あ', 'いい', 'ううう',\n" u" 'あ', 'いい', 'ううう', 'あ', 'いい', " u"'ううう'],\n" u" dtype='object')") assert repr(idx) == expected else: expected = (u"Index([u'あ', u'いい', u'ううう', u'あ', u'いい', " u"u'ううう', u'あ', u'いい', u'ううう', u'あ',\n" u" u'いい', u'ううう', u'あ', u'いい', u'ううう', " u"u'あ', u'いい', u'ううう', u'あ', u'いい',\n" u" u'ううう', u'あ', u'いい', u'ううう', u'あ', " u"u'いい', u'ううう', u'あ', u'いい', u'ううう'],\n" u" dtype='object')") assert coerce(idx) == expected # truncated idx = pd.Index([u'あ', u'いい', u'ううう'] * 100) if PY3: expected = (u"Index(['あ', 'いい', 'ううう', 'あ', 'いい', 'ううう', " u"'あ', 'いい', 'ううう', 'あ',\n" u" ...\n" u" 'ううう', 'あ', 'いい', 'ううう', 'あ', 'いい', " u"'ううう', 'あ', 'いい', 'ううう'],\n" u" dtype='object', length=300)") assert repr(idx) == expected else: expected = (u"Index([u'あ', u'いい', u'ううう', u'あ', u'いい', " u"u'ううう', u'あ', u'いい', u'ううう', u'あ',\n" u" ...\n" u" u'ううう', u'あ', u'いい', u'ううう', u'あ', " u"u'いい', u'ううう', u'あ', u'いい', u'ううう'],\n" u" dtype='object', length=300)") assert coerce(idx) == expected # Emable Unicode option ----------------------------------------- with cf.option_context('display.unicode.east_asian_width', True): # short idx = pd.Index([u'あ', u'いい', u'ううう']) if PY3: expected = (u"Index(['あ', 'いい', 'ううう'], " u"dtype='object')") assert repr(idx) == expected else: expected = (u"Index([u'あ', u'いい', u'ううう'], " u"dtype='object')") assert coerce(idx) == expected # multiple lines idx = pd.Index([u'あ', u'いい', u'ううう'] * 10) if PY3: expected = (u"Index(['あ', 'いい', 'ううう', 'あ', 'いい', " u"'ううう', 'あ', 'いい', 'ううう',\n" u" 'あ', 'いい', 'ううう', 'あ', 'いい', " u"'ううう', 'あ', 'いい', 'ううう',\n" u" 'あ', 'いい', 'ううう', 'あ', 'いい', " u"'ううう', 'あ', 'いい', 'ううう',\n" u" 'あ', 'いい', 'ううう'],\n" u" dtype='object')""") assert repr(idx) == expected else: expected = (u"Index([u'あ', u'いい', u'ううう', u'あ', u'いい', " u"u'ううう', u'あ', u'いい',\n" u" u'ううう', u'あ', u'いい', u'ううう', " u"u'あ', u'いい', u'ううう', u'あ',\n" u" u'いい', u'ううう', u'あ', u'いい', " u"u'ううう', u'あ', u'いい',\n" u" u'ううう', u'あ', u'いい', u'ううう', " u"u'あ', u'いい', u'ううう'],\n" u" dtype='object')") assert coerce(idx) == expected # truncated idx = pd.Index([u'あ', u'いい', u'ううう'] * 100) if PY3: expected = (u"Index(['あ', 'いい', 'ううう', 'あ', 'いい', " u"'ううう', 'あ', 'いい', 'ううう',\n" u" 'あ',\n" u" ...\n" u" 'ううう', 'あ', 'いい', 'ううう', 'あ', " u"'いい', 'ううう', 'あ', 'いい',\n" u" 'ううう'],\n" u" dtype='object', length=300)") assert repr(idx) == expected else: expected = (u"Index([u'あ', u'いい', u'ううう', u'あ', u'いい', " u"u'ううう', u'あ', u'いい',\n" u" u'ううう', u'あ',\n" u" ...\n" u" u'ううう', u'あ', u'いい', u'ううう', " u"u'あ', u'いい', u'ううう', u'あ',\n" u" u'いい', u'ううう'],\n" u" dtype='object', length=300)") assert coerce(idx) == expected @pytest.mark.parametrize('dtype', [np.int64, np.float64]) @pytest.mark.parametrize('delta', [1, 0, -1]) def test_addsub_arithmetic(self, dtype, delta): # GH 8142 delta = dtype(delta) idx = pd.Index([10, 11, 12], dtype=dtype) result = idx + delta expected = pd.Index(idx.values + delta, dtype=dtype) tm.assert_index_equal(result, expected) # this subtraction used to fail result = idx - delta expected = pd.Index(idx.values - delta, dtype=dtype) tm.assert_index_equal(result, expected) tm.assert_index_equal(idx + idx, 2 * idx) tm.assert_index_equal(idx - idx, 0 * idx) assert not (idx - idx).empty class TestMixedIntIndex(Base): # Mostly the tests from common.py for which the results differ # in py2 and py3 because ints and strings are uncomparable in py3 # (GH 13514) _holder = Index def setup_method(self, method): self.indices = dict(mixedIndex=Index([0, 'a', 1, 'b', 2, 'c'])) self.setup_indices() def create_index(self): return self.mixedIndex def test_argsort(self): idx = self.create_index() if PY36: with tm.assert_raises_regex(TypeError, "'>|<' not supported"): result = idx.argsort() elif PY3: with tm.assert_raises_regex(TypeError, "unorderable types"): result = idx.argsort() else: result = idx.argsort() expected = np.array(idx).argsort() tm.assert_numpy_array_equal(result, expected, check_dtype=False) def test_numpy_argsort(self): idx = self.create_index() if PY36: with tm.assert_raises_regex(TypeError, "'>|<' not supported"): result = np.argsort(idx) elif PY3: with tm.assert_raises_regex(TypeError, "unorderable types"): result = np.argsort(idx) else: result = np.argsort(idx) expected = idx.argsort() tm.assert_numpy_array_equal(result, expected) def test_copy_name(self): # Check that "name" argument passed at initialization is honoured # GH12309 idx = self.create_index() first = idx.__class__(idx, copy=True, name='mario') second = first.__class__(first, copy=False) # Even though "copy=False", we want a new object. assert first is not second # Not using tm.assert_index_equal() since names differ: assert idx.equals(first) assert first.name == 'mario' assert second.name == 'mario' s1 = Series(2, index=first) s2 = Series(3, index=second[:-1]) warning_type = RuntimeWarning if PY3 else None with tm.assert_produces_warning(warning_type): # Python 3: Unorderable types s3 = s1 * s2 assert s3.index.name == 'mario' def test_copy_name2(self): # Check that adding a "name" parameter to the copy is honored # GH14302 idx = pd.Index([1, 2], name='MyName') idx1 = idx.copy() assert idx.equals(idx1) assert idx.name == 'MyName' assert idx1.name == 'MyName' idx2 = idx.copy(name='NewName') assert idx.equals(idx2) assert idx.name == 'MyName' assert idx2.name == 'NewName' idx3 = idx.copy(names=['NewName']) assert idx.equals(idx3) assert idx.name == 'MyName' assert idx.names == ['MyName'] assert idx3.name == 'NewName' assert idx3.names == ['NewName'] def test_union_base(self): idx = self.create_index() first = idx[3:] second = idx[:5] if PY3: with tm.assert_produces_warning(RuntimeWarning): # unorderable types result = first.union(second) expected = Index(['b', 2, 'c', 0, 'a', 1]) tm.assert_index_equal(result, expected) else: result = first.union(second) expected = Index(['b', 2, 'c', 0, 'a', 1]) tm.assert_index_equal(result, expected) # GH 10149 cases = [klass(second.values) for klass in [np.array, Series, list]] for case in cases: if PY3: with tm.assert_produces_warning(RuntimeWarning): # unorderable types result = first.union(case) assert tm.equalContents(result, idx) else: result = first.union(case) assert tm.equalContents(result, idx) def test_intersection_base(self): # (same results for py2 and py3 but sortedness not tested elsewhere) idx = self.create_index() first = idx[:5] second = idx[:3] result = first.intersection(second) expected = Index([0, 'a', 1]) tm.assert_index_equal(result, expected) # GH 10149 cases = [klass(second.values) for klass in [np.array, Series, list]] for case in cases: result = first.intersection(case) assert tm.equalContents(result, second) def test_difference_base(self): # (same results for py2 and py3 but sortedness not tested elsewhere) idx = self.create_index() first = idx[:4] second = idx[3:] result = first.difference(second) expected = Index([0, 1, 'a']) tm.assert_index_equal(result, expected) def test_symmetric_difference(self): # (same results for py2 and py3 but sortedness not tested elsewhere) idx = self.create_index() first = idx[:4] second = idx[3:] result = first.symmetric_difference(second) expected = Index([0, 1, 2, 'a', 'c']) tm.assert_index_equal(result, expected) def test_logical_compat(self): idx = self.create_index() assert idx.all() == idx.values.all() assert idx.any() == idx.values.any() def test_dropna(self): # GH 6194 for dtype in [None, object, 'category']: idx = pd.Index([1, 2, 3], dtype=dtype) tm.assert_index_equal(idx.dropna(), idx) idx = pd.Index([1., 2., 3.], dtype=dtype) tm.assert_index_equal(idx.dropna(), idx) nanidx = pd.Index([1., 2., np.nan, 3.], dtype=dtype) tm.assert_index_equal(nanidx.dropna(), idx) idx = pd.Index(['A', 'B', 'C'], dtype=dtype) tm.assert_index_equal(idx.dropna(), idx) nanidx = pd.Index(['A', np.nan, 'B', 'C'], dtype=dtype) tm.assert_index_equal(nanidx.dropna(), idx) tm.assert_index_equal(nanidx.dropna(how='any'), idx) tm.assert_index_equal(nanidx.dropna(how='all'), idx) idx = pd.DatetimeIndex(['2011-01-01', '2011-01-02', '2011-01-03']) tm.assert_index_equal(idx.dropna(), idx) nanidx = pd.DatetimeIndex(['2011-01-01', '2011-01-02', '2011-01-03', pd.NaT]) tm.assert_index_equal(nanidx.dropna(), idx) idx = pd.TimedeltaIndex(['1 days', '2 days', '3 days']) tm.assert_index_equal(idx.dropna(), idx) nanidx = pd.TimedeltaIndex([pd.NaT, '1 days', '2 days', '3 days', pd.NaT]) tm.assert_index_equal(nanidx.dropna(), idx) idx = pd.PeriodIndex(['2012-02', '2012-04', '2012-05'], freq='M') tm.assert_index_equal(idx.dropna(), idx) nanidx = pd.PeriodIndex(['2012-02', '2012-04', 'NaT', '2012-05'], freq='M') tm.assert_index_equal(nanidx.dropna(), idx) msg = "invalid how option: xxx" with tm.assert_raises_regex(ValueError, msg): pd.Index([1, 2, 3]).dropna(how='xxx') def test_get_combined_index(self): result = _get_combined_index([]) tm.assert_index_equal(result, Index([])) def test_repeat(self): repeats = 2 idx = pd.Index([1, 2, 3]) expected = pd.Index([1, 1, 2, 2, 3, 3]) result = idx.repeat(repeats) tm.assert_index_equal(result, expected) with tm.assert_produces_warning(FutureWarning): result = idx.repeat(n=repeats) tm.assert_index_equal(result, expected) def test_is_monotonic_na(self): examples = [pd.Index([np.nan]), pd.Index([np.nan, 1]), pd.Index([1, 2, np.nan]), pd.Index(['a', 'b', np.nan]), pd.to_datetime(['NaT']), pd.to_datetime(['NaT', '2000-01-01']), pd.to_datetime(['2000-01-01', 'NaT', '2000-01-02']), pd.to_timedelta(['1 day', 'NaT']), ] for index in examples: assert not index.is_monotonic_increasing assert not index.is_monotonic_decreasing assert not index._is_strictly_monotonic_increasing assert not index._is_strictly_monotonic_decreasing def test_repr_summary(self): with cf.option_context('display.max_seq_items', 10): r = repr(pd.Index(np.arange(1000))) assert len(r) < 200 assert "..." in r def test_int_name_format(self): index = Index(['a', 'b', 'c'], name=0) s = Series(lrange(3), index) df = DataFrame(lrange(3), index=index) repr(s) repr(df) def test_print_unicode_columns(self): df = pd.DataFrame({u("\u05d0"): [1, 2, 3], "\u05d1": [4, 5, 6], "c": [7, 8, 9]}) repr(df.columns) # should not raise UnicodeDecodeError def test_unicode_string_with_unicode(self): idx = Index(lrange(1000)) if PY3: str(idx) else: text_type(idx) def test_bytestring_with_unicode(self): idx = Index(lrange(1000)) if PY3: bytes(idx) else: str(idx) def test_intersect_str_dates(self): dt_dates = [datetime(2012, 2, 9), datetime(2012, 2, 22)] i1 = Index(dt_dates, dtype=object) i2 = Index(['aa'], dtype=object) res = i2.intersection(i1) assert len(res) == 0 class TestIndexUtils(object): @pytest.mark.parametrize('data, names, expected', [ ([[1, 2, 3]], None, Index([1, 2, 3])), ([[1, 2, 3]], ['name'], Index([1, 2, 3], name='name')), ([['a', 'a'], ['c', 'd']], None, MultiIndex([['a'], ['c', 'd']], [[0, 0], [0, 1]])), ([['a', 'a'], ['c', 'd']], ['L1', 'L2'], MultiIndex([['a'], ['c', 'd']], [[0, 0], [0, 1]], names=['L1', 'L2'])), ]) def test_ensure_index_from_sequences(self, data, names, expected): result = _ensure_index_from_sequences(data, names) tm.assert_index_equal(result, expected) @pytest.mark.parametrize('opname', ['eq', 'ne', 'le', 'lt', 'ge', 'gt']) def test_generated_op_names(opname, indices): index = indices opname = '__{name}__'.format(name=opname) method = getattr(index, opname) assert method.__name__ == opname
winklerand/pandas
pandas/tests/indexes/test_base.py
pandas/tests/series/common.py
from toolz import concat, unique, count from collections import Mapping class ShareDict(Mapping): """ A Mapping composed of other Mappings This is a union of other disjoint mappings. It allows the combination of many dicts into a single dict-like object without creating copies of the underlying dicts. It provides cheap ``update``, ``len`` and ``__iter__`` operations as well as a fairly cheap ``__getitem__`` operation (linear in the number of constituent mappings). This class is optimized for Dask's use, and may not be generally useful. Users may want to consider the standard ``collections.ChainMap`` data structure. This class makes the following assumptions: 1. Constituent mappings are disjoint. No key is in more than one mapping. 2. Constituent mappings will not be modified Note that ShareDict does not enforce these assumptions. It is up to the user to guarantee them. Examples -------- >>> a = {'x': 1, 'y': 2} >>> b = {'z': 3} >>> s = ShareDict() >>> s.update(a) >>> s.update(b) >>> dict(s) # doctest: +SKIP {'x': 1, 'y': 2, 'z': 3} These dictionaries are stored within an internal dictionary of dictionaries >>> list(s.dicts.values()) # doctest: +SKIP [{'x': 1, 'y': 2}, {'z': 3}] By default these are named by their object id. However, you can also provide explicit names. >>> s = ShareDict() >>> s.update_with_key(a, key='a') >>> s.update_with_key(b, key='b') >>> s.dicts # doctest: +SKIP {'a': {'x': 1, 'y': 2}, 'b': {'z': 3}} """ def __init__(self): self.dicts = dict() def update_with_key(self, arg, key=None): if type(arg) is ShareDict: assert key is None self.dicts.update(arg.dicts) return if key is None: key = id(arg) assert isinstance(arg, dict) if arg: self.dicts[key] = arg def update(self, arg): self.update_with_key(arg) def __getitem__(self, key): for d in self.dicts.values(): if key in d: return d[key] raise KeyError(key) def __len__(self): return count(iter(self)) def items(self): seen = set() for d in self.dicts.values(): for key in d: if key not in seen: seen.add(key) yield (key, d[key]) def __iter__(self): return unique(concat(self.dicts.values())) def merge(*dicts): result = ShareDict() for d in dicts: if isinstance(d, tuple): key, d = d result.update_with_key(d, key=key) else: result.update_with_key(d) return result
import numpy as np import pandas as pd import pandas.util.testing as tm import sys import os import dask import pytest from time import sleep import dask.dataframe as dd from dask.utils import tmpfile, tmpdir, dependency_depth from dask.dataframe.utils import assert_eq def test_to_hdf(): pytest.importorskip('tables') df = pd.DataFrame({'x': ['a', 'b', 'c', 'd'], 'y': [1, 2, 3, 4]}, index=[1., 2., 3., 4.]) a = dd.from_pandas(df, 2) with tmpfile('h5') as fn: a.to_hdf(fn, '/data') out = pd.read_hdf(fn, '/data') tm.assert_frame_equal(df, out[:]) with tmpfile('h5') as fn: a.x.to_hdf(fn, '/data') out = pd.read_hdf(fn, '/data') tm.assert_series_equal(df.x, out[:]) a = dd.from_pandas(df, 1) with tmpfile('h5') as fn: a.to_hdf(fn, '/data') out = pd.read_hdf(fn, '/data') tm.assert_frame_equal(df, out[:]) # test compute = False with tmpfile('h5') as fn: r = a.to_hdf(fn, '/data', compute=False) r.compute() out = pd.read_hdf(fn, '/data') tm.assert_frame_equal(df, out[:]) def test_to_hdf_multiple_nodes(): pytest.importorskip('tables') df = pd.DataFrame({'x': ['a', 'b', 'c', 'd'], 'y': [1, 2, 3, 4]}, index=[1., 2., 3., 4.]) a = dd.from_pandas(df, 2) df16 = pd.DataFrame({'x': ['a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p'], 'y': [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16]}, index=[1., 2., 3., 4., 5., 6., 7., 8., 9., 10., 11., 12., 13., 14., 15., 16.]) b = dd.from_pandas(df16, 16) # saving to multiple nodes with tmpfile('h5') as fn: a.to_hdf(fn, '/data*') out = dd.read_hdf(fn, '/data*') assert_eq(df, out) # saving to multiple nodes making sure order is kept with tmpfile('h5') as fn: b.to_hdf(fn, '/data*') out = dd.read_hdf(fn, '/data*') assert_eq(df16, out) # saving to multiple datasets with custom name_function with tmpfile('h5') as fn: a.to_hdf(fn, '/data_*', name_function=lambda i: 'a' * (i + 1)) out = dd.read_hdf(fn, '/data_*') assert_eq(df, out) out = pd.read_hdf(fn, '/data_a') tm.assert_frame_equal(out, df.iloc[:2]) out = pd.read_hdf(fn, '/data_aa') tm.assert_frame_equal(out, df.iloc[2:]) # test multiple nodes with hdf object with tmpfile('h5') as fn: with pd.HDFStore(fn) as hdf: b.to_hdf(hdf, '/data*') out = dd.read_hdf(fn, '/data*') assert_eq(df16, out) def test_to_hdf_multiple_files(): pytest.importorskip('tables') df = pd.DataFrame({'x': ['a', 'b', 'c', 'd'], 'y': [1, 2, 3, 4]}, index=[1., 2., 3., 4.]) a = dd.from_pandas(df, 2) df16 = pd.DataFrame({'x': ['a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p'], 'y': [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16]}, index=[1., 2., 3., 4., 5., 6., 7., 8., 9., 10., 11., 12., 13., 14., 15., 16.]) b = dd.from_pandas(df16, 16) # saving to multiple files with tmpdir() as dn: fn = os.path.join(dn, 'data_*.h5') a.to_hdf(fn, '/data') out = dd.read_hdf(fn, '/data') assert_eq(df, out) # saving to multiple files making sure order is kept with tmpdir() as dn: fn = os.path.join(dn, 'data_*.h5') b.to_hdf(fn, '/data') out = dd.read_hdf(fn, '/data') assert_eq(df16, out) # saving to multiple files with custom name_function with tmpdir() as dn: fn = os.path.join(dn, 'data_*.h5') a.to_hdf(fn, '/data', name_function=lambda i: 'a' * (i + 1)) out = dd.read_hdf(fn, '/data') assert_eq(df, out) out = pd.read_hdf(os.path.join(dn, 'data_a.h5'), '/data') tm.assert_frame_equal(out, df.iloc[:2]) out = pd.read_hdf(os.path.join(dn, 'data_aa.h5'), '/data') tm.assert_frame_equal(out, df.iloc[2:]) # test hdf object with tmpfile('h5') as fn: with pd.HDFStore(fn) as hdf: a.to_hdf(hdf, '/data*') out = dd.read_hdf(fn, '/data*') assert_eq(df, out) def test_to_hdf_modes_multiple_nodes(): pytest.importorskip('tables') df = pd.DataFrame({'x': ['a', 'b', 'c', 'd'], 'y': [1, 2, 3, 4]}, index=[1., 2., 3., 4.]) # appending a single partition to existing data a = dd.from_pandas(df, 1) with tmpfile('h5') as fn: a.to_hdf(fn, '/data2') a.to_hdf(fn, '/data*', mode='a') out = dd.read_hdf(fn, '/data*') assert_eq(df.append(df), out) # overwriting a file with a single partition a = dd.from_pandas(df, 1) with tmpfile('h5') as fn: a.to_hdf(fn, '/data2') a.to_hdf(fn, '/data*', mode='w') out = dd.read_hdf(fn, '/data*') assert_eq(df, out) # appending two partitions to existing data a = dd.from_pandas(df, 2) with tmpfile('h5') as fn: a.to_hdf(fn, '/data2') a.to_hdf(fn, '/data*', mode='a') out = dd.read_hdf(fn, '/data*') assert_eq(df.append(df), out) # overwriting a file with two partitions a = dd.from_pandas(df, 2) with tmpfile('h5') as fn: a.to_hdf(fn, '/data2') a.to_hdf(fn, '/data*', mode='w') out = dd.read_hdf(fn, '/data*') assert_eq(df, out) # overwriting a single partition, keeping other partitions a = dd.from_pandas(df, 2) with tmpfile('h5') as fn: a.to_hdf(fn, '/data1') a.to_hdf(fn, '/data2') a.to_hdf(fn, '/data*', mode='a', append=False) out = dd.read_hdf(fn, '/data*') assert_eq(df.append(df), out) def test_to_hdf_modes_multiple_files(): pytest.importorskip('tables') df = pd.DataFrame({'x': ['a', 'b', 'c', 'd'], 'y': [1, 2, 3, 4]}, index=[1., 2., 3., 4.]) # appending a single partition to existing data a = dd.from_pandas(df, 1) with tmpdir() as dn: fn = os.path.join(dn, 'data*') a.to_hdf(os.path.join(dn, 'data2'), '/data') a.to_hdf(fn, '/data', mode='a') out = dd.read_hdf(fn, '/data*') assert_eq(df.append(df), out) # appending two partitions to existing data a = dd.from_pandas(df, 2) with tmpdir() as dn: fn = os.path.join(dn, 'data*') a.to_hdf(os.path.join(dn, 'data2'), '/data') a.to_hdf(fn, '/data', mode='a') out = dd.read_hdf(fn, '/data') assert_eq(df.append(df), out) # overwriting a file with two partitions a = dd.from_pandas(df, 2) with tmpdir() as dn: fn = os.path.join(dn, 'data*') a.to_hdf(os.path.join(dn, 'data1'), '/data') a.to_hdf(fn, '/data', mode='w') out = dd.read_hdf(fn, '/data') assert_eq(df, out) # overwriting a single partition, keeping other partitions a = dd.from_pandas(df, 2) with tmpdir() as dn: fn = os.path.join(dn, 'data*') a.to_hdf(os.path.join(dn, 'data1'), '/data') a.to_hdf(fn, '/data', mode='a', append=False) out = dd.read_hdf(fn, '/data') assert_eq(df.append(df), out) def test_to_hdf_link_optimizations(): """testing dask link levels is correct by calculating the depth of the dask graph""" pytest.importorskip('tables') df16 = pd.DataFrame({'x': ['a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p'], 'y': [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16]}, index=[1., 2., 3., 4., 5., 6., 7., 8., 9., 10., 11., 12., 13., 14., 15., 16.]) a = dd.from_pandas(df16, 16) # saving to multiple hdf files, no links are needed # expected layers: from_pandas, to_hdf, list = depth of 3 with tmpdir() as dn: fn = os.path.join(dn, 'data*') d = a.to_hdf(fn, '/data', compute=False) assert dependency_depth(d.dask) == 3 # saving to a single hdf file with multiple nodes # all subsequent nodes depend on the first # expected layers: from_pandas, first to_hdf(creates file+node), subsequent to_hdfs, list = 4 with tmpfile() as fn: d = a.to_hdf(fn, '/data*', compute=False) assert dependency_depth(d.dask) == 4 # saving to a single hdf file with a single node # every node depends on the previous node # expected layers: from_pandas, to_hdf times npartitions(15), list = 2 + npartitions = 17 with tmpfile() as fn: d = a.to_hdf(fn, '/data', compute=False) assert dependency_depth(d.dask) == 2 + a.npartitions @pytest.mark.slow def test_to_hdf_lock_delays(): pytest.importorskip('tables') df16 = pd.DataFrame({'x': ['a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p'], 'y': [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16]}, index=[1., 2., 3., 4., 5., 6., 7., 8., 9., 10., 11., 12., 13., 14., 15., 16.]) a = dd.from_pandas(df16, 16) # adding artifichial delays to make sure last tasks finish first # that's a way to simulate last tasks finishing last def delayed_nop(i): if i[1] < 10: sleep(0.1 * (10 - i[1])) return i # saving to multiple hdf nodes with tmpfile() as fn: a = a.apply(delayed_nop, axis=1, meta=a) a.to_hdf(fn, '/data*') out = dd.read_hdf(fn, '/data*') assert_eq(df16, out) # saving to multiple hdf files # adding artifichial delays to make sure last tasks finish first with tmpdir() as dn: fn = os.path.join(dn, 'data*') a = a.apply(delayed_nop, axis=1, meta=a) a.to_hdf(fn, '/data') out = dd.read_hdf(fn, '/data') assert_eq(df16, out) def test_to_hdf_exceptions(): pytest.importorskip('tables') df = pd.DataFrame({'x': ['a', 'b', 'c', 'd'], 'y': [1, 2, 3, 4]}, index=[1., 2., 3., 4.]) a = dd.from_pandas(df, 1) # triggering too many asterisks error with tmpdir() as dn: with pytest.raises(ValueError): fn = os.path.join(dn, 'data_*.h5') a.to_hdf(fn, '/data_*') # triggering too many asterisks error with tmpfile() as fn: with pd.HDFStore(fn) as hdf: with pytest.raises(ValueError): a.to_hdf(hdf, '/data_*_*') @pytest.mark.parametrize('get', [dask.get, dask.threaded.get, dask.multiprocessing.get]) @pytest.mark.parametrize('npartitions', [1, 4, 10]) def test_to_hdf_schedulers(get, npartitions): pytest.importorskip('tables') df = pd.DataFrame({'x': ['a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p'], 'y': [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16]}, index=[1., 2., 3., 4., 5., 6., 7., 8., 9., 10., 11., 12., 13., 14., 15., 16.]) a = dd.from_pandas(df, npartitions=npartitions) # test single file single node with tmpfile('h5') as fn: a.to_hdf(fn, '/data', get=get) out = pd.read_hdf(fn, '/data') assert_eq(df, out) # test multiple files single node with tmpdir() as dn: fn = os.path.join(dn, 'data_*.h5') a.to_hdf(fn, '/data', get=get) out = dd.read_hdf(fn, '/data') assert_eq(df, out) # test single file multiple nodes with tmpfile('h5') as fn: a.to_hdf(fn, '/data*', get=get) out = dd.read_hdf(fn, '/data*') assert_eq(df, out) def test_to_hdf_kwargs(): pytest.importorskip('tables') df = pd.DataFrame({'A': ['a', 'aaaa']}) ddf = dd.from_pandas(df, npartitions=2) with tmpfile('h5') as fn: ddf.to_hdf(fn, 'foo4', format='table', min_itemsize=4) df2 = pd.read_hdf(fn, 'foo4') tm.assert_frame_equal(df, df2) @pytest.mark.skipif(sys.version_info[:2] == (3, 3), reason="Python3.3 uses pytest2.7.2, w/o warns method") def test_to_fmt_warns(): pytest.importorskip('tables') df16 = pd.DataFrame({'x': ['a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p'], 'y': [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16]}, index=[1., 2., 3., 4., 5., 6., 7., 8., 9., 10., 11., 12., 13., 14., 15., 16.]) a = dd.from_pandas(df16, 16) # testing warning when breaking order with tmpfile('h5') as fn: with pytest.warns(None): a.to_hdf(fn, '/data*', name_function=str) # testing warning when breaking order with tmpdir() as dn: with pytest.warns(None): fn = os.path.join(dn, "data_*.csv") a.to_csv(fn, name_function=str) @pytest.mark.parametrize('data, compare', [ (pd.DataFrame({'x': ['a', 'b', 'c', 'd'], 'y': [1, 2, 3, 4]}, index=[1., 2., 3., 4.]), tm.assert_frame_equal), (pd.Series([1, 2, 3, 4], name='a'), tm.assert_series_equal), ]) def test_read_hdf(data, compare): pytest.importorskip('tables') with tmpfile('h5') as fn: data.to_hdf(fn, '/data') try: dd.read_hdf(fn, 'data', chunksize=2, mode='r') assert False except TypeError as e: assert "format='table'" in str(e) with tmpfile('h5') as fn: data.to_hdf(fn, '/data', format='table') a = dd.read_hdf(fn, '/data', chunksize=2, mode='r') assert a.npartitions == 2 compare(a.compute(), data) compare(dd.read_hdf(fn, '/data', chunksize=2, start=1, stop=3, mode='r').compute(), pd.read_hdf(fn, '/data', start=1, stop=3)) assert (sorted(dd.read_hdf(fn, '/data', mode='r').dask) == sorted(dd.read_hdf(fn, '/data', mode='r').dask)) def test_read_hdf_multiply_open(): """Test that we can read from a file that's already opened elsewhere in read-only mode.""" pytest.importorskip('tables') df = pd.DataFrame({'x': ['a', 'b', 'c', 'd'], 'y': [1, 2, 3, 4]}, index=[1., 2., 3., 4.]) with tmpfile('h5') as fn: df.to_hdf(fn, '/data', format='table') with pd.HDFStore(fn, mode='r'): dd.read_hdf(fn, '/data', chunksize=2, mode='r') def test_read_hdf_multiple(): pytest.importorskip('tables') df = pd.DataFrame({'x': ['a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p'], 'y': [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16]}, index=[1., 2., 3., 4., 5., 6., 7., 8., 9., 10., 11., 12., 13., 14., 15., 16.]) a = dd.from_pandas(df, 16) with tmpfile('h5') as fn: a.to_hdf(fn, '/data*') r = dd.read_hdf(fn, '/data*', sorted_index=True) assert a.npartitions == r.npartitions assert a.divisions == r.divisions assert_eq(a, r) def test_read_hdf_start_stop_values(): pytest.importorskip('tables') df = pd.DataFrame({'x': ['a', 'b', 'c', 'd'], 'y': [1, 2, 3, 4]}, index=[1., 2., 3., 4.]) with tmpfile('h5') as fn: df.to_hdf(fn, '/data', format='table') with pytest.raises(ValueError) as e: dd.read_hdf(fn, '/data', stop=10) assert 'number of rows' in str(e) with pytest.raises(ValueError) as e: dd.read_hdf(fn, '/data', start=10) assert 'is above or equal to' in str(e) with pytest.raises(ValueError) as e: dd.read_hdf(fn, '/data', chunksize=-1) assert 'positive integer' in str(e) def test_hdf_globbing(): pytest.importorskip('tables') df = pd.DataFrame({'x': ['a', 'b', 'c', 'd'], 'y': [1, 2, 3, 4]}, index=[1., 2., 3., 4.]) with tmpdir() as tdir: df.to_hdf(os.path.join(tdir, 'one.h5'), '/foo/data', format='table') df.to_hdf(os.path.join(tdir, 'two.h5'), '/bar/data', format='table') df.to_hdf(os.path.join(tdir, 'two.h5'), '/foo/data', format='table') with dask.set_options(get=dask.get): res = dd.read_hdf(os.path.join(tdir, 'one.h5'), '/*/data', chunksize=2) assert res.npartitions == 2 tm.assert_frame_equal(res.compute(), df) res = dd.read_hdf(os.path.join(tdir, 'one.h5'), '/*/data', chunksize=2, start=1, stop=3) expected = pd.read_hdf(os.path.join(tdir, 'one.h5'), '/foo/data', start=1, stop=3) tm.assert_frame_equal(res.compute(), expected) res = dd.read_hdf(os.path.join(tdir, 'two.h5'), '/*/data', chunksize=2) assert res.npartitions == 2 + 2 tm.assert_frame_equal(res.compute(), pd.concat([df] * 2)) res = dd.read_hdf(os.path.join(tdir, '*.h5'), '/foo/data', chunksize=2) assert res.npartitions == 2 + 2 tm.assert_frame_equal(res.compute(), pd.concat([df] * 2)) res = dd.read_hdf(os.path.join(tdir, '*.h5'), '/*/data', chunksize=2) assert res.npartitions == 2 + 2 + 2 tm.assert_frame_equal(res.compute(), pd.concat([df] * 3)) def test_read_hdf_doesnt_segfault(): pytest.importorskip('tables') with tmpfile('h5') as fn: N = 40 df = pd.DataFrame(np.random.randn(N, 3)) with pd.HDFStore(fn, mode='w') as store: store.append('/x', df) ddf = dd.read_hdf(fn, '/x', chunksize=2) assert len(ddf) == N
mraspaud/dask
dask/dataframe/io/tests/test_hdf.py
dask/sharedict.py
from itertools import chain from dask.order import child_max, ndependents, order from dask.core import get_deps from dask.utils_test import add, inc def issorted(L, reverse=False): return sorted(L, reverse=reverse) == L def f(*args): pass def test_ordering_keeps_groups_together(): a, b, c = 'abc' d = dict(((a, i), (f,)) for i in range(4)) d.update({(b, 0): (f, (a, 0), (a, 1)), (b, 1): (f, (a, 2), (a, 3))}) o = order(d) assert abs(o[(a, 0)] - o[(a, 1)]) == 1 assert abs(o[(a, 2)] - o[(a, 3)]) == 1 d = dict(((a, i), (f,)) for i in range(4)) d.update({(b, 0): (f, (a, 0), (a, 2)), (b, 1): (f, (a, 1), (a, 3))}) o = order(d) assert abs(o[(a, 0)] - o[(a, 2)]) == 1 assert abs(o[(a, 1)] - o[(a, 3)]) == 1 def test_prefer_broker_nodes(): """ b0 b1 b2 | \ / a0 a1 a1 should be run before a0 """ a, b, c = 'abc' dsk = {(a, 0): (f,), (a, 1): (f,), (b, 0): (f, (a, 0)), (b, 1): (f, (a, 1)), (b, 2): (f, (a, 1))} o = order(dsk) assert o[(a, 1)] < o[(a, 0)] # Switch name of 0, 1 to ensure that this isn't due to string comparison dsk = {(a, 0): (f,), (a, 1): (f,), (b, 0): (f, (a, 0)), (b, 1): (f, (a, 1)), (b, 2): (f, (a, 0))} o = order(dsk) assert o[(a, 1)] > o[(a, 0)] def test_base_of_reduce_preferred(): """ a3 /| a2 | /| | a1 | | /| | | a0 | | | | | | | b0 b1 b2 b3 \ \ / / c We really want to run b0 quickly """ dsk = dict((('a', i), (f, ('a', i - 1), ('b', i))) for i in [1, 2, 3]) dsk[('a', 0)] = (f, ('b', 0)) dsk.update(dict((('b', i), (f, 'c', 1)) for i in [0, 1, 2, 3])) dsk['c'] = 1 o = order(dsk) assert o == {('a', 3): 0, ('a', 2): 1, ('a', 1): 2, ('a', 0): 3, ('b', 0): 4, 'c': 5, ('b', 1): 6, ('b', 2): 7, ('b', 3): 8} # ('b', 0) is the most important out of ('b', i) assert min([('b', i) for i in [0, 1, 2, 3]], key=o.get) == ('b', 0) def test_deep_bases_win_over_dependents(): """ d should come before e and probably before one of b and c a / | \ . b c | / \ | / e d """ dsk = {'a': (f, 'b', 'c', 'd'), 'b': (f, 'd', 'e'), 'c': (f, 'd'), 'd': 1, 'e': 2} o = order(dsk) assert o['d'] < o['e'] assert o['d'] < o['b'] or o['d'] < o['c'] def test_prefer_deep(): """ c | y b | | x a Prefer longer chains first so we should start with c """ dsk = {'a': 1, 'b': (f, 'a'), 'c': (f, 'b'), 'x': 1, 'y': (f, 'x')} o = order(dsk) assert o == {'c': 0, 'b': 1, 'a': 2, 'y': 3, 'x': 4} def test_stacklimit(): dsk = dict(('x%s' % (i + 1), (inc, 'x%s' % i)) for i in range(10000)) dependencies, dependents = get_deps(dsk) scores = dict.fromkeys(dsk, 1) child_max(dependencies, dependents, scores) ndependents(dependencies, dependents) def test_ndependents(): a, b, c = 'abc' dsk = dict(chain((((a, i), i * 2) for i in range(5)), (((b, i), (add, i, (a, i))) for i in range(5)), (((c, i), (add, i, (b, i))) for i in range(5)))) result = ndependents(*get_deps(dsk)) expected = dict(chain((((a, i), 3) for i in range(5)), (((b, i), 2) for i in range(5)), (((c, i), 1) for i in range(5)))) assert result == expected dsk = {a: 1, b: 1} deps = get_deps(dsk) assert ndependents(*deps) == dsk dsk = {a: 1, b: (add, a, 1), c: (add, b, a)} assert ndependents(*get_deps(dsk)) == {a: 4, b: 2, c: 1} dsk = {a: 1, b: a, c: b} deps = get_deps(dsk) assert ndependents(*deps) == {a: 3, b: 2, c: 1} def test_break_ties_by_str(): dsk = {('x', i): (inc, i) for i in range(10)} x_keys = sorted(dsk) dsk['y'] = list(x_keys) o = order(dsk) expected = {'y': 0} expected.update({k: i + 1 for i, k in enumerate(x_keys)}) assert o == expected def test_order_doesnt_fail_on_mixed_type_keys(): order({'x': (inc, 1), ('y', 0): (inc, 2), 'z': (add, 'x', ('y', 0))})
import numpy as np import pandas as pd import pandas.util.testing as tm import sys import os import dask import pytest from time import sleep import dask.dataframe as dd from dask.utils import tmpfile, tmpdir, dependency_depth from dask.dataframe.utils import assert_eq def test_to_hdf(): pytest.importorskip('tables') df = pd.DataFrame({'x': ['a', 'b', 'c', 'd'], 'y': [1, 2, 3, 4]}, index=[1., 2., 3., 4.]) a = dd.from_pandas(df, 2) with tmpfile('h5') as fn: a.to_hdf(fn, '/data') out = pd.read_hdf(fn, '/data') tm.assert_frame_equal(df, out[:]) with tmpfile('h5') as fn: a.x.to_hdf(fn, '/data') out = pd.read_hdf(fn, '/data') tm.assert_series_equal(df.x, out[:]) a = dd.from_pandas(df, 1) with tmpfile('h5') as fn: a.to_hdf(fn, '/data') out = pd.read_hdf(fn, '/data') tm.assert_frame_equal(df, out[:]) # test compute = False with tmpfile('h5') as fn: r = a.to_hdf(fn, '/data', compute=False) r.compute() out = pd.read_hdf(fn, '/data') tm.assert_frame_equal(df, out[:]) def test_to_hdf_multiple_nodes(): pytest.importorskip('tables') df = pd.DataFrame({'x': ['a', 'b', 'c', 'd'], 'y': [1, 2, 3, 4]}, index=[1., 2., 3., 4.]) a = dd.from_pandas(df, 2) df16 = pd.DataFrame({'x': ['a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p'], 'y': [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16]}, index=[1., 2., 3., 4., 5., 6., 7., 8., 9., 10., 11., 12., 13., 14., 15., 16.]) b = dd.from_pandas(df16, 16) # saving to multiple nodes with tmpfile('h5') as fn: a.to_hdf(fn, '/data*') out = dd.read_hdf(fn, '/data*') assert_eq(df, out) # saving to multiple nodes making sure order is kept with tmpfile('h5') as fn: b.to_hdf(fn, '/data*') out = dd.read_hdf(fn, '/data*') assert_eq(df16, out) # saving to multiple datasets with custom name_function with tmpfile('h5') as fn: a.to_hdf(fn, '/data_*', name_function=lambda i: 'a' * (i + 1)) out = dd.read_hdf(fn, '/data_*') assert_eq(df, out) out = pd.read_hdf(fn, '/data_a') tm.assert_frame_equal(out, df.iloc[:2]) out = pd.read_hdf(fn, '/data_aa') tm.assert_frame_equal(out, df.iloc[2:]) # test multiple nodes with hdf object with tmpfile('h5') as fn: with pd.HDFStore(fn) as hdf: b.to_hdf(hdf, '/data*') out = dd.read_hdf(fn, '/data*') assert_eq(df16, out) def test_to_hdf_multiple_files(): pytest.importorskip('tables') df = pd.DataFrame({'x': ['a', 'b', 'c', 'd'], 'y': [1, 2, 3, 4]}, index=[1., 2., 3., 4.]) a = dd.from_pandas(df, 2) df16 = pd.DataFrame({'x': ['a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p'], 'y': [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16]}, index=[1., 2., 3., 4., 5., 6., 7., 8., 9., 10., 11., 12., 13., 14., 15., 16.]) b = dd.from_pandas(df16, 16) # saving to multiple files with tmpdir() as dn: fn = os.path.join(dn, 'data_*.h5') a.to_hdf(fn, '/data') out = dd.read_hdf(fn, '/data') assert_eq(df, out) # saving to multiple files making sure order is kept with tmpdir() as dn: fn = os.path.join(dn, 'data_*.h5') b.to_hdf(fn, '/data') out = dd.read_hdf(fn, '/data') assert_eq(df16, out) # saving to multiple files with custom name_function with tmpdir() as dn: fn = os.path.join(dn, 'data_*.h5') a.to_hdf(fn, '/data', name_function=lambda i: 'a' * (i + 1)) out = dd.read_hdf(fn, '/data') assert_eq(df, out) out = pd.read_hdf(os.path.join(dn, 'data_a.h5'), '/data') tm.assert_frame_equal(out, df.iloc[:2]) out = pd.read_hdf(os.path.join(dn, 'data_aa.h5'), '/data') tm.assert_frame_equal(out, df.iloc[2:]) # test hdf object with tmpfile('h5') as fn: with pd.HDFStore(fn) as hdf: a.to_hdf(hdf, '/data*') out = dd.read_hdf(fn, '/data*') assert_eq(df, out) def test_to_hdf_modes_multiple_nodes(): pytest.importorskip('tables') df = pd.DataFrame({'x': ['a', 'b', 'c', 'd'], 'y': [1, 2, 3, 4]}, index=[1., 2., 3., 4.]) # appending a single partition to existing data a = dd.from_pandas(df, 1) with tmpfile('h5') as fn: a.to_hdf(fn, '/data2') a.to_hdf(fn, '/data*', mode='a') out = dd.read_hdf(fn, '/data*') assert_eq(df.append(df), out) # overwriting a file with a single partition a = dd.from_pandas(df, 1) with tmpfile('h5') as fn: a.to_hdf(fn, '/data2') a.to_hdf(fn, '/data*', mode='w') out = dd.read_hdf(fn, '/data*') assert_eq(df, out) # appending two partitions to existing data a = dd.from_pandas(df, 2) with tmpfile('h5') as fn: a.to_hdf(fn, '/data2') a.to_hdf(fn, '/data*', mode='a') out = dd.read_hdf(fn, '/data*') assert_eq(df.append(df), out) # overwriting a file with two partitions a = dd.from_pandas(df, 2) with tmpfile('h5') as fn: a.to_hdf(fn, '/data2') a.to_hdf(fn, '/data*', mode='w') out = dd.read_hdf(fn, '/data*') assert_eq(df, out) # overwriting a single partition, keeping other partitions a = dd.from_pandas(df, 2) with tmpfile('h5') as fn: a.to_hdf(fn, '/data1') a.to_hdf(fn, '/data2') a.to_hdf(fn, '/data*', mode='a', append=False) out = dd.read_hdf(fn, '/data*') assert_eq(df.append(df), out) def test_to_hdf_modes_multiple_files(): pytest.importorskip('tables') df = pd.DataFrame({'x': ['a', 'b', 'c', 'd'], 'y': [1, 2, 3, 4]}, index=[1., 2., 3., 4.]) # appending a single partition to existing data a = dd.from_pandas(df, 1) with tmpdir() as dn: fn = os.path.join(dn, 'data*') a.to_hdf(os.path.join(dn, 'data2'), '/data') a.to_hdf(fn, '/data', mode='a') out = dd.read_hdf(fn, '/data*') assert_eq(df.append(df), out) # appending two partitions to existing data a = dd.from_pandas(df, 2) with tmpdir() as dn: fn = os.path.join(dn, 'data*') a.to_hdf(os.path.join(dn, 'data2'), '/data') a.to_hdf(fn, '/data', mode='a') out = dd.read_hdf(fn, '/data') assert_eq(df.append(df), out) # overwriting a file with two partitions a = dd.from_pandas(df, 2) with tmpdir() as dn: fn = os.path.join(dn, 'data*') a.to_hdf(os.path.join(dn, 'data1'), '/data') a.to_hdf(fn, '/data', mode='w') out = dd.read_hdf(fn, '/data') assert_eq(df, out) # overwriting a single partition, keeping other partitions a = dd.from_pandas(df, 2) with tmpdir() as dn: fn = os.path.join(dn, 'data*') a.to_hdf(os.path.join(dn, 'data1'), '/data') a.to_hdf(fn, '/data', mode='a', append=False) out = dd.read_hdf(fn, '/data') assert_eq(df.append(df), out) def test_to_hdf_link_optimizations(): """testing dask link levels is correct by calculating the depth of the dask graph""" pytest.importorskip('tables') df16 = pd.DataFrame({'x': ['a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p'], 'y': [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16]}, index=[1., 2., 3., 4., 5., 6., 7., 8., 9., 10., 11., 12., 13., 14., 15., 16.]) a = dd.from_pandas(df16, 16) # saving to multiple hdf files, no links are needed # expected layers: from_pandas, to_hdf, list = depth of 3 with tmpdir() as dn: fn = os.path.join(dn, 'data*') d = a.to_hdf(fn, '/data', compute=False) assert dependency_depth(d.dask) == 3 # saving to a single hdf file with multiple nodes # all subsequent nodes depend on the first # expected layers: from_pandas, first to_hdf(creates file+node), subsequent to_hdfs, list = 4 with tmpfile() as fn: d = a.to_hdf(fn, '/data*', compute=False) assert dependency_depth(d.dask) == 4 # saving to a single hdf file with a single node # every node depends on the previous node # expected layers: from_pandas, to_hdf times npartitions(15), list = 2 + npartitions = 17 with tmpfile() as fn: d = a.to_hdf(fn, '/data', compute=False) assert dependency_depth(d.dask) == 2 + a.npartitions @pytest.mark.slow def test_to_hdf_lock_delays(): pytest.importorskip('tables') df16 = pd.DataFrame({'x': ['a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p'], 'y': [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16]}, index=[1., 2., 3., 4., 5., 6., 7., 8., 9., 10., 11., 12., 13., 14., 15., 16.]) a = dd.from_pandas(df16, 16) # adding artifichial delays to make sure last tasks finish first # that's a way to simulate last tasks finishing last def delayed_nop(i): if i[1] < 10: sleep(0.1 * (10 - i[1])) return i # saving to multiple hdf nodes with tmpfile() as fn: a = a.apply(delayed_nop, axis=1, meta=a) a.to_hdf(fn, '/data*') out = dd.read_hdf(fn, '/data*') assert_eq(df16, out) # saving to multiple hdf files # adding artifichial delays to make sure last tasks finish first with tmpdir() as dn: fn = os.path.join(dn, 'data*') a = a.apply(delayed_nop, axis=1, meta=a) a.to_hdf(fn, '/data') out = dd.read_hdf(fn, '/data') assert_eq(df16, out) def test_to_hdf_exceptions(): pytest.importorskip('tables') df = pd.DataFrame({'x': ['a', 'b', 'c', 'd'], 'y': [1, 2, 3, 4]}, index=[1., 2., 3., 4.]) a = dd.from_pandas(df, 1) # triggering too many asterisks error with tmpdir() as dn: with pytest.raises(ValueError): fn = os.path.join(dn, 'data_*.h5') a.to_hdf(fn, '/data_*') # triggering too many asterisks error with tmpfile() as fn: with pd.HDFStore(fn) as hdf: with pytest.raises(ValueError): a.to_hdf(hdf, '/data_*_*') @pytest.mark.parametrize('get', [dask.get, dask.threaded.get, dask.multiprocessing.get]) @pytest.mark.parametrize('npartitions', [1, 4, 10]) def test_to_hdf_schedulers(get, npartitions): pytest.importorskip('tables') df = pd.DataFrame({'x': ['a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p'], 'y': [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16]}, index=[1., 2., 3., 4., 5., 6., 7., 8., 9., 10., 11., 12., 13., 14., 15., 16.]) a = dd.from_pandas(df, npartitions=npartitions) # test single file single node with tmpfile('h5') as fn: a.to_hdf(fn, '/data', get=get) out = pd.read_hdf(fn, '/data') assert_eq(df, out) # test multiple files single node with tmpdir() as dn: fn = os.path.join(dn, 'data_*.h5') a.to_hdf(fn, '/data', get=get) out = dd.read_hdf(fn, '/data') assert_eq(df, out) # test single file multiple nodes with tmpfile('h5') as fn: a.to_hdf(fn, '/data*', get=get) out = dd.read_hdf(fn, '/data*') assert_eq(df, out) def test_to_hdf_kwargs(): pytest.importorskip('tables') df = pd.DataFrame({'A': ['a', 'aaaa']}) ddf = dd.from_pandas(df, npartitions=2) with tmpfile('h5') as fn: ddf.to_hdf(fn, 'foo4', format='table', min_itemsize=4) df2 = pd.read_hdf(fn, 'foo4') tm.assert_frame_equal(df, df2) @pytest.mark.skipif(sys.version_info[:2] == (3, 3), reason="Python3.3 uses pytest2.7.2, w/o warns method") def test_to_fmt_warns(): pytest.importorskip('tables') df16 = pd.DataFrame({'x': ['a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p'], 'y': [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16]}, index=[1., 2., 3., 4., 5., 6., 7., 8., 9., 10., 11., 12., 13., 14., 15., 16.]) a = dd.from_pandas(df16, 16) # testing warning when breaking order with tmpfile('h5') as fn: with pytest.warns(None): a.to_hdf(fn, '/data*', name_function=str) # testing warning when breaking order with tmpdir() as dn: with pytest.warns(None): fn = os.path.join(dn, "data_*.csv") a.to_csv(fn, name_function=str) @pytest.mark.parametrize('data, compare', [ (pd.DataFrame({'x': ['a', 'b', 'c', 'd'], 'y': [1, 2, 3, 4]}, index=[1., 2., 3., 4.]), tm.assert_frame_equal), (pd.Series([1, 2, 3, 4], name='a'), tm.assert_series_equal), ]) def test_read_hdf(data, compare): pytest.importorskip('tables') with tmpfile('h5') as fn: data.to_hdf(fn, '/data') try: dd.read_hdf(fn, 'data', chunksize=2, mode='r') assert False except TypeError as e: assert "format='table'" in str(e) with tmpfile('h5') as fn: data.to_hdf(fn, '/data', format='table') a = dd.read_hdf(fn, '/data', chunksize=2, mode='r') assert a.npartitions == 2 compare(a.compute(), data) compare(dd.read_hdf(fn, '/data', chunksize=2, start=1, stop=3, mode='r').compute(), pd.read_hdf(fn, '/data', start=1, stop=3)) assert (sorted(dd.read_hdf(fn, '/data', mode='r').dask) == sorted(dd.read_hdf(fn, '/data', mode='r').dask)) def test_read_hdf_multiply_open(): """Test that we can read from a file that's already opened elsewhere in read-only mode.""" pytest.importorskip('tables') df = pd.DataFrame({'x': ['a', 'b', 'c', 'd'], 'y': [1, 2, 3, 4]}, index=[1., 2., 3., 4.]) with tmpfile('h5') as fn: df.to_hdf(fn, '/data', format='table') with pd.HDFStore(fn, mode='r'): dd.read_hdf(fn, '/data', chunksize=2, mode='r') def test_read_hdf_multiple(): pytest.importorskip('tables') df = pd.DataFrame({'x': ['a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p'], 'y': [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16]}, index=[1., 2., 3., 4., 5., 6., 7., 8., 9., 10., 11., 12., 13., 14., 15., 16.]) a = dd.from_pandas(df, 16) with tmpfile('h5') as fn: a.to_hdf(fn, '/data*') r = dd.read_hdf(fn, '/data*', sorted_index=True) assert a.npartitions == r.npartitions assert a.divisions == r.divisions assert_eq(a, r) def test_read_hdf_start_stop_values(): pytest.importorskip('tables') df = pd.DataFrame({'x': ['a', 'b', 'c', 'd'], 'y': [1, 2, 3, 4]}, index=[1., 2., 3., 4.]) with tmpfile('h5') as fn: df.to_hdf(fn, '/data', format='table') with pytest.raises(ValueError) as e: dd.read_hdf(fn, '/data', stop=10) assert 'number of rows' in str(e) with pytest.raises(ValueError) as e: dd.read_hdf(fn, '/data', start=10) assert 'is above or equal to' in str(e) with pytest.raises(ValueError) as e: dd.read_hdf(fn, '/data', chunksize=-1) assert 'positive integer' in str(e) def test_hdf_globbing(): pytest.importorskip('tables') df = pd.DataFrame({'x': ['a', 'b', 'c', 'd'], 'y': [1, 2, 3, 4]}, index=[1., 2., 3., 4.]) with tmpdir() as tdir: df.to_hdf(os.path.join(tdir, 'one.h5'), '/foo/data', format='table') df.to_hdf(os.path.join(tdir, 'two.h5'), '/bar/data', format='table') df.to_hdf(os.path.join(tdir, 'two.h5'), '/foo/data', format='table') with dask.set_options(get=dask.get): res = dd.read_hdf(os.path.join(tdir, 'one.h5'), '/*/data', chunksize=2) assert res.npartitions == 2 tm.assert_frame_equal(res.compute(), df) res = dd.read_hdf(os.path.join(tdir, 'one.h5'), '/*/data', chunksize=2, start=1, stop=3) expected = pd.read_hdf(os.path.join(tdir, 'one.h5'), '/foo/data', start=1, stop=3) tm.assert_frame_equal(res.compute(), expected) res = dd.read_hdf(os.path.join(tdir, 'two.h5'), '/*/data', chunksize=2) assert res.npartitions == 2 + 2 tm.assert_frame_equal(res.compute(), pd.concat([df] * 2)) res = dd.read_hdf(os.path.join(tdir, '*.h5'), '/foo/data', chunksize=2) assert res.npartitions == 2 + 2 tm.assert_frame_equal(res.compute(), pd.concat([df] * 2)) res = dd.read_hdf(os.path.join(tdir, '*.h5'), '/*/data', chunksize=2) assert res.npartitions == 2 + 2 + 2 tm.assert_frame_equal(res.compute(), pd.concat([df] * 3)) def test_read_hdf_doesnt_segfault(): pytest.importorskip('tables') with tmpfile('h5') as fn: N = 40 df = pd.DataFrame(np.random.randn(N, 3)) with pd.HDFStore(fn, mode='w') as store: store.append('/x', df) ddf = dd.read_hdf(fn, '/x', chunksize=2) assert len(ddf) == N
mraspaud/dask
dask/dataframe/io/tests/test_hdf.py
dask/tests/test_order.py
from __future__ import absolute_import, division, print_function from .profile import Profiler, ResourceProfiler, CacheProfiler from .progress import ProgressBar from .profile_visualize import visualize
import numpy as np import pandas as pd import pandas.util.testing as tm import sys import os import dask import pytest from time import sleep import dask.dataframe as dd from dask.utils import tmpfile, tmpdir, dependency_depth from dask.dataframe.utils import assert_eq def test_to_hdf(): pytest.importorskip('tables') df = pd.DataFrame({'x': ['a', 'b', 'c', 'd'], 'y': [1, 2, 3, 4]}, index=[1., 2., 3., 4.]) a = dd.from_pandas(df, 2) with tmpfile('h5') as fn: a.to_hdf(fn, '/data') out = pd.read_hdf(fn, '/data') tm.assert_frame_equal(df, out[:]) with tmpfile('h5') as fn: a.x.to_hdf(fn, '/data') out = pd.read_hdf(fn, '/data') tm.assert_series_equal(df.x, out[:]) a = dd.from_pandas(df, 1) with tmpfile('h5') as fn: a.to_hdf(fn, '/data') out = pd.read_hdf(fn, '/data') tm.assert_frame_equal(df, out[:]) # test compute = False with tmpfile('h5') as fn: r = a.to_hdf(fn, '/data', compute=False) r.compute() out = pd.read_hdf(fn, '/data') tm.assert_frame_equal(df, out[:]) def test_to_hdf_multiple_nodes(): pytest.importorskip('tables') df = pd.DataFrame({'x': ['a', 'b', 'c', 'd'], 'y': [1, 2, 3, 4]}, index=[1., 2., 3., 4.]) a = dd.from_pandas(df, 2) df16 = pd.DataFrame({'x': ['a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p'], 'y': [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16]}, index=[1., 2., 3., 4., 5., 6., 7., 8., 9., 10., 11., 12., 13., 14., 15., 16.]) b = dd.from_pandas(df16, 16) # saving to multiple nodes with tmpfile('h5') as fn: a.to_hdf(fn, '/data*') out = dd.read_hdf(fn, '/data*') assert_eq(df, out) # saving to multiple nodes making sure order is kept with tmpfile('h5') as fn: b.to_hdf(fn, '/data*') out = dd.read_hdf(fn, '/data*') assert_eq(df16, out) # saving to multiple datasets with custom name_function with tmpfile('h5') as fn: a.to_hdf(fn, '/data_*', name_function=lambda i: 'a' * (i + 1)) out = dd.read_hdf(fn, '/data_*') assert_eq(df, out) out = pd.read_hdf(fn, '/data_a') tm.assert_frame_equal(out, df.iloc[:2]) out = pd.read_hdf(fn, '/data_aa') tm.assert_frame_equal(out, df.iloc[2:]) # test multiple nodes with hdf object with tmpfile('h5') as fn: with pd.HDFStore(fn) as hdf: b.to_hdf(hdf, '/data*') out = dd.read_hdf(fn, '/data*') assert_eq(df16, out) def test_to_hdf_multiple_files(): pytest.importorskip('tables') df = pd.DataFrame({'x': ['a', 'b', 'c', 'd'], 'y': [1, 2, 3, 4]}, index=[1., 2., 3., 4.]) a = dd.from_pandas(df, 2) df16 = pd.DataFrame({'x': ['a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p'], 'y': [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16]}, index=[1., 2., 3., 4., 5., 6., 7., 8., 9., 10., 11., 12., 13., 14., 15., 16.]) b = dd.from_pandas(df16, 16) # saving to multiple files with tmpdir() as dn: fn = os.path.join(dn, 'data_*.h5') a.to_hdf(fn, '/data') out = dd.read_hdf(fn, '/data') assert_eq(df, out) # saving to multiple files making sure order is kept with tmpdir() as dn: fn = os.path.join(dn, 'data_*.h5') b.to_hdf(fn, '/data') out = dd.read_hdf(fn, '/data') assert_eq(df16, out) # saving to multiple files with custom name_function with tmpdir() as dn: fn = os.path.join(dn, 'data_*.h5') a.to_hdf(fn, '/data', name_function=lambda i: 'a' * (i + 1)) out = dd.read_hdf(fn, '/data') assert_eq(df, out) out = pd.read_hdf(os.path.join(dn, 'data_a.h5'), '/data') tm.assert_frame_equal(out, df.iloc[:2]) out = pd.read_hdf(os.path.join(dn, 'data_aa.h5'), '/data') tm.assert_frame_equal(out, df.iloc[2:]) # test hdf object with tmpfile('h5') as fn: with pd.HDFStore(fn) as hdf: a.to_hdf(hdf, '/data*') out = dd.read_hdf(fn, '/data*') assert_eq(df, out) def test_to_hdf_modes_multiple_nodes(): pytest.importorskip('tables') df = pd.DataFrame({'x': ['a', 'b', 'c', 'd'], 'y': [1, 2, 3, 4]}, index=[1., 2., 3., 4.]) # appending a single partition to existing data a = dd.from_pandas(df, 1) with tmpfile('h5') as fn: a.to_hdf(fn, '/data2') a.to_hdf(fn, '/data*', mode='a') out = dd.read_hdf(fn, '/data*') assert_eq(df.append(df), out) # overwriting a file with a single partition a = dd.from_pandas(df, 1) with tmpfile('h5') as fn: a.to_hdf(fn, '/data2') a.to_hdf(fn, '/data*', mode='w') out = dd.read_hdf(fn, '/data*') assert_eq(df, out) # appending two partitions to existing data a = dd.from_pandas(df, 2) with tmpfile('h5') as fn: a.to_hdf(fn, '/data2') a.to_hdf(fn, '/data*', mode='a') out = dd.read_hdf(fn, '/data*') assert_eq(df.append(df), out) # overwriting a file with two partitions a = dd.from_pandas(df, 2) with tmpfile('h5') as fn: a.to_hdf(fn, '/data2') a.to_hdf(fn, '/data*', mode='w') out = dd.read_hdf(fn, '/data*') assert_eq(df, out) # overwriting a single partition, keeping other partitions a = dd.from_pandas(df, 2) with tmpfile('h5') as fn: a.to_hdf(fn, '/data1') a.to_hdf(fn, '/data2') a.to_hdf(fn, '/data*', mode='a', append=False) out = dd.read_hdf(fn, '/data*') assert_eq(df.append(df), out) def test_to_hdf_modes_multiple_files(): pytest.importorskip('tables') df = pd.DataFrame({'x': ['a', 'b', 'c', 'd'], 'y': [1, 2, 3, 4]}, index=[1., 2., 3., 4.]) # appending a single partition to existing data a = dd.from_pandas(df, 1) with tmpdir() as dn: fn = os.path.join(dn, 'data*') a.to_hdf(os.path.join(dn, 'data2'), '/data') a.to_hdf(fn, '/data', mode='a') out = dd.read_hdf(fn, '/data*') assert_eq(df.append(df), out) # appending two partitions to existing data a = dd.from_pandas(df, 2) with tmpdir() as dn: fn = os.path.join(dn, 'data*') a.to_hdf(os.path.join(dn, 'data2'), '/data') a.to_hdf(fn, '/data', mode='a') out = dd.read_hdf(fn, '/data') assert_eq(df.append(df), out) # overwriting a file with two partitions a = dd.from_pandas(df, 2) with tmpdir() as dn: fn = os.path.join(dn, 'data*') a.to_hdf(os.path.join(dn, 'data1'), '/data') a.to_hdf(fn, '/data', mode='w') out = dd.read_hdf(fn, '/data') assert_eq(df, out) # overwriting a single partition, keeping other partitions a = dd.from_pandas(df, 2) with tmpdir() as dn: fn = os.path.join(dn, 'data*') a.to_hdf(os.path.join(dn, 'data1'), '/data') a.to_hdf(fn, '/data', mode='a', append=False) out = dd.read_hdf(fn, '/data') assert_eq(df.append(df), out) def test_to_hdf_link_optimizations(): """testing dask link levels is correct by calculating the depth of the dask graph""" pytest.importorskip('tables') df16 = pd.DataFrame({'x': ['a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p'], 'y': [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16]}, index=[1., 2., 3., 4., 5., 6., 7., 8., 9., 10., 11., 12., 13., 14., 15., 16.]) a = dd.from_pandas(df16, 16) # saving to multiple hdf files, no links are needed # expected layers: from_pandas, to_hdf, list = depth of 3 with tmpdir() as dn: fn = os.path.join(dn, 'data*') d = a.to_hdf(fn, '/data', compute=False) assert dependency_depth(d.dask) == 3 # saving to a single hdf file with multiple nodes # all subsequent nodes depend on the first # expected layers: from_pandas, first to_hdf(creates file+node), subsequent to_hdfs, list = 4 with tmpfile() as fn: d = a.to_hdf(fn, '/data*', compute=False) assert dependency_depth(d.dask) == 4 # saving to a single hdf file with a single node # every node depends on the previous node # expected layers: from_pandas, to_hdf times npartitions(15), list = 2 + npartitions = 17 with tmpfile() as fn: d = a.to_hdf(fn, '/data', compute=False) assert dependency_depth(d.dask) == 2 + a.npartitions @pytest.mark.slow def test_to_hdf_lock_delays(): pytest.importorskip('tables') df16 = pd.DataFrame({'x': ['a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p'], 'y': [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16]}, index=[1., 2., 3., 4., 5., 6., 7., 8., 9., 10., 11., 12., 13., 14., 15., 16.]) a = dd.from_pandas(df16, 16) # adding artifichial delays to make sure last tasks finish first # that's a way to simulate last tasks finishing last def delayed_nop(i): if i[1] < 10: sleep(0.1 * (10 - i[1])) return i # saving to multiple hdf nodes with tmpfile() as fn: a = a.apply(delayed_nop, axis=1, meta=a) a.to_hdf(fn, '/data*') out = dd.read_hdf(fn, '/data*') assert_eq(df16, out) # saving to multiple hdf files # adding artifichial delays to make sure last tasks finish first with tmpdir() as dn: fn = os.path.join(dn, 'data*') a = a.apply(delayed_nop, axis=1, meta=a) a.to_hdf(fn, '/data') out = dd.read_hdf(fn, '/data') assert_eq(df16, out) def test_to_hdf_exceptions(): pytest.importorskip('tables') df = pd.DataFrame({'x': ['a', 'b', 'c', 'd'], 'y': [1, 2, 3, 4]}, index=[1., 2., 3., 4.]) a = dd.from_pandas(df, 1) # triggering too many asterisks error with tmpdir() as dn: with pytest.raises(ValueError): fn = os.path.join(dn, 'data_*.h5') a.to_hdf(fn, '/data_*') # triggering too many asterisks error with tmpfile() as fn: with pd.HDFStore(fn) as hdf: with pytest.raises(ValueError): a.to_hdf(hdf, '/data_*_*') @pytest.mark.parametrize('get', [dask.get, dask.threaded.get, dask.multiprocessing.get]) @pytest.mark.parametrize('npartitions', [1, 4, 10]) def test_to_hdf_schedulers(get, npartitions): pytest.importorskip('tables') df = pd.DataFrame({'x': ['a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p'], 'y': [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16]}, index=[1., 2., 3., 4., 5., 6., 7., 8., 9., 10., 11., 12., 13., 14., 15., 16.]) a = dd.from_pandas(df, npartitions=npartitions) # test single file single node with tmpfile('h5') as fn: a.to_hdf(fn, '/data', get=get) out = pd.read_hdf(fn, '/data') assert_eq(df, out) # test multiple files single node with tmpdir() as dn: fn = os.path.join(dn, 'data_*.h5') a.to_hdf(fn, '/data', get=get) out = dd.read_hdf(fn, '/data') assert_eq(df, out) # test single file multiple nodes with tmpfile('h5') as fn: a.to_hdf(fn, '/data*', get=get) out = dd.read_hdf(fn, '/data*') assert_eq(df, out) def test_to_hdf_kwargs(): pytest.importorskip('tables') df = pd.DataFrame({'A': ['a', 'aaaa']}) ddf = dd.from_pandas(df, npartitions=2) with tmpfile('h5') as fn: ddf.to_hdf(fn, 'foo4', format='table', min_itemsize=4) df2 = pd.read_hdf(fn, 'foo4') tm.assert_frame_equal(df, df2) @pytest.mark.skipif(sys.version_info[:2] == (3, 3), reason="Python3.3 uses pytest2.7.2, w/o warns method") def test_to_fmt_warns(): pytest.importorskip('tables') df16 = pd.DataFrame({'x': ['a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p'], 'y': [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16]}, index=[1., 2., 3., 4., 5., 6., 7., 8., 9., 10., 11., 12., 13., 14., 15., 16.]) a = dd.from_pandas(df16, 16) # testing warning when breaking order with tmpfile('h5') as fn: with pytest.warns(None): a.to_hdf(fn, '/data*', name_function=str) # testing warning when breaking order with tmpdir() as dn: with pytest.warns(None): fn = os.path.join(dn, "data_*.csv") a.to_csv(fn, name_function=str) @pytest.mark.parametrize('data, compare', [ (pd.DataFrame({'x': ['a', 'b', 'c', 'd'], 'y': [1, 2, 3, 4]}, index=[1., 2., 3., 4.]), tm.assert_frame_equal), (pd.Series([1, 2, 3, 4], name='a'), tm.assert_series_equal), ]) def test_read_hdf(data, compare): pytest.importorskip('tables') with tmpfile('h5') as fn: data.to_hdf(fn, '/data') try: dd.read_hdf(fn, 'data', chunksize=2, mode='r') assert False except TypeError as e: assert "format='table'" in str(e) with tmpfile('h5') as fn: data.to_hdf(fn, '/data', format='table') a = dd.read_hdf(fn, '/data', chunksize=2, mode='r') assert a.npartitions == 2 compare(a.compute(), data) compare(dd.read_hdf(fn, '/data', chunksize=2, start=1, stop=3, mode='r').compute(), pd.read_hdf(fn, '/data', start=1, stop=3)) assert (sorted(dd.read_hdf(fn, '/data', mode='r').dask) == sorted(dd.read_hdf(fn, '/data', mode='r').dask)) def test_read_hdf_multiply_open(): """Test that we can read from a file that's already opened elsewhere in read-only mode.""" pytest.importorskip('tables') df = pd.DataFrame({'x': ['a', 'b', 'c', 'd'], 'y': [1, 2, 3, 4]}, index=[1., 2., 3., 4.]) with tmpfile('h5') as fn: df.to_hdf(fn, '/data', format='table') with pd.HDFStore(fn, mode='r'): dd.read_hdf(fn, '/data', chunksize=2, mode='r') def test_read_hdf_multiple(): pytest.importorskip('tables') df = pd.DataFrame({'x': ['a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p'], 'y': [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16]}, index=[1., 2., 3., 4., 5., 6., 7., 8., 9., 10., 11., 12., 13., 14., 15., 16.]) a = dd.from_pandas(df, 16) with tmpfile('h5') as fn: a.to_hdf(fn, '/data*') r = dd.read_hdf(fn, '/data*', sorted_index=True) assert a.npartitions == r.npartitions assert a.divisions == r.divisions assert_eq(a, r) def test_read_hdf_start_stop_values(): pytest.importorskip('tables') df = pd.DataFrame({'x': ['a', 'b', 'c', 'd'], 'y': [1, 2, 3, 4]}, index=[1., 2., 3., 4.]) with tmpfile('h5') as fn: df.to_hdf(fn, '/data', format='table') with pytest.raises(ValueError) as e: dd.read_hdf(fn, '/data', stop=10) assert 'number of rows' in str(e) with pytest.raises(ValueError) as e: dd.read_hdf(fn, '/data', start=10) assert 'is above or equal to' in str(e) with pytest.raises(ValueError) as e: dd.read_hdf(fn, '/data', chunksize=-1) assert 'positive integer' in str(e) def test_hdf_globbing(): pytest.importorskip('tables') df = pd.DataFrame({'x': ['a', 'b', 'c', 'd'], 'y': [1, 2, 3, 4]}, index=[1., 2., 3., 4.]) with tmpdir() as tdir: df.to_hdf(os.path.join(tdir, 'one.h5'), '/foo/data', format='table') df.to_hdf(os.path.join(tdir, 'two.h5'), '/bar/data', format='table') df.to_hdf(os.path.join(tdir, 'two.h5'), '/foo/data', format='table') with dask.set_options(get=dask.get): res = dd.read_hdf(os.path.join(tdir, 'one.h5'), '/*/data', chunksize=2) assert res.npartitions == 2 tm.assert_frame_equal(res.compute(), df) res = dd.read_hdf(os.path.join(tdir, 'one.h5'), '/*/data', chunksize=2, start=1, stop=3) expected = pd.read_hdf(os.path.join(tdir, 'one.h5'), '/foo/data', start=1, stop=3) tm.assert_frame_equal(res.compute(), expected) res = dd.read_hdf(os.path.join(tdir, 'two.h5'), '/*/data', chunksize=2) assert res.npartitions == 2 + 2 tm.assert_frame_equal(res.compute(), pd.concat([df] * 2)) res = dd.read_hdf(os.path.join(tdir, '*.h5'), '/foo/data', chunksize=2) assert res.npartitions == 2 + 2 tm.assert_frame_equal(res.compute(), pd.concat([df] * 2)) res = dd.read_hdf(os.path.join(tdir, '*.h5'), '/*/data', chunksize=2) assert res.npartitions == 2 + 2 + 2 tm.assert_frame_equal(res.compute(), pd.concat([df] * 3)) def test_read_hdf_doesnt_segfault(): pytest.importorskip('tables') with tmpfile('h5') as fn: N = 40 df = pd.DataFrame(np.random.randn(N, 3)) with pd.HDFStore(fn, mode='w') as store: store.append('/x', df) ddf = dd.read_hdf(fn, '/x', chunksize=2) assert len(ddf) == N
mraspaud/dask
dask/dataframe/io/tests/test_hdf.py
dask/diagnostics/__init__.py
# This file helps to compute a version number in source trees obtained from # git-archive tarball (such as those provided by githubs download-from-tag # feature). Distribution tarballs (built by setup.py sdist) and build # directories (produced by setup.py build) will contain a much shorter file # that just contains the computed version number. # This file is released into the public domain. Generated by # versioneer-0.15 (https://github.com/warner/python-versioneer) import errno import os import re import subprocess import sys from pandas.compat import PY3 def get_keywords(): # these strings will be replaced by git during git-archive. # setup.py/versioneer.py will grep for the variable names, so they must # each be defined on a line of their own. _version.py will just call # get_keywords(). git_refnames = "$Format:%d$" git_full = "$Format:%H$" keywords = {"refnames": git_refnames, "full": git_full} return keywords class VersioneerConfig(object): pass def get_config(): # these strings are filled in when 'setup.py versioneer' creates # _version.py cfg = VersioneerConfig() cfg.VCS = "git" cfg.style = "pep440" cfg.tag_prefix = "v" cfg.parentdir_prefix = "pandas-" cfg.versionfile_source = "pandas/_version.py" cfg.verbose = False return cfg class NotThisMethod(Exception): pass LONG_VERSION_PY = {} HANDLERS = {} def register_vcs_handler(vcs, method): # decorator def decorate(f): if vcs not in HANDLERS: HANDLERS[vcs] = {} HANDLERS[vcs][method] = f return f return decorate def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False): assert isinstance(commands, list) p = None for c in commands: try: dispcmd = str([c] + args) # remember shell=False, so use git.cmd on windows, not just git p = subprocess.Popen([c] + args, cwd=cwd, stdout=subprocess.PIPE, stderr=(subprocess.PIPE if hide_stderr else None)) break except EnvironmentError: e = sys.exc_info()[1] if e.errno == errno.ENOENT: continue if verbose: print("unable to run {dispcmd}".format(dispcmd=dispcmd)) print(e) return None else: if verbose: print("unable to find command, tried %s" % (commands,)) return None stdout = p.communicate()[0].strip() if PY3: stdout = stdout.decode() if p.returncode != 0: if verbose: print("unable to run {dispcmd} (error)".format(dispcmd=dispcmd)) return None return stdout def versions_from_parentdir(parentdir_prefix, root, verbose): # Source tarballs conventionally unpack into a directory that includes # both the project name and a version string. dirname = os.path.basename(root) if not dirname.startswith(parentdir_prefix): if verbose: print("guessing rootdir is '{root}', but '{dirname}' " "doesn't start with prefix '{parentdir_prefix}'".format( root=root, dirname=dirname, parentdir_prefix=parentdir_prefix)) raise NotThisMethod("rootdir doesn't start with parentdir_prefix") return {"version": dirname[len(parentdir_prefix):], "full-revisionid": None, "dirty": False, "error": None} @register_vcs_handler("git", "get_keywords") def git_get_keywords(versionfile_abs): # the code embedded in _version.py can just fetch the value of these # keywords. When used from setup.py, we don't want to import _version.py, # so we do it with a regexp instead. This function is not used from # _version.py. keywords = {} try: f = open(versionfile_abs, "r") for line in f.readlines(): if line.strip().startswith("git_refnames ="): mo = re.search(r'=\s*"(.*)"', line) if mo: keywords["refnames"] = mo.group(1) if line.strip().startswith("git_full ="): mo = re.search(r'=\s*"(.*)"', line) if mo: keywords["full"] = mo.group(1) f.close() except EnvironmentError: pass return keywords @register_vcs_handler("git", "keywords") def git_versions_from_keywords(keywords, tag_prefix, verbose): if not keywords: raise NotThisMethod("no keywords at all, weird") refnames = keywords["refnames"].strip() if refnames.startswith("$Format"): if verbose: print("keywords are unexpanded, not using") raise NotThisMethod("unexpanded keywords, not a git-archive tarball") refs = {r.strip() for r in refnames.strip("()").split(",")} # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of # just "foo-1.0". If we see a "tag: " prefix, prefer those. TAG = "tag: " tags = {r[len(TAG):] for r in refs if r.startswith(TAG)} if not tags: # Either we're using git < 1.8.3, or there really are no tags. We use # a heuristic: assume all version tags have a digit. The old git %d # expansion behaves like git log --decorate=short and strips out the # refs/heads/ and refs/tags/ prefixes that would let us distinguish # between branches and tags. By ignoring refnames without digits, we # filter out many common branch names like "release" and # "stabilization", as well as "HEAD" and "master". tags = {r for r in refs if re.search(r'\d', r)} if verbose: print("discarding '{}', no digits".format(",".join(refs - tags))) if verbose: print("likely tags: {}".format(",".join(sorted(tags)))) for ref in sorted(tags): # sorting will prefer e.g. "2.0" over "2.0rc1" if ref.startswith(tag_prefix): r = ref[len(tag_prefix):] if verbose: print("picking {r}".format(r=r)) return {"version": r, "full-revisionid": keywords["full"].strip(), "dirty": False, "error": None } # no suitable tags, so version is "0+unknown", but full hex is still there if verbose: print("no suitable tags, using unknown + full revision id") return {"version": "0+unknown", "full-revisionid": keywords["full"].strip(), "dirty": False, "error": "no suitable tags"} @register_vcs_handler("git", "pieces_from_vcs") def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): # this runs 'git' from the root of the source tree. This only gets called # if the git-archive 'subst' keywords were *not* expanded, and # _version.py hasn't already been rewritten with a short version string, # meaning we're inside a checked out source tree. if not os.path.exists(os.path.join(root, ".git")): if verbose: print("no .git in {root}".format(root=root)) raise NotThisMethod("no .git directory") GITS = ["git"] if sys.platform == "win32": GITS = ["git.cmd", "git.exe"] # if there is a tag, this yields TAG-NUM-gHEX[-dirty] # if there are no tags, this yields HEX[-dirty] (no NUM) describe_out = run_command(GITS, ["describe", "--tags", "--dirty", "--always", "--long"], cwd=root) # --long was added in git-1.5.5 if describe_out is None: raise NotThisMethod("'git describe' failed") describe_out = describe_out.strip() full_out = run_command(GITS, ["rev-parse", "HEAD"], cwd=root) if full_out is None: raise NotThisMethod("'git rev-parse' failed") full_out = full_out.strip() pieces = {} pieces["long"] = full_out pieces["short"] = full_out[:7] # maybe improved later pieces["error"] = None # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty] # TAG might have hyphens. git_describe = describe_out # look for -dirty suffix dirty = git_describe.endswith("-dirty") pieces["dirty"] = dirty if dirty: git_describe = git_describe[:git_describe.rindex("-dirty")] # now we have TAG-NUM-gHEX or HEX if "-" in git_describe: # TAG-NUM-gHEX mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe) if not mo: # unparseable. Maybe git-describe is misbehaving? pieces["error"] = ("unable to parse git-describe output: " "'{describe_out}'".format( describe_out=describe_out)) return pieces # tag full_tag = mo.group(1) if not full_tag.startswith(tag_prefix): if verbose: fmt = "tag '{full_tag}' doesn't start with prefix " \ "'{tag_prefix}'" print(fmt.format(full_tag=full_tag, tag_prefix=tag_prefix)) pieces["error"] = ("tag '{full_tag}' doesn't start with " "prefix '{tag_prefix}'".format( full_tag, tag_prefix)) return pieces pieces["closest-tag"] = full_tag[len(tag_prefix):] # distance: number of commits since tag pieces["distance"] = int(mo.group(2)) # commit: short hex revision ID pieces["short"] = mo.group(3) else: # HEX: no tags pieces["closest-tag"] = None count_out = run_command(GITS, ["rev-list", "HEAD", "--count"], cwd=root) pieces["distance"] = int(count_out) # total number of commits return pieces def plus_or_dot(pieces): if "+" in pieces.get("closest-tag", ""): return "." return "+" def render_pep440(pieces): # now build up version string, with post-release "local version # identifier". Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you # get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty # exceptions: # 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty] if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"] or pieces["dirty"]: rendered += plus_or_dot(pieces) rendered += "{:d}.g{}".format(pieces["distance"], pieces["short"]) if pieces["dirty"]: rendered += ".dirty" else: # exception #1 rendered = "0+untagged.{:d}.g{}".format(pieces["distance"], pieces["short"]) if pieces["dirty"]: rendered += ".dirty" return rendered def render_pep440_pre(pieces): # TAG[.post.devDISTANCE] . No -dirty # exceptions: # 1: no tags. 0.post.devDISTANCE if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"]: rendered += ".post.dev%d" % pieces["distance"] else: # exception #1 rendered = "0.post.dev%d" % pieces["distance"] return rendered def render_pep440_post(pieces): # TAG[.postDISTANCE[.dev0]+gHEX] . The ".dev0" means dirty. Note that # .dev0 sorts backwards (a dirty tree will appear "older" than the # corresponding clean one), but you shouldn't be releasing software with # -dirty anyways. # exceptions: # 1: no tags. 0.postDISTANCE[.dev0] if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"] or pieces["dirty"]: rendered += ".post{:d}".format(pieces["distance"]) if pieces["dirty"]: rendered += ".dev0" rendered += plus_or_dot(pieces) rendered += "g{}".format(pieces["short"]) else: # exception #1 rendered = "0.post%d" % pieces["distance"] if pieces["dirty"]: rendered += ".dev0" rendered += "+g{}".format(pieces["short"]) return rendered def render_pep440_old(pieces): # TAG[.postDISTANCE[.dev0]] . The ".dev0" means dirty. # exceptions: # 1: no tags. 0.postDISTANCE[.dev0] if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"] or pieces["dirty"]: rendered += ".post%d" % pieces["distance"] if pieces["dirty"]: rendered += ".dev0" else: # exception #1 rendered = "0.post%d" % pieces["distance"] if pieces["dirty"]: rendered += ".dev0" return rendered def render_git_describe(pieces): # TAG[-DISTANCE-gHEX][-dirty], like 'git describe --tags --dirty # --always' # exceptions: # 1: no tags. HEX[-dirty] (note: no 'g' prefix) if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"]: rendered += "-{:d}-g{}".format(pieces["distance"], pieces["short"]) else: # exception #1 rendered = pieces["short"] if pieces["dirty"]: rendered += "-dirty" return rendered def render_git_describe_long(pieces): # TAG-DISTANCE-gHEX[-dirty], like 'git describe --tags --dirty # --always -long'. The distance/hash is unconditional. # exceptions: # 1: no tags. HEX[-dirty] (note: no 'g' prefix) if pieces["closest-tag"]: rendered = pieces["closest-tag"] rendered += "-{:d}-g{}".format(pieces["distance"], pieces["short"]) else: # exception #1 rendered = pieces["short"] if pieces["dirty"]: rendered += "-dirty" return rendered def render(pieces, style): if pieces["error"]: return {"version": "unknown", "full-revisionid": pieces.get("long"), "dirty": None, "error": pieces["error"]} if not style or style == "default": style = "pep440" # the default if style == "pep440": rendered = render_pep440(pieces) elif style == "pep440-pre": rendered = render_pep440_pre(pieces) elif style == "pep440-post": rendered = render_pep440_post(pieces) elif style == "pep440-old": rendered = render_pep440_old(pieces) elif style == "git-describe": rendered = render_git_describe(pieces) elif style == "git-describe-long": rendered = render_git_describe_long(pieces) else: raise ValueError("unknown style '{style}'".format(style=style)) return {"version": rendered, "full-revisionid": pieces["long"], "dirty": pieces["dirty"], "error": None} def get_versions(): # I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have # __file__, we can work backwards from there to the root. Some # py2exe/bbfreeze/non-CPython implementations don't do __file__, in which # case we can only use expanded keywords. cfg = get_config() verbose = cfg.verbose try: return git_versions_from_keywords(get_keywords(), cfg.tag_prefix, verbose) except NotThisMethod: pass try: root = os.path.realpath(__file__) # versionfile_source is the relative path from the top of the source # tree (where the .git directory might live) to this file. Invert # this to find the root from __file__. for i in cfg.versionfile_source.split('/'): root = os.path.dirname(root) except NameError: return {"version": "0+unknown", "full-revisionid": None, "dirty": None, "error": "unable to find root of source tree"} try: pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose) return render(pieces, cfg.style) except NotThisMethod: pass try: if cfg.parentdir_prefix: return versions_from_parentdir(cfg.parentdir_prefix, root, verbose) except NotThisMethod: pass return {"version": "0+unknown", "full-revisionid": None, "dirty": None, "error": "unable to compute version"}
# coding=utf-8 # pylint: disable-msg=E1101,W0612 from datetime import datetime, timedelta from distutils.version import LooseVersion import numpy as np from numpy import nan import pytest import pytz from pandas._libs.tslib import iNaT from pandas.compat import range from pandas.errors import PerformanceWarning import pandas.util._test_decorators as td import pandas as pd from pandas import ( Categorical, DataFrame, Index, IntervalIndex, MultiIndex, NaT, Series, Timestamp, date_range, isna) from pandas.core.series import remove_na import pandas.util.testing as tm from pandas.util.testing import assert_frame_equal, assert_series_equal try: import scipy _is_scipy_ge_0190 = (LooseVersion(scipy.__version__) >= LooseVersion('0.19.0')) except ImportError: _is_scipy_ge_0190 = False def _skip_if_no_pchip(): try: from scipy.interpolate import pchip_interpolate # noqa except ImportError: import pytest pytest.skip('scipy.interpolate.pchip missing') def _skip_if_no_akima(): try: from scipy.interpolate import Akima1DInterpolator # noqa except ImportError: import pytest pytest.skip('scipy.interpolate.Akima1DInterpolator missing') def _simple_ts(start, end, freq='D'): rng = date_range(start, end, freq=freq) return Series(np.random.randn(len(rng)), index=rng) class TestSeriesMissingData(): def test_remove_na_deprecation(self): # see gh-16971 with tm.assert_produces_warning(FutureWarning): remove_na(Series([])) def test_timedelta_fillna(self): # GH 3371 s = Series([Timestamp('20130101'), Timestamp('20130101'), Timestamp('20130102'), Timestamp('20130103 9:01:01')]) td = s.diff() # reg fillna result = td.fillna(0) expected = Series([timedelta(0), timedelta(0), timedelta(1), timedelta(days=1, seconds=9 * 3600 + 60 + 1)]) assert_series_equal(result, expected) # interprested as seconds result = td.fillna(1) expected = Series([timedelta(seconds=1), timedelta(0), timedelta(1), timedelta(days=1, seconds=9 * 3600 + 60 + 1)]) assert_series_equal(result, expected) result = td.fillna(timedelta(days=1, seconds=1)) expected = Series([timedelta(days=1, seconds=1), timedelta(0), timedelta(1), timedelta(days=1, seconds=9 * 3600 + 60 + 1)]) assert_series_equal(result, expected) result = td.fillna(np.timedelta64(int(1e9))) expected = Series([timedelta(seconds=1), timedelta(0), timedelta(1), timedelta(days=1, seconds=9 * 3600 + 60 + 1)]) assert_series_equal(result, expected) result = td.fillna(NaT) expected = Series([NaT, timedelta(0), timedelta(1), timedelta(days=1, seconds=9 * 3600 + 60 + 1)], dtype='m8[ns]') assert_series_equal(result, expected) # ffill td[2] = np.nan result = td.ffill() expected = td.fillna(0) expected[0] = np.nan assert_series_equal(result, expected) # bfill td[2] = np.nan result = td.bfill() expected = td.fillna(0) expected[2] = timedelta(days=1, seconds=9 * 3600 + 60 + 1) assert_series_equal(result, expected) def test_datetime64_fillna(self): s = Series([Timestamp('20130101'), Timestamp('20130101'), Timestamp( '20130102'), Timestamp('20130103 9:01:01')]) s[2] = np.nan # reg fillna result = s.fillna(Timestamp('20130104')) expected = Series([Timestamp('20130101'), Timestamp( '20130101'), Timestamp('20130104'), Timestamp('20130103 9:01:01')]) assert_series_equal(result, expected) result = s.fillna(NaT) expected = s assert_series_equal(result, expected) # ffill result = s.ffill() expected = Series([Timestamp('20130101'), Timestamp( '20130101'), Timestamp('20130101'), Timestamp('20130103 9:01:01')]) assert_series_equal(result, expected) # bfill result = s.bfill() expected = Series([Timestamp('20130101'), Timestamp('20130101'), Timestamp('20130103 9:01:01'), Timestamp( '20130103 9:01:01')]) assert_series_equal(result, expected) # GH 6587 # make sure that we are treating as integer when filling # this also tests inference of a datetime-like with NaT's s = Series([pd.NaT, pd.NaT, '2013-08-05 15:30:00.000001']) expected = Series( ['2013-08-05 15:30:00.000001', '2013-08-05 15:30:00.000001', '2013-08-05 15:30:00.000001'], dtype='M8[ns]') result = s.fillna(method='backfill') assert_series_equal(result, expected) def test_datetime64_tz_fillna(self): for tz in ['US/Eastern', 'Asia/Tokyo']: # DatetimeBlock s = Series([Timestamp('2011-01-01 10:00'), pd.NaT, Timestamp('2011-01-03 10:00'), pd.NaT]) null_loc = pd.Series([False, True, False, True]) result = s.fillna(pd.Timestamp('2011-01-02 10:00')) expected = Series([Timestamp('2011-01-01 10:00'), Timestamp('2011-01-02 10:00'), Timestamp('2011-01-03 10:00'), Timestamp('2011-01-02 10:00')]) tm.assert_series_equal(expected, result) # check s is not changed tm.assert_series_equal(pd.isna(s), null_loc) result = s.fillna(pd.Timestamp('2011-01-02 10:00', tz=tz)) expected = Series([Timestamp('2011-01-01 10:00'), Timestamp('2011-01-02 10:00', tz=tz), Timestamp('2011-01-03 10:00'), Timestamp('2011-01-02 10:00', tz=tz)]) tm.assert_series_equal(expected, result) tm.assert_series_equal(pd.isna(s), null_loc) result = s.fillna('AAA') expected = Series([Timestamp('2011-01-01 10:00'), 'AAA', Timestamp('2011-01-03 10:00'), 'AAA'], dtype=object) tm.assert_series_equal(expected, result) tm.assert_series_equal(pd.isna(s), null_loc) result = s.fillna({1: pd.Timestamp('2011-01-02 10:00', tz=tz), 3: pd.Timestamp('2011-01-04 10:00')}) expected = Series([Timestamp('2011-01-01 10:00'), Timestamp('2011-01-02 10:00', tz=tz), Timestamp('2011-01-03 10:00'), Timestamp('2011-01-04 10:00')]) tm.assert_series_equal(expected, result) tm.assert_series_equal(pd.isna(s), null_loc) result = s.fillna({1: pd.Timestamp('2011-01-02 10:00'), 3: pd.Timestamp('2011-01-04 10:00')}) expected = Series([Timestamp('2011-01-01 10:00'), Timestamp('2011-01-02 10:00'), Timestamp('2011-01-03 10:00'), Timestamp('2011-01-04 10:00')]) tm.assert_series_equal(expected, result) tm.assert_series_equal(pd.isna(s), null_loc) # DatetimeBlockTZ idx = pd.DatetimeIndex(['2011-01-01 10:00', pd.NaT, '2011-01-03 10:00', pd.NaT], tz=tz) s = pd.Series(idx) assert s.dtype == 'datetime64[ns, {0}]'.format(tz) tm.assert_series_equal(pd.isna(s), null_loc) result = s.fillna(pd.Timestamp('2011-01-02 10:00')) expected = Series([Timestamp('2011-01-01 10:00', tz=tz), Timestamp('2011-01-02 10:00'), Timestamp('2011-01-03 10:00', tz=tz), Timestamp('2011-01-02 10:00')]) tm.assert_series_equal(expected, result) tm.assert_series_equal(pd.isna(s), null_loc) result = s.fillna(pd.Timestamp('2011-01-02 10:00', tz=tz)) idx = pd.DatetimeIndex(['2011-01-01 10:00', '2011-01-02 10:00', '2011-01-03 10:00', '2011-01-02 10:00'], tz=tz) expected = Series(idx) tm.assert_series_equal(expected, result) tm.assert_series_equal(pd.isna(s), null_loc) result = s.fillna(pd.Timestamp('2011-01-02 10:00', tz=tz).to_pydatetime()) idx = pd.DatetimeIndex(['2011-01-01 10:00', '2011-01-02 10:00', '2011-01-03 10:00', '2011-01-02 10:00'], tz=tz) expected = Series(idx) tm.assert_series_equal(expected, result) tm.assert_series_equal(pd.isna(s), null_loc) result = s.fillna('AAA') expected = Series([Timestamp('2011-01-01 10:00', tz=tz), 'AAA', Timestamp('2011-01-03 10:00', tz=tz), 'AAA'], dtype=object) tm.assert_series_equal(expected, result) tm.assert_series_equal(pd.isna(s), null_loc) result = s.fillna({1: pd.Timestamp('2011-01-02 10:00', tz=tz), 3: pd.Timestamp('2011-01-04 10:00')}) expected = Series([Timestamp('2011-01-01 10:00', tz=tz), Timestamp('2011-01-02 10:00', tz=tz), Timestamp('2011-01-03 10:00', tz=tz), Timestamp('2011-01-04 10:00')]) tm.assert_series_equal(expected, result) tm.assert_series_equal(pd.isna(s), null_loc) result = s.fillna({1: pd.Timestamp('2011-01-02 10:00', tz=tz), 3: pd.Timestamp('2011-01-04 10:00', tz=tz)}) expected = Series([Timestamp('2011-01-01 10:00', tz=tz), Timestamp('2011-01-02 10:00', tz=tz), Timestamp('2011-01-03 10:00', tz=tz), Timestamp('2011-01-04 10:00', tz=tz)]) tm.assert_series_equal(expected, result) tm.assert_series_equal(pd.isna(s), null_loc) # filling with a naive/other zone, coerce to object result = s.fillna(Timestamp('20130101')) expected = Series([Timestamp('2011-01-01 10:00', tz=tz), Timestamp('2013-01-01'), Timestamp('2011-01-03 10:00', tz=tz), Timestamp('2013-01-01')]) tm.assert_series_equal(expected, result) tm.assert_series_equal(pd.isna(s), null_loc) result = s.fillna(Timestamp('20130101', tz='US/Pacific')) expected = Series([Timestamp('2011-01-01 10:00', tz=tz), Timestamp('2013-01-01', tz='US/Pacific'), Timestamp('2011-01-03 10:00', tz=tz), Timestamp('2013-01-01', tz='US/Pacific')]) tm.assert_series_equal(expected, result) tm.assert_series_equal(pd.isna(s), null_loc) # with timezone # GH 15855 df = pd.Series([pd.Timestamp('2012-11-11 00:00:00+01:00'), pd.NaT]) exp = pd.Series([pd.Timestamp('2012-11-11 00:00:00+01:00'), pd.Timestamp('2012-11-11 00:00:00+01:00')]) assert_series_equal(df.fillna(method='pad'), exp) df = pd.Series([pd.NaT, pd.Timestamp('2012-11-11 00:00:00+01:00')]) exp = pd.Series([pd.Timestamp('2012-11-11 00:00:00+01:00'), pd.Timestamp('2012-11-11 00:00:00+01:00')]) assert_series_equal(df.fillna(method='bfill'), exp) def test_fillna_consistency(self): # GH 16402 # fillna with a tz aware to a tz-naive, should result in object s = Series([Timestamp('20130101'), pd.NaT]) result = s.fillna(Timestamp('20130101', tz='US/Eastern')) expected = Series([Timestamp('20130101'), Timestamp('2013-01-01', tz='US/Eastern')], dtype='object') assert_series_equal(result, expected) # where (we ignore the errors=) result = s.where([True, False], Timestamp('20130101', tz='US/Eastern'), errors='ignore') assert_series_equal(result, expected) result = s.where([True, False], Timestamp('20130101', tz='US/Eastern'), errors='ignore') assert_series_equal(result, expected) # with a non-datetime result = s.fillna('foo') expected = Series([Timestamp('20130101'), 'foo']) assert_series_equal(result, expected) # assignment s2 = s.copy() s2[1] = 'foo' assert_series_equal(s2, expected) def test_datetime64tz_fillna_round_issue(self): # GH 14872 data = pd.Series([pd.NaT, pd.NaT, datetime(2016, 12, 12, 22, 24, 6, 100001, tzinfo=pytz.utc)]) filled = data.fillna(method='bfill') expected = pd.Series([datetime(2016, 12, 12, 22, 24, 6, 100001, tzinfo=pytz.utc), datetime(2016, 12, 12, 22, 24, 6, 100001, tzinfo=pytz.utc), datetime(2016, 12, 12, 22, 24, 6, 100001, tzinfo=pytz.utc)]) assert_series_equal(filled, expected) def test_fillna_downcast(self): # GH 15277 # infer int64 from float64 s = pd.Series([1., np.nan]) result = s.fillna(0, downcast='infer') expected = pd.Series([1, 0]) assert_series_equal(result, expected) # infer int64 from float64 when fillna value is a dict s = pd.Series([1., np.nan]) result = s.fillna({1: 0}, downcast='infer') expected = pd.Series([1, 0]) assert_series_equal(result, expected) def test_fillna_int(self): s = Series(np.random.randint(-100, 100, 50)) s.fillna(method='ffill', inplace=True) assert_series_equal(s.fillna(method='ffill', inplace=False), s) def test_fillna_raise(self): s = Series(np.random.randint(-100, 100, 50)) pytest.raises(TypeError, s.fillna, [1, 2]) pytest.raises(TypeError, s.fillna, (1, 2)) # related GH 9217, make sure limit is an int and greater than 0 s = Series([1, 2, 3, None]) for limit in [-1, 0, 1., 2.]: for method in ['backfill', 'bfill', 'pad', 'ffill', None]: with pytest.raises(ValueError): s.fillna(1, limit=limit, method=method) def test_categorical_nan_equality(self): cat = Series(Categorical(["a", "b", "c", np.nan])) exp = Series([True, True, True, False]) res = (cat == cat) tm.assert_series_equal(res, exp) def test_categorical_nan_handling(self): # NaNs are represented as -1 in labels s = Series(Categorical(["a", "b", np.nan, "a"])) tm.assert_index_equal(s.cat.categories, Index(["a", "b"])) tm.assert_numpy_array_equal(s.values.codes, np.array([0, 1, -1, 0], dtype=np.int8)) @pytest.mark.parametrize('fill_value, expected_output', [ ('a', ['a', 'a', 'b', 'a', 'a']), ({1: 'a', 3: 'b', 4: 'b'}, ['a', 'a', 'b', 'b', 'b']), ({1: 'a'}, ['a', 'a', 'b', np.nan, np.nan]), ({1: 'a', 3: 'b'}, ['a', 'a', 'b', 'b', np.nan]), (Series('a'), ['a', np.nan, 'b', np.nan, np.nan]), (Series('a', index=[1]), ['a', 'a', 'b', np.nan, np.nan]), (Series({1: 'a', 3: 'b'}), ['a', 'a', 'b', 'b', np.nan]), (Series(['a', 'b'], index=[3, 4]), ['a', np.nan, 'b', 'a', 'b']) ]) def test_fillna_categorical(self, fill_value, expected_output): # GH 17033 # Test fillna for a Categorical series data = ['a', np.nan, 'b', np.nan, np.nan] s = Series(Categorical(data, categories=['a', 'b'])) exp = Series(Categorical(expected_output, categories=['a', 'b'])) tm.assert_series_equal(s.fillna(fill_value), exp) def test_fillna_categorical_raise(self): data = ['a', np.nan, 'b', np.nan, np.nan] s = Series(Categorical(data, categories=['a', 'b'])) with pytest.raises(ValueError, match="fill value must be in categories"): s.fillna('d') with pytest.raises(ValueError, match="fill value must be in categories"): s.fillna(Series('d')) with pytest.raises(ValueError, match="fill value must be in categories"): s.fillna({1: 'd', 3: 'a'}) msg = ('"value" parameter must be a scalar or ' 'dict, but you passed a "list"') with pytest.raises(TypeError, match=msg): s.fillna(['a', 'b']) msg = ('"value" parameter must be a scalar or ' 'dict, but you passed a "tuple"') with pytest.raises(TypeError, match=msg): s.fillna(('a', 'b')) msg = ('"value" parameter must be a scalar, dict ' 'or Series, but you passed a "DataFrame"') with pytest.raises(TypeError, match=msg): s.fillna(DataFrame({1: ['a'], 3: ['b']})) def test_fillna_nat(self): series = Series([0, 1, 2, iNaT], dtype='M8[ns]') filled = series.fillna(method='pad') filled2 = series.fillna(value=series.values[2]) expected = series.copy() expected.values[3] = expected.values[2] assert_series_equal(filled, expected) assert_series_equal(filled2, expected) df = DataFrame({'A': series}) filled = df.fillna(method='pad') filled2 = df.fillna(value=series.values[2]) expected = DataFrame({'A': expected}) assert_frame_equal(filled, expected) assert_frame_equal(filled2, expected) series = Series([iNaT, 0, 1, 2], dtype='M8[ns]') filled = series.fillna(method='bfill') filled2 = series.fillna(value=series[1]) expected = series.copy() expected[0] = expected[1] assert_series_equal(filled, expected) assert_series_equal(filled2, expected) df = DataFrame({'A': series}) filled = df.fillna(method='bfill') filled2 = df.fillna(value=series[1]) expected = DataFrame({'A': expected}) assert_frame_equal(filled, expected) assert_frame_equal(filled2, expected) def test_isna_for_inf(self): s = Series(['a', np.inf, np.nan, 1.0]) with pd.option_context('mode.use_inf_as_na', True): r = s.isna() dr = s.dropna() e = Series([False, True, True, False]) de = Series(['a', 1.0], index=[0, 3]) tm.assert_series_equal(r, e) tm.assert_series_equal(dr, de) @tm.capture_stdout def test_isnull_for_inf_deprecated(self): # gh-17115 s = Series(['a', np.inf, np.nan, 1.0]) with pd.option_context('mode.use_inf_as_null', True): r = s.isna() dr = s.dropna() e = Series([False, True, True, False]) de = Series(['a', 1.0], index=[0, 3]) tm.assert_series_equal(r, e) tm.assert_series_equal(dr, de) def test_fillna(self, datetime_series): ts = Series([0., 1., 2., 3., 4.], index=tm.makeDateIndex(5)) tm.assert_series_equal(ts, ts.fillna(method='ffill')) ts[2] = np.NaN exp = Series([0., 1., 1., 3., 4.], index=ts.index) tm.assert_series_equal(ts.fillna(method='ffill'), exp) exp = Series([0., 1., 3., 3., 4.], index=ts.index) tm.assert_series_equal(ts.fillna(method='backfill'), exp) exp = Series([0., 1., 5., 3., 4.], index=ts.index) tm.assert_series_equal(ts.fillna(value=5), exp) pytest.raises(ValueError, ts.fillna) pytest.raises(ValueError, datetime_series.fillna, value=0, method='ffill') # GH 5703 s1 = Series([np.nan]) s2 = Series([1]) result = s1.fillna(s2) expected = Series([1.]) assert_series_equal(result, expected) result = s1.fillna({}) assert_series_equal(result, s1) result = s1.fillna(Series(())) assert_series_equal(result, s1) result = s2.fillna(s1) assert_series_equal(result, s2) result = s1.fillna({0: 1}) assert_series_equal(result, expected) result = s1.fillna({1: 1}) assert_series_equal(result, Series([np.nan])) result = s1.fillna({0: 1, 1: 1}) assert_series_equal(result, expected) result = s1.fillna(Series({0: 1, 1: 1})) assert_series_equal(result, expected) result = s1.fillna(Series({0: 1, 1: 1}, index=[4, 5])) assert_series_equal(result, s1) s1 = Series([0, 1, 2], list('abc')) s2 = Series([0, np.nan, 2], list('bac')) result = s2.fillna(s1) expected = Series([0, 0, 2.], list('bac')) assert_series_equal(result, expected) # limit s = Series(np.nan, index=[0, 1, 2]) result = s.fillna(999, limit=1) expected = Series([999, np.nan, np.nan], index=[0, 1, 2]) assert_series_equal(result, expected) result = s.fillna(999, limit=2) expected = Series([999, 999, np.nan], index=[0, 1, 2]) assert_series_equal(result, expected) # GH 9043 # make sure a string representation of int/float values can be filled # correctly without raising errors or being converted vals = ['0', '1.5', '-0.3'] for val in vals: s = Series([0, 1, np.nan, np.nan, 4], dtype='float64') result = s.fillna(val) expected = Series([0, 1, val, val, 4], dtype='object') assert_series_equal(result, expected) def test_fillna_bug(self): x = Series([nan, 1., nan, 3., nan], ['z', 'a', 'b', 'c', 'd']) filled = x.fillna(method='ffill') expected = Series([nan, 1., 1., 3., 3.], x.index) assert_series_equal(filled, expected) filled = x.fillna(method='bfill') expected = Series([1., 1., 3., 3., nan], x.index) assert_series_equal(filled, expected) def test_fillna_inplace(self): x = Series([nan, 1., nan, 3., nan], ['z', 'a', 'b', 'c', 'd']) y = x.copy() y.fillna(value=0, inplace=True) expected = x.fillna(value=0) assert_series_equal(y, expected) def test_fillna_invalid_method(self, datetime_series): try: datetime_series.fillna(method='ffil') except ValueError as inst: assert 'ffil' in str(inst) def test_ffill(self): ts = Series([0., 1., 2., 3., 4.], index=tm.makeDateIndex(5)) ts[2] = np.NaN assert_series_equal(ts.ffill(), ts.fillna(method='ffill')) def test_ffill_mixed_dtypes_without_missing_data(self): # GH14956 series = pd.Series([datetime(2015, 1, 1, tzinfo=pytz.utc), 1]) result = series.ffill() assert_series_equal(series, result) def test_bfill(self): ts = Series([0., 1., 2., 3., 4.], index=tm.makeDateIndex(5)) ts[2] = np.NaN assert_series_equal(ts.bfill(), ts.fillna(method='bfill')) def test_timedelta64_nan(self): td = Series([timedelta(days=i) for i in range(10)]) # nan ops on timedeltas td1 = td.copy() td1[0] = np.nan assert isna(td1[0]) assert td1[0].value == iNaT td1[0] = td[0] assert not isna(td1[0]) td1[1] = iNaT assert isna(td1[1]) assert td1[1].value == iNaT td1[1] = td[1] assert not isna(td1[1]) td1[2] = NaT assert isna(td1[2]) assert td1[2].value == iNaT td1[2] = td[2] assert not isna(td1[2]) # boolean setting # this doesn't work, not sure numpy even supports it # result = td[(td>np.timedelta64(timedelta(days=3))) & # td<np.timedelta64(timedelta(days=7)))] = np.nan # assert isna(result).sum() == 7 # NumPy limitiation =( # def test_logical_range_select(self): # np.random.seed(12345) # selector = -0.5 <= datetime_series <= 0.5 # expected = (datetime_series >= -0.5) & (datetime_series <= 0.5) # assert_series_equal(selector, expected) def test_dropna_empty(self): s = Series([]) assert len(s.dropna()) == 0 s.dropna(inplace=True) assert len(s) == 0 # invalid axis pytest.raises(ValueError, s.dropna, axis=1) def test_datetime64_tz_dropna(self): # DatetimeBlock s = Series([Timestamp('2011-01-01 10:00'), pd.NaT, Timestamp( '2011-01-03 10:00'), pd.NaT]) result = s.dropna() expected = Series([Timestamp('2011-01-01 10:00'), Timestamp('2011-01-03 10:00')], index=[0, 2]) tm.assert_series_equal(result, expected) # DatetimeBlockTZ idx = pd.DatetimeIndex(['2011-01-01 10:00', pd.NaT, '2011-01-03 10:00', pd.NaT], tz='Asia/Tokyo') s = pd.Series(idx) assert s.dtype == 'datetime64[ns, Asia/Tokyo]' result = s.dropna() expected = Series([Timestamp('2011-01-01 10:00', tz='Asia/Tokyo'), Timestamp('2011-01-03 10:00', tz='Asia/Tokyo')], index=[0, 2]) assert result.dtype == 'datetime64[ns, Asia/Tokyo]' tm.assert_series_equal(result, expected) def test_dropna_no_nan(self): for s in [Series([1, 2, 3], name='x'), Series( [False, True, False], name='x')]: result = s.dropna() tm.assert_series_equal(result, s) assert result is not s s2 = s.copy() s2.dropna(inplace=True) tm.assert_series_equal(s2, s) def test_dropna_intervals(self): s = Series([np.nan, 1, 2, 3], IntervalIndex.from_arrays( [np.nan, 0, 1, 2], [np.nan, 1, 2, 3])) result = s.dropna() expected = s.iloc[1:] assert_series_equal(result, expected) def test_valid(self, datetime_series): ts = datetime_series.copy() ts[::2] = np.NaN result = ts.dropna() assert len(result) == ts.count() tm.assert_series_equal(result, ts[1::2]) tm.assert_series_equal(result, ts[pd.notna(ts)]) def test_isna(self): ser = Series([0, 5.4, 3, nan, -0.001]) expected = Series([False, False, False, True, False]) tm.assert_series_equal(ser.isna(), expected) ser = Series(["hi", "", nan]) expected = Series([False, False, True]) tm.assert_series_equal(ser.isna(), expected) def test_notna(self): ser = Series([0, 5.4, 3, nan, -0.001]) expected = Series([True, True, True, False, True]) tm.assert_series_equal(ser.notna(), expected) ser = Series(["hi", "", nan]) expected = Series([True, True, False]) tm.assert_series_equal(ser.notna(), expected) def test_pad_nan(self): x = Series([np.nan, 1., np.nan, 3., np.nan], ['z', 'a', 'b', 'c', 'd'], dtype=float) x.fillna(method='pad', inplace=True) expected = Series([np.nan, 1.0, 1.0, 3.0, 3.0], ['z', 'a', 'b', 'c', 'd'], dtype=float) assert_series_equal(x[1:], expected[1:]) assert np.isnan(x[0]), np.isnan(expected[0]) def test_pad_require_monotonicity(self): rng = date_range('1/1/2000', '3/1/2000', freq='B') # neither monotonic increasing or decreasing rng2 = rng[[1, 0, 2]] pytest.raises(ValueError, rng2.get_indexer, rng, method='pad') def test_dropna_preserve_name(self, datetime_series): datetime_series[:5] = np.nan result = datetime_series.dropna() assert result.name == datetime_series.name name = datetime_series.name ts = datetime_series.copy() ts.dropna(inplace=True) assert ts.name == name def test_fill_value_when_combine_const(self): # GH12723 s = Series([0, 1, np.nan, 3, 4, 5]) exp = s.fillna(0).add(2) res = s.add(2, fill_value=0) assert_series_equal(res, exp) def test_series_fillna_limit(self): index = np.arange(10) s = Series(np.random.randn(10), index=index) result = s[:2].reindex(index) result = result.fillna(method='pad', limit=5) expected = s[:2].reindex(index).fillna(method='pad') expected[-3:] = np.nan assert_series_equal(result, expected) result = s[-2:].reindex(index) result = result.fillna(method='bfill', limit=5) expected = s[-2:].reindex(index).fillna(method='backfill') expected[:3] = np.nan assert_series_equal(result, expected) def test_sparse_series_fillna_limit(self): index = np.arange(10) s = Series(np.random.randn(10), index=index) ss = s[:2].reindex(index).to_sparse() # TODO: what is this test doing? why are result an expected # the same call to fillna? with tm.assert_produces_warning(PerformanceWarning): # TODO: release-note fillna performance warning result = ss.fillna(method='pad', limit=5) expected = ss.fillna(method='pad', limit=5) expected = expected.to_dense() expected[-3:] = np.nan expected = expected.to_sparse() assert_series_equal(result, expected) ss = s[-2:].reindex(index).to_sparse() with tm.assert_produces_warning(PerformanceWarning): result = ss.fillna(method='backfill', limit=5) expected = ss.fillna(method='backfill') expected = expected.to_dense() expected[:3] = np.nan expected = expected.to_sparse() assert_series_equal(result, expected) def test_sparse_series_pad_backfill_limit(self): index = np.arange(10) s = Series(np.random.randn(10), index=index) s = s.to_sparse() result = s[:2].reindex(index, method='pad', limit=5) with tm.assert_produces_warning(PerformanceWarning): expected = s[:2].reindex(index).fillna(method='pad') expected = expected.to_dense() expected[-3:] = np.nan expected = expected.to_sparse() assert_series_equal(result, expected) result = s[-2:].reindex(index, method='backfill', limit=5) with tm.assert_produces_warning(PerformanceWarning): expected = s[-2:].reindex(index).fillna(method='backfill') expected = expected.to_dense() expected[:3] = np.nan expected = expected.to_sparse() assert_series_equal(result, expected) def test_series_pad_backfill_limit(self): index = np.arange(10) s = Series(np.random.randn(10), index=index) result = s[:2].reindex(index, method='pad', limit=5) expected = s[:2].reindex(index).fillna(method='pad') expected[-3:] = np.nan assert_series_equal(result, expected) result = s[-2:].reindex(index, method='backfill', limit=5) expected = s[-2:].reindex(index).fillna(method='backfill') expected[:3] = np.nan assert_series_equal(result, expected) class TestSeriesInterpolateData(): def test_interpolate(self, datetime_series, string_series): ts = Series(np.arange(len(datetime_series), dtype=float), datetime_series.index) ts_copy = ts.copy() ts_copy[5:10] = np.NaN linear_interp = ts_copy.interpolate(method='linear') tm.assert_series_equal(linear_interp, ts) ord_ts = Series([d.toordinal() for d in datetime_series.index], index=datetime_series.index).astype(float) ord_ts_copy = ord_ts.copy() ord_ts_copy[5:10] = np.NaN time_interp = ord_ts_copy.interpolate(method='time') tm.assert_series_equal(time_interp, ord_ts) # try time interpolation on a non-TimeSeries # Only raises ValueError if there are NaNs. non_ts = string_series.copy() non_ts[0] = np.NaN pytest.raises(ValueError, non_ts.interpolate, method='time') @td.skip_if_no_scipy def test_interpolate_pchip(self): _skip_if_no_pchip() ser = Series(np.sort(np.random.uniform(size=100))) # interpolate at new_index new_index = ser.index.union(Index([49.25, 49.5, 49.75, 50.25, 50.5, 50.75])) interp_s = ser.reindex(new_index).interpolate(method='pchip') # does not blow up, GH5977 interp_s[49:51] @td.skip_if_no_scipy def test_interpolate_akima(self): _skip_if_no_akima() ser = Series([10, 11, 12, 13]) expected = Series([11.00, 11.25, 11.50, 11.75, 12.00, 12.25, 12.50, 12.75, 13.00], index=Index([1.0, 1.25, 1.5, 1.75, 2.0, 2.25, 2.5, 2.75, 3.0])) # interpolate at new_index new_index = ser.index.union(Index([1.25, 1.5, 1.75, 2.25, 2.5, 2.75])) interp_s = ser.reindex(new_index).interpolate(method='akima') assert_series_equal(interp_s[1:3], expected) @td.skip_if_no_scipy def test_interpolate_piecewise_polynomial(self): ser = Series([10, 11, 12, 13]) expected = Series([11.00, 11.25, 11.50, 11.75, 12.00, 12.25, 12.50, 12.75, 13.00], index=Index([1.0, 1.25, 1.5, 1.75, 2.0, 2.25, 2.5, 2.75, 3.0])) # interpolate at new_index new_index = ser.index.union(Index([1.25, 1.5, 1.75, 2.25, 2.5, 2.75])) interp_s = ser.reindex(new_index).interpolate( method='piecewise_polynomial') assert_series_equal(interp_s[1:3], expected) @td.skip_if_no_scipy def test_interpolate_from_derivatives(self): ser = Series([10, 11, 12, 13]) expected = Series([11.00, 11.25, 11.50, 11.75, 12.00, 12.25, 12.50, 12.75, 13.00], index=Index([1.0, 1.25, 1.5, 1.75, 2.0, 2.25, 2.5, 2.75, 3.0])) # interpolate at new_index new_index = ser.index.union(Index([1.25, 1.5, 1.75, 2.25, 2.5, 2.75])) interp_s = ser.reindex(new_index).interpolate( method='from_derivatives') assert_series_equal(interp_s[1:3], expected) @pytest.mark.parametrize("kwargs", [ {}, pytest.param({'method': 'polynomial', 'order': 1}, marks=td.skip_if_no_scipy) ]) def test_interpolate_corners(self, kwargs): s = Series([np.nan, np.nan]) assert_series_equal(s.interpolate(**kwargs), s) s = Series([]).interpolate() assert_series_equal(s.interpolate(**kwargs), s) def test_interpolate_index_values(self): s = Series(np.nan, index=np.sort(np.random.rand(30))) s[::3] = np.random.randn(10) vals = s.index.values.astype(float) result = s.interpolate(method='index') expected = s.copy() bad = isna(expected.values) good = ~bad expected = Series(np.interp(vals[bad], vals[good], s.values[good]), index=s.index[bad]) assert_series_equal(result[bad], expected) # 'values' is synonymous with 'index' for the method kwarg other_result = s.interpolate(method='values') assert_series_equal(other_result, result) assert_series_equal(other_result[bad], expected) def test_interpolate_non_ts(self): s = Series([1, 3, np.nan, np.nan, np.nan, 11]) with pytest.raises(ValueError): s.interpolate(method='time') @pytest.mark.parametrize("kwargs", [ {}, pytest.param({'method': 'polynomial', 'order': 1}, marks=td.skip_if_no_scipy) ]) def test_nan_interpolate(self, kwargs): s = Series([0, 1, np.nan, 3]) result = s.interpolate(**kwargs) expected = Series([0., 1., 2., 3.]) assert_series_equal(result, expected) def test_nan_irregular_index(self): s = Series([1, 2, np.nan, 4], index=[1, 3, 5, 9]) result = s.interpolate() expected = Series([1., 2., 3., 4.], index=[1, 3, 5, 9]) assert_series_equal(result, expected) def test_nan_str_index(self): s = Series([0, 1, 2, np.nan], index=list('abcd')) result = s.interpolate() expected = Series([0., 1., 2., 2.], index=list('abcd')) assert_series_equal(result, expected) @td.skip_if_no_scipy def test_interp_quad(self): sq = Series([1, 4, np.nan, 16], index=[1, 2, 3, 4]) result = sq.interpolate(method='quadratic') expected = Series([1., 4., 9., 16.], index=[1, 2, 3, 4]) assert_series_equal(result, expected) @td.skip_if_no_scipy def test_interp_scipy_basic(self): s = Series([1, 3, np.nan, 12, np.nan, 25]) # slinear expected = Series([1., 3., 7.5, 12., 18.5, 25.]) result = s.interpolate(method='slinear') assert_series_equal(result, expected) result = s.interpolate(method='slinear', downcast='infer') assert_series_equal(result, expected) # nearest expected = Series([1, 3, 3, 12, 12, 25]) result = s.interpolate(method='nearest') assert_series_equal(result, expected.astype('float')) result = s.interpolate(method='nearest', downcast='infer') assert_series_equal(result, expected) # zero expected = Series([1, 3, 3, 12, 12, 25]) result = s.interpolate(method='zero') assert_series_equal(result, expected.astype('float')) result = s.interpolate(method='zero', downcast='infer') assert_series_equal(result, expected) # quadratic # GH #15662. # new cubic and quadratic interpolation algorithms from scipy 0.19.0. # previously `splmake` was used. See scipy/scipy#6710 if _is_scipy_ge_0190: expected = Series([1, 3., 6.823529, 12., 18.058824, 25.]) else: expected = Series([1, 3., 6.769231, 12., 18.230769, 25.]) result = s.interpolate(method='quadratic') assert_series_equal(result, expected) result = s.interpolate(method='quadratic', downcast='infer') assert_series_equal(result, expected) # cubic expected = Series([1., 3., 6.8, 12., 18.2, 25.]) result = s.interpolate(method='cubic') assert_series_equal(result, expected) def test_interp_limit(self): s = Series([1, 3, np.nan, np.nan, np.nan, 11]) expected = Series([1., 3., 5., 7., np.nan, 11.]) result = s.interpolate(method='linear', limit=2) assert_series_equal(result, expected) # GH 9217, make sure limit is an int and greater than 0 methods = ['linear', 'time', 'index', 'values', 'nearest', 'zero', 'slinear', 'quadratic', 'cubic', 'barycentric', 'krogh', 'polynomial', 'spline', 'piecewise_polynomial', None, 'from_derivatives', 'pchip', 'akima'] s = pd.Series([1, 2, np.nan, np.nan, 5]) for limit in [-1, 0, 1., 2.]: for method in methods: with pytest.raises(ValueError): s.interpolate(limit=limit, method=method) def test_interp_limit_forward(self): s = Series([1, 3, np.nan, np.nan, np.nan, 11]) # Provide 'forward' (the default) explicitly here. expected = Series([1., 3., 5., 7., np.nan, 11.]) result = s.interpolate(method='linear', limit=2, limit_direction='forward') assert_series_equal(result, expected) result = s.interpolate(method='linear', limit=2, limit_direction='FORWARD') assert_series_equal(result, expected) def test_interp_unlimited(self): # these test are for issue #16282 default Limit=None is unlimited s = Series([np.nan, 1., 3., np.nan, np.nan, np.nan, 11., np.nan]) expected = Series([1., 1., 3., 5., 7., 9., 11., 11.]) result = s.interpolate(method='linear', limit_direction='both') assert_series_equal(result, expected) expected = Series([np.nan, 1., 3., 5., 7., 9., 11., 11.]) result = s.interpolate(method='linear', limit_direction='forward') assert_series_equal(result, expected) expected = Series([1., 1., 3., 5., 7., 9., 11., np.nan]) result = s.interpolate(method='linear', limit_direction='backward') assert_series_equal(result, expected) def test_interp_limit_bad_direction(self): s = Series([1, 3, np.nan, np.nan, np.nan, 11]) pytest.raises(ValueError, s.interpolate, method='linear', limit=2, limit_direction='abc') # raises an error even if no limit is specified. pytest.raises(ValueError, s.interpolate, method='linear', limit_direction='abc') # limit_area introduced GH #16284 def test_interp_limit_area(self): # These tests are for issue #9218 -- fill NaNs in both directions. s = Series([nan, nan, 3, nan, nan, nan, 7, nan, nan]) expected = Series([nan, nan, 3., 4., 5., 6., 7., nan, nan]) result = s.interpolate(method='linear', limit_area='inside') assert_series_equal(result, expected) expected = Series([nan, nan, 3., 4., nan, nan, 7., nan, nan]) result = s.interpolate(method='linear', limit_area='inside', limit=1) expected = Series([nan, nan, 3., 4., nan, 6., 7., nan, nan]) result = s.interpolate(method='linear', limit_area='inside', limit_direction='both', limit=1) assert_series_equal(result, expected) expected = Series([nan, nan, 3., nan, nan, nan, 7., 7., 7.]) result = s.interpolate(method='linear', limit_area='outside') assert_series_equal(result, expected) expected = Series([nan, nan, 3., nan, nan, nan, 7., 7., nan]) result = s.interpolate(method='linear', limit_area='outside', limit=1) expected = Series([nan, 3., 3., nan, nan, nan, 7., 7., nan]) result = s.interpolate(method='linear', limit_area='outside', limit_direction='both', limit=1) assert_series_equal(result, expected) expected = Series([3., 3., 3., nan, nan, nan, 7., nan, nan]) result = s.interpolate(method='linear', limit_area='outside', direction='backward') # raises an error even if limit type is wrong. pytest.raises(ValueError, s.interpolate, method='linear', limit_area='abc') def test_interp_limit_direction(self): # These tests are for issue #9218 -- fill NaNs in both directions. s = Series([1, 3, np.nan, np.nan, np.nan, 11]) expected = Series([1., 3., np.nan, 7., 9., 11.]) result = s.interpolate(method='linear', limit=2, limit_direction='backward') assert_series_equal(result, expected) expected = Series([1., 3., 5., np.nan, 9., 11.]) result = s.interpolate(method='linear', limit=1, limit_direction='both') assert_series_equal(result, expected) # Check that this works on a longer series of nans. s = Series([1, 3, np.nan, np.nan, np.nan, 7, 9, np.nan, np.nan, 12, np.nan]) expected = Series([1., 3., 4., 5., 6., 7., 9., 10., 11., 12., 12.]) result = s.interpolate(method='linear', limit=2, limit_direction='both') assert_series_equal(result, expected) expected = Series([1., 3., 4., np.nan, 6., 7., 9., 10., 11., 12., 12.]) result = s.interpolate(method='linear', limit=1, limit_direction='both') assert_series_equal(result, expected) def test_interp_limit_to_ends(self): # These test are for issue #10420 -- flow back to beginning. s = Series([np.nan, np.nan, 5, 7, 9, np.nan]) expected = Series([5., 5., 5., 7., 9., np.nan]) result = s.interpolate(method='linear', limit=2, limit_direction='backward') assert_series_equal(result, expected) expected = Series([5., 5., 5., 7., 9., 9.]) result = s.interpolate(method='linear', limit=2, limit_direction='both') assert_series_equal(result, expected) def test_interp_limit_before_ends(self): # These test are for issue #11115 -- limit ends properly. s = Series([np.nan, np.nan, 5, 7, np.nan, np.nan]) expected = Series([np.nan, np.nan, 5., 7., 7., np.nan]) result = s.interpolate(method='linear', limit=1, limit_direction='forward') assert_series_equal(result, expected) expected = Series([np.nan, 5., 5., 7., np.nan, np.nan]) result = s.interpolate(method='linear', limit=1, limit_direction='backward') assert_series_equal(result, expected) expected = Series([np.nan, 5., 5., 7., 7., np.nan]) result = s.interpolate(method='linear', limit=1, limit_direction='both') assert_series_equal(result, expected) @td.skip_if_no_scipy def test_interp_all_good(self): s = Series([1, 2, 3]) result = s.interpolate(method='polynomial', order=1) assert_series_equal(result, s) # non-scipy result = s.interpolate() assert_series_equal(result, s) @pytest.mark.parametrize("check_scipy", [ False, pytest.param(True, marks=td.skip_if_no_scipy) ]) def test_interp_multiIndex(self, check_scipy): idx = MultiIndex.from_tuples([(0, 'a'), (1, 'b'), (2, 'c')]) s = Series([1, 2, np.nan], index=idx) expected = s.copy() expected.loc[2] = 2 result = s.interpolate() assert_series_equal(result, expected) if check_scipy: with pytest.raises(ValueError): s.interpolate(method='polynomial', order=1) @td.skip_if_no_scipy def test_interp_nonmono_raise(self): s = Series([1, np.nan, 3], index=[0, 2, 1]) with pytest.raises(ValueError): s.interpolate(method='krogh') @td.skip_if_no_scipy def test_interp_datetime64(self): df = Series([1, np.nan, 3], index=date_range('1/1/2000', periods=3)) result = df.interpolate(method='nearest') expected = Series([1., 1., 3.], index=date_range('1/1/2000', periods=3)) assert_series_equal(result, expected) def test_interp_limit_no_nans(self): # GH 7173 s = pd.Series([1., 2., 3.]) result = s.interpolate(limit=1) expected = s assert_series_equal(result, expected) @td.skip_if_no_scipy @pytest.mark.parametrize("method", ['polynomial', 'spline']) def test_no_order(self, method): s = Series([0, 1, np.nan, 3]) with pytest.raises(ValueError): s.interpolate(method=method) @td.skip_if_no_scipy def test_spline(self): s = Series([1, 2, np.nan, 4, 5, np.nan, 7]) result = s.interpolate(method='spline', order=1) expected = Series([1., 2., 3., 4., 5., 6., 7.]) assert_series_equal(result, expected) @td.skip_if_no('scipy', min_version='0.15') def test_spline_extrapolate(self): s = Series([1, 2, 3, 4, np.nan, 6, np.nan]) result3 = s.interpolate(method='spline', order=1, ext=3) expected3 = Series([1., 2., 3., 4., 5., 6., 6.]) assert_series_equal(result3, expected3) result1 = s.interpolate(method='spline', order=1, ext=0) expected1 = Series([1., 2., 3., 4., 5., 6., 7.]) assert_series_equal(result1, expected1) @td.skip_if_no_scipy def test_spline_smooth(self): s = Series([1, 2, np.nan, 4, 5.1, np.nan, 7]) assert (s.interpolate(method='spline', order=3, s=0)[5] != s.interpolate(method='spline', order=3)[5]) @td.skip_if_no_scipy def test_spline_interpolation(self): s = Series(np.arange(10) ** 2) s[np.random.randint(0, 9, 3)] = np.nan result1 = s.interpolate(method='spline', order=1) expected1 = s.interpolate(method='spline', order=1) assert_series_equal(result1, expected1) @td.skip_if_no_scipy def test_spline_error(self): # see gh-10633 s = pd.Series(np.arange(10) ** 2) s[np.random.randint(0, 9, 3)] = np.nan with pytest.raises(ValueError): s.interpolate(method='spline') with pytest.raises(ValueError): s.interpolate(method='spline', order=0) def test_interp_timedelta64(self): # GH 6424 df = Series([1, np.nan, 3], index=pd.to_timedelta([1, 2, 3])) result = df.interpolate(method='time') expected = Series([1., 2., 3.], index=pd.to_timedelta([1, 2, 3])) assert_series_equal(result, expected) # test for non uniform spacing df = Series([1, np.nan, 3], index=pd.to_timedelta([1, 2, 4])) result = df.interpolate(method='time') expected = Series([1., 1.666667, 3.], index=pd.to_timedelta([1, 2, 4])) assert_series_equal(result, expected) def test_series_interpolate_method_values(self): # #1646 ts = _simple_ts('1/1/2000', '1/20/2000') ts[::2] = np.nan result = ts.interpolate(method='values') exp = ts.interpolate() assert_series_equal(result, exp) def test_series_interpolate_intraday(self): # #1698 index = pd.date_range('1/1/2012', periods=4, freq='12D') ts = pd.Series([0, 12, 24, 36], index) new_index = index.append(index + pd.DateOffset(days=1)).sort_values() exp = ts.reindex(new_index).interpolate(method='time') index = pd.date_range('1/1/2012', periods=4, freq='12H') ts = pd.Series([0, 12, 24, 36], index) new_index = index.append(index + pd.DateOffset(hours=1)).sort_values() result = ts.reindex(new_index).interpolate(method='time') tm.assert_numpy_array_equal(result.values, exp.values)
dsm054/pandas
pandas/tests/series/test_missing.py
pandas/_version.py
""" Read SAS7BDAT files Based on code written by Jared Hobbs: https://bitbucket.org/jaredhobbs/sas7bdat See also: https://github.com/BioStatMatt/sas7bdat Partial documentation of the file format: https://cran.r-project.org/web/packages/sas7bdat/vignettes/sas7bdat.pdf Reference for binary data compression: http://collaboration.cmc.ec.gc.ca/science/rpn/biblio/ddj/Website/articles/CUJ/1992/9210/ross/ross.htm """ from datetime import datetime import struct import numpy as np from pandas.errors import EmptyDataError import pandas as pd from pandas import compat from pandas.io.common import BaseIterator, get_filepath_or_buffer from pandas.io.sas._sas import Parser import pandas.io.sas.sas_constants as const class _subheader_pointer(object): pass class _column(object): pass # SAS7BDAT represents a SAS data file in SAS7BDAT format. class SAS7BDATReader(BaseIterator): """ Read SAS files in SAS7BDAT format. Parameters ---------- path_or_buf : path name or buffer Name of SAS file or file-like object pointing to SAS file contents. index : column identifier, defaults to None Column to use as index. convert_dates : boolean, defaults to True Attempt to convert dates to Pandas datetime values. Note that some rarely used SAS date formats may be unsupported. blank_missing : boolean, defaults to True Convert empty strings to missing values (SAS uses blanks to indicate missing character variables). chunksize : int, defaults to None Return SAS7BDATReader object for iterations, returns chunks with given number of lines. encoding : string, defaults to None String encoding. convert_text : bool, defaults to True If False, text variables are left as raw bytes. convert_header_text : bool, defaults to True If False, header text, including column names, are left as raw bytes. """ def __init__(self, path_or_buf, index=None, convert_dates=True, blank_missing=True, chunksize=None, encoding=None, convert_text=True, convert_header_text=True): self.index = index self.convert_dates = convert_dates self.blank_missing = blank_missing self.chunksize = chunksize self.encoding = encoding self.convert_text = convert_text self.convert_header_text = convert_header_text self.default_encoding = "latin-1" self.compression = "" self.column_names_strings = [] self.column_names = [] self.column_formats = [] self.columns = [] self._current_page_data_subheader_pointers = [] self._cached_page = None self._column_data_lengths = [] self._column_data_offsets = [] self._column_types = [] self._current_row_in_file_index = 0 self._current_row_on_page_index = 0 self._current_row_in_file_index = 0 self._path_or_buf, _, _, _ = get_filepath_or_buffer(path_or_buf) if isinstance(self._path_or_buf, compat.string_types): self._path_or_buf = open(self._path_or_buf, 'rb') self.handle = self._path_or_buf self._get_properties() self._parse_metadata() def column_data_lengths(self): """Return a numpy int64 array of the column data lengths""" return np.asarray(self._column_data_lengths, dtype=np.int64) def column_data_offsets(self): """Return a numpy int64 array of the column offsets""" return np.asarray(self._column_data_offsets, dtype=np.int64) def column_types(self): """Returns a numpy character array of the column types: s (string) or d (double)""" return np.asarray(self._column_types, dtype=np.dtype('S1')) def close(self): try: self.handle.close() except AttributeError: pass def _get_properties(self): # Check magic number self._path_or_buf.seek(0) self._cached_page = self._path_or_buf.read(288) if self._cached_page[0:len(const.magic)] != const.magic: self.close() raise ValueError("magic number mismatch (not a SAS file?)") # Get alignment information align1, align2 = 0, 0 buf = self._read_bytes(const.align_1_offset, const.align_1_length) if buf == const.u64_byte_checker_value: align2 = const.align_2_value self.U64 = True self._int_length = 8 self._page_bit_offset = const.page_bit_offset_x64 self._subheader_pointer_length = const.subheader_pointer_length_x64 else: self.U64 = False self._page_bit_offset = const.page_bit_offset_x86 self._subheader_pointer_length = const.subheader_pointer_length_x86 self._int_length = 4 buf = self._read_bytes(const.align_2_offset, const.align_2_length) if buf == const.align_1_checker_value: align1 = const.align_2_value total_align = align1 + align2 # Get endianness information buf = self._read_bytes(const.endianness_offset, const.endianness_length) if buf == b'\x01': self.byte_order = "<" else: self.byte_order = ">" # Get encoding information buf = self._read_bytes(const.encoding_offset, const.encoding_length)[0] if buf in const.encoding_names: self.file_encoding = const.encoding_names[buf] else: self.file_encoding = "unknown (code=%s)" % str(buf) # Get platform information buf = self._read_bytes(const.platform_offset, const.platform_length) if buf == b'1': self.platform = "unix" elif buf == b'2': self.platform = "windows" else: self.platform = "unknown" buf = self._read_bytes(const.dataset_offset, const.dataset_length) self.name = buf.rstrip(b'\x00 ') if self.convert_header_text: self.name = self.name.decode( self.encoding or self.default_encoding) buf = self._read_bytes(const.file_type_offset, const.file_type_length) self.file_type = buf.rstrip(b'\x00 ') if self.convert_header_text: self.file_type = self.file_type.decode( self.encoding or self.default_encoding) # Timestamp is epoch 01/01/1960 epoch = datetime(1960, 1, 1) x = self._read_float(const.date_created_offset + align1, const.date_created_length) self.date_created = epoch + pd.to_timedelta(x, unit='s') x = self._read_float(const.date_modified_offset + align1, const.date_modified_length) self.date_modified = epoch + pd.to_timedelta(x, unit='s') self.header_length = self._read_int(const.header_size_offset + align1, const.header_size_length) # Read the rest of the header into cached_page. buf = self._path_or_buf.read(self.header_length - 288) self._cached_page += buf if len(self._cached_page) != self.header_length: self.close() raise ValueError("The SAS7BDAT file appears to be truncated.") self._page_length = self._read_int(const.page_size_offset + align1, const.page_size_length) self._page_count = self._read_int(const.page_count_offset + align1, const.page_count_length) buf = self._read_bytes(const.sas_release_offset + total_align, const.sas_release_length) self.sas_release = buf.rstrip(b'\x00 ') if self.convert_header_text: self.sas_release = self.sas_release.decode( self.encoding or self.default_encoding) buf = self._read_bytes(const.sas_server_type_offset + total_align, const.sas_server_type_length) self.server_type = buf.rstrip(b'\x00 ') if self.convert_header_text: self.server_type = self.server_type.decode( self.encoding or self.default_encoding) buf = self._read_bytes(const.os_version_number_offset + total_align, const.os_version_number_length) self.os_version = buf.rstrip(b'\x00 ') if self.convert_header_text: self.os_version = self.os_version.decode( self.encoding or self.default_encoding) buf = self._read_bytes(const.os_name_offset + total_align, const.os_name_length) buf = buf.rstrip(b'\x00 ') if len(buf) > 0: self.os_name = buf.decode(self.encoding or self.default_encoding) else: buf = self._read_bytes(const.os_maker_offset + total_align, const.os_maker_length) self.os_name = buf.rstrip(b'\x00 ') if self.convert_header_text: self.os_name = self.os_name.decode( self.encoding or self.default_encoding) def __next__(self): da = self.read(nrows=self.chunksize or 1) if da is None: raise StopIteration return da # Read a single float of the given width (4 or 8). def _read_float(self, offset, width): if width not in (4, 8): self.close() raise ValueError("invalid float width") buf = self._read_bytes(offset, width) fd = "f" if width == 4 else "d" return struct.unpack(self.byte_order + fd, buf)[0] # Read a single signed integer of the given width (1, 2, 4 or 8). def _read_int(self, offset, width): if width not in (1, 2, 4, 8): self.close() raise ValueError("invalid int width") buf = self._read_bytes(offset, width) it = {1: "b", 2: "h", 4: "l", 8: "q"}[width] iv = struct.unpack(self.byte_order + it, buf)[0] return iv def _read_bytes(self, offset, length): if self._cached_page is None: self._path_or_buf.seek(offset) buf = self._path_or_buf.read(length) if len(buf) < length: self.close() msg = "Unable to read {:d} bytes from file position {:d}." raise ValueError(msg.format(length, offset)) return buf else: if offset + length > len(self._cached_page): self.close() raise ValueError("The cached page is too small.") return self._cached_page[offset:offset + length] def _parse_metadata(self): done = False while not done: self._cached_page = self._path_or_buf.read(self._page_length) if len(self._cached_page) <= 0: break if len(self._cached_page) != self._page_length: self.close() raise ValueError( "Failed to read a meta data page from the SAS file.") done = self._process_page_meta() def _process_page_meta(self): self._read_page_header() pt = [const.page_meta_type, const.page_amd_type] + const.page_mix_types if self._current_page_type in pt: self._process_page_metadata() is_data_page = self._current_page_type & const.page_data_type is_mix_page = self._current_page_type in const.page_mix_types return (is_data_page or is_mix_page or self._current_page_data_subheader_pointers != []) def _read_page_header(self): bit_offset = self._page_bit_offset tx = const.page_type_offset + bit_offset self._current_page_type = self._read_int(tx, const.page_type_length) tx = const.block_count_offset + bit_offset self._current_page_block_count = self._read_int( tx, const.block_count_length) tx = const.subheader_count_offset + bit_offset self._current_page_subheaders_count = ( self._read_int(tx, const.subheader_count_length)) def _process_page_metadata(self): bit_offset = self._page_bit_offset for i in range(self._current_page_subheaders_count): pointer = self._process_subheader_pointers( const.subheader_pointers_offset + bit_offset, i) if pointer.length == 0: continue if pointer.compression == const.truncated_subheader_id: continue subheader_signature = self._read_subheader_signature( pointer.offset) subheader_index = ( self._get_subheader_index(subheader_signature, pointer.compression, pointer.ptype)) self._process_subheader(subheader_index, pointer) def _get_subheader_index(self, signature, compression, ptype): index = const.subheader_signature_to_index.get(signature) if index is None: f1 = ((compression == const.compressed_subheader_id) or (compression == 0)) f2 = (ptype == const.compressed_subheader_type) if (self.compression != "") and f1 and f2: index = const.SASIndex.data_subheader_index else: self.close() raise ValueError("Unknown subheader signature") return index def _process_subheader_pointers(self, offset, subheader_pointer_index): subheader_pointer_length = self._subheader_pointer_length total_offset = (offset + subheader_pointer_length * subheader_pointer_index) subheader_offset = self._read_int(total_offset, self._int_length) total_offset += self._int_length subheader_length = self._read_int(total_offset, self._int_length) total_offset += self._int_length subheader_compression = self._read_int(total_offset, 1) total_offset += 1 subheader_type = self._read_int(total_offset, 1) x = _subheader_pointer() x.offset = subheader_offset x.length = subheader_length x.compression = subheader_compression x.ptype = subheader_type return x def _read_subheader_signature(self, offset): subheader_signature = self._read_bytes(offset, self._int_length) return subheader_signature def _process_subheader(self, subheader_index, pointer): offset = pointer.offset length = pointer.length if subheader_index == const.SASIndex.row_size_index: processor = self._process_rowsize_subheader elif subheader_index == const.SASIndex.column_size_index: processor = self._process_columnsize_subheader elif subheader_index == const.SASIndex.column_text_index: processor = self._process_columntext_subheader elif subheader_index == const.SASIndex.column_name_index: processor = self._process_columnname_subheader elif subheader_index == const.SASIndex.column_attributes_index: processor = self._process_columnattributes_subheader elif subheader_index == const.SASIndex.format_and_label_index: processor = self._process_format_subheader elif subheader_index == const.SASIndex.column_list_index: processor = self._process_columnlist_subheader elif subheader_index == const.SASIndex.subheader_counts_index: processor = self._process_subheader_counts elif subheader_index == const.SASIndex.data_subheader_index: self._current_page_data_subheader_pointers.append(pointer) return else: raise ValueError("unknown subheader index") processor(offset, length) def _process_rowsize_subheader(self, offset, length): int_len = self._int_length lcs_offset = offset lcp_offset = offset if self.U64: lcs_offset += 682 lcp_offset += 706 else: lcs_offset += 354 lcp_offset += 378 self.row_length = self._read_int( offset + const.row_length_offset_multiplier * int_len, int_len) self.row_count = self._read_int( offset + const.row_count_offset_multiplier * int_len, int_len) self.col_count_p1 = self._read_int( offset + const.col_count_p1_multiplier * int_len, int_len) self.col_count_p2 = self._read_int( offset + const.col_count_p2_multiplier * int_len, int_len) mx = const.row_count_on_mix_page_offset_multiplier * int_len self._mix_page_row_count = self._read_int(offset + mx, int_len) self._lcs = self._read_int(lcs_offset, 2) self._lcp = self._read_int(lcp_offset, 2) def _process_columnsize_subheader(self, offset, length): int_len = self._int_length offset += int_len self.column_count = self._read_int(offset, int_len) if (self.col_count_p1 + self.col_count_p2 != self.column_count): print("Warning: column count mismatch (%d + %d != %d)\n", self.col_count_p1, self.col_count_p2, self.column_count) # Unknown purpose def _process_subheader_counts(self, offset, length): pass def _process_columntext_subheader(self, offset, length): offset += self._int_length text_block_size = self._read_int(offset, const.text_block_size_length) buf = self._read_bytes(offset, text_block_size) cname_raw = buf[0:text_block_size].rstrip(b"\x00 ") cname = cname_raw if self.convert_header_text: cname = cname.decode(self.encoding or self.default_encoding) self.column_names_strings.append(cname) if len(self.column_names_strings) == 1: compression_literal = "" for cl in const.compression_literals: if cl in cname_raw: compression_literal = cl self.compression = compression_literal offset -= self._int_length offset1 = offset + 16 if self.U64: offset1 += 4 buf = self._read_bytes(offset1, self._lcp) compression_literal = buf.rstrip(b"\x00") if compression_literal == "": self._lcs = 0 offset1 = offset + 32 if self.U64: offset1 += 4 buf = self._read_bytes(offset1, self._lcp) self.creator_proc = buf[0:self._lcp] elif compression_literal == const.rle_compression: offset1 = offset + 40 if self.U64: offset1 += 4 buf = self._read_bytes(offset1, self._lcp) self.creator_proc = buf[0:self._lcp] elif self._lcs > 0: self._lcp = 0 offset1 = offset + 16 if self.U64: offset1 += 4 buf = self._read_bytes(offset1, self._lcs) self.creator_proc = buf[0:self._lcp] if self.convert_header_text: if hasattr(self, "creator_proc"): self.creator_proc = self.creator_proc.decode( self.encoding or self.default_encoding) def _process_columnname_subheader(self, offset, length): int_len = self._int_length offset += int_len column_name_pointers_count = (length - 2 * int_len - 12) // 8 for i in range(column_name_pointers_count): text_subheader = offset + const.column_name_pointer_length * \ (i + 1) + const.column_name_text_subheader_offset col_name_offset = offset + const.column_name_pointer_length * \ (i + 1) + const.column_name_offset_offset col_name_length = offset + const.column_name_pointer_length * \ (i + 1) + const.column_name_length_offset idx = self._read_int( text_subheader, const.column_name_text_subheader_length) col_offset = self._read_int( col_name_offset, const.column_name_offset_length) col_len = self._read_int( col_name_length, const.column_name_length_length) name_str = self.column_names_strings[idx] self.column_names.append(name_str[col_offset:col_offset + col_len]) def _process_columnattributes_subheader(self, offset, length): int_len = self._int_length column_attributes_vectors_count = ( length - 2 * int_len - 12) // (int_len + 8) for i in range(column_attributes_vectors_count): col_data_offset = (offset + int_len + const.column_data_offset_offset + i * (int_len + 8)) col_data_len = (offset + 2 * int_len + const.column_data_length_offset + i * (int_len + 8)) col_types = (offset + 2 * int_len + const.column_type_offset + i * (int_len + 8)) x = self._read_int(col_data_offset, int_len) self._column_data_offsets.append(x) x = self._read_int(col_data_len, const.column_data_length_length) self._column_data_lengths.append(x) x = self._read_int(col_types, const.column_type_length) self._column_types.append(b'd' if x == 1 else b's') def _process_columnlist_subheader(self, offset, length): # unknown purpose pass def _process_format_subheader(self, offset, length): int_len = self._int_length text_subheader_format = ( offset + const.column_format_text_subheader_index_offset + 3 * int_len) col_format_offset = (offset + const.column_format_offset_offset + 3 * int_len) col_format_len = (offset + const.column_format_length_offset + 3 * int_len) text_subheader_label = ( offset + const.column_label_text_subheader_index_offset + 3 * int_len) col_label_offset = (offset + const.column_label_offset_offset + 3 * int_len) col_label_len = offset + const.column_label_length_offset + 3 * int_len x = self._read_int(text_subheader_format, const.column_format_text_subheader_index_length) format_idx = min(x, len(self.column_names_strings) - 1) format_start = self._read_int( col_format_offset, const.column_format_offset_length) format_len = self._read_int( col_format_len, const.column_format_length_length) label_idx = self._read_int( text_subheader_label, const.column_label_text_subheader_index_length) label_idx = min(label_idx, len(self.column_names_strings) - 1) label_start = self._read_int( col_label_offset, const.column_label_offset_length) label_len = self._read_int(col_label_len, const.column_label_length_length) label_names = self.column_names_strings[label_idx] column_label = label_names[label_start: label_start + label_len] format_names = self.column_names_strings[format_idx] column_format = format_names[format_start: format_start + format_len] current_column_number = len(self.columns) col = _column() col.col_id = current_column_number col.name = self.column_names[current_column_number] col.label = column_label col.format = column_format col.ctype = self._column_types[current_column_number] col.length = self._column_data_lengths[current_column_number] self.column_formats.append(column_format) self.columns.append(col) def read(self, nrows=None): if (nrows is None) and (self.chunksize is not None): nrows = self.chunksize elif nrows is None: nrows = self.row_count if len(self._column_types) == 0: self.close() raise EmptyDataError("No columns to parse from file") if self._current_row_in_file_index >= self.row_count: return None m = self.row_count - self._current_row_in_file_index if nrows > m: nrows = m nd = self._column_types.count(b'd') ns = self._column_types.count(b's') self._string_chunk = np.empty((ns, nrows), dtype=np.object) self._byte_chunk = np.zeros((nd, 8 * nrows), dtype=np.uint8) self._current_row_in_chunk_index = 0 p = Parser(self) p.read(nrows) rslt = self._chunk_to_dataframe() if self.index is not None: rslt = rslt.set_index(self.index) return rslt def _read_next_page(self): self._current_page_data_subheader_pointers = [] self._cached_page = self._path_or_buf.read(self._page_length) if len(self._cached_page) <= 0: return True elif len(self._cached_page) != self._page_length: self.close() msg = ("failed to read complete page from file " "(read {:d} of {:d} bytes)") raise ValueError(msg.format(len(self._cached_page), self._page_length)) self._read_page_header() page_type = self._current_page_type if page_type == const.page_meta_type: self._process_page_metadata() is_data_page = page_type & const.page_data_type pt = [const.page_meta_type] + const.page_mix_types if not is_data_page and self._current_page_type not in pt: return self._read_next_page() return False def _chunk_to_dataframe(self): n = self._current_row_in_chunk_index m = self._current_row_in_file_index ix = range(m - n, m) rslt = pd.DataFrame(index=ix) js, jb = 0, 0 for j in range(self.column_count): name = self.column_names[j] if self._column_types[j] == b'd': rslt[name] = self._byte_chunk[jb, :].view( dtype=self.byte_order + 'd') rslt[name] = np.asarray(rslt[name], dtype=np.float64) if self.convert_dates: unit = None if self.column_formats[j] in const.sas_date_formats: unit = 'd' elif self.column_formats[j] in const.sas_datetime_formats: unit = 's' if unit: rslt[name] = pd.to_datetime(rslt[name], unit=unit, origin="1960-01-01") jb += 1 elif self._column_types[j] == b's': rslt[name] = self._string_chunk[js, :] if self.convert_text and (self.encoding is not None): rslt[name] = rslt[name].str.decode( self.encoding or self.default_encoding) if self.blank_missing: ii = rslt[name].str.len() == 0 rslt.loc[ii, name] = np.nan js += 1 else: self.close() raise ValueError("unknown column type %s" % self._column_types[j]) return rslt
# coding=utf-8 # pylint: disable-msg=E1101,W0612 from datetime import datetime, timedelta from distutils.version import LooseVersion import numpy as np from numpy import nan import pytest import pytz from pandas._libs.tslib import iNaT from pandas.compat import range from pandas.errors import PerformanceWarning import pandas.util._test_decorators as td import pandas as pd from pandas import ( Categorical, DataFrame, Index, IntervalIndex, MultiIndex, NaT, Series, Timestamp, date_range, isna) from pandas.core.series import remove_na import pandas.util.testing as tm from pandas.util.testing import assert_frame_equal, assert_series_equal try: import scipy _is_scipy_ge_0190 = (LooseVersion(scipy.__version__) >= LooseVersion('0.19.0')) except ImportError: _is_scipy_ge_0190 = False def _skip_if_no_pchip(): try: from scipy.interpolate import pchip_interpolate # noqa except ImportError: import pytest pytest.skip('scipy.interpolate.pchip missing') def _skip_if_no_akima(): try: from scipy.interpolate import Akima1DInterpolator # noqa except ImportError: import pytest pytest.skip('scipy.interpolate.Akima1DInterpolator missing') def _simple_ts(start, end, freq='D'): rng = date_range(start, end, freq=freq) return Series(np.random.randn(len(rng)), index=rng) class TestSeriesMissingData(): def test_remove_na_deprecation(self): # see gh-16971 with tm.assert_produces_warning(FutureWarning): remove_na(Series([])) def test_timedelta_fillna(self): # GH 3371 s = Series([Timestamp('20130101'), Timestamp('20130101'), Timestamp('20130102'), Timestamp('20130103 9:01:01')]) td = s.diff() # reg fillna result = td.fillna(0) expected = Series([timedelta(0), timedelta(0), timedelta(1), timedelta(days=1, seconds=9 * 3600 + 60 + 1)]) assert_series_equal(result, expected) # interprested as seconds result = td.fillna(1) expected = Series([timedelta(seconds=1), timedelta(0), timedelta(1), timedelta(days=1, seconds=9 * 3600 + 60 + 1)]) assert_series_equal(result, expected) result = td.fillna(timedelta(days=1, seconds=1)) expected = Series([timedelta(days=1, seconds=1), timedelta(0), timedelta(1), timedelta(days=1, seconds=9 * 3600 + 60 + 1)]) assert_series_equal(result, expected) result = td.fillna(np.timedelta64(int(1e9))) expected = Series([timedelta(seconds=1), timedelta(0), timedelta(1), timedelta(days=1, seconds=9 * 3600 + 60 + 1)]) assert_series_equal(result, expected) result = td.fillna(NaT) expected = Series([NaT, timedelta(0), timedelta(1), timedelta(days=1, seconds=9 * 3600 + 60 + 1)], dtype='m8[ns]') assert_series_equal(result, expected) # ffill td[2] = np.nan result = td.ffill() expected = td.fillna(0) expected[0] = np.nan assert_series_equal(result, expected) # bfill td[2] = np.nan result = td.bfill() expected = td.fillna(0) expected[2] = timedelta(days=1, seconds=9 * 3600 + 60 + 1) assert_series_equal(result, expected) def test_datetime64_fillna(self): s = Series([Timestamp('20130101'), Timestamp('20130101'), Timestamp( '20130102'), Timestamp('20130103 9:01:01')]) s[2] = np.nan # reg fillna result = s.fillna(Timestamp('20130104')) expected = Series([Timestamp('20130101'), Timestamp( '20130101'), Timestamp('20130104'), Timestamp('20130103 9:01:01')]) assert_series_equal(result, expected) result = s.fillna(NaT) expected = s assert_series_equal(result, expected) # ffill result = s.ffill() expected = Series([Timestamp('20130101'), Timestamp( '20130101'), Timestamp('20130101'), Timestamp('20130103 9:01:01')]) assert_series_equal(result, expected) # bfill result = s.bfill() expected = Series([Timestamp('20130101'), Timestamp('20130101'), Timestamp('20130103 9:01:01'), Timestamp( '20130103 9:01:01')]) assert_series_equal(result, expected) # GH 6587 # make sure that we are treating as integer when filling # this also tests inference of a datetime-like with NaT's s = Series([pd.NaT, pd.NaT, '2013-08-05 15:30:00.000001']) expected = Series( ['2013-08-05 15:30:00.000001', '2013-08-05 15:30:00.000001', '2013-08-05 15:30:00.000001'], dtype='M8[ns]') result = s.fillna(method='backfill') assert_series_equal(result, expected) def test_datetime64_tz_fillna(self): for tz in ['US/Eastern', 'Asia/Tokyo']: # DatetimeBlock s = Series([Timestamp('2011-01-01 10:00'), pd.NaT, Timestamp('2011-01-03 10:00'), pd.NaT]) null_loc = pd.Series([False, True, False, True]) result = s.fillna(pd.Timestamp('2011-01-02 10:00')) expected = Series([Timestamp('2011-01-01 10:00'), Timestamp('2011-01-02 10:00'), Timestamp('2011-01-03 10:00'), Timestamp('2011-01-02 10:00')]) tm.assert_series_equal(expected, result) # check s is not changed tm.assert_series_equal(pd.isna(s), null_loc) result = s.fillna(pd.Timestamp('2011-01-02 10:00', tz=tz)) expected = Series([Timestamp('2011-01-01 10:00'), Timestamp('2011-01-02 10:00', tz=tz), Timestamp('2011-01-03 10:00'), Timestamp('2011-01-02 10:00', tz=tz)]) tm.assert_series_equal(expected, result) tm.assert_series_equal(pd.isna(s), null_loc) result = s.fillna('AAA') expected = Series([Timestamp('2011-01-01 10:00'), 'AAA', Timestamp('2011-01-03 10:00'), 'AAA'], dtype=object) tm.assert_series_equal(expected, result) tm.assert_series_equal(pd.isna(s), null_loc) result = s.fillna({1: pd.Timestamp('2011-01-02 10:00', tz=tz), 3: pd.Timestamp('2011-01-04 10:00')}) expected = Series([Timestamp('2011-01-01 10:00'), Timestamp('2011-01-02 10:00', tz=tz), Timestamp('2011-01-03 10:00'), Timestamp('2011-01-04 10:00')]) tm.assert_series_equal(expected, result) tm.assert_series_equal(pd.isna(s), null_loc) result = s.fillna({1: pd.Timestamp('2011-01-02 10:00'), 3: pd.Timestamp('2011-01-04 10:00')}) expected = Series([Timestamp('2011-01-01 10:00'), Timestamp('2011-01-02 10:00'), Timestamp('2011-01-03 10:00'), Timestamp('2011-01-04 10:00')]) tm.assert_series_equal(expected, result) tm.assert_series_equal(pd.isna(s), null_loc) # DatetimeBlockTZ idx = pd.DatetimeIndex(['2011-01-01 10:00', pd.NaT, '2011-01-03 10:00', pd.NaT], tz=tz) s = pd.Series(idx) assert s.dtype == 'datetime64[ns, {0}]'.format(tz) tm.assert_series_equal(pd.isna(s), null_loc) result = s.fillna(pd.Timestamp('2011-01-02 10:00')) expected = Series([Timestamp('2011-01-01 10:00', tz=tz), Timestamp('2011-01-02 10:00'), Timestamp('2011-01-03 10:00', tz=tz), Timestamp('2011-01-02 10:00')]) tm.assert_series_equal(expected, result) tm.assert_series_equal(pd.isna(s), null_loc) result = s.fillna(pd.Timestamp('2011-01-02 10:00', tz=tz)) idx = pd.DatetimeIndex(['2011-01-01 10:00', '2011-01-02 10:00', '2011-01-03 10:00', '2011-01-02 10:00'], tz=tz) expected = Series(idx) tm.assert_series_equal(expected, result) tm.assert_series_equal(pd.isna(s), null_loc) result = s.fillna(pd.Timestamp('2011-01-02 10:00', tz=tz).to_pydatetime()) idx = pd.DatetimeIndex(['2011-01-01 10:00', '2011-01-02 10:00', '2011-01-03 10:00', '2011-01-02 10:00'], tz=tz) expected = Series(idx) tm.assert_series_equal(expected, result) tm.assert_series_equal(pd.isna(s), null_loc) result = s.fillna('AAA') expected = Series([Timestamp('2011-01-01 10:00', tz=tz), 'AAA', Timestamp('2011-01-03 10:00', tz=tz), 'AAA'], dtype=object) tm.assert_series_equal(expected, result) tm.assert_series_equal(pd.isna(s), null_loc) result = s.fillna({1: pd.Timestamp('2011-01-02 10:00', tz=tz), 3: pd.Timestamp('2011-01-04 10:00')}) expected = Series([Timestamp('2011-01-01 10:00', tz=tz), Timestamp('2011-01-02 10:00', tz=tz), Timestamp('2011-01-03 10:00', tz=tz), Timestamp('2011-01-04 10:00')]) tm.assert_series_equal(expected, result) tm.assert_series_equal(pd.isna(s), null_loc) result = s.fillna({1: pd.Timestamp('2011-01-02 10:00', tz=tz), 3: pd.Timestamp('2011-01-04 10:00', tz=tz)}) expected = Series([Timestamp('2011-01-01 10:00', tz=tz), Timestamp('2011-01-02 10:00', tz=tz), Timestamp('2011-01-03 10:00', tz=tz), Timestamp('2011-01-04 10:00', tz=tz)]) tm.assert_series_equal(expected, result) tm.assert_series_equal(pd.isna(s), null_loc) # filling with a naive/other zone, coerce to object result = s.fillna(Timestamp('20130101')) expected = Series([Timestamp('2011-01-01 10:00', tz=tz), Timestamp('2013-01-01'), Timestamp('2011-01-03 10:00', tz=tz), Timestamp('2013-01-01')]) tm.assert_series_equal(expected, result) tm.assert_series_equal(pd.isna(s), null_loc) result = s.fillna(Timestamp('20130101', tz='US/Pacific')) expected = Series([Timestamp('2011-01-01 10:00', tz=tz), Timestamp('2013-01-01', tz='US/Pacific'), Timestamp('2011-01-03 10:00', tz=tz), Timestamp('2013-01-01', tz='US/Pacific')]) tm.assert_series_equal(expected, result) tm.assert_series_equal(pd.isna(s), null_loc) # with timezone # GH 15855 df = pd.Series([pd.Timestamp('2012-11-11 00:00:00+01:00'), pd.NaT]) exp = pd.Series([pd.Timestamp('2012-11-11 00:00:00+01:00'), pd.Timestamp('2012-11-11 00:00:00+01:00')]) assert_series_equal(df.fillna(method='pad'), exp) df = pd.Series([pd.NaT, pd.Timestamp('2012-11-11 00:00:00+01:00')]) exp = pd.Series([pd.Timestamp('2012-11-11 00:00:00+01:00'), pd.Timestamp('2012-11-11 00:00:00+01:00')]) assert_series_equal(df.fillna(method='bfill'), exp) def test_fillna_consistency(self): # GH 16402 # fillna with a tz aware to a tz-naive, should result in object s = Series([Timestamp('20130101'), pd.NaT]) result = s.fillna(Timestamp('20130101', tz='US/Eastern')) expected = Series([Timestamp('20130101'), Timestamp('2013-01-01', tz='US/Eastern')], dtype='object') assert_series_equal(result, expected) # where (we ignore the errors=) result = s.where([True, False], Timestamp('20130101', tz='US/Eastern'), errors='ignore') assert_series_equal(result, expected) result = s.where([True, False], Timestamp('20130101', tz='US/Eastern'), errors='ignore') assert_series_equal(result, expected) # with a non-datetime result = s.fillna('foo') expected = Series([Timestamp('20130101'), 'foo']) assert_series_equal(result, expected) # assignment s2 = s.copy() s2[1] = 'foo' assert_series_equal(s2, expected) def test_datetime64tz_fillna_round_issue(self): # GH 14872 data = pd.Series([pd.NaT, pd.NaT, datetime(2016, 12, 12, 22, 24, 6, 100001, tzinfo=pytz.utc)]) filled = data.fillna(method='bfill') expected = pd.Series([datetime(2016, 12, 12, 22, 24, 6, 100001, tzinfo=pytz.utc), datetime(2016, 12, 12, 22, 24, 6, 100001, tzinfo=pytz.utc), datetime(2016, 12, 12, 22, 24, 6, 100001, tzinfo=pytz.utc)]) assert_series_equal(filled, expected) def test_fillna_downcast(self): # GH 15277 # infer int64 from float64 s = pd.Series([1., np.nan]) result = s.fillna(0, downcast='infer') expected = pd.Series([1, 0]) assert_series_equal(result, expected) # infer int64 from float64 when fillna value is a dict s = pd.Series([1., np.nan]) result = s.fillna({1: 0}, downcast='infer') expected = pd.Series([1, 0]) assert_series_equal(result, expected) def test_fillna_int(self): s = Series(np.random.randint(-100, 100, 50)) s.fillna(method='ffill', inplace=True) assert_series_equal(s.fillna(method='ffill', inplace=False), s) def test_fillna_raise(self): s = Series(np.random.randint(-100, 100, 50)) pytest.raises(TypeError, s.fillna, [1, 2]) pytest.raises(TypeError, s.fillna, (1, 2)) # related GH 9217, make sure limit is an int and greater than 0 s = Series([1, 2, 3, None]) for limit in [-1, 0, 1., 2.]: for method in ['backfill', 'bfill', 'pad', 'ffill', None]: with pytest.raises(ValueError): s.fillna(1, limit=limit, method=method) def test_categorical_nan_equality(self): cat = Series(Categorical(["a", "b", "c", np.nan])) exp = Series([True, True, True, False]) res = (cat == cat) tm.assert_series_equal(res, exp) def test_categorical_nan_handling(self): # NaNs are represented as -1 in labels s = Series(Categorical(["a", "b", np.nan, "a"])) tm.assert_index_equal(s.cat.categories, Index(["a", "b"])) tm.assert_numpy_array_equal(s.values.codes, np.array([0, 1, -1, 0], dtype=np.int8)) @pytest.mark.parametrize('fill_value, expected_output', [ ('a', ['a', 'a', 'b', 'a', 'a']), ({1: 'a', 3: 'b', 4: 'b'}, ['a', 'a', 'b', 'b', 'b']), ({1: 'a'}, ['a', 'a', 'b', np.nan, np.nan]), ({1: 'a', 3: 'b'}, ['a', 'a', 'b', 'b', np.nan]), (Series('a'), ['a', np.nan, 'b', np.nan, np.nan]), (Series('a', index=[1]), ['a', 'a', 'b', np.nan, np.nan]), (Series({1: 'a', 3: 'b'}), ['a', 'a', 'b', 'b', np.nan]), (Series(['a', 'b'], index=[3, 4]), ['a', np.nan, 'b', 'a', 'b']) ]) def test_fillna_categorical(self, fill_value, expected_output): # GH 17033 # Test fillna for a Categorical series data = ['a', np.nan, 'b', np.nan, np.nan] s = Series(Categorical(data, categories=['a', 'b'])) exp = Series(Categorical(expected_output, categories=['a', 'b'])) tm.assert_series_equal(s.fillna(fill_value), exp) def test_fillna_categorical_raise(self): data = ['a', np.nan, 'b', np.nan, np.nan] s = Series(Categorical(data, categories=['a', 'b'])) with pytest.raises(ValueError, match="fill value must be in categories"): s.fillna('d') with pytest.raises(ValueError, match="fill value must be in categories"): s.fillna(Series('d')) with pytest.raises(ValueError, match="fill value must be in categories"): s.fillna({1: 'd', 3: 'a'}) msg = ('"value" parameter must be a scalar or ' 'dict, but you passed a "list"') with pytest.raises(TypeError, match=msg): s.fillna(['a', 'b']) msg = ('"value" parameter must be a scalar or ' 'dict, but you passed a "tuple"') with pytest.raises(TypeError, match=msg): s.fillna(('a', 'b')) msg = ('"value" parameter must be a scalar, dict ' 'or Series, but you passed a "DataFrame"') with pytest.raises(TypeError, match=msg): s.fillna(DataFrame({1: ['a'], 3: ['b']})) def test_fillna_nat(self): series = Series([0, 1, 2, iNaT], dtype='M8[ns]') filled = series.fillna(method='pad') filled2 = series.fillna(value=series.values[2]) expected = series.copy() expected.values[3] = expected.values[2] assert_series_equal(filled, expected) assert_series_equal(filled2, expected) df = DataFrame({'A': series}) filled = df.fillna(method='pad') filled2 = df.fillna(value=series.values[2]) expected = DataFrame({'A': expected}) assert_frame_equal(filled, expected) assert_frame_equal(filled2, expected) series = Series([iNaT, 0, 1, 2], dtype='M8[ns]') filled = series.fillna(method='bfill') filled2 = series.fillna(value=series[1]) expected = series.copy() expected[0] = expected[1] assert_series_equal(filled, expected) assert_series_equal(filled2, expected) df = DataFrame({'A': series}) filled = df.fillna(method='bfill') filled2 = df.fillna(value=series[1]) expected = DataFrame({'A': expected}) assert_frame_equal(filled, expected) assert_frame_equal(filled2, expected) def test_isna_for_inf(self): s = Series(['a', np.inf, np.nan, 1.0]) with pd.option_context('mode.use_inf_as_na', True): r = s.isna() dr = s.dropna() e = Series([False, True, True, False]) de = Series(['a', 1.0], index=[0, 3]) tm.assert_series_equal(r, e) tm.assert_series_equal(dr, de) @tm.capture_stdout def test_isnull_for_inf_deprecated(self): # gh-17115 s = Series(['a', np.inf, np.nan, 1.0]) with pd.option_context('mode.use_inf_as_null', True): r = s.isna() dr = s.dropna() e = Series([False, True, True, False]) de = Series(['a', 1.0], index=[0, 3]) tm.assert_series_equal(r, e) tm.assert_series_equal(dr, de) def test_fillna(self, datetime_series): ts = Series([0., 1., 2., 3., 4.], index=tm.makeDateIndex(5)) tm.assert_series_equal(ts, ts.fillna(method='ffill')) ts[2] = np.NaN exp = Series([0., 1., 1., 3., 4.], index=ts.index) tm.assert_series_equal(ts.fillna(method='ffill'), exp) exp = Series([0., 1., 3., 3., 4.], index=ts.index) tm.assert_series_equal(ts.fillna(method='backfill'), exp) exp = Series([0., 1., 5., 3., 4.], index=ts.index) tm.assert_series_equal(ts.fillna(value=5), exp) pytest.raises(ValueError, ts.fillna) pytest.raises(ValueError, datetime_series.fillna, value=0, method='ffill') # GH 5703 s1 = Series([np.nan]) s2 = Series([1]) result = s1.fillna(s2) expected = Series([1.]) assert_series_equal(result, expected) result = s1.fillna({}) assert_series_equal(result, s1) result = s1.fillna(Series(())) assert_series_equal(result, s1) result = s2.fillna(s1) assert_series_equal(result, s2) result = s1.fillna({0: 1}) assert_series_equal(result, expected) result = s1.fillna({1: 1}) assert_series_equal(result, Series([np.nan])) result = s1.fillna({0: 1, 1: 1}) assert_series_equal(result, expected) result = s1.fillna(Series({0: 1, 1: 1})) assert_series_equal(result, expected) result = s1.fillna(Series({0: 1, 1: 1}, index=[4, 5])) assert_series_equal(result, s1) s1 = Series([0, 1, 2], list('abc')) s2 = Series([0, np.nan, 2], list('bac')) result = s2.fillna(s1) expected = Series([0, 0, 2.], list('bac')) assert_series_equal(result, expected) # limit s = Series(np.nan, index=[0, 1, 2]) result = s.fillna(999, limit=1) expected = Series([999, np.nan, np.nan], index=[0, 1, 2]) assert_series_equal(result, expected) result = s.fillna(999, limit=2) expected = Series([999, 999, np.nan], index=[0, 1, 2]) assert_series_equal(result, expected) # GH 9043 # make sure a string representation of int/float values can be filled # correctly without raising errors or being converted vals = ['0', '1.5', '-0.3'] for val in vals: s = Series([0, 1, np.nan, np.nan, 4], dtype='float64') result = s.fillna(val) expected = Series([0, 1, val, val, 4], dtype='object') assert_series_equal(result, expected) def test_fillna_bug(self): x = Series([nan, 1., nan, 3., nan], ['z', 'a', 'b', 'c', 'd']) filled = x.fillna(method='ffill') expected = Series([nan, 1., 1., 3., 3.], x.index) assert_series_equal(filled, expected) filled = x.fillna(method='bfill') expected = Series([1., 1., 3., 3., nan], x.index) assert_series_equal(filled, expected) def test_fillna_inplace(self): x = Series([nan, 1., nan, 3., nan], ['z', 'a', 'b', 'c', 'd']) y = x.copy() y.fillna(value=0, inplace=True) expected = x.fillna(value=0) assert_series_equal(y, expected) def test_fillna_invalid_method(self, datetime_series): try: datetime_series.fillna(method='ffil') except ValueError as inst: assert 'ffil' in str(inst) def test_ffill(self): ts = Series([0., 1., 2., 3., 4.], index=tm.makeDateIndex(5)) ts[2] = np.NaN assert_series_equal(ts.ffill(), ts.fillna(method='ffill')) def test_ffill_mixed_dtypes_without_missing_data(self): # GH14956 series = pd.Series([datetime(2015, 1, 1, tzinfo=pytz.utc), 1]) result = series.ffill() assert_series_equal(series, result) def test_bfill(self): ts = Series([0., 1., 2., 3., 4.], index=tm.makeDateIndex(5)) ts[2] = np.NaN assert_series_equal(ts.bfill(), ts.fillna(method='bfill')) def test_timedelta64_nan(self): td = Series([timedelta(days=i) for i in range(10)]) # nan ops on timedeltas td1 = td.copy() td1[0] = np.nan assert isna(td1[0]) assert td1[0].value == iNaT td1[0] = td[0] assert not isna(td1[0]) td1[1] = iNaT assert isna(td1[1]) assert td1[1].value == iNaT td1[1] = td[1] assert not isna(td1[1]) td1[2] = NaT assert isna(td1[2]) assert td1[2].value == iNaT td1[2] = td[2] assert not isna(td1[2]) # boolean setting # this doesn't work, not sure numpy even supports it # result = td[(td>np.timedelta64(timedelta(days=3))) & # td<np.timedelta64(timedelta(days=7)))] = np.nan # assert isna(result).sum() == 7 # NumPy limitiation =( # def test_logical_range_select(self): # np.random.seed(12345) # selector = -0.5 <= datetime_series <= 0.5 # expected = (datetime_series >= -0.5) & (datetime_series <= 0.5) # assert_series_equal(selector, expected) def test_dropna_empty(self): s = Series([]) assert len(s.dropna()) == 0 s.dropna(inplace=True) assert len(s) == 0 # invalid axis pytest.raises(ValueError, s.dropna, axis=1) def test_datetime64_tz_dropna(self): # DatetimeBlock s = Series([Timestamp('2011-01-01 10:00'), pd.NaT, Timestamp( '2011-01-03 10:00'), pd.NaT]) result = s.dropna() expected = Series([Timestamp('2011-01-01 10:00'), Timestamp('2011-01-03 10:00')], index=[0, 2]) tm.assert_series_equal(result, expected) # DatetimeBlockTZ idx = pd.DatetimeIndex(['2011-01-01 10:00', pd.NaT, '2011-01-03 10:00', pd.NaT], tz='Asia/Tokyo') s = pd.Series(idx) assert s.dtype == 'datetime64[ns, Asia/Tokyo]' result = s.dropna() expected = Series([Timestamp('2011-01-01 10:00', tz='Asia/Tokyo'), Timestamp('2011-01-03 10:00', tz='Asia/Tokyo')], index=[0, 2]) assert result.dtype == 'datetime64[ns, Asia/Tokyo]' tm.assert_series_equal(result, expected) def test_dropna_no_nan(self): for s in [Series([1, 2, 3], name='x'), Series( [False, True, False], name='x')]: result = s.dropna() tm.assert_series_equal(result, s) assert result is not s s2 = s.copy() s2.dropna(inplace=True) tm.assert_series_equal(s2, s) def test_dropna_intervals(self): s = Series([np.nan, 1, 2, 3], IntervalIndex.from_arrays( [np.nan, 0, 1, 2], [np.nan, 1, 2, 3])) result = s.dropna() expected = s.iloc[1:] assert_series_equal(result, expected) def test_valid(self, datetime_series): ts = datetime_series.copy() ts[::2] = np.NaN result = ts.dropna() assert len(result) == ts.count() tm.assert_series_equal(result, ts[1::2]) tm.assert_series_equal(result, ts[pd.notna(ts)]) def test_isna(self): ser = Series([0, 5.4, 3, nan, -0.001]) expected = Series([False, False, False, True, False]) tm.assert_series_equal(ser.isna(), expected) ser = Series(["hi", "", nan]) expected = Series([False, False, True]) tm.assert_series_equal(ser.isna(), expected) def test_notna(self): ser = Series([0, 5.4, 3, nan, -0.001]) expected = Series([True, True, True, False, True]) tm.assert_series_equal(ser.notna(), expected) ser = Series(["hi", "", nan]) expected = Series([True, True, False]) tm.assert_series_equal(ser.notna(), expected) def test_pad_nan(self): x = Series([np.nan, 1., np.nan, 3., np.nan], ['z', 'a', 'b', 'c', 'd'], dtype=float) x.fillna(method='pad', inplace=True) expected = Series([np.nan, 1.0, 1.0, 3.0, 3.0], ['z', 'a', 'b', 'c', 'd'], dtype=float) assert_series_equal(x[1:], expected[1:]) assert np.isnan(x[0]), np.isnan(expected[0]) def test_pad_require_monotonicity(self): rng = date_range('1/1/2000', '3/1/2000', freq='B') # neither monotonic increasing or decreasing rng2 = rng[[1, 0, 2]] pytest.raises(ValueError, rng2.get_indexer, rng, method='pad') def test_dropna_preserve_name(self, datetime_series): datetime_series[:5] = np.nan result = datetime_series.dropna() assert result.name == datetime_series.name name = datetime_series.name ts = datetime_series.copy() ts.dropna(inplace=True) assert ts.name == name def test_fill_value_when_combine_const(self): # GH12723 s = Series([0, 1, np.nan, 3, 4, 5]) exp = s.fillna(0).add(2) res = s.add(2, fill_value=0) assert_series_equal(res, exp) def test_series_fillna_limit(self): index = np.arange(10) s = Series(np.random.randn(10), index=index) result = s[:2].reindex(index) result = result.fillna(method='pad', limit=5) expected = s[:2].reindex(index).fillna(method='pad') expected[-3:] = np.nan assert_series_equal(result, expected) result = s[-2:].reindex(index) result = result.fillna(method='bfill', limit=5) expected = s[-2:].reindex(index).fillna(method='backfill') expected[:3] = np.nan assert_series_equal(result, expected) def test_sparse_series_fillna_limit(self): index = np.arange(10) s = Series(np.random.randn(10), index=index) ss = s[:2].reindex(index).to_sparse() # TODO: what is this test doing? why are result an expected # the same call to fillna? with tm.assert_produces_warning(PerformanceWarning): # TODO: release-note fillna performance warning result = ss.fillna(method='pad', limit=5) expected = ss.fillna(method='pad', limit=5) expected = expected.to_dense() expected[-3:] = np.nan expected = expected.to_sparse() assert_series_equal(result, expected) ss = s[-2:].reindex(index).to_sparse() with tm.assert_produces_warning(PerformanceWarning): result = ss.fillna(method='backfill', limit=5) expected = ss.fillna(method='backfill') expected = expected.to_dense() expected[:3] = np.nan expected = expected.to_sparse() assert_series_equal(result, expected) def test_sparse_series_pad_backfill_limit(self): index = np.arange(10) s = Series(np.random.randn(10), index=index) s = s.to_sparse() result = s[:2].reindex(index, method='pad', limit=5) with tm.assert_produces_warning(PerformanceWarning): expected = s[:2].reindex(index).fillna(method='pad') expected = expected.to_dense() expected[-3:] = np.nan expected = expected.to_sparse() assert_series_equal(result, expected) result = s[-2:].reindex(index, method='backfill', limit=5) with tm.assert_produces_warning(PerformanceWarning): expected = s[-2:].reindex(index).fillna(method='backfill') expected = expected.to_dense() expected[:3] = np.nan expected = expected.to_sparse() assert_series_equal(result, expected) def test_series_pad_backfill_limit(self): index = np.arange(10) s = Series(np.random.randn(10), index=index) result = s[:2].reindex(index, method='pad', limit=5) expected = s[:2].reindex(index).fillna(method='pad') expected[-3:] = np.nan assert_series_equal(result, expected) result = s[-2:].reindex(index, method='backfill', limit=5) expected = s[-2:].reindex(index).fillna(method='backfill') expected[:3] = np.nan assert_series_equal(result, expected) class TestSeriesInterpolateData(): def test_interpolate(self, datetime_series, string_series): ts = Series(np.arange(len(datetime_series), dtype=float), datetime_series.index) ts_copy = ts.copy() ts_copy[5:10] = np.NaN linear_interp = ts_copy.interpolate(method='linear') tm.assert_series_equal(linear_interp, ts) ord_ts = Series([d.toordinal() for d in datetime_series.index], index=datetime_series.index).astype(float) ord_ts_copy = ord_ts.copy() ord_ts_copy[5:10] = np.NaN time_interp = ord_ts_copy.interpolate(method='time') tm.assert_series_equal(time_interp, ord_ts) # try time interpolation on a non-TimeSeries # Only raises ValueError if there are NaNs. non_ts = string_series.copy() non_ts[0] = np.NaN pytest.raises(ValueError, non_ts.interpolate, method='time') @td.skip_if_no_scipy def test_interpolate_pchip(self): _skip_if_no_pchip() ser = Series(np.sort(np.random.uniform(size=100))) # interpolate at new_index new_index = ser.index.union(Index([49.25, 49.5, 49.75, 50.25, 50.5, 50.75])) interp_s = ser.reindex(new_index).interpolate(method='pchip') # does not blow up, GH5977 interp_s[49:51] @td.skip_if_no_scipy def test_interpolate_akima(self): _skip_if_no_akima() ser = Series([10, 11, 12, 13]) expected = Series([11.00, 11.25, 11.50, 11.75, 12.00, 12.25, 12.50, 12.75, 13.00], index=Index([1.0, 1.25, 1.5, 1.75, 2.0, 2.25, 2.5, 2.75, 3.0])) # interpolate at new_index new_index = ser.index.union(Index([1.25, 1.5, 1.75, 2.25, 2.5, 2.75])) interp_s = ser.reindex(new_index).interpolate(method='akima') assert_series_equal(interp_s[1:3], expected) @td.skip_if_no_scipy def test_interpolate_piecewise_polynomial(self): ser = Series([10, 11, 12, 13]) expected = Series([11.00, 11.25, 11.50, 11.75, 12.00, 12.25, 12.50, 12.75, 13.00], index=Index([1.0, 1.25, 1.5, 1.75, 2.0, 2.25, 2.5, 2.75, 3.0])) # interpolate at new_index new_index = ser.index.union(Index([1.25, 1.5, 1.75, 2.25, 2.5, 2.75])) interp_s = ser.reindex(new_index).interpolate( method='piecewise_polynomial') assert_series_equal(interp_s[1:3], expected) @td.skip_if_no_scipy def test_interpolate_from_derivatives(self): ser = Series([10, 11, 12, 13]) expected = Series([11.00, 11.25, 11.50, 11.75, 12.00, 12.25, 12.50, 12.75, 13.00], index=Index([1.0, 1.25, 1.5, 1.75, 2.0, 2.25, 2.5, 2.75, 3.0])) # interpolate at new_index new_index = ser.index.union(Index([1.25, 1.5, 1.75, 2.25, 2.5, 2.75])) interp_s = ser.reindex(new_index).interpolate( method='from_derivatives') assert_series_equal(interp_s[1:3], expected) @pytest.mark.parametrize("kwargs", [ {}, pytest.param({'method': 'polynomial', 'order': 1}, marks=td.skip_if_no_scipy) ]) def test_interpolate_corners(self, kwargs): s = Series([np.nan, np.nan]) assert_series_equal(s.interpolate(**kwargs), s) s = Series([]).interpolate() assert_series_equal(s.interpolate(**kwargs), s) def test_interpolate_index_values(self): s = Series(np.nan, index=np.sort(np.random.rand(30))) s[::3] = np.random.randn(10) vals = s.index.values.astype(float) result = s.interpolate(method='index') expected = s.copy() bad = isna(expected.values) good = ~bad expected = Series(np.interp(vals[bad], vals[good], s.values[good]), index=s.index[bad]) assert_series_equal(result[bad], expected) # 'values' is synonymous with 'index' for the method kwarg other_result = s.interpolate(method='values') assert_series_equal(other_result, result) assert_series_equal(other_result[bad], expected) def test_interpolate_non_ts(self): s = Series([1, 3, np.nan, np.nan, np.nan, 11]) with pytest.raises(ValueError): s.interpolate(method='time') @pytest.mark.parametrize("kwargs", [ {}, pytest.param({'method': 'polynomial', 'order': 1}, marks=td.skip_if_no_scipy) ]) def test_nan_interpolate(self, kwargs): s = Series([0, 1, np.nan, 3]) result = s.interpolate(**kwargs) expected = Series([0., 1., 2., 3.]) assert_series_equal(result, expected) def test_nan_irregular_index(self): s = Series([1, 2, np.nan, 4], index=[1, 3, 5, 9]) result = s.interpolate() expected = Series([1., 2., 3., 4.], index=[1, 3, 5, 9]) assert_series_equal(result, expected) def test_nan_str_index(self): s = Series([0, 1, 2, np.nan], index=list('abcd')) result = s.interpolate() expected = Series([0., 1., 2., 2.], index=list('abcd')) assert_series_equal(result, expected) @td.skip_if_no_scipy def test_interp_quad(self): sq = Series([1, 4, np.nan, 16], index=[1, 2, 3, 4]) result = sq.interpolate(method='quadratic') expected = Series([1., 4., 9., 16.], index=[1, 2, 3, 4]) assert_series_equal(result, expected) @td.skip_if_no_scipy def test_interp_scipy_basic(self): s = Series([1, 3, np.nan, 12, np.nan, 25]) # slinear expected = Series([1., 3., 7.5, 12., 18.5, 25.]) result = s.interpolate(method='slinear') assert_series_equal(result, expected) result = s.interpolate(method='slinear', downcast='infer') assert_series_equal(result, expected) # nearest expected = Series([1, 3, 3, 12, 12, 25]) result = s.interpolate(method='nearest') assert_series_equal(result, expected.astype('float')) result = s.interpolate(method='nearest', downcast='infer') assert_series_equal(result, expected) # zero expected = Series([1, 3, 3, 12, 12, 25]) result = s.interpolate(method='zero') assert_series_equal(result, expected.astype('float')) result = s.interpolate(method='zero', downcast='infer') assert_series_equal(result, expected) # quadratic # GH #15662. # new cubic and quadratic interpolation algorithms from scipy 0.19.0. # previously `splmake` was used. See scipy/scipy#6710 if _is_scipy_ge_0190: expected = Series([1, 3., 6.823529, 12., 18.058824, 25.]) else: expected = Series([1, 3., 6.769231, 12., 18.230769, 25.]) result = s.interpolate(method='quadratic') assert_series_equal(result, expected) result = s.interpolate(method='quadratic', downcast='infer') assert_series_equal(result, expected) # cubic expected = Series([1., 3., 6.8, 12., 18.2, 25.]) result = s.interpolate(method='cubic') assert_series_equal(result, expected) def test_interp_limit(self): s = Series([1, 3, np.nan, np.nan, np.nan, 11]) expected = Series([1., 3., 5., 7., np.nan, 11.]) result = s.interpolate(method='linear', limit=2) assert_series_equal(result, expected) # GH 9217, make sure limit is an int and greater than 0 methods = ['linear', 'time', 'index', 'values', 'nearest', 'zero', 'slinear', 'quadratic', 'cubic', 'barycentric', 'krogh', 'polynomial', 'spline', 'piecewise_polynomial', None, 'from_derivatives', 'pchip', 'akima'] s = pd.Series([1, 2, np.nan, np.nan, 5]) for limit in [-1, 0, 1., 2.]: for method in methods: with pytest.raises(ValueError): s.interpolate(limit=limit, method=method) def test_interp_limit_forward(self): s = Series([1, 3, np.nan, np.nan, np.nan, 11]) # Provide 'forward' (the default) explicitly here. expected = Series([1., 3., 5., 7., np.nan, 11.]) result = s.interpolate(method='linear', limit=2, limit_direction='forward') assert_series_equal(result, expected) result = s.interpolate(method='linear', limit=2, limit_direction='FORWARD') assert_series_equal(result, expected) def test_interp_unlimited(self): # these test are for issue #16282 default Limit=None is unlimited s = Series([np.nan, 1., 3., np.nan, np.nan, np.nan, 11., np.nan]) expected = Series([1., 1., 3., 5., 7., 9., 11., 11.]) result = s.interpolate(method='linear', limit_direction='both') assert_series_equal(result, expected) expected = Series([np.nan, 1., 3., 5., 7., 9., 11., 11.]) result = s.interpolate(method='linear', limit_direction='forward') assert_series_equal(result, expected) expected = Series([1., 1., 3., 5., 7., 9., 11., np.nan]) result = s.interpolate(method='linear', limit_direction='backward') assert_series_equal(result, expected) def test_interp_limit_bad_direction(self): s = Series([1, 3, np.nan, np.nan, np.nan, 11]) pytest.raises(ValueError, s.interpolate, method='linear', limit=2, limit_direction='abc') # raises an error even if no limit is specified. pytest.raises(ValueError, s.interpolate, method='linear', limit_direction='abc') # limit_area introduced GH #16284 def test_interp_limit_area(self): # These tests are for issue #9218 -- fill NaNs in both directions. s = Series([nan, nan, 3, nan, nan, nan, 7, nan, nan]) expected = Series([nan, nan, 3., 4., 5., 6., 7., nan, nan]) result = s.interpolate(method='linear', limit_area='inside') assert_series_equal(result, expected) expected = Series([nan, nan, 3., 4., nan, nan, 7., nan, nan]) result = s.interpolate(method='linear', limit_area='inside', limit=1) expected = Series([nan, nan, 3., 4., nan, 6., 7., nan, nan]) result = s.interpolate(method='linear', limit_area='inside', limit_direction='both', limit=1) assert_series_equal(result, expected) expected = Series([nan, nan, 3., nan, nan, nan, 7., 7., 7.]) result = s.interpolate(method='linear', limit_area='outside') assert_series_equal(result, expected) expected = Series([nan, nan, 3., nan, nan, nan, 7., 7., nan]) result = s.interpolate(method='linear', limit_area='outside', limit=1) expected = Series([nan, 3., 3., nan, nan, nan, 7., 7., nan]) result = s.interpolate(method='linear', limit_area='outside', limit_direction='both', limit=1) assert_series_equal(result, expected) expected = Series([3., 3., 3., nan, nan, nan, 7., nan, nan]) result = s.interpolate(method='linear', limit_area='outside', direction='backward') # raises an error even if limit type is wrong. pytest.raises(ValueError, s.interpolate, method='linear', limit_area='abc') def test_interp_limit_direction(self): # These tests are for issue #9218 -- fill NaNs in both directions. s = Series([1, 3, np.nan, np.nan, np.nan, 11]) expected = Series([1., 3., np.nan, 7., 9., 11.]) result = s.interpolate(method='linear', limit=2, limit_direction='backward') assert_series_equal(result, expected) expected = Series([1., 3., 5., np.nan, 9., 11.]) result = s.interpolate(method='linear', limit=1, limit_direction='both') assert_series_equal(result, expected) # Check that this works on a longer series of nans. s = Series([1, 3, np.nan, np.nan, np.nan, 7, 9, np.nan, np.nan, 12, np.nan]) expected = Series([1., 3., 4., 5., 6., 7., 9., 10., 11., 12., 12.]) result = s.interpolate(method='linear', limit=2, limit_direction='both') assert_series_equal(result, expected) expected = Series([1., 3., 4., np.nan, 6., 7., 9., 10., 11., 12., 12.]) result = s.interpolate(method='linear', limit=1, limit_direction='both') assert_series_equal(result, expected) def test_interp_limit_to_ends(self): # These test are for issue #10420 -- flow back to beginning. s = Series([np.nan, np.nan, 5, 7, 9, np.nan]) expected = Series([5., 5., 5., 7., 9., np.nan]) result = s.interpolate(method='linear', limit=2, limit_direction='backward') assert_series_equal(result, expected) expected = Series([5., 5., 5., 7., 9., 9.]) result = s.interpolate(method='linear', limit=2, limit_direction='both') assert_series_equal(result, expected) def test_interp_limit_before_ends(self): # These test are for issue #11115 -- limit ends properly. s = Series([np.nan, np.nan, 5, 7, np.nan, np.nan]) expected = Series([np.nan, np.nan, 5., 7., 7., np.nan]) result = s.interpolate(method='linear', limit=1, limit_direction='forward') assert_series_equal(result, expected) expected = Series([np.nan, 5., 5., 7., np.nan, np.nan]) result = s.interpolate(method='linear', limit=1, limit_direction='backward') assert_series_equal(result, expected) expected = Series([np.nan, 5., 5., 7., 7., np.nan]) result = s.interpolate(method='linear', limit=1, limit_direction='both') assert_series_equal(result, expected) @td.skip_if_no_scipy def test_interp_all_good(self): s = Series([1, 2, 3]) result = s.interpolate(method='polynomial', order=1) assert_series_equal(result, s) # non-scipy result = s.interpolate() assert_series_equal(result, s) @pytest.mark.parametrize("check_scipy", [ False, pytest.param(True, marks=td.skip_if_no_scipy) ]) def test_interp_multiIndex(self, check_scipy): idx = MultiIndex.from_tuples([(0, 'a'), (1, 'b'), (2, 'c')]) s = Series([1, 2, np.nan], index=idx) expected = s.copy() expected.loc[2] = 2 result = s.interpolate() assert_series_equal(result, expected) if check_scipy: with pytest.raises(ValueError): s.interpolate(method='polynomial', order=1) @td.skip_if_no_scipy def test_interp_nonmono_raise(self): s = Series([1, np.nan, 3], index=[0, 2, 1]) with pytest.raises(ValueError): s.interpolate(method='krogh') @td.skip_if_no_scipy def test_interp_datetime64(self): df = Series([1, np.nan, 3], index=date_range('1/1/2000', periods=3)) result = df.interpolate(method='nearest') expected = Series([1., 1., 3.], index=date_range('1/1/2000', periods=3)) assert_series_equal(result, expected) def test_interp_limit_no_nans(self): # GH 7173 s = pd.Series([1., 2., 3.]) result = s.interpolate(limit=1) expected = s assert_series_equal(result, expected) @td.skip_if_no_scipy @pytest.mark.parametrize("method", ['polynomial', 'spline']) def test_no_order(self, method): s = Series([0, 1, np.nan, 3]) with pytest.raises(ValueError): s.interpolate(method=method) @td.skip_if_no_scipy def test_spline(self): s = Series([1, 2, np.nan, 4, 5, np.nan, 7]) result = s.interpolate(method='spline', order=1) expected = Series([1., 2., 3., 4., 5., 6., 7.]) assert_series_equal(result, expected) @td.skip_if_no('scipy', min_version='0.15') def test_spline_extrapolate(self): s = Series([1, 2, 3, 4, np.nan, 6, np.nan]) result3 = s.interpolate(method='spline', order=1, ext=3) expected3 = Series([1., 2., 3., 4., 5., 6., 6.]) assert_series_equal(result3, expected3) result1 = s.interpolate(method='spline', order=1, ext=0) expected1 = Series([1., 2., 3., 4., 5., 6., 7.]) assert_series_equal(result1, expected1) @td.skip_if_no_scipy def test_spline_smooth(self): s = Series([1, 2, np.nan, 4, 5.1, np.nan, 7]) assert (s.interpolate(method='spline', order=3, s=0)[5] != s.interpolate(method='spline', order=3)[5]) @td.skip_if_no_scipy def test_spline_interpolation(self): s = Series(np.arange(10) ** 2) s[np.random.randint(0, 9, 3)] = np.nan result1 = s.interpolate(method='spline', order=1) expected1 = s.interpolate(method='spline', order=1) assert_series_equal(result1, expected1) @td.skip_if_no_scipy def test_spline_error(self): # see gh-10633 s = pd.Series(np.arange(10) ** 2) s[np.random.randint(0, 9, 3)] = np.nan with pytest.raises(ValueError): s.interpolate(method='spline') with pytest.raises(ValueError): s.interpolate(method='spline', order=0) def test_interp_timedelta64(self): # GH 6424 df = Series([1, np.nan, 3], index=pd.to_timedelta([1, 2, 3])) result = df.interpolate(method='time') expected = Series([1., 2., 3.], index=pd.to_timedelta([1, 2, 3])) assert_series_equal(result, expected) # test for non uniform spacing df = Series([1, np.nan, 3], index=pd.to_timedelta([1, 2, 4])) result = df.interpolate(method='time') expected = Series([1., 1.666667, 3.], index=pd.to_timedelta([1, 2, 4])) assert_series_equal(result, expected) def test_series_interpolate_method_values(self): # #1646 ts = _simple_ts('1/1/2000', '1/20/2000') ts[::2] = np.nan result = ts.interpolate(method='values') exp = ts.interpolate() assert_series_equal(result, exp) def test_series_interpolate_intraday(self): # #1698 index = pd.date_range('1/1/2012', periods=4, freq='12D') ts = pd.Series([0, 12, 24, 36], index) new_index = index.append(index + pd.DateOffset(days=1)).sort_values() exp = ts.reindex(new_index).interpolate(method='time') index = pd.date_range('1/1/2012', periods=4, freq='12H') ts = pd.Series([0, 12, 24, 36], index) new_index = index.append(index + pd.DateOffset(hours=1)).sort_values() result = ts.reindex(new_index).interpolate(method='time') tm.assert_numpy_array_equal(result.values, exp.values)
dsm054/pandas
pandas/tests/series/test_missing.py
pandas/io/sas/sas7bdat.py
""" Utilities used within urbansim that don't yet have a better home. """ from __future__ import print_function import os import numpy as np import pandas as pd from zbox import toolz as tz def _mkifnotexists(folder): d = os.path.join(os.getenv('DATA_HOME', "."), folder) if not os.path.exists(d): os.makedirs(d) return d def data_dir(): """ Return the directory for the input data. """ return _mkifnotexists("data") def configs_dir(): """ Return the directory for the model configuration files. """ return _mkifnotexists("configs") def runs_dir(): """ Return the directory for the run output. """ return _mkifnotexists("runs") def models_dir(): """ Return the directory for the model configuration files (used by the website). """ return _mkifnotexists("configs") def charts_dir(): """ Return the directory for the chart configuration files (used by the website). """ return _mkifnotexists("web/charts") def maps_dir(): """ Return the directory for the map configuration files (used by the website). """ return _mkifnotexists("web/maps") def simulations_dir(): """ Return the directory for the simulation configuration files (used by the website). """ return _mkifnotexists("web/simulations") def reports_dir(): """ Return the directory for the report configuration files (used by the website). """ return _mkifnotexists("web/reports") def edits_dir(): """ Return the directory for the editable files (used by the website). """ return _mkifnotexists("") def config(fname): """ Return the config path for the file with the given filename. """ return os.path.join(configs_dir(), fname) def get_run_number(): """ Get a run number for this execution of the model system, for identifying the output hdf5 files). Returns ------- The integer number for this run of the model system. """ try: f = open(os.path.join(os.getenv('DATA_HOME', "."), 'RUNNUM'), 'r') num = int(f.read()) f.close() except Exception: num = 1 f = open(os.path.join(os.getenv('DATA_HOME', "."), 'RUNNUM'), 'w') f.write(str(num + 1)) f.close() return num def compute_range(travel_data, attr, travel_time_attr, dist, agg=np.sum): """ Compute a zone-based accessibility query using the urbansim format travel data dataframe. Parameters ---------- travel_data : dataframe The dataframe of urbansim format travel data. Has from_zone_id as first index, to_zone_id as second index, and different impedances between zones as columns. attr : series The attr to aggregate. Should be indexed by zone_id and the values will be aggregated. travel_time_attr : string The column name in travel_data to use as the impedance. dist : float The max distance to aggregate up to agg : function, optional, np.sum by default The numpy function to use for aggregation """ travel_data = travel_data.reset_index(level=1) travel_data = travel_data[travel_data[travel_time_attr] < dist] travel_data["attr"] = attr[travel_data.to_zone_id].values return travel_data.groupby(level=0).attr.apply(agg) def reindex(series1, series2): """ This reindexes the first series by the second series. This is an extremely common operation that does not appear to be in Pandas at this time. If anyone knows of an easier way to do this in Pandas, please inform the UrbanSim developers. The canonical example would be a parcel series which has an index which is parcel_ids and a value which you want to fetch, let's say it's land_area. Another dataset, let's say of buildings has a series which indicate the parcel_ids that the buildings are located on, but which does not have land_area. If you pass parcels.land_area as the first series and buildings.parcel_id as the second series, this function returns a series which is indexed by buildings and has land_area as values and can be added to the buildings dataset. In short, this is a join on to a different table using a foreign key stored in the current table, but with only one attribute rather than for a full dataset. This is very similar to the pandas "loc" function or "reindex" function, but neither of those functions return the series indexed on the current table. In both of those cases, the series would be indexed on the foreign table and would require a second step to change the index. """ # turns out the merge is much faster than the .loc below df = pd.merge(pd.DataFrame({"left": series2}), pd.DataFrame({"right": series1}), left_on="left", right_index=True, how="left") return df.right # return pd.Series(series1.loc[series2.values].values, index=series2.index) def fidx(right, left, left_fk=None): """ Re-indexes a series or data frame (right) to align with another (left) series or data frame via foreign key relationship. The index of the right must be unique. This is similar to misc.reindex, but allows for data frame re-indexes and supports re-indexing data frames or series with a multi-index. Parameters: ----------- right: pandas.DataFrame or pandas.Series Series or data frame to re-index from. left: pandas.Series or pandas.DataFrame Series or data frame to re-index to. If a series is provided, its values serve as the foreign keys. If a data frame is provided, one or more columns may be used as foreign keys, must specify the ``left_fk`` argument to specify which column(s) will serve as keys. left_fk: optional, str or list of str Used when the left is a data frame, specifies the column(s) in the left to serve as foreign keys. The specified columns' ordering must match the order of the multi-index in the right. Returns: -------- pandas.Series or pandas.DataFrame with column(s) from right aligned with the left. """ # ensure that we can align correctly if not right.index.is_unique: raise ValueError("The right's index must be unique!") # simpler case: # if the left (target) is a single series then just re-index to it if isinstance(left_fk, str): left = left[left_fk] if isinstance(left, pd.Series): a = right.reindex(left) a.index = left.index return a # when reindexing using multiple columns (composite foreign key) # i.e. the right has a multindex # if a series for the right provided, convert to a data frame if isinstance(right, pd.Series): right = right.to_frame('right') right_cols = 'right' else: right_cols = right.columns # do the merge return pd.merge( left=left, right=right, left_on=left_fk, right_index=True, how='left' )[right_cols] def signif(val): """ Convert a statistical significance to its ascii representation - this should be the same representation created in R. """ val = abs(val) if val > 3.1: return '***' elif val > 2.33: return '**' elif val > 1.64: return '*' elif val > 1.28: return '.' return '' naics_d = { 11: 'Agriculture', 21: 'Mining', 22: 'Utilities', 23: 'Construction', 31: 'Manufacturing1', 32: 'Manufacturing2', 33: 'Manufacturing3', 42: 'Wholesale', 44: 'Retail1', 45: 'Retail2', 48: 'Transportation', 49: 'Warehousing', 51: 'Information', 52: 'Finance and Insurance', 53: 'Real Estate', 54: 'Professional', 55: 'Management', 56: 'Administrative', 61: 'Educational', 62: 'Health Care', 71: 'Arts', 72: 'Accomodation and Food', 81: 'Other', 92: 'Public', 99: 'Unknown' } def naicsname(val): """ This function maps NAICS (job codes) from number to name. """ return naics_d[val] def numpymat2df(mat): """ Sometimes (though not very often) it is useful to convert a numpy matrix which has no column names to a Pandas dataframe for use of the Pandas functions. This method converts a 2D numpy matrix to Pandas dataframe with default column headers. Parameters ---------- mat : The numpy matrix Returns ------- A pandas dataframe with the same data as the input matrix but with columns named x0, x1, ... x[n-1] for the number of columns. """ return pd.DataFrame( dict(('x%d' % i, mat[:, i]) for i in range(mat.shape[1]))) def df64bitto32bit(tbl): """ Convert a Pandas dataframe from 64 bit types to 32 bit types to save memory or disk space. Parameters ---------- tbl : The dataframe to convert Returns ------- The converted dataframe """ newtbl = pd.DataFrame(index=tbl.index) for colname in tbl.columns: newtbl[colname] = series64bitto32bit(tbl[colname]) return newtbl def series64bitto32bit(s): """ Convert a Pandas series from 64 bit types to 32 bit types to save memory or disk space. Parameters ---------- s : The series to convert Returns ------- The converted series """ if s.dtype == np.float64: return s.astype('float32') elif s.dtype == np.int64: return s.astype('int32') return s def _pandassummarytojson(v, ndigits=3): return {i: round(float(v.ix[i]), ndigits) for i in v.index} def pandasdfsummarytojson(df, ndigits=3): """ Convert the result of a Parameters ---------- df : The result of a Pandas describe operation. ndigits : int, optional - The number of significant digits to round to. Returns ------- A json object which captures the describe. Keys are field names and values are dictionaries with all of the indexes returned by the Pandas describe. """ df = df.transpose() return {k: _pandassummarytojson(v, ndigits) for k, v in df.iterrows()} def column_map(tables, columns): """ Take a list of tables and a list of column names and resolve which columns come from which table. Parameters ---------- tables : sequence of _DataFrameWrapper or _TableFuncWrapper Could also be sequence of modified pandas.DataFrames, the important thing is that they have ``.name`` and ``.columns`` attributes. columns : sequence of str The column names of interest. Returns ------- col_map : dict Maps table names to lists of column names. """ if not columns: return {t.name: None for t in tables} columns = set(columns) colmap = {t.name: list(set(t.columns).intersection(columns)) for t in tables} foundcols = tz.reduce(lambda x, y: x.union(y), (set(v) for v in colmap.values())) if foundcols != columns: raise RuntimeError('Not all required columns were found. ' 'Missing: {}'.format(list(columns - foundcols))) return colmap def column_list(tables, columns): """ Take a list of tables and a list of column names and return the columns that are present in the tables. Parameters ---------- tables : sequence of _DataFrameWrapper or _TableFuncWrapper Could also be sequence of modified pandas.DataFrames, the important thing is that they have ``.name`` and ``.columns`` attributes. columns : sequence of str The column names of interest. Returns ------- cols : list Lists of column names available in the tables. """ columns = set(columns) foundcols = tz.reduce(lambda x, y: x.union(y), (set(t.columns) for t in tables)) return list(columns.intersection(foundcols))
import os import tempfile try: from StringIO import StringIO except ImportError: from io import StringIO import numpy as np import numpy.testing as npt import pandas as pd import pytest import statsmodels.formula.api as smf import yaml from pandas.util import testing as pdt from statsmodels.regression.linear_model import RegressionResultsWrapper from .. import regression from ...exceptions import ModelEvaluationError from ...utils import testing @pytest.fixture def test_df(): return pd.DataFrame( {'col1': range(5), 'col2': range(5, 10)}, index=['a', 'b', 'c', 'd', 'e']) @pytest.fixture def groupby_df(test_df): test_df['group'] = ['x', 'y', 'x', 'x', 'y'] return test_df def test_fit_model(test_df): filters = [] model_exp = 'col1 ~ col2' fit = regression.fit_model(test_df, filters, model_exp) assert isinstance(fit, RegressionResultsWrapper) def test_predict(test_df): filters = ['col1 in [0, 2, 4]'] model_exp = 'col1 ~ col2' fit = regression.fit_model(test_df, filters, model_exp) predicted = regression.predict( test_df.query('col1 in [1, 3]'), None, fit) expected = pd.Series([1., 3.], index=['b', 'd']) pdt.assert_series_equal(predicted, expected) def test_predict_ytransform(test_df): def yt(x): return x / 2. filters = ['col1 in [0, 2, 4]'] model_exp = 'col1 ~ col2' fit = regression.fit_model(test_df, filters, model_exp) predicted = regression.predict( test_df.query('col1 in [1, 3]'), None, fit, ytransform=yt) expected = pd.Series([0.5, 1.5], index=['b', 'd']) pdt.assert_series_equal(predicted, expected) def test_predict_with_nans(): df = pd.DataFrame( {'col1': range(5), 'col2': [5, 6, pd.np.nan, 8, 9]}, index=['a', 'b', 'c', 'd', 'e']) with pytest.raises(ModelEvaluationError): regression.fit_model(df, None, 'col1 ~ col2') fit = regression.fit_model(df.loc[['a', 'b', 'e']], None, 'col1 ~ col2') predict = regression.predict( df.loc[['c', 'd']], None, fit) assert np.isnan(predict.loc['c']) def test_rhs(): assert regression._rhs('col1 + col2') == 'col1 + col2' assert regression._rhs('col3 ~ col1 + col2') == 'col1 + col2' def test_FakeRegressionResults(test_df): model_exp = 'col1 ~ col2' model = smf.ols(formula=model_exp, data=test_df) fit = model.fit() fit_parameters = regression._model_fit_to_table(fit) wrapper = regression._FakeRegressionResults( model_exp, fit_parameters, fit.rsquared, fit.rsquared_adj) test_predict = pd.DataFrame({'col2': [0.5, 10, 25.6]}) npt.assert_array_equal( wrapper.predict(test_predict), fit.predict(test_predict)) pdt.assert_series_equal(wrapper.params, fit.params, check_names=False) pdt.assert_series_equal(wrapper.bse, fit.bse, check_names=False) pdt.assert_series_equal(wrapper.tvalues, fit.tvalues, check_names=False) assert wrapper.rsquared == fit.rsquared assert wrapper.rsquared_adj == fit.rsquared_adj def test_RegressionModel(test_df): fit_filters = ['col1 in [0, 2, 4]'] predict_filters = ['col1 in [1, 3]'] model_exp = 'col1 ~ col2' def ytransform(x): return x / 2. name = 'test hedonic' model = regression.RegressionModel( fit_filters, predict_filters, model_exp, ytransform, name) assert model.fit_filters == fit_filters assert model.predict_filters == predict_filters assert model.model_expression == model_exp assert model.ytransform == ytransform assert model.name == name assert model.model_fit is None assert set(model.columns_used()) == {'col1', 'col2'} # verify there's an error if there isn't a model fit yet with pytest.raises(RuntimeError): model.predict(test_df) fit = model.fit(test_df) assert isinstance(fit, RegressionResultsWrapper) assert isinstance(model.model_fit, RegressionResultsWrapper) predicted = model.predict(test_df) expected = pd.Series([0.5, 1.5], index=['b', 'd']) pdt.assert_series_equal(predicted, expected) # make sure this doesn't cause an error model.report_fit() def test_RegressionModelGroup(groupby_df): model_exp = 'col1 ~ col2' hmg = regression.RegressionModelGroup('group') xmodel = regression.RegressionModel(None, None, model_exp, name='x') hmg.add_model(xmodel) assert isinstance(hmg.models['x'], regression.RegressionModel) hmg.add_model_from_params('y', None, None, model_exp) assert isinstance(hmg.models['y'], regression.RegressionModel) assert hmg.models['y'].name == 'y' assert set(hmg.columns_used()) == {'col1', 'col2'} assert hmg.fitted is False fits = hmg.fit(groupby_df) assert hmg.fitted is True assert isinstance(fits['x'], RegressionResultsWrapper) assert isinstance(fits['y'], RegressionResultsWrapper) predicted = hmg.predict(groupby_df) assert isinstance(predicted, pd.Series) pdt.assert_series_equal( predicted.sort_index(), groupby_df.col1, check_dtype=False, check_names=False) def assert_dict_specs_equal(j1, j2): j1_params = j1.pop('fit_parameters') j2_params = j2.pop('fit_parameters') assert j1 == j2 if j1_params and j2_params: pdt.assert_series_equal( pd.Series(j1_params['Coefficient']), pd.Series(j2_params['Coefficient'])) else: assert j1_params is None assert j2_params is None class TestRegressionModelYAMLNotFit(object): def setup_method(self, method): fit_filters = ['col1 in [0, 2, 4]'] predict_filters = ['col1 in [1, 3]'] model_exp = 'col1 ~ col2' ytransform = np.log1p name = 'test hedonic' self.model = regression.RegressionModel( fit_filters, predict_filters, model_exp, ytransform, name) self.expected_dict = { 'model_type': 'regression', 'name': name, 'fit_filters': fit_filters, 'predict_filters': predict_filters, 'model_expression': model_exp, 'ytransform': regression.YTRANSFORM_MAPPING[ytransform], 'fitted': False, 'fit_parameters': None, 'fit_rsquared': None, 'fit_rsquared_adj': None } def test_string(self): test_yaml = self.model.to_yaml() assert_dict_specs_equal(yaml.load(test_yaml), self.expected_dict) model = regression.RegressionModel.from_yaml(yaml_str=test_yaml) assert isinstance(model, regression.RegressionModel) def test_buffer(self): test_buffer = StringIO() self.model.to_yaml(str_or_buffer=test_buffer) assert_dict_specs_equal( yaml.load(test_buffer.getvalue()), self.expected_dict) test_buffer.seek(0) model = regression.RegressionModel.from_yaml(str_or_buffer=test_buffer) assert isinstance(model, regression.RegressionModel) test_buffer.close() def test_file(self): test_file = tempfile.NamedTemporaryFile(suffix='.yaml').name self.model.to_yaml(str_or_buffer=test_file) with open(test_file) as f: assert_dict_specs_equal(yaml.load(f), self.expected_dict) model = regression.RegressionModel.from_yaml(str_or_buffer=test_file) assert isinstance(model, regression.RegressionModel) os.remove(test_file) class TestRegressionModelYAMLFit(TestRegressionModelYAMLNotFit): def setup_method(self, method): super(TestRegressionModelYAMLFit, self).setup_method(method) self.model.fit(test_df()) self.expected_dict['fitted'] = True self.expected_dict['fit_rsquared'] = 1.0 self.expected_dict['fit_rsquared_adj'] = 1.0 self.expected_dict['fit_parameters'] = { 'Coefficient': { 'Intercept': -5.0, 'col2': 1.0}, 'T-Score': { 'Intercept': 8.621678386539817e-16, 'col2': 5.997311421859925e-16}, 'Std. Error': { 'Intercept': 6.771450370191848e-15, 'col2': 9.420554752102651e-16}} def test_fitted_load(self, test_df): model = regression.RegressionModel.from_yaml( yaml_str=self.model.to_yaml()) assert isinstance(model.model_fit, regression._FakeRegressionResults) npt.assert_array_equal( model.predict(test_df), self.model.predict(test_df)) testing.assert_frames_equal( model.fit_parameters, self.model.fit_parameters) assert model.fit_parameters.rsquared == \ self.model.fit_parameters.rsquared assert model.fit_parameters.rsquared_adj == \ self.model.fit_parameters.rsquared_adj def test_model_fit_to_table(test_df): filters = [] model_exp = 'col1 ~ col2' fit = regression.fit_model(test_df, filters, model_exp) params = regression._model_fit_to_table(fit) pdt.assert_series_equal( params['Coefficient'], fit.params, check_names=False) pdt.assert_series_equal(params['Std. Error'], fit.bse, check_names=False) pdt.assert_series_equal(params['T-Score'], fit.tvalues, check_names=False) assert params.rsquared == fit.rsquared assert params.rsquared_adj == fit.rsquared_adj def test_SegmentedRegressionModel_raises(groupby_df): seg = regression.SegmentedRegressionModel('group') with pytest.raises(ValueError): seg.fit(groupby_df) def test_SegmentedRegressionModel(groupby_df): seg = regression.SegmentedRegressionModel( 'group', default_model_expr='col1 ~ col2') assert seg.fitted is False fits = seg.fit(groupby_df) assert seg.fitted is True assert 'x' in fits and 'y' in fits assert isinstance(fits['x'], RegressionResultsWrapper) test_data = pd.DataFrame({'group': ['x', 'y'], 'col2': [0.5, 10.5]}) predicted = seg.predict(test_data) pdt.assert_series_equal(predicted.sort_index(), pd.Series([-4.5, 5.5])) def test_SegmentedRegressionModel_explicit(groupby_df): seg = regression.SegmentedRegressionModel( 'group', fit_filters=['col1 not in [2]'], predict_filters=['group != "z"']) seg.add_segment('x', 'col1 ~ col2') seg.add_segment('y', 'np.exp(col2) ~ np.exp(col1)', np.log) assert set(seg.columns_used()) == {'col1', 'col2', 'group'} fits = seg.fit(groupby_df) assert 'x' in fits and 'y' in fits assert isinstance(fits['x'], RegressionResultsWrapper) test_data = pd.DataFrame( {'group': ['x', 'z', 'y'], 'col1': [-5, 42, 100], 'col2': [0.5, 42, 10.5]}) predicted = seg.predict(test_data) pdt.assert_series_equal( predicted.sort_index(), pd.Series([-4.5, 105], index=[0, 2])) def test_SegmentedRegressionModel_yaml(groupby_df): seg = regression.SegmentedRegressionModel( 'group', fit_filters=['col1 not in [2]'], predict_filters=['group != "z"'], default_model_expr='col1 ~ col2', min_segment_size=5000, name='test_seg') seg.add_segment('x') seg.add_segment('y', 'np.exp(col2) ~ np.exp(col1)', np.log) expected_dict = { 'model_type': 'segmented_regression', 'name': 'test_seg', 'segmentation_col': 'group', 'fit_filters': ['col1 not in [2]'], 'predict_filters': ['group != "z"'], 'min_segment_size': 5000, 'default_config': { 'model_expression': 'col1 ~ col2', 'ytransform': None }, 'fitted': False, 'models': { 'x': { 'name': 'x', 'fitted': False, 'fit_parameters': None, 'fit_rsquared': None, 'fit_rsquared_adj': None }, 'y': { 'name': 'y', 'model_expression': 'np.exp(col2) ~ np.exp(col1)', 'ytransform': 'np.log', 'fitted': False, 'fit_parameters': None, 'fit_rsquared': None, 'fit_rsquared_adj': None } } } assert yaml.load(seg.to_yaml()) == expected_dict new_seg = regression.SegmentedRegressionModel.from_yaml(seg.to_yaml()) assert yaml.load(new_seg.to_yaml()) == expected_dict seg.fit(groupby_df) expected_dict['fitted'] = True expected_dict['models']['x']['fitted'] = True expected_dict['models']['y']['fitted'] = True del expected_dict['models']['x']['fit_parameters'] del expected_dict['models']['x']['fit_rsquared'] del expected_dict['models']['x']['fit_rsquared_adj'] del expected_dict['models']['y']['fit_parameters'] del expected_dict['models']['y']['fit_rsquared'] del expected_dict['models']['y']['fit_rsquared_adj'] actual_dict = yaml.load(seg.to_yaml()) assert isinstance(actual_dict['models']['x'].pop('fit_parameters'), dict) assert isinstance(actual_dict['models']['x'].pop('fit_rsquared'), float) assert isinstance( actual_dict['models']['x'].pop('fit_rsquared_adj'), float) assert isinstance(actual_dict['models']['y'].pop('fit_parameters'), dict) assert isinstance(actual_dict['models']['y'].pop('fit_rsquared'), float) assert isinstance( actual_dict['models']['y'].pop('fit_rsquared_adj'), float) assert actual_dict == expected_dict new_seg = regression.SegmentedRegressionModel.from_yaml(seg.to_yaml()) assert new_seg.fitted is True def test_SegmentedRegressionModel_removes_gone_segments(groupby_df): seg = regression.SegmentedRegressionModel( 'group', default_model_expr='col1 ~ col2') seg.add_segment('a') seg.add_segment('b') seg.add_segment('c') seg.fit(groupby_df) assert sorted(seg._group.models.keys()) == ['x', 'y'] def test_fit_from_cfg(test_df): fit_filters = ['col1 in [0, 2, 4]'] predict_filters = ['col1 in [1, 3]'] model_exp = 'col1 ~ col2' ytransform = np.log name = 'test hedonic' model = regression.RegressionModel( fit_filters, predict_filters, model_exp, ytransform, name) cfgname = tempfile.NamedTemporaryFile(suffix='.yaml').name model.to_yaml(cfgname) regression.RegressionModel.fit_from_cfg(test_df, cfgname, debug=True) regression.RegressionModel.predict_from_cfg(test_df, cfgname) os.remove(cfgname) def test_fit_from_cfg_segmented(groupby_df): seg = regression.SegmentedRegressionModel( 'group', fit_filters=['col1 not in [2]'], predict_filters=['group != "z"'], default_model_expr='col1 ~ col2', min_segment_size=5000, name='test_seg') seg.add_segment('x') cfgname = tempfile.NamedTemporaryFile(suffix='.yaml').name seg.to_yaml(cfgname) regression.SegmentedRegressionModel.fit_from_cfg(groupby_df, cfgname, debug=True, min_segment_size=5000) regression.SegmentedRegressionModel.predict_from_cfg(groupby_df, cfgname, min_segment_size=5000) os.remove(cfgname)
apdjustino/urbansim
urbansim/models/tests/test_regression.py
urbansim/utils/misc.py
"""Tests for generated dict functions.""" from attr import Factory from attr._make import NOTHING from hypothesis import assume, given from hypothesis.strategies._internal.core import data, sampled_from from cattr import Converter from cattr._compat import adapted_fields, fields from cattr.gen import ( make_dict_structure_fn, make_dict_unstructure_fn, override, ) from . import nested_classes, simple_classes from .metadata import ( nested_typed_classes, simple_typed_classes, simple_typed_dataclasses, ) @given(nested_classes | simple_classes()) def test_unmodified_generated_unstructuring(cl_and_vals): converter = Converter() cl, vals = cl_and_vals fn = make_dict_unstructure_fn(cl, converter) inst = cl(*vals) res_expected = converter.unstructure(inst) converter.register_unstructure_hook(cl, fn) res_actual = converter.unstructure(inst) assert res_expected == res_actual @given(nested_classes | simple_classes()) def test_nodefs_generated_unstructuring(cl_and_vals): """Test omitting default values on a per-attribute basis.""" converter = Converter() cl, vals = cl_and_vals attr_is_default = False for attr, val in zip(cl.__attrs_attrs__, vals): if attr.default is not NOTHING: fn = make_dict_unstructure_fn( cl, converter, **{attr.name: override(omit_if_default=True)} ) if attr.default == val: attr_is_default = True break else: assume(False) converter.register_unstructure_hook(cl, fn) inst = cl(*vals) res = converter.unstructure(inst) if attr_is_default: assert attr.name not in res @given(nested_classes | simple_classes()) def test_nodefs_generated_unstructuring_cl(cl_and_vals): """Test omitting default values on a per-class basis.""" converter = Converter() cl, vals = cl_and_vals for attr, val in zip(cl.__attrs_attrs__, vals): if attr.default is not NOTHING: break else: assume(False) converter.register_unstructure_hook( cl, make_dict_unstructure_fn(cl, converter, omit_if_default=True) ) inst = cl(*vals) res = converter.unstructure(inst) for attr, val in zip(cl.__attrs_attrs__, vals): if attr.default is not NOTHING: if not isinstance(attr.default, Factory): if val == attr.default: assert attr.name not in res else: assert attr.name in res else: # The default is a factory, but might take self. if attr.default.takes_self: if val == attr.default.factory(cl): assert attr.name not in res else: assert attr.name in res else: if val == attr.default.factory(): assert attr.name not in res else: assert attr.name in res @given(nested_classes | simple_classes() | simple_typed_dataclasses()) def test_individual_overrides(cl_and_vals): """ Test omitting default values on a per-class basis, but with individual overrides. """ converter = Converter() cl, vals = cl_and_vals for attr, val in zip(adapted_fields(cl), vals): if attr.default is not NOTHING: break else: assume(False) chosen_name = attr.name converter.register_unstructure_hook( cl, make_dict_unstructure_fn( cl, converter, omit_if_default=True, **{attr.name: override(omit_if_default=False)} ), ) inst = cl(*vals) res = converter.unstructure(inst) assert "Hyp" not in repr(res) assert "Factory" not in repr(res) for attr, val in zip(adapted_fields(cl), vals): if attr.name == chosen_name: assert attr.name in res elif attr.default is not NOTHING: if not isinstance(attr.default, Factory): if val == attr.default: assert attr.name not in res else: assert attr.name in res else: if attr.default.takes_self: if val == attr.default.factory(inst): assert attr.name not in res else: assert attr.name in res else: if val == attr.default.factory(): assert attr.name not in res else: assert attr.name in res @given( nested_typed_classes() | simple_typed_classes() | simple_typed_dataclasses() ) def test_unmodified_generated_structuring(cl_and_vals): converter = Converter() cl, vals = cl_and_vals fn = make_dict_structure_fn(cl, converter) inst = cl(*vals) unstructured = converter.unstructure(inst) assert "Hyp" not in repr(unstructured) converter.register_structure_hook(cl, fn) res = converter.structure(unstructured, cl) assert inst == res @given( simple_typed_classes(min_attrs=1) | simple_typed_dataclasses(min_attrs=1), data(), ) def test_renaming(cl_and_vals, data): converter = Converter() cl, vals = cl_and_vals attrs = fields(cl) to_replace = data.draw(sampled_from(attrs)) u_fn = make_dict_unstructure_fn( cl, converter, **{to_replace.name: override(rename="class")} ) s_fn = make_dict_structure_fn( cl, converter, **{to_replace.name: override(rename="class")} ) converter.register_structure_hook(cl, s_fn) converter.register_unstructure_hook(cl, u_fn) inst = cl(*vals) raw = converter.unstructure(inst) assert "class" in raw new_inst = converter.structure(raw, cl) assert inst == new_inst
"""Tests for auto-disambiguators.""" from typing import Any import attr import pytest from attr import NOTHING from hypothesis import HealthCheck, assume, given, settings from cattr.disambiguators import create_uniq_field_dis_func from . import simple_classes def test_edge_errors(): """Edge input cases cause errors.""" @attr.s class A(object): pass with pytest.raises(ValueError): # Can't generate for only one class. create_uniq_field_dis_func(A) @attr.s class B(object): pass with pytest.raises(ValueError): # No fields on either class. create_uniq_field_dis_func(A, B) @attr.s class C(object): a = attr.ib() @attr.s class D(object): a = attr.ib() with pytest.raises(ValueError): # No unique fields on either class. create_uniq_field_dis_func(C, D) @attr.s class E: pass @attr.s class F: b = attr.ib(default=Any) with pytest.raises(ValueError): # no usable non-default attributes create_uniq_field_dis_func(E, F) @given(simple_classes(defaults=False)) def test_fallback(cl_and_vals): """The fallback case works.""" cl, vals = cl_and_vals assume(attr.fields(cl)) # At least one field. @attr.s class A(object): pass fn = create_uniq_field_dis_func(A, cl) assert fn({}) is A assert fn(attr.asdict(cl(*vals))) is cl attr_names = {a.name for a in attr.fields(cl)} if "xyz" not in attr_names: fn({"xyz": 1}) is A # Uses the fallback. @settings( suppress_health_check=[HealthCheck.filter_too_much, HealthCheck.too_slow] ) @given(simple_classes(), simple_classes()) def test_disambiguation(cl_and_vals_a, cl_and_vals_b): """Disambiguation should work when there are unique required fields.""" cl_a, vals_a = cl_and_vals_a cl_b, vals_b = cl_and_vals_b req_a = {a.name for a in attr.fields(cl_a)} req_b = {a.name for a in attr.fields(cl_b)} assume(len(req_a)) assume(len(req_b)) assume((req_a - req_b) or (req_b - req_a)) for attr_name in req_a - req_b: assume(getattr(attr.fields(cl_a), attr_name).default is NOTHING) for attr_name in req_b - req_a: assume(getattr(attr.fields(cl_b), attr_name).default is NOTHING) fn = create_uniq_field_dis_func(cl_a, cl_b) assert fn(attr.asdict(cl_a(*vals_a))) is cl_a
Tinche/cattrs
tests/test_disambigutors.py
tests/test_dict_generation.py
import pandas as pd from pandas.core.internals import ObjectBlock from .base import BaseExtensionTests class BaseCastingTests(BaseExtensionTests): """Casting to and from ExtensionDtypes""" def test_astype_object_series(self, all_data): ser = pd.Series({"A": all_data}) result = ser.astype(object) assert isinstance(result._data.blocks[0], ObjectBlock) def test_tolist(self, data): result = pd.Series(data).tolist() expected = list(data) assert result == expected def test_astype_str(self, data): result = pd.Series(data[:5]).astype(str) expected = pd.Series(data[:5].astype(str)) self.assert_series_equal(result, expected)
from __future__ import division import pytest import numpy as np from pandas import ( Interval, IntervalIndex, Index, isna, notna, interval_range, Timestamp, Timedelta, date_range, timedelta_range) from pandas.compat import lzip import pandas.core.common as com from pandas.tests.indexes.common import Base import pandas.util.testing as tm import pandas as pd @pytest.fixture(scope='class', params=[None, 'foo']) def name(request): return request.param class TestIntervalIndex(Base): _holder = IntervalIndex def setup_method(self, method): self.index = IntervalIndex.from_arrays([0, 1], [1, 2]) self.index_with_nan = IntervalIndex.from_tuples( [(0, 1), np.nan, (1, 2)]) self.indices = dict(intervalIndex=tm.makeIntervalIndex(10)) def create_index(self, closed='right'): return IntervalIndex.from_breaks(range(11), closed=closed) def create_index_with_nan(self, closed='right'): mask = [True, False] + [True] * 8 return IntervalIndex.from_arrays( np.where(mask, np.arange(10), np.nan), np.where(mask, np.arange(1, 11), np.nan), closed=closed) def test_properties(self, closed): index = self.create_index(closed=closed) assert len(index) == 10 assert index.size == 10 assert index.shape == (10, ) tm.assert_index_equal(index.left, Index(np.arange(10))) tm.assert_index_equal(index.right, Index(np.arange(1, 11))) tm.assert_index_equal(index.mid, Index(np.arange(0.5, 10.5))) assert index.closed == closed ivs = [Interval(l, r, closed) for l, r in zip(range(10), range(1, 11))] expected = np.array(ivs, dtype=object) tm.assert_numpy_array_equal(np.asarray(index), expected) tm.assert_numpy_array_equal(index.values, expected) # with nans index = self.create_index_with_nan(closed=closed) assert len(index) == 10 assert index.size == 10 assert index.shape == (10, ) expected_left = Index([0, np.nan, 2, 3, 4, 5, 6, 7, 8, 9]) expected_right = expected_left + 1 expected_mid = expected_left + 0.5 tm.assert_index_equal(index.left, expected_left) tm.assert_index_equal(index.right, expected_right) tm.assert_index_equal(index.mid, expected_mid) assert index.closed == closed ivs = [Interval(l, r, closed) if notna(l) else np.nan for l, r in zip(expected_left, expected_right)] expected = np.array(ivs, dtype=object) tm.assert_numpy_array_equal(np.asarray(index), expected) tm.assert_numpy_array_equal(index.values, expected) @pytest.mark.parametrize('breaks', [ [1, 1, 2, 5, 15, 53, 217, 1014, 5335, 31240, 201608], [-np.inf, -100, -10, 0.5, 1, 1.5, 3.8, 101, 202, np.inf], pd.to_datetime(['20170101', '20170202', '20170303', '20170404']), pd.to_timedelta(['1ns', '2ms', '3s', '4M', '5H', '6D'])]) def test_length(self, closed, breaks): # GH 18789 index = IntervalIndex.from_breaks(breaks, closed=closed) result = index.length expected = Index(iv.length for iv in index) tm.assert_index_equal(result, expected) # with NA index = index.insert(1, np.nan) result = index.length expected = Index(iv.length if notna(iv) else iv for iv in index) tm.assert_index_equal(result, expected) def test_with_nans(self, closed): index = self.create_index(closed=closed) assert not index.hasnans result = index.isna() expected = np.repeat(False, len(index)) tm.assert_numpy_array_equal(result, expected) result = index.notna() expected = np.repeat(True, len(index)) tm.assert_numpy_array_equal(result, expected) index = self.create_index_with_nan(closed=closed) assert index.hasnans result = index.isna() expected = np.array([False, True] + [False] * (len(index) - 2)) tm.assert_numpy_array_equal(result, expected) result = index.notna() expected = np.array([True, False] + [True] * (len(index) - 2)) tm.assert_numpy_array_equal(result, expected) def test_copy(self, closed): expected = self.create_index(closed=closed) result = expected.copy() assert result.equals(expected) result = expected.copy(deep=True) assert result.equals(expected) assert result.left is not expected.left def test_ensure_copied_data(self, closed): # exercise the copy flag in the constructor # not copying index = self.create_index(closed=closed) result = IntervalIndex(index, copy=False) tm.assert_numpy_array_equal(index.left.values, result.left.values, check_same='same') tm.assert_numpy_array_equal(index.right.values, result.right.values, check_same='same') # by-definition make a copy result = IntervalIndex(index.values, copy=False) tm.assert_numpy_array_equal(index.left.values, result.left.values, check_same='copy') tm.assert_numpy_array_equal(index.right.values, result.right.values, check_same='copy') def test_equals(self, closed): expected = IntervalIndex.from_breaks(np.arange(5), closed=closed) assert expected.equals(expected) assert expected.equals(expected.copy()) assert not expected.equals(expected.astype(object)) assert not expected.equals(np.array(expected)) assert not expected.equals(list(expected)) assert not expected.equals([1, 2]) assert not expected.equals(np.array([1, 2])) assert not expected.equals(pd.date_range('20130101', periods=2)) expected_name1 = IntervalIndex.from_breaks( np.arange(5), closed=closed, name='foo') expected_name2 = IntervalIndex.from_breaks( np.arange(5), closed=closed, name='bar') assert expected.equals(expected_name1) assert expected_name1.equals(expected_name2) for other_closed in {'left', 'right', 'both', 'neither'} - {closed}: expected_other_closed = IntervalIndex.from_breaks( np.arange(5), closed=other_closed) assert not expected.equals(expected_other_closed) @pytest.mark.parametrize('klass', [list, tuple, np.array, pd.Series]) def test_where(self, closed, klass): idx = self.create_index(closed=closed) cond = [True] * len(idx) expected = idx result = expected.where(klass(cond)) tm.assert_index_equal(result, expected) cond = [False] + [True] * len(idx[1:]) expected = IntervalIndex([np.nan] + idx[1:].tolist()) result = idx.where(klass(cond)) tm.assert_index_equal(result, expected) def test_delete(self, closed): expected = IntervalIndex.from_breaks(np.arange(1, 11), closed=closed) result = self.create_index(closed=closed).delete(0) tm.assert_index_equal(result, expected) @pytest.mark.parametrize('data', [ interval_range(0, periods=10, closed='neither'), interval_range(1.7, periods=8, freq=2.5, closed='both'), interval_range(Timestamp('20170101'), periods=12, closed='left'), interval_range(Timedelta('1 day'), periods=6, closed='right')]) def test_insert(self, data): item = data[0] idx_item = IntervalIndex([item]) # start expected = idx_item.append(data) result = data.insert(0, item) tm.assert_index_equal(result, expected) # end expected = data.append(idx_item) result = data.insert(len(data), item) tm.assert_index_equal(result, expected) # mid expected = data[:3].append(idx_item).append(data[3:]) result = data.insert(3, item) tm.assert_index_equal(result, expected) # invalid type msg = 'can only insert Interval objects and NA into an IntervalIndex' with tm.assert_raises_regex(ValueError, msg): data.insert(1, 'foo') # invalid closed msg = 'inserted item must be closed on the same side as the index' for closed in {'left', 'right', 'both', 'neither'} - {item.closed}: with tm.assert_raises_regex(ValueError, msg): bad_item = Interval(item.left, item.right, closed=closed) data.insert(1, bad_item) # GH 18295 (test missing) na_idx = IntervalIndex([np.nan], closed=data.closed) for na in (np.nan, pd.NaT, None): expected = data[:1].append(na_idx).append(data[1:]) result = data.insert(1, na) tm.assert_index_equal(result, expected) def test_take(self, closed): index = self.create_index(closed=closed) result = index.take(range(10)) tm.assert_index_equal(result, index) result = index.take([0, 0, 1]) expected = IntervalIndex.from_arrays( [0, 0, 1], [1, 1, 2], closed=closed) tm.assert_index_equal(result, expected) def test_unique(self, closed): # unique non-overlapping idx = IntervalIndex.from_tuples( [(0, 1), (2, 3), (4, 5)], closed=closed) assert idx.is_unique # unique overlapping - distinct endpoints idx = IntervalIndex.from_tuples([(0, 1), (0.5, 1.5)], closed=closed) assert idx.is_unique # unique overlapping - shared endpoints idx = pd.IntervalIndex.from_tuples( [(1, 2), (1, 3), (2, 3)], closed=closed) assert idx.is_unique # unique nested idx = IntervalIndex.from_tuples([(-1, 1), (-2, 2)], closed=closed) assert idx.is_unique # duplicate idx = IntervalIndex.from_tuples( [(0, 1), (0, 1), (2, 3)], closed=closed) assert not idx.is_unique # empty idx = IntervalIndex([], closed=closed) assert idx.is_unique def test_monotonic(self, closed): # increasing non-overlapping idx = IntervalIndex.from_tuples( [(0, 1), (2, 3), (4, 5)], closed=closed) assert idx.is_monotonic assert idx._is_strictly_monotonic_increasing assert not idx.is_monotonic_decreasing assert not idx._is_strictly_monotonic_decreasing # decreasing non-overlapping idx = IntervalIndex.from_tuples( [(4, 5), (2, 3), (1, 2)], closed=closed) assert not idx.is_monotonic assert not idx._is_strictly_monotonic_increasing assert idx.is_monotonic_decreasing assert idx._is_strictly_monotonic_decreasing # unordered non-overlapping idx = IntervalIndex.from_tuples( [(0, 1), (4, 5), (2, 3)], closed=closed) assert not idx.is_monotonic assert not idx._is_strictly_monotonic_increasing assert not idx.is_monotonic_decreasing assert not idx._is_strictly_monotonic_decreasing # increasing overlapping idx = IntervalIndex.from_tuples( [(0, 2), (0.5, 2.5), (1, 3)], closed=closed) assert idx.is_monotonic assert idx._is_strictly_monotonic_increasing assert not idx.is_monotonic_decreasing assert not idx._is_strictly_monotonic_decreasing # decreasing overlapping idx = IntervalIndex.from_tuples( [(1, 3), (0.5, 2.5), (0, 2)], closed=closed) assert not idx.is_monotonic assert not idx._is_strictly_monotonic_increasing assert idx.is_monotonic_decreasing assert idx._is_strictly_monotonic_decreasing # unordered overlapping idx = IntervalIndex.from_tuples( [(0.5, 2.5), (0, 2), (1, 3)], closed=closed) assert not idx.is_monotonic assert not idx._is_strictly_monotonic_increasing assert not idx.is_monotonic_decreasing assert not idx._is_strictly_monotonic_decreasing # increasing overlapping shared endpoints idx = pd.IntervalIndex.from_tuples( [(1, 2), (1, 3), (2, 3)], closed=closed) assert idx.is_monotonic assert idx._is_strictly_monotonic_increasing assert not idx.is_monotonic_decreasing assert not idx._is_strictly_monotonic_decreasing # decreasing overlapping shared endpoints idx = pd.IntervalIndex.from_tuples( [(2, 3), (1, 3), (1, 2)], closed=closed) assert not idx.is_monotonic assert not idx._is_strictly_monotonic_increasing assert idx.is_monotonic_decreasing assert idx._is_strictly_monotonic_decreasing # stationary idx = IntervalIndex.from_tuples([(0, 1), (0, 1)], closed=closed) assert idx.is_monotonic assert not idx._is_strictly_monotonic_increasing assert idx.is_monotonic_decreasing assert not idx._is_strictly_monotonic_decreasing # empty idx = IntervalIndex([], closed=closed) assert idx.is_monotonic assert idx._is_strictly_monotonic_increasing assert idx.is_monotonic_decreasing assert idx._is_strictly_monotonic_decreasing @pytest.mark.skip(reason='not a valid repr as we use interval notation') def test_repr(self): i = IntervalIndex.from_tuples([(0, 1), (1, 2)], closed='right') expected = ("IntervalIndex(left=[0, 1]," "\n right=[1, 2]," "\n closed='right'," "\n dtype='interval[int64]')") assert repr(i) == expected i = IntervalIndex.from_tuples((Timestamp('20130101'), Timestamp('20130102')), (Timestamp('20130102'), Timestamp('20130103')), closed='right') expected = ("IntervalIndex(left=['2013-01-01', '2013-01-02']," "\n right=['2013-01-02', '2013-01-03']," "\n closed='right'," "\n dtype='interval[datetime64[ns]]')") assert repr(i) == expected @pytest.mark.skip(reason='not a valid repr as we use interval notation') def test_repr_max_seq_item_setting(self): super(TestIntervalIndex, self).test_repr_max_seq_item_setting() @pytest.mark.skip(reason='not a valid repr as we use interval notation') def test_repr_roundtrip(self): super(TestIntervalIndex, self).test_repr_roundtrip() # TODO: check this behavior is consistent with test_interval_new.py def test_get_item(self, closed): i = IntervalIndex.from_arrays((0, 1, np.nan), (1, 2, np.nan), closed=closed) assert i[0] == Interval(0.0, 1.0, closed=closed) assert i[1] == Interval(1.0, 2.0, closed=closed) assert isna(i[2]) result = i[0:1] expected = IntervalIndex.from_arrays((0.,), (1.,), closed=closed) tm.assert_index_equal(result, expected) result = i[0:2] expected = IntervalIndex.from_arrays((0., 1), (1., 2.), closed=closed) tm.assert_index_equal(result, expected) result = i[1:3] expected = IntervalIndex.from_arrays((1., np.nan), (2., np.nan), closed=closed) tm.assert_index_equal(result, expected) # To be removed, replaced by test_interval_new.py (see #16316, #16386) def test_get_loc_value(self): pytest.raises(KeyError, self.index.get_loc, 0) assert self.index.get_loc(0.5) == 0 assert self.index.get_loc(1) == 0 assert self.index.get_loc(1.5) == 1 assert self.index.get_loc(2) == 1 pytest.raises(KeyError, self.index.get_loc, -1) pytest.raises(KeyError, self.index.get_loc, 3) idx = IntervalIndex.from_tuples([(0, 2), (1, 3)]) assert idx.get_loc(0.5) == 0 assert idx.get_loc(1) == 0 tm.assert_numpy_array_equal(idx.get_loc(1.5), np.array([0, 1], dtype='int64')) tm.assert_numpy_array_equal(np.sort(idx.get_loc(2)), np.array([0, 1], dtype='int64')) assert idx.get_loc(3) == 1 pytest.raises(KeyError, idx.get_loc, 3.5) idx = IntervalIndex.from_arrays([0, 2], [1, 3]) pytest.raises(KeyError, idx.get_loc, 1.5) # To be removed, replaced by test_interval_new.py (see #16316, #16386) def slice_locs_cases(self, breaks): # TODO: same tests for more index types index = IntervalIndex.from_breaks([0, 1, 2], closed='right') assert index.slice_locs() == (0, 2) assert index.slice_locs(0, 1) == (0, 1) assert index.slice_locs(1, 1) == (0, 1) assert index.slice_locs(0, 2) == (0, 2) assert index.slice_locs(0.5, 1.5) == (0, 2) assert index.slice_locs(0, 0.5) == (0, 1) assert index.slice_locs(start=1) == (0, 2) assert index.slice_locs(start=1.2) == (1, 2) assert index.slice_locs(end=1) == (0, 1) assert index.slice_locs(end=1.1) == (0, 2) assert index.slice_locs(end=1.0) == (0, 1) assert index.slice_locs(-1, -1) == (0, 0) index = IntervalIndex.from_breaks([0, 1, 2], closed='neither') assert index.slice_locs(0, 1) == (0, 1) assert index.slice_locs(0, 2) == (0, 2) assert index.slice_locs(0.5, 1.5) == (0, 2) assert index.slice_locs(1, 1) == (1, 1) assert index.slice_locs(1, 2) == (1, 2) index = IntervalIndex.from_tuples([(0, 1), (2, 3), (4, 5)], closed='both') assert index.slice_locs(1, 1) == (0, 1) assert index.slice_locs(1, 2) == (0, 2) # To be removed, replaced by test_interval_new.py (see #16316, #16386) def test_slice_locs_int64(self): self.slice_locs_cases([0, 1, 2]) # To be removed, replaced by test_interval_new.py (see #16316, #16386) def test_slice_locs_float64(self): self.slice_locs_cases([0.0, 1.0, 2.0]) # To be removed, replaced by test_interval_new.py (see #16316, #16386) def slice_locs_decreasing_cases(self, tuples): index = IntervalIndex.from_tuples(tuples) assert index.slice_locs(1.5, 0.5) == (1, 3) assert index.slice_locs(2, 0) == (1, 3) assert index.slice_locs(2, 1) == (1, 3) assert index.slice_locs(3, 1.1) == (0, 3) assert index.slice_locs(3, 3) == (0, 2) assert index.slice_locs(3.5, 3.3) == (0, 1) assert index.slice_locs(1, -3) == (2, 3) slice_locs = index.slice_locs(-1, -1) assert slice_locs[0] == slice_locs[1] # To be removed, replaced by test_interval_new.py (see #16316, #16386) def test_slice_locs_decreasing_int64(self): self.slice_locs_cases([(2, 4), (1, 3), (0, 2)]) # To be removed, replaced by test_interval_new.py (see #16316, #16386) def test_slice_locs_decreasing_float64(self): self.slice_locs_cases([(2., 4.), (1., 3.), (0., 2.)]) # To be removed, replaced by test_interval_new.py (see #16316, #16386) def test_slice_locs_fails(self): index = IntervalIndex.from_tuples([(1, 2), (0, 1), (2, 3)]) with pytest.raises(KeyError): index.slice_locs(1, 2) # To be removed, replaced by test_interval_new.py (see #16316, #16386) def test_get_loc_interval(self): assert self.index.get_loc(Interval(0, 1)) == 0 assert self.index.get_loc(Interval(0, 0.5)) == 0 assert self.index.get_loc(Interval(0, 1, 'left')) == 0 pytest.raises(KeyError, self.index.get_loc, Interval(2, 3)) pytest.raises(KeyError, self.index.get_loc, Interval(-1, 0, 'left')) # Make consistent with test_interval_new.py (see #16316, #16386) @pytest.mark.parametrize('item', [3, Interval(1, 4)]) def test_get_loc_length_one(self, item, closed): # GH 20921 index = IntervalIndex.from_tuples([(0, 5)], closed=closed) result = index.get_loc(item) assert result == 0 # To be removed, replaced by test_interval_new.py (see #16316, #16386) def test_get_indexer(self): actual = self.index.get_indexer([-1, 0, 0.5, 1, 1.5, 2, 3]) expected = np.array([-1, -1, 0, 0, 1, 1, -1], dtype='intp') tm.assert_numpy_array_equal(actual, expected) actual = self.index.get_indexer(self.index) expected = np.array([0, 1], dtype='intp') tm.assert_numpy_array_equal(actual, expected) index = IntervalIndex.from_breaks([0, 1, 2], closed='left') actual = index.get_indexer([-1, 0, 0.5, 1, 1.5, 2, 3]) expected = np.array([-1, 0, 0, 1, 1, -1, -1], dtype='intp') tm.assert_numpy_array_equal(actual, expected) actual = self.index.get_indexer(index[:1]) expected = np.array([0], dtype='intp') tm.assert_numpy_array_equal(actual, expected) actual = self.index.get_indexer(index) expected = np.array([-1, 1], dtype='intp') tm.assert_numpy_array_equal(actual, expected) # To be removed, replaced by test_interval_new.py (see #16316, #16386) def test_get_indexer_subintervals(self): # TODO: is this right? # return indexers for wholly contained subintervals target = IntervalIndex.from_breaks(np.linspace(0, 2, 5)) actual = self.index.get_indexer(target) expected = np.array([0, 0, 1, 1], dtype='p') tm.assert_numpy_array_equal(actual, expected) target = IntervalIndex.from_breaks([0, 0.67, 1.33, 2]) actual = self.index.get_indexer(target) expected = np.array([0, 0, 1, 1], dtype='intp') tm.assert_numpy_array_equal(actual, expected) actual = self.index.get_indexer(target[[0, -1]]) expected = np.array([0, 1], dtype='intp') tm.assert_numpy_array_equal(actual, expected) target = IntervalIndex.from_breaks([0, 0.33, 0.67, 1], closed='left') actual = self.index.get_indexer(target) expected = np.array([0, 0, 0], dtype='intp') tm.assert_numpy_array_equal(actual, expected) # Make consistent with test_interval_new.py (see #16316, #16386) @pytest.mark.parametrize('item', [ [3], np.arange(1, 5), [Interval(1, 4)], interval_range(1, 4)]) def test_get_indexer_length_one(self, item, closed): # GH 17284 index = IntervalIndex.from_tuples([(0, 5)], closed=closed) result = index.get_indexer(item) expected = np.array([0] * len(item), dtype='intp') tm.assert_numpy_array_equal(result, expected) # To be removed, replaced by test_interval_new.py (see #16316, #16386) def test_contains(self): # Only endpoints are valid. i = IntervalIndex.from_arrays([0, 1], [1, 2]) # Invalid assert 0 not in i assert 1 not in i assert 2 not in i # Valid assert Interval(0, 1) in i assert Interval(0, 2) in i assert Interval(0, 0.5) in i assert Interval(3, 5) not in i assert Interval(-1, 0, closed='left') not in i # To be removed, replaced by test_interval_new.py (see #16316, #16386) def testcontains(self): # can select values that are IN the range of a value i = IntervalIndex.from_arrays([0, 1], [1, 2]) assert i.contains(0.1) assert i.contains(0.5) assert i.contains(1) assert i.contains(Interval(0, 1)) assert i.contains(Interval(0, 2)) # these overlaps completely assert i.contains(Interval(0, 3)) assert i.contains(Interval(1, 3)) assert not i.contains(20) assert not i.contains(-20) def test_dropna(self, closed): expected = IntervalIndex.from_tuples( [(0.0, 1.0), (1.0, 2.0)], closed=closed) ii = IntervalIndex.from_tuples([(0, 1), (1, 2), np.nan], closed=closed) result = ii.dropna() tm.assert_index_equal(result, expected) ii = IntervalIndex.from_arrays( [0, 1, np.nan], [1, 2, np.nan], closed=closed) result = ii.dropna() tm.assert_index_equal(result, expected) # TODO: check this behavior is consistent with test_interval_new.py def test_non_contiguous(self, closed): index = IntervalIndex.from_tuples([(0, 1), (2, 3)], closed=closed) target = [0.5, 1.5, 2.5] actual = index.get_indexer(target) expected = np.array([0, -1, 1], dtype='intp') tm.assert_numpy_array_equal(actual, expected) assert 1.5 not in index def test_union(self, closed): index = self.create_index(closed=closed) other = IntervalIndex.from_breaks(range(5, 13), closed=closed) expected = IntervalIndex.from_breaks(range(13), closed=closed) result = index.union(other) tm.assert_index_equal(result, expected) result = other.union(index) tm.assert_index_equal(result, expected) tm.assert_index_equal(index.union(index), index) tm.assert_index_equal(index.union(index[:1]), index) # GH 19101: empty result, same dtype index = IntervalIndex(np.array([], dtype='int64'), closed=closed) result = index.union(index) tm.assert_index_equal(result, index) # GH 19101: empty result, different dtypes other = IntervalIndex(np.array([], dtype='float64'), closed=closed) result = index.union(other) tm.assert_index_equal(result, index) def test_intersection(self, closed): index = self.create_index(closed=closed) other = IntervalIndex.from_breaks(range(5, 13), closed=closed) expected = IntervalIndex.from_breaks(range(5, 11), closed=closed) result = index.intersection(other) tm.assert_index_equal(result, expected) result = other.intersection(index) tm.assert_index_equal(result, expected) tm.assert_index_equal(index.intersection(index), index) # GH 19101: empty result, same dtype other = IntervalIndex.from_breaks(range(300, 314), closed=closed) expected = IntervalIndex(np.array([], dtype='int64'), closed=closed) result = index.intersection(other) tm.assert_index_equal(result, expected) # GH 19101: empty result, different dtypes breaks = np.arange(300, 314, dtype='float64') other = IntervalIndex.from_breaks(breaks, closed=closed) result = index.intersection(other) tm.assert_index_equal(result, expected) def test_difference(self, closed): index = self.create_index(closed=closed) tm.assert_index_equal(index.difference(index[:1]), index[1:]) # GH 19101: empty result, same dtype result = index.difference(index) expected = IntervalIndex(np.array([], dtype='int64'), closed=closed) tm.assert_index_equal(result, expected) # GH 19101: empty result, different dtypes other = IntervalIndex.from_arrays(index.left.astype('float64'), index.right, closed=closed) result = index.difference(other) tm.assert_index_equal(result, expected) def test_symmetric_difference(self, closed): index = self.create_index(closed=closed) result = index[1:].symmetric_difference(index[:-1]) expected = IntervalIndex([index[0], index[-1]]) tm.assert_index_equal(result, expected) # GH 19101: empty result, same dtype result = index.symmetric_difference(index) expected = IntervalIndex(np.array([], dtype='int64'), closed=closed) tm.assert_index_equal(result, expected) # GH 19101: empty result, different dtypes other = IntervalIndex.from_arrays(index.left.astype('float64'), index.right, closed=closed) result = index.symmetric_difference(other) tm.assert_index_equal(result, expected) @pytest.mark.parametrize('op_name', [ 'union', 'intersection', 'difference', 'symmetric_difference']) def test_set_operation_errors(self, closed, op_name): index = self.create_index(closed=closed) set_op = getattr(index, op_name) # non-IntervalIndex msg = ('the other index needs to be an IntervalIndex too, but ' 'was type Int64Index') with tm.assert_raises_regex(TypeError, msg): set_op(Index([1, 2, 3])) # mixed closed msg = ('can only do set operations between two IntervalIndex objects ' 'that are closed on the same side') for other_closed in {'right', 'left', 'both', 'neither'} - {closed}: other = self.create_index(closed=other_closed) with tm.assert_raises_regex(ValueError, msg): set_op(other) # GH 19016: incompatible dtypes other = interval_range(Timestamp('20180101'), periods=9, closed=closed) msg = ('can only do {op} between two IntervalIndex objects that have ' 'compatible dtypes').format(op=op_name) with tm.assert_raises_regex(TypeError, msg): set_op(other) def test_isin(self, closed): index = self.create_index(closed=closed) expected = np.array([True] + [False] * (len(index) - 1)) result = index.isin(index[:1]) tm.assert_numpy_array_equal(result, expected) result = index.isin([index[0]]) tm.assert_numpy_array_equal(result, expected) other = IntervalIndex.from_breaks(np.arange(-2, 10), closed=closed) expected = np.array([True] * (len(index) - 1) + [False]) result = index.isin(other) tm.assert_numpy_array_equal(result, expected) result = index.isin(other.tolist()) tm.assert_numpy_array_equal(result, expected) for other_closed in {'right', 'left', 'both', 'neither'}: other = self.create_index(closed=other_closed) expected = np.repeat(closed == other_closed, len(index)) result = index.isin(other) tm.assert_numpy_array_equal(result, expected) result = index.isin(other.tolist()) tm.assert_numpy_array_equal(result, expected) def test_comparison(self): actual = Interval(0, 1) < self.index expected = np.array([False, True]) tm.assert_numpy_array_equal(actual, expected) actual = Interval(0.5, 1.5) < self.index expected = np.array([False, True]) tm.assert_numpy_array_equal(actual, expected) actual = self.index > Interval(0.5, 1.5) tm.assert_numpy_array_equal(actual, expected) actual = self.index == self.index expected = np.array([True, True]) tm.assert_numpy_array_equal(actual, expected) actual = self.index <= self.index tm.assert_numpy_array_equal(actual, expected) actual = self.index >= self.index tm.assert_numpy_array_equal(actual, expected) actual = self.index < self.index expected = np.array([False, False]) tm.assert_numpy_array_equal(actual, expected) actual = self.index > self.index tm.assert_numpy_array_equal(actual, expected) actual = self.index == IntervalIndex.from_breaks([0, 1, 2], 'left') tm.assert_numpy_array_equal(actual, expected) actual = self.index == self.index.values tm.assert_numpy_array_equal(actual, np.array([True, True])) actual = self.index.values == self.index tm.assert_numpy_array_equal(actual, np.array([True, True])) actual = self.index <= self.index.values tm.assert_numpy_array_equal(actual, np.array([True, True])) actual = self.index != self.index.values tm.assert_numpy_array_equal(actual, np.array([False, False])) actual = self.index > self.index.values tm.assert_numpy_array_equal(actual, np.array([False, False])) actual = self.index.values > self.index tm.assert_numpy_array_equal(actual, np.array([False, False])) # invalid comparisons actual = self.index == 0 tm.assert_numpy_array_equal(actual, np.array([False, False])) actual = self.index == self.index.left tm.assert_numpy_array_equal(actual, np.array([False, False])) with tm.assert_raises_regex(TypeError, 'unorderable types'): self.index > 0 with tm.assert_raises_regex(TypeError, 'unorderable types'): self.index <= 0 with pytest.raises(TypeError): self.index > np.arange(2) with pytest.raises(ValueError): self.index > np.arange(3) def test_missing_values(self, closed): idx = Index([np.nan, Interval(0, 1, closed=closed), Interval(1, 2, closed=closed)]) idx2 = IntervalIndex.from_arrays( [np.nan, 0, 1], [np.nan, 1, 2], closed=closed) assert idx.equals(idx2) with pytest.raises(ValueError): IntervalIndex.from_arrays( [np.nan, 0, 1], np.array([0, 1, 2]), closed=closed) tm.assert_numpy_array_equal(isna(idx), np.array([True, False, False])) def test_sort_values(self, closed): index = self.create_index(closed=closed) result = index.sort_values() tm.assert_index_equal(result, index) result = index.sort_values(ascending=False) tm.assert_index_equal(result, index[::-1]) # with nan index = IntervalIndex([Interval(1, 2), np.nan, Interval(0, 1)]) result = index.sort_values() expected = IntervalIndex([Interval(0, 1), Interval(1, 2), np.nan]) tm.assert_index_equal(result, expected) result = index.sort_values(ascending=False) expected = IntervalIndex([np.nan, Interval(1, 2), Interval(0, 1)]) tm.assert_index_equal(result, expected) @pytest.mark.parametrize('tz', [None, 'US/Eastern']) def test_datetime(self, tz): start = Timestamp('2000-01-01', tz=tz) dates = date_range(start=start, periods=10) index = IntervalIndex.from_breaks(dates) # test mid start = Timestamp('2000-01-01T12:00', tz=tz) expected = date_range(start=start, periods=9) tm.assert_index_equal(index.mid, expected) # __contains__ doesn't check individual points assert Timestamp('2000-01-01', tz=tz) not in index assert Timestamp('2000-01-01T12', tz=tz) not in index assert Timestamp('2000-01-02', tz=tz) not in index iv_true = Interval(Timestamp('2000-01-01T08', tz=tz), Timestamp('2000-01-01T18', tz=tz)) iv_false = Interval(Timestamp('1999-12-31', tz=tz), Timestamp('2000-01-01', tz=tz)) assert iv_true in index assert iv_false not in index # .contains does check individual points assert not index.contains(Timestamp('2000-01-01', tz=tz)) assert index.contains(Timestamp('2000-01-01T12', tz=tz)) assert index.contains(Timestamp('2000-01-02', tz=tz)) assert index.contains(iv_true) assert not index.contains(iv_false) # test get_indexer start = Timestamp('1999-12-31T12:00', tz=tz) target = date_range(start=start, periods=7, freq='12H') actual = index.get_indexer(target) expected = np.array([-1, -1, 0, 0, 1, 1, 2], dtype='intp') tm.assert_numpy_array_equal(actual, expected) start = Timestamp('2000-01-08T18:00', tz=tz) target = date_range(start=start, periods=7, freq='6H') actual = index.get_indexer(target) expected = np.array([7, 7, 8, 8, 8, 8, -1], dtype='intp') tm.assert_numpy_array_equal(actual, expected) def test_append(self, closed): index1 = IntervalIndex.from_arrays([0, 1], [1, 2], closed=closed) index2 = IntervalIndex.from_arrays([1, 2], [2, 3], closed=closed) result = index1.append(index2) expected = IntervalIndex.from_arrays( [0, 1, 1, 2], [1, 2, 2, 3], closed=closed) tm.assert_index_equal(result, expected) result = index1.append([index1, index2]) expected = IntervalIndex.from_arrays( [0, 1, 0, 1, 1, 2], [1, 2, 1, 2, 2, 3], closed=closed) tm.assert_index_equal(result, expected) msg = ('can only append two IntervalIndex objects that are closed ' 'on the same side') for other_closed in {'left', 'right', 'both', 'neither'} - {closed}: index_other_closed = IntervalIndex.from_arrays( [0, 1], [1, 2], closed=other_closed) with tm.assert_raises_regex(ValueError, msg): index1.append(index_other_closed) def test_is_non_overlapping_monotonic(self, closed): # Should be True in all cases tpls = [(0, 1), (2, 3), (4, 5), (6, 7)] idx = IntervalIndex.from_tuples(tpls, closed=closed) assert idx.is_non_overlapping_monotonic is True idx = IntervalIndex.from_tuples(tpls[::-1], closed=closed) assert idx.is_non_overlapping_monotonic is True # Should be False in all cases (overlapping) tpls = [(0, 2), (1, 3), (4, 5), (6, 7)] idx = IntervalIndex.from_tuples(tpls, closed=closed) assert idx.is_non_overlapping_monotonic is False idx = IntervalIndex.from_tuples(tpls[::-1], closed=closed) assert idx.is_non_overlapping_monotonic is False # Should be False in all cases (non-monotonic) tpls = [(0, 1), (2, 3), (6, 7), (4, 5)] idx = IntervalIndex.from_tuples(tpls, closed=closed) assert idx.is_non_overlapping_monotonic is False idx = IntervalIndex.from_tuples(tpls[::-1], closed=closed) assert idx.is_non_overlapping_monotonic is False # Should be False for closed='both', otherwise True (GH16560) if closed == 'both': idx = IntervalIndex.from_breaks(range(4), closed=closed) assert idx.is_non_overlapping_monotonic is False else: idx = IntervalIndex.from_breaks(range(4), closed=closed) assert idx.is_non_overlapping_monotonic is True @pytest.mark.parametrize('tuples', [ lzip(range(10), range(1, 11)), lzip(date_range('20170101', periods=10), date_range('20170101', periods=10)), lzip(timedelta_range('0 days', periods=10), timedelta_range('1 day', periods=10))]) def test_to_tuples(self, tuples): # GH 18756 idx = IntervalIndex.from_tuples(tuples) result = idx.to_tuples() expected = Index(com._asarray_tuplesafe(tuples)) tm.assert_index_equal(result, expected) @pytest.mark.parametrize('tuples', [ lzip(range(10), range(1, 11)) + [np.nan], lzip(date_range('20170101', periods=10), date_range('20170101', periods=10)) + [np.nan], lzip(timedelta_range('0 days', periods=10), timedelta_range('1 day', periods=10)) + [np.nan]]) @pytest.mark.parametrize('na_tuple', [True, False]) def test_to_tuples_na(self, tuples, na_tuple): # GH 18756 idx = IntervalIndex.from_tuples(tuples) result = idx.to_tuples(na_tuple=na_tuple) # check the non-NA portion expected_notna = Index(com._asarray_tuplesafe(tuples[:-1])) result_notna = result[:-1] tm.assert_index_equal(result_notna, expected_notna) # check the NA portion result_na = result[-1] if na_tuple: assert isinstance(result_na, tuple) assert len(result_na) == 2 assert all(isna(x) for x in result_na) else: assert isna(result_na) @pytest.mark.parametrize('new_closed', [ 'left', 'right', 'both', 'neither']) def test_set_closed(self, name, closed, new_closed): # GH 21670 index = interval_range(0, 5, closed=closed, name=name) result = index.set_closed(new_closed) expected = interval_range(0, 5, closed=new_closed, name=name) tm.assert_index_equal(result, expected) @pytest.mark.parametrize('bad_closed', ['foo', 10, 'LEFT', True, False]) def test_set_closed_errors(self, bad_closed): # GH 21670 index = interval_range(0, 5) msg = "invalid option for 'closed': {closed}".format(closed=bad_closed) with tm.assert_raises_regex(ValueError, msg): index.set_closed(bad_closed)
pratapvardhan/pandas
pandas/tests/indexes/interval/test_interval.py
pandas/tests/extension/base/casting.py
# -*- coding: utf-8 -*- import numpy as np import pytest from pandas import Index, MultiIndex @pytest.fixture def idx(): # a MultiIndex used to test the general functionality of the # general functionality of this object major_axis = Index(['foo', 'bar', 'baz', 'qux']) minor_axis = Index(['one', 'two']) major_labels = np.array([0, 0, 1, 2, 3, 3]) minor_labels = np.array([0, 1, 0, 1, 0, 1]) index_names = ['first', 'second'] index = MultiIndex( levels=[major_axis, minor_axis], labels=[major_labels, minor_labels], names=index_names, verify_integrity=False ) return index @pytest.fixture def index_names(): # names that match those in the idx fixture for testing equality of # names assigned to the idx return ['first', 'second'] @pytest.fixture def holder(): # the MultiIndex constructor used to base compatibility with pickle return MultiIndex @pytest.fixture def compat_props(): # a MultiIndex must have these properties associated with it return ['shape', 'ndim', 'size']
from __future__ import division import pytest import numpy as np from pandas import ( Interval, IntervalIndex, Index, isna, notna, interval_range, Timestamp, Timedelta, date_range, timedelta_range) from pandas.compat import lzip import pandas.core.common as com from pandas.tests.indexes.common import Base import pandas.util.testing as tm import pandas as pd @pytest.fixture(scope='class', params=[None, 'foo']) def name(request): return request.param class TestIntervalIndex(Base): _holder = IntervalIndex def setup_method(self, method): self.index = IntervalIndex.from_arrays([0, 1], [1, 2]) self.index_with_nan = IntervalIndex.from_tuples( [(0, 1), np.nan, (1, 2)]) self.indices = dict(intervalIndex=tm.makeIntervalIndex(10)) def create_index(self, closed='right'): return IntervalIndex.from_breaks(range(11), closed=closed) def create_index_with_nan(self, closed='right'): mask = [True, False] + [True] * 8 return IntervalIndex.from_arrays( np.where(mask, np.arange(10), np.nan), np.where(mask, np.arange(1, 11), np.nan), closed=closed) def test_properties(self, closed): index = self.create_index(closed=closed) assert len(index) == 10 assert index.size == 10 assert index.shape == (10, ) tm.assert_index_equal(index.left, Index(np.arange(10))) tm.assert_index_equal(index.right, Index(np.arange(1, 11))) tm.assert_index_equal(index.mid, Index(np.arange(0.5, 10.5))) assert index.closed == closed ivs = [Interval(l, r, closed) for l, r in zip(range(10), range(1, 11))] expected = np.array(ivs, dtype=object) tm.assert_numpy_array_equal(np.asarray(index), expected) tm.assert_numpy_array_equal(index.values, expected) # with nans index = self.create_index_with_nan(closed=closed) assert len(index) == 10 assert index.size == 10 assert index.shape == (10, ) expected_left = Index([0, np.nan, 2, 3, 4, 5, 6, 7, 8, 9]) expected_right = expected_left + 1 expected_mid = expected_left + 0.5 tm.assert_index_equal(index.left, expected_left) tm.assert_index_equal(index.right, expected_right) tm.assert_index_equal(index.mid, expected_mid) assert index.closed == closed ivs = [Interval(l, r, closed) if notna(l) else np.nan for l, r in zip(expected_left, expected_right)] expected = np.array(ivs, dtype=object) tm.assert_numpy_array_equal(np.asarray(index), expected) tm.assert_numpy_array_equal(index.values, expected) @pytest.mark.parametrize('breaks', [ [1, 1, 2, 5, 15, 53, 217, 1014, 5335, 31240, 201608], [-np.inf, -100, -10, 0.5, 1, 1.5, 3.8, 101, 202, np.inf], pd.to_datetime(['20170101', '20170202', '20170303', '20170404']), pd.to_timedelta(['1ns', '2ms', '3s', '4M', '5H', '6D'])]) def test_length(self, closed, breaks): # GH 18789 index = IntervalIndex.from_breaks(breaks, closed=closed) result = index.length expected = Index(iv.length for iv in index) tm.assert_index_equal(result, expected) # with NA index = index.insert(1, np.nan) result = index.length expected = Index(iv.length if notna(iv) else iv for iv in index) tm.assert_index_equal(result, expected) def test_with_nans(self, closed): index = self.create_index(closed=closed) assert not index.hasnans result = index.isna() expected = np.repeat(False, len(index)) tm.assert_numpy_array_equal(result, expected) result = index.notna() expected = np.repeat(True, len(index)) tm.assert_numpy_array_equal(result, expected) index = self.create_index_with_nan(closed=closed) assert index.hasnans result = index.isna() expected = np.array([False, True] + [False] * (len(index) - 2)) tm.assert_numpy_array_equal(result, expected) result = index.notna() expected = np.array([True, False] + [True] * (len(index) - 2)) tm.assert_numpy_array_equal(result, expected) def test_copy(self, closed): expected = self.create_index(closed=closed) result = expected.copy() assert result.equals(expected) result = expected.copy(deep=True) assert result.equals(expected) assert result.left is not expected.left def test_ensure_copied_data(self, closed): # exercise the copy flag in the constructor # not copying index = self.create_index(closed=closed) result = IntervalIndex(index, copy=False) tm.assert_numpy_array_equal(index.left.values, result.left.values, check_same='same') tm.assert_numpy_array_equal(index.right.values, result.right.values, check_same='same') # by-definition make a copy result = IntervalIndex(index.values, copy=False) tm.assert_numpy_array_equal(index.left.values, result.left.values, check_same='copy') tm.assert_numpy_array_equal(index.right.values, result.right.values, check_same='copy') def test_equals(self, closed): expected = IntervalIndex.from_breaks(np.arange(5), closed=closed) assert expected.equals(expected) assert expected.equals(expected.copy()) assert not expected.equals(expected.astype(object)) assert not expected.equals(np.array(expected)) assert not expected.equals(list(expected)) assert not expected.equals([1, 2]) assert not expected.equals(np.array([1, 2])) assert not expected.equals(pd.date_range('20130101', periods=2)) expected_name1 = IntervalIndex.from_breaks( np.arange(5), closed=closed, name='foo') expected_name2 = IntervalIndex.from_breaks( np.arange(5), closed=closed, name='bar') assert expected.equals(expected_name1) assert expected_name1.equals(expected_name2) for other_closed in {'left', 'right', 'both', 'neither'} - {closed}: expected_other_closed = IntervalIndex.from_breaks( np.arange(5), closed=other_closed) assert not expected.equals(expected_other_closed) @pytest.mark.parametrize('klass', [list, tuple, np.array, pd.Series]) def test_where(self, closed, klass): idx = self.create_index(closed=closed) cond = [True] * len(idx) expected = idx result = expected.where(klass(cond)) tm.assert_index_equal(result, expected) cond = [False] + [True] * len(idx[1:]) expected = IntervalIndex([np.nan] + idx[1:].tolist()) result = idx.where(klass(cond)) tm.assert_index_equal(result, expected) def test_delete(self, closed): expected = IntervalIndex.from_breaks(np.arange(1, 11), closed=closed) result = self.create_index(closed=closed).delete(0) tm.assert_index_equal(result, expected) @pytest.mark.parametrize('data', [ interval_range(0, periods=10, closed='neither'), interval_range(1.7, periods=8, freq=2.5, closed='both'), interval_range(Timestamp('20170101'), periods=12, closed='left'), interval_range(Timedelta('1 day'), periods=6, closed='right')]) def test_insert(self, data): item = data[0] idx_item = IntervalIndex([item]) # start expected = idx_item.append(data) result = data.insert(0, item) tm.assert_index_equal(result, expected) # end expected = data.append(idx_item) result = data.insert(len(data), item) tm.assert_index_equal(result, expected) # mid expected = data[:3].append(idx_item).append(data[3:]) result = data.insert(3, item) tm.assert_index_equal(result, expected) # invalid type msg = 'can only insert Interval objects and NA into an IntervalIndex' with tm.assert_raises_regex(ValueError, msg): data.insert(1, 'foo') # invalid closed msg = 'inserted item must be closed on the same side as the index' for closed in {'left', 'right', 'both', 'neither'} - {item.closed}: with tm.assert_raises_regex(ValueError, msg): bad_item = Interval(item.left, item.right, closed=closed) data.insert(1, bad_item) # GH 18295 (test missing) na_idx = IntervalIndex([np.nan], closed=data.closed) for na in (np.nan, pd.NaT, None): expected = data[:1].append(na_idx).append(data[1:]) result = data.insert(1, na) tm.assert_index_equal(result, expected) def test_take(self, closed): index = self.create_index(closed=closed) result = index.take(range(10)) tm.assert_index_equal(result, index) result = index.take([0, 0, 1]) expected = IntervalIndex.from_arrays( [0, 0, 1], [1, 1, 2], closed=closed) tm.assert_index_equal(result, expected) def test_unique(self, closed): # unique non-overlapping idx = IntervalIndex.from_tuples( [(0, 1), (2, 3), (4, 5)], closed=closed) assert idx.is_unique # unique overlapping - distinct endpoints idx = IntervalIndex.from_tuples([(0, 1), (0.5, 1.5)], closed=closed) assert idx.is_unique # unique overlapping - shared endpoints idx = pd.IntervalIndex.from_tuples( [(1, 2), (1, 3), (2, 3)], closed=closed) assert idx.is_unique # unique nested idx = IntervalIndex.from_tuples([(-1, 1), (-2, 2)], closed=closed) assert idx.is_unique # duplicate idx = IntervalIndex.from_tuples( [(0, 1), (0, 1), (2, 3)], closed=closed) assert not idx.is_unique # empty idx = IntervalIndex([], closed=closed) assert idx.is_unique def test_monotonic(self, closed): # increasing non-overlapping idx = IntervalIndex.from_tuples( [(0, 1), (2, 3), (4, 5)], closed=closed) assert idx.is_monotonic assert idx._is_strictly_monotonic_increasing assert not idx.is_monotonic_decreasing assert not idx._is_strictly_monotonic_decreasing # decreasing non-overlapping idx = IntervalIndex.from_tuples( [(4, 5), (2, 3), (1, 2)], closed=closed) assert not idx.is_monotonic assert not idx._is_strictly_monotonic_increasing assert idx.is_monotonic_decreasing assert idx._is_strictly_monotonic_decreasing # unordered non-overlapping idx = IntervalIndex.from_tuples( [(0, 1), (4, 5), (2, 3)], closed=closed) assert not idx.is_monotonic assert not idx._is_strictly_monotonic_increasing assert not idx.is_monotonic_decreasing assert not idx._is_strictly_monotonic_decreasing # increasing overlapping idx = IntervalIndex.from_tuples( [(0, 2), (0.5, 2.5), (1, 3)], closed=closed) assert idx.is_monotonic assert idx._is_strictly_monotonic_increasing assert not idx.is_monotonic_decreasing assert not idx._is_strictly_monotonic_decreasing # decreasing overlapping idx = IntervalIndex.from_tuples( [(1, 3), (0.5, 2.5), (0, 2)], closed=closed) assert not idx.is_monotonic assert not idx._is_strictly_monotonic_increasing assert idx.is_monotonic_decreasing assert idx._is_strictly_monotonic_decreasing # unordered overlapping idx = IntervalIndex.from_tuples( [(0.5, 2.5), (0, 2), (1, 3)], closed=closed) assert not idx.is_monotonic assert not idx._is_strictly_monotonic_increasing assert not idx.is_monotonic_decreasing assert not idx._is_strictly_monotonic_decreasing # increasing overlapping shared endpoints idx = pd.IntervalIndex.from_tuples( [(1, 2), (1, 3), (2, 3)], closed=closed) assert idx.is_monotonic assert idx._is_strictly_monotonic_increasing assert not idx.is_monotonic_decreasing assert not idx._is_strictly_monotonic_decreasing # decreasing overlapping shared endpoints idx = pd.IntervalIndex.from_tuples( [(2, 3), (1, 3), (1, 2)], closed=closed) assert not idx.is_monotonic assert not idx._is_strictly_monotonic_increasing assert idx.is_monotonic_decreasing assert idx._is_strictly_monotonic_decreasing # stationary idx = IntervalIndex.from_tuples([(0, 1), (0, 1)], closed=closed) assert idx.is_monotonic assert not idx._is_strictly_monotonic_increasing assert idx.is_monotonic_decreasing assert not idx._is_strictly_monotonic_decreasing # empty idx = IntervalIndex([], closed=closed) assert idx.is_monotonic assert idx._is_strictly_monotonic_increasing assert idx.is_monotonic_decreasing assert idx._is_strictly_monotonic_decreasing @pytest.mark.skip(reason='not a valid repr as we use interval notation') def test_repr(self): i = IntervalIndex.from_tuples([(0, 1), (1, 2)], closed='right') expected = ("IntervalIndex(left=[0, 1]," "\n right=[1, 2]," "\n closed='right'," "\n dtype='interval[int64]')") assert repr(i) == expected i = IntervalIndex.from_tuples((Timestamp('20130101'), Timestamp('20130102')), (Timestamp('20130102'), Timestamp('20130103')), closed='right') expected = ("IntervalIndex(left=['2013-01-01', '2013-01-02']," "\n right=['2013-01-02', '2013-01-03']," "\n closed='right'," "\n dtype='interval[datetime64[ns]]')") assert repr(i) == expected @pytest.mark.skip(reason='not a valid repr as we use interval notation') def test_repr_max_seq_item_setting(self): super(TestIntervalIndex, self).test_repr_max_seq_item_setting() @pytest.mark.skip(reason='not a valid repr as we use interval notation') def test_repr_roundtrip(self): super(TestIntervalIndex, self).test_repr_roundtrip() # TODO: check this behavior is consistent with test_interval_new.py def test_get_item(self, closed): i = IntervalIndex.from_arrays((0, 1, np.nan), (1, 2, np.nan), closed=closed) assert i[0] == Interval(0.0, 1.0, closed=closed) assert i[1] == Interval(1.0, 2.0, closed=closed) assert isna(i[2]) result = i[0:1] expected = IntervalIndex.from_arrays((0.,), (1.,), closed=closed) tm.assert_index_equal(result, expected) result = i[0:2] expected = IntervalIndex.from_arrays((0., 1), (1., 2.), closed=closed) tm.assert_index_equal(result, expected) result = i[1:3] expected = IntervalIndex.from_arrays((1., np.nan), (2., np.nan), closed=closed) tm.assert_index_equal(result, expected) # To be removed, replaced by test_interval_new.py (see #16316, #16386) def test_get_loc_value(self): pytest.raises(KeyError, self.index.get_loc, 0) assert self.index.get_loc(0.5) == 0 assert self.index.get_loc(1) == 0 assert self.index.get_loc(1.5) == 1 assert self.index.get_loc(2) == 1 pytest.raises(KeyError, self.index.get_loc, -1) pytest.raises(KeyError, self.index.get_loc, 3) idx = IntervalIndex.from_tuples([(0, 2), (1, 3)]) assert idx.get_loc(0.5) == 0 assert idx.get_loc(1) == 0 tm.assert_numpy_array_equal(idx.get_loc(1.5), np.array([0, 1], dtype='int64')) tm.assert_numpy_array_equal(np.sort(idx.get_loc(2)), np.array([0, 1], dtype='int64')) assert idx.get_loc(3) == 1 pytest.raises(KeyError, idx.get_loc, 3.5) idx = IntervalIndex.from_arrays([0, 2], [1, 3]) pytest.raises(KeyError, idx.get_loc, 1.5) # To be removed, replaced by test_interval_new.py (see #16316, #16386) def slice_locs_cases(self, breaks): # TODO: same tests for more index types index = IntervalIndex.from_breaks([0, 1, 2], closed='right') assert index.slice_locs() == (0, 2) assert index.slice_locs(0, 1) == (0, 1) assert index.slice_locs(1, 1) == (0, 1) assert index.slice_locs(0, 2) == (0, 2) assert index.slice_locs(0.5, 1.5) == (0, 2) assert index.slice_locs(0, 0.5) == (0, 1) assert index.slice_locs(start=1) == (0, 2) assert index.slice_locs(start=1.2) == (1, 2) assert index.slice_locs(end=1) == (0, 1) assert index.slice_locs(end=1.1) == (0, 2) assert index.slice_locs(end=1.0) == (0, 1) assert index.slice_locs(-1, -1) == (0, 0) index = IntervalIndex.from_breaks([0, 1, 2], closed='neither') assert index.slice_locs(0, 1) == (0, 1) assert index.slice_locs(0, 2) == (0, 2) assert index.slice_locs(0.5, 1.5) == (0, 2) assert index.slice_locs(1, 1) == (1, 1) assert index.slice_locs(1, 2) == (1, 2) index = IntervalIndex.from_tuples([(0, 1), (2, 3), (4, 5)], closed='both') assert index.slice_locs(1, 1) == (0, 1) assert index.slice_locs(1, 2) == (0, 2) # To be removed, replaced by test_interval_new.py (see #16316, #16386) def test_slice_locs_int64(self): self.slice_locs_cases([0, 1, 2]) # To be removed, replaced by test_interval_new.py (see #16316, #16386) def test_slice_locs_float64(self): self.slice_locs_cases([0.0, 1.0, 2.0]) # To be removed, replaced by test_interval_new.py (see #16316, #16386) def slice_locs_decreasing_cases(self, tuples): index = IntervalIndex.from_tuples(tuples) assert index.slice_locs(1.5, 0.5) == (1, 3) assert index.slice_locs(2, 0) == (1, 3) assert index.slice_locs(2, 1) == (1, 3) assert index.slice_locs(3, 1.1) == (0, 3) assert index.slice_locs(3, 3) == (0, 2) assert index.slice_locs(3.5, 3.3) == (0, 1) assert index.slice_locs(1, -3) == (2, 3) slice_locs = index.slice_locs(-1, -1) assert slice_locs[0] == slice_locs[1] # To be removed, replaced by test_interval_new.py (see #16316, #16386) def test_slice_locs_decreasing_int64(self): self.slice_locs_cases([(2, 4), (1, 3), (0, 2)]) # To be removed, replaced by test_interval_new.py (see #16316, #16386) def test_slice_locs_decreasing_float64(self): self.slice_locs_cases([(2., 4.), (1., 3.), (0., 2.)]) # To be removed, replaced by test_interval_new.py (see #16316, #16386) def test_slice_locs_fails(self): index = IntervalIndex.from_tuples([(1, 2), (0, 1), (2, 3)]) with pytest.raises(KeyError): index.slice_locs(1, 2) # To be removed, replaced by test_interval_new.py (see #16316, #16386) def test_get_loc_interval(self): assert self.index.get_loc(Interval(0, 1)) == 0 assert self.index.get_loc(Interval(0, 0.5)) == 0 assert self.index.get_loc(Interval(0, 1, 'left')) == 0 pytest.raises(KeyError, self.index.get_loc, Interval(2, 3)) pytest.raises(KeyError, self.index.get_loc, Interval(-1, 0, 'left')) # Make consistent with test_interval_new.py (see #16316, #16386) @pytest.mark.parametrize('item', [3, Interval(1, 4)]) def test_get_loc_length_one(self, item, closed): # GH 20921 index = IntervalIndex.from_tuples([(0, 5)], closed=closed) result = index.get_loc(item) assert result == 0 # To be removed, replaced by test_interval_new.py (see #16316, #16386) def test_get_indexer(self): actual = self.index.get_indexer([-1, 0, 0.5, 1, 1.5, 2, 3]) expected = np.array([-1, -1, 0, 0, 1, 1, -1], dtype='intp') tm.assert_numpy_array_equal(actual, expected) actual = self.index.get_indexer(self.index) expected = np.array([0, 1], dtype='intp') tm.assert_numpy_array_equal(actual, expected) index = IntervalIndex.from_breaks([0, 1, 2], closed='left') actual = index.get_indexer([-1, 0, 0.5, 1, 1.5, 2, 3]) expected = np.array([-1, 0, 0, 1, 1, -1, -1], dtype='intp') tm.assert_numpy_array_equal(actual, expected) actual = self.index.get_indexer(index[:1]) expected = np.array([0], dtype='intp') tm.assert_numpy_array_equal(actual, expected) actual = self.index.get_indexer(index) expected = np.array([-1, 1], dtype='intp') tm.assert_numpy_array_equal(actual, expected) # To be removed, replaced by test_interval_new.py (see #16316, #16386) def test_get_indexer_subintervals(self): # TODO: is this right? # return indexers for wholly contained subintervals target = IntervalIndex.from_breaks(np.linspace(0, 2, 5)) actual = self.index.get_indexer(target) expected = np.array([0, 0, 1, 1], dtype='p') tm.assert_numpy_array_equal(actual, expected) target = IntervalIndex.from_breaks([0, 0.67, 1.33, 2]) actual = self.index.get_indexer(target) expected = np.array([0, 0, 1, 1], dtype='intp') tm.assert_numpy_array_equal(actual, expected) actual = self.index.get_indexer(target[[0, -1]]) expected = np.array([0, 1], dtype='intp') tm.assert_numpy_array_equal(actual, expected) target = IntervalIndex.from_breaks([0, 0.33, 0.67, 1], closed='left') actual = self.index.get_indexer(target) expected = np.array([0, 0, 0], dtype='intp') tm.assert_numpy_array_equal(actual, expected) # Make consistent with test_interval_new.py (see #16316, #16386) @pytest.mark.parametrize('item', [ [3], np.arange(1, 5), [Interval(1, 4)], interval_range(1, 4)]) def test_get_indexer_length_one(self, item, closed): # GH 17284 index = IntervalIndex.from_tuples([(0, 5)], closed=closed) result = index.get_indexer(item) expected = np.array([0] * len(item), dtype='intp') tm.assert_numpy_array_equal(result, expected) # To be removed, replaced by test_interval_new.py (see #16316, #16386) def test_contains(self): # Only endpoints are valid. i = IntervalIndex.from_arrays([0, 1], [1, 2]) # Invalid assert 0 not in i assert 1 not in i assert 2 not in i # Valid assert Interval(0, 1) in i assert Interval(0, 2) in i assert Interval(0, 0.5) in i assert Interval(3, 5) not in i assert Interval(-1, 0, closed='left') not in i # To be removed, replaced by test_interval_new.py (see #16316, #16386) def testcontains(self): # can select values that are IN the range of a value i = IntervalIndex.from_arrays([0, 1], [1, 2]) assert i.contains(0.1) assert i.contains(0.5) assert i.contains(1) assert i.contains(Interval(0, 1)) assert i.contains(Interval(0, 2)) # these overlaps completely assert i.contains(Interval(0, 3)) assert i.contains(Interval(1, 3)) assert not i.contains(20) assert not i.contains(-20) def test_dropna(self, closed): expected = IntervalIndex.from_tuples( [(0.0, 1.0), (1.0, 2.0)], closed=closed) ii = IntervalIndex.from_tuples([(0, 1), (1, 2), np.nan], closed=closed) result = ii.dropna() tm.assert_index_equal(result, expected) ii = IntervalIndex.from_arrays( [0, 1, np.nan], [1, 2, np.nan], closed=closed) result = ii.dropna() tm.assert_index_equal(result, expected) # TODO: check this behavior is consistent with test_interval_new.py def test_non_contiguous(self, closed): index = IntervalIndex.from_tuples([(0, 1), (2, 3)], closed=closed) target = [0.5, 1.5, 2.5] actual = index.get_indexer(target) expected = np.array([0, -1, 1], dtype='intp') tm.assert_numpy_array_equal(actual, expected) assert 1.5 not in index def test_union(self, closed): index = self.create_index(closed=closed) other = IntervalIndex.from_breaks(range(5, 13), closed=closed) expected = IntervalIndex.from_breaks(range(13), closed=closed) result = index.union(other) tm.assert_index_equal(result, expected) result = other.union(index) tm.assert_index_equal(result, expected) tm.assert_index_equal(index.union(index), index) tm.assert_index_equal(index.union(index[:1]), index) # GH 19101: empty result, same dtype index = IntervalIndex(np.array([], dtype='int64'), closed=closed) result = index.union(index) tm.assert_index_equal(result, index) # GH 19101: empty result, different dtypes other = IntervalIndex(np.array([], dtype='float64'), closed=closed) result = index.union(other) tm.assert_index_equal(result, index) def test_intersection(self, closed): index = self.create_index(closed=closed) other = IntervalIndex.from_breaks(range(5, 13), closed=closed) expected = IntervalIndex.from_breaks(range(5, 11), closed=closed) result = index.intersection(other) tm.assert_index_equal(result, expected) result = other.intersection(index) tm.assert_index_equal(result, expected) tm.assert_index_equal(index.intersection(index), index) # GH 19101: empty result, same dtype other = IntervalIndex.from_breaks(range(300, 314), closed=closed) expected = IntervalIndex(np.array([], dtype='int64'), closed=closed) result = index.intersection(other) tm.assert_index_equal(result, expected) # GH 19101: empty result, different dtypes breaks = np.arange(300, 314, dtype='float64') other = IntervalIndex.from_breaks(breaks, closed=closed) result = index.intersection(other) tm.assert_index_equal(result, expected) def test_difference(self, closed): index = self.create_index(closed=closed) tm.assert_index_equal(index.difference(index[:1]), index[1:]) # GH 19101: empty result, same dtype result = index.difference(index) expected = IntervalIndex(np.array([], dtype='int64'), closed=closed) tm.assert_index_equal(result, expected) # GH 19101: empty result, different dtypes other = IntervalIndex.from_arrays(index.left.astype('float64'), index.right, closed=closed) result = index.difference(other) tm.assert_index_equal(result, expected) def test_symmetric_difference(self, closed): index = self.create_index(closed=closed) result = index[1:].symmetric_difference(index[:-1]) expected = IntervalIndex([index[0], index[-1]]) tm.assert_index_equal(result, expected) # GH 19101: empty result, same dtype result = index.symmetric_difference(index) expected = IntervalIndex(np.array([], dtype='int64'), closed=closed) tm.assert_index_equal(result, expected) # GH 19101: empty result, different dtypes other = IntervalIndex.from_arrays(index.left.astype('float64'), index.right, closed=closed) result = index.symmetric_difference(other) tm.assert_index_equal(result, expected) @pytest.mark.parametrize('op_name', [ 'union', 'intersection', 'difference', 'symmetric_difference']) def test_set_operation_errors(self, closed, op_name): index = self.create_index(closed=closed) set_op = getattr(index, op_name) # non-IntervalIndex msg = ('the other index needs to be an IntervalIndex too, but ' 'was type Int64Index') with tm.assert_raises_regex(TypeError, msg): set_op(Index([1, 2, 3])) # mixed closed msg = ('can only do set operations between two IntervalIndex objects ' 'that are closed on the same side') for other_closed in {'right', 'left', 'both', 'neither'} - {closed}: other = self.create_index(closed=other_closed) with tm.assert_raises_regex(ValueError, msg): set_op(other) # GH 19016: incompatible dtypes other = interval_range(Timestamp('20180101'), periods=9, closed=closed) msg = ('can only do {op} between two IntervalIndex objects that have ' 'compatible dtypes').format(op=op_name) with tm.assert_raises_regex(TypeError, msg): set_op(other) def test_isin(self, closed): index = self.create_index(closed=closed) expected = np.array([True] + [False] * (len(index) - 1)) result = index.isin(index[:1]) tm.assert_numpy_array_equal(result, expected) result = index.isin([index[0]]) tm.assert_numpy_array_equal(result, expected) other = IntervalIndex.from_breaks(np.arange(-2, 10), closed=closed) expected = np.array([True] * (len(index) - 1) + [False]) result = index.isin(other) tm.assert_numpy_array_equal(result, expected) result = index.isin(other.tolist()) tm.assert_numpy_array_equal(result, expected) for other_closed in {'right', 'left', 'both', 'neither'}: other = self.create_index(closed=other_closed) expected = np.repeat(closed == other_closed, len(index)) result = index.isin(other) tm.assert_numpy_array_equal(result, expected) result = index.isin(other.tolist()) tm.assert_numpy_array_equal(result, expected) def test_comparison(self): actual = Interval(0, 1) < self.index expected = np.array([False, True]) tm.assert_numpy_array_equal(actual, expected) actual = Interval(0.5, 1.5) < self.index expected = np.array([False, True]) tm.assert_numpy_array_equal(actual, expected) actual = self.index > Interval(0.5, 1.5) tm.assert_numpy_array_equal(actual, expected) actual = self.index == self.index expected = np.array([True, True]) tm.assert_numpy_array_equal(actual, expected) actual = self.index <= self.index tm.assert_numpy_array_equal(actual, expected) actual = self.index >= self.index tm.assert_numpy_array_equal(actual, expected) actual = self.index < self.index expected = np.array([False, False]) tm.assert_numpy_array_equal(actual, expected) actual = self.index > self.index tm.assert_numpy_array_equal(actual, expected) actual = self.index == IntervalIndex.from_breaks([0, 1, 2], 'left') tm.assert_numpy_array_equal(actual, expected) actual = self.index == self.index.values tm.assert_numpy_array_equal(actual, np.array([True, True])) actual = self.index.values == self.index tm.assert_numpy_array_equal(actual, np.array([True, True])) actual = self.index <= self.index.values tm.assert_numpy_array_equal(actual, np.array([True, True])) actual = self.index != self.index.values tm.assert_numpy_array_equal(actual, np.array([False, False])) actual = self.index > self.index.values tm.assert_numpy_array_equal(actual, np.array([False, False])) actual = self.index.values > self.index tm.assert_numpy_array_equal(actual, np.array([False, False])) # invalid comparisons actual = self.index == 0 tm.assert_numpy_array_equal(actual, np.array([False, False])) actual = self.index == self.index.left tm.assert_numpy_array_equal(actual, np.array([False, False])) with tm.assert_raises_regex(TypeError, 'unorderable types'): self.index > 0 with tm.assert_raises_regex(TypeError, 'unorderable types'): self.index <= 0 with pytest.raises(TypeError): self.index > np.arange(2) with pytest.raises(ValueError): self.index > np.arange(3) def test_missing_values(self, closed): idx = Index([np.nan, Interval(0, 1, closed=closed), Interval(1, 2, closed=closed)]) idx2 = IntervalIndex.from_arrays( [np.nan, 0, 1], [np.nan, 1, 2], closed=closed) assert idx.equals(idx2) with pytest.raises(ValueError): IntervalIndex.from_arrays( [np.nan, 0, 1], np.array([0, 1, 2]), closed=closed) tm.assert_numpy_array_equal(isna(idx), np.array([True, False, False])) def test_sort_values(self, closed): index = self.create_index(closed=closed) result = index.sort_values() tm.assert_index_equal(result, index) result = index.sort_values(ascending=False) tm.assert_index_equal(result, index[::-1]) # with nan index = IntervalIndex([Interval(1, 2), np.nan, Interval(0, 1)]) result = index.sort_values() expected = IntervalIndex([Interval(0, 1), Interval(1, 2), np.nan]) tm.assert_index_equal(result, expected) result = index.sort_values(ascending=False) expected = IntervalIndex([np.nan, Interval(1, 2), Interval(0, 1)]) tm.assert_index_equal(result, expected) @pytest.mark.parametrize('tz', [None, 'US/Eastern']) def test_datetime(self, tz): start = Timestamp('2000-01-01', tz=tz) dates = date_range(start=start, periods=10) index = IntervalIndex.from_breaks(dates) # test mid start = Timestamp('2000-01-01T12:00', tz=tz) expected = date_range(start=start, periods=9) tm.assert_index_equal(index.mid, expected) # __contains__ doesn't check individual points assert Timestamp('2000-01-01', tz=tz) not in index assert Timestamp('2000-01-01T12', tz=tz) not in index assert Timestamp('2000-01-02', tz=tz) not in index iv_true = Interval(Timestamp('2000-01-01T08', tz=tz), Timestamp('2000-01-01T18', tz=tz)) iv_false = Interval(Timestamp('1999-12-31', tz=tz), Timestamp('2000-01-01', tz=tz)) assert iv_true in index assert iv_false not in index # .contains does check individual points assert not index.contains(Timestamp('2000-01-01', tz=tz)) assert index.contains(Timestamp('2000-01-01T12', tz=tz)) assert index.contains(Timestamp('2000-01-02', tz=tz)) assert index.contains(iv_true) assert not index.contains(iv_false) # test get_indexer start = Timestamp('1999-12-31T12:00', tz=tz) target = date_range(start=start, periods=7, freq='12H') actual = index.get_indexer(target) expected = np.array([-1, -1, 0, 0, 1, 1, 2], dtype='intp') tm.assert_numpy_array_equal(actual, expected) start = Timestamp('2000-01-08T18:00', tz=tz) target = date_range(start=start, periods=7, freq='6H') actual = index.get_indexer(target) expected = np.array([7, 7, 8, 8, 8, 8, -1], dtype='intp') tm.assert_numpy_array_equal(actual, expected) def test_append(self, closed): index1 = IntervalIndex.from_arrays([0, 1], [1, 2], closed=closed) index2 = IntervalIndex.from_arrays([1, 2], [2, 3], closed=closed) result = index1.append(index2) expected = IntervalIndex.from_arrays( [0, 1, 1, 2], [1, 2, 2, 3], closed=closed) tm.assert_index_equal(result, expected) result = index1.append([index1, index2]) expected = IntervalIndex.from_arrays( [0, 1, 0, 1, 1, 2], [1, 2, 1, 2, 2, 3], closed=closed) tm.assert_index_equal(result, expected) msg = ('can only append two IntervalIndex objects that are closed ' 'on the same side') for other_closed in {'left', 'right', 'both', 'neither'} - {closed}: index_other_closed = IntervalIndex.from_arrays( [0, 1], [1, 2], closed=other_closed) with tm.assert_raises_regex(ValueError, msg): index1.append(index_other_closed) def test_is_non_overlapping_monotonic(self, closed): # Should be True in all cases tpls = [(0, 1), (2, 3), (4, 5), (6, 7)] idx = IntervalIndex.from_tuples(tpls, closed=closed) assert idx.is_non_overlapping_monotonic is True idx = IntervalIndex.from_tuples(tpls[::-1], closed=closed) assert idx.is_non_overlapping_monotonic is True # Should be False in all cases (overlapping) tpls = [(0, 2), (1, 3), (4, 5), (6, 7)] idx = IntervalIndex.from_tuples(tpls, closed=closed) assert idx.is_non_overlapping_monotonic is False idx = IntervalIndex.from_tuples(tpls[::-1], closed=closed) assert idx.is_non_overlapping_monotonic is False # Should be False in all cases (non-monotonic) tpls = [(0, 1), (2, 3), (6, 7), (4, 5)] idx = IntervalIndex.from_tuples(tpls, closed=closed) assert idx.is_non_overlapping_monotonic is False idx = IntervalIndex.from_tuples(tpls[::-1], closed=closed) assert idx.is_non_overlapping_monotonic is False # Should be False for closed='both', otherwise True (GH16560) if closed == 'both': idx = IntervalIndex.from_breaks(range(4), closed=closed) assert idx.is_non_overlapping_monotonic is False else: idx = IntervalIndex.from_breaks(range(4), closed=closed) assert idx.is_non_overlapping_monotonic is True @pytest.mark.parametrize('tuples', [ lzip(range(10), range(1, 11)), lzip(date_range('20170101', periods=10), date_range('20170101', periods=10)), lzip(timedelta_range('0 days', periods=10), timedelta_range('1 day', periods=10))]) def test_to_tuples(self, tuples): # GH 18756 idx = IntervalIndex.from_tuples(tuples) result = idx.to_tuples() expected = Index(com._asarray_tuplesafe(tuples)) tm.assert_index_equal(result, expected) @pytest.mark.parametrize('tuples', [ lzip(range(10), range(1, 11)) + [np.nan], lzip(date_range('20170101', periods=10), date_range('20170101', periods=10)) + [np.nan], lzip(timedelta_range('0 days', periods=10), timedelta_range('1 day', periods=10)) + [np.nan]]) @pytest.mark.parametrize('na_tuple', [True, False]) def test_to_tuples_na(self, tuples, na_tuple): # GH 18756 idx = IntervalIndex.from_tuples(tuples) result = idx.to_tuples(na_tuple=na_tuple) # check the non-NA portion expected_notna = Index(com._asarray_tuplesafe(tuples[:-1])) result_notna = result[:-1] tm.assert_index_equal(result_notna, expected_notna) # check the NA portion result_na = result[-1] if na_tuple: assert isinstance(result_na, tuple) assert len(result_na) == 2 assert all(isna(x) for x in result_na) else: assert isna(result_na) @pytest.mark.parametrize('new_closed', [ 'left', 'right', 'both', 'neither']) def test_set_closed(self, name, closed, new_closed): # GH 21670 index = interval_range(0, 5, closed=closed, name=name) result = index.set_closed(new_closed) expected = interval_range(0, 5, closed=new_closed, name=name) tm.assert_index_equal(result, expected) @pytest.mark.parametrize('bad_closed', ['foo', 10, 'LEFT', True, False]) def test_set_closed_errors(self, bad_closed): # GH 21670 index = interval_range(0, 5) msg = "invalid option for 'closed': {closed}".format(closed=bad_closed) with tm.assert_raises_regex(ValueError, msg): index.set_closed(bad_closed)
pratapvardhan/pandas
pandas/tests/indexes/interval/test_interval.py
pandas/tests/indexes/multi/conftest.py
import itertools import numpy as np import pytest import pandas as pd from pandas.core.internals import ExtensionBlock from .base import BaseExtensionTests class BaseReshapingTests(BaseExtensionTests): """Tests for reshaping and concatenation.""" @pytest.mark.parametrize('in_frame', [True, False]) def test_concat(self, data, in_frame): wrapped = pd.Series(data) if in_frame: wrapped = pd.DataFrame(wrapped) result = pd.concat([wrapped, wrapped], ignore_index=True) assert len(result) == len(data) * 2 if in_frame: dtype = result.dtypes[0] else: dtype = result.dtype assert dtype == data.dtype assert isinstance(result._data.blocks[0], ExtensionBlock) @pytest.mark.parametrize('in_frame', [True, False]) def test_concat_all_na_block(self, data_missing, in_frame): valid_block = pd.Series(data_missing.take([1, 1]), index=[0, 1]) na_block = pd.Series(data_missing.take([0, 0]), index=[2, 3]) if in_frame: valid_block = pd.DataFrame({"a": valid_block}) na_block = pd.DataFrame({"a": na_block}) result = pd.concat([valid_block, na_block]) if in_frame: expected = pd.DataFrame({"a": data_missing.take([1, 1, 0, 0])}) self.assert_frame_equal(result, expected) else: expected = pd.Series(data_missing.take([1, 1, 0, 0])) self.assert_series_equal(result, expected) def test_concat_mixed_dtypes(self, data): # https://github.com/pandas-dev/pandas/issues/20762 df1 = pd.DataFrame({'A': data[:3]}) df2 = pd.DataFrame({"A": [1, 2, 3]}) df3 = pd.DataFrame({"A": ['a', 'b', 'c']}).astype('category') dfs = [df1, df2, df3] # dataframes result = pd.concat(dfs) expected = pd.concat([x.astype(object) for x in dfs]) self.assert_frame_equal(result, expected) # series result = pd.concat([x['A'] for x in dfs]) expected = pd.concat([x['A'].astype(object) for x in dfs]) self.assert_series_equal(result, expected) # simple test for just EA and one other result = pd.concat([df1, df2]) expected = pd.concat([df1.astype('object'), df2.astype('object')]) self.assert_frame_equal(result, expected) result = pd.concat([df1['A'], df2['A']]) expected = pd.concat([df1['A'].astype('object'), df2['A'].astype('object')]) self.assert_series_equal(result, expected) def test_concat_columns(self, data, na_value): df1 = pd.DataFrame({'A': data[:3]}) df2 = pd.DataFrame({'B': [1, 2, 3]}) expected = pd.DataFrame({'A': data[:3], 'B': [1, 2, 3]}) result = pd.concat([df1, df2], axis=1) self.assert_frame_equal(result, expected) result = pd.concat([df1['A'], df2['B']], axis=1) self.assert_frame_equal(result, expected) # non-aligned df2 = pd.DataFrame({'B': [1, 2, 3]}, index=[1, 2, 3]) expected = pd.DataFrame({ 'A': data._from_sequence(list(data[:3]) + [na_value], dtype=data.dtype), 'B': [np.nan, 1, 2, 3]}) result = pd.concat([df1, df2], axis=1) self.assert_frame_equal(result, expected) result = pd.concat([df1['A'], df2['B']], axis=1) self.assert_frame_equal(result, expected) def test_align(self, data, na_value): a = data[:3] b = data[2:5] r1, r2 = pd.Series(a).align(pd.Series(b, index=[1, 2, 3])) # Assumes that the ctor can take a list of scalars of the type e1 = pd.Series(data._from_sequence(list(a) + [na_value], dtype=data.dtype)) e2 = pd.Series(data._from_sequence([na_value] + list(b), dtype=data.dtype)) self.assert_series_equal(r1, e1) self.assert_series_equal(r2, e2) def test_align_frame(self, data, na_value): a = data[:3] b = data[2:5] r1, r2 = pd.DataFrame({'A': a}).align( pd.DataFrame({'A': b}, index=[1, 2, 3]) ) # Assumes that the ctor can take a list of scalars of the type e1 = pd.DataFrame({'A': data._from_sequence(list(a) + [na_value], dtype=data.dtype)}) e2 = pd.DataFrame({'A': data._from_sequence([na_value] + list(b), dtype=data.dtype)}) self.assert_frame_equal(r1, e1) self.assert_frame_equal(r2, e2) def test_align_series_frame(self, data, na_value): # https://github.com/pandas-dev/pandas/issues/20576 ser = pd.Series(data, name='a') df = pd.DataFrame({"col": np.arange(len(ser) + 1)}) r1, r2 = ser.align(df) e1 = pd.Series(data._from_sequence(list(data) + [na_value], dtype=data.dtype), name=ser.name) self.assert_series_equal(r1, e1) self.assert_frame_equal(r2, df) def test_set_frame_expand_regular_with_extension(self, data): df = pd.DataFrame({"A": [1] * len(data)}) df['B'] = data expected = pd.DataFrame({"A": [1] * len(data), "B": data}) self.assert_frame_equal(df, expected) def test_set_frame_expand_extension_with_regular(self, data): df = pd.DataFrame({'A': data}) df['B'] = [1] * len(data) expected = pd.DataFrame({"A": data, "B": [1] * len(data)}) self.assert_frame_equal(df, expected) def test_set_frame_overwrite_object(self, data): # https://github.com/pandas-dev/pandas/issues/20555 df = pd.DataFrame({"A": [1] * len(data)}, dtype=object) df['A'] = data assert df.dtypes['A'] == data.dtype def test_merge(self, data, na_value): # GH-20743 df1 = pd.DataFrame({'ext': data[:3], 'int1': [1, 2, 3], 'key': [0, 1, 2]}) df2 = pd.DataFrame({'int2': [1, 2, 3, 4], 'key': [0, 0, 1, 3]}) res = pd.merge(df1, df2) exp = pd.DataFrame( {'int1': [1, 1, 2], 'int2': [1, 2, 3], 'key': [0, 0, 1], 'ext': data._from_sequence([data[0], data[0], data[1]], dtype=data.dtype)}) self.assert_frame_equal(res, exp[['ext', 'int1', 'key', 'int2']]) res = pd.merge(df1, df2, how='outer') exp = pd.DataFrame( {'int1': [1, 1, 2, 3, np.nan], 'int2': [1, 2, 3, np.nan, 4], 'key': [0, 0, 1, 2, 3], 'ext': data._from_sequence( [data[0], data[0], data[1], data[2], na_value], dtype=data.dtype)}) self.assert_frame_equal(res, exp[['ext', 'int1', 'key', 'int2']]) def test_merge_on_extension_array(self, data): # GH 23020 a, b = data[:2] key = type(data)._from_sequence([a, b], dtype=data.dtype) df = pd.DataFrame({"key": key, "val": [1, 2]}) result = pd.merge(df, df, on='key') expected = pd.DataFrame({"key": key, "val_x": [1, 2], "val_y": [1, 2]}) self.assert_frame_equal(result, expected) # order result = pd.merge(df.iloc[[1, 0]], df, on='key') expected = expected.iloc[[1, 0]].reset_index(drop=True) self.assert_frame_equal(result, expected) def test_merge_on_extension_array_duplicates(self, data): # GH 23020 a, b = data[:2] key = type(data)._from_sequence([a, b, a], dtype=data.dtype) df1 = pd.DataFrame({"key": key, "val": [1, 2, 3]}) df2 = pd.DataFrame({"key": key, "val": [1, 2, 3]}) result = pd.merge(df1, df2, on='key') expected = pd.DataFrame({ "key": key.take([0, 0, 0, 0, 1]), "val_x": [1, 1, 3, 3, 2], "val_y": [1, 3, 1, 3, 2], }) self.assert_frame_equal(result, expected) @pytest.mark.parametrize("columns", [ ["A", "B"], pd.MultiIndex.from_tuples([('A', 'a'), ('A', 'b')], names=['outer', 'inner']), ]) def test_stack(self, data, columns): df = pd.DataFrame({"A": data[:5], "B": data[:5]}) df.columns = columns result = df.stack() expected = df.astype(object).stack() # we need a second astype(object), in case the constructor inferred # object -> specialized, as is done for period. expected = expected.astype(object) if isinstance(expected, pd.Series): assert result.dtype == df.iloc[:, 0].dtype else: assert all(result.dtypes == df.iloc[:, 0].dtype) result = result.astype(object) self.assert_equal(result, expected) @pytest.mark.parametrize("index", [ # Two levels, uniform. pd.MultiIndex.from_product(([['A', 'B'], ['a', 'b']]), names=['a', 'b']), # non-uniform pd.MultiIndex.from_tuples([('A', 'a'), ('A', 'b'), ('B', 'b')]), # three levels, non-uniform pd.MultiIndex.from_product([('A', 'B'), ('a', 'b', 'c'), (0, 1, 2)]), pd.MultiIndex.from_tuples([ ('A', 'a', 1), ('A', 'b', 0), ('A', 'a', 0), ('B', 'a', 0), ('B', 'c', 1), ]), ]) @pytest.mark.parametrize("obj", ["series", "frame"]) def test_unstack(self, data, index, obj): data = data[:len(index)] if obj == "series": ser = pd.Series(data, index=index) else: ser = pd.DataFrame({"A": data, "B": data}, index=index) n = index.nlevels levels = list(range(n)) # [0, 1, 2] # [(0,), (1,), (2,), (0, 1), (0, 2), (1, 0), (1, 2), (2, 0), (2, 1)] combinations = itertools.chain.from_iterable( itertools.permutations(levels, i) for i in range(1, n) ) for level in combinations: result = ser.unstack(level=level) assert all(isinstance(result[col].array, type(data)) for col in result.columns) expected = ser.astype(object).unstack(level=level) result = result.astype(object) self.assert_frame_equal(result, expected)
# -*- coding: utf-8 -*- import operator import numpy as np import pytest import pandas as pd from pandas import Series, compat from pandas.core.indexes.period import IncompatibleFrequency import pandas.util.testing as tm def _permute(obj): return obj.take(np.random.permutation(len(obj))) class TestSeriesFlexArithmetic(object): @pytest.mark.parametrize( 'ts', [ (lambda x: x, lambda x: x * 2, False), (lambda x: x, lambda x: x[::2], False), (lambda x: x, lambda x: 5, True), (lambda x: tm.makeFloatSeries(), lambda x: tm.makeFloatSeries(), True) ]) @pytest.mark.parametrize('opname', ['add', 'sub', 'mul', 'floordiv', 'truediv', 'div', 'pow']) def test_flex_method_equivalence(self, opname, ts): # check that Series.{opname} behaves like Series.__{opname}__, tser = tm.makeTimeSeries().rename('ts') series = ts[0](tser) other = ts[1](tser) check_reverse = ts[2] if opname == 'div' and compat.PY3: pytest.skip('div test only for Py3') op = getattr(Series, opname) if op == 'div': alt = operator.truediv else: alt = getattr(operator, opname) result = op(series, other) expected = alt(series, other) tm.assert_almost_equal(result, expected) if check_reverse: rop = getattr(Series, "r" + opname) result = rop(series, other) expected = alt(other, series) tm.assert_almost_equal(result, expected) class TestSeriesArithmetic(object): # Some of these may end up in tests/arithmetic, but are not yet sorted def test_add_series_with_period_index(self): rng = pd.period_range('1/1/2000', '1/1/2010', freq='A') ts = Series(np.random.randn(len(rng)), index=rng) result = ts + ts[::2] expected = ts + ts expected[1::2] = np.nan tm.assert_series_equal(result, expected) result = ts + _permute(ts[::2]) tm.assert_series_equal(result, expected) msg = "Input has different freq=D from PeriodIndex\\(freq=A-DEC\\)" with pytest.raises(IncompatibleFrequency, match=msg): ts + ts.asfreq('D', how="end") # ------------------------------------------------------------------ # Comparisons class TestSeriesFlexComparison(object): def test_comparison_flex_basic(self): left = pd.Series(np.random.randn(10)) right = pd.Series(np.random.randn(10)) tm.assert_series_equal(left.eq(right), left == right) tm.assert_series_equal(left.ne(right), left != right) tm.assert_series_equal(left.le(right), left < right) tm.assert_series_equal(left.lt(right), left <= right) tm.assert_series_equal(left.gt(right), left > right) tm.assert_series_equal(left.ge(right), left >= right) # axis for axis in [0, None, 'index']: tm.assert_series_equal(left.eq(right, axis=axis), left == right) tm.assert_series_equal(left.ne(right, axis=axis), left != right) tm.assert_series_equal(left.le(right, axis=axis), left < right) tm.assert_series_equal(left.lt(right, axis=axis), left <= right) tm.assert_series_equal(left.gt(right, axis=axis), left > right) tm.assert_series_equal(left.ge(right, axis=axis), left >= right) # msg = 'No axis named 1 for object type' for op in ['eq', 'ne', 'le', 'le', 'gt', 'ge']: with pytest.raises(ValueError, match=msg): getattr(left, op)(right, axis=1) class TestSeriesComparison(object): def test_comparison_different_length(self): a = Series(['a', 'b', 'c']) b = Series(['b', 'a']) with pytest.raises(ValueError): a < b a = Series([1, 2]) b = Series([2, 3, 4]) with pytest.raises(ValueError): a == b @pytest.mark.parametrize('opname', ['eq', 'ne', 'gt', 'lt', 'ge', 'le']) def test_ser_flex_cmp_return_dtypes(self, opname): # GH#15115 ser = Series([1, 3, 2], index=range(3)) const = 2 result = getattr(ser, opname)(const).get_dtype_counts() tm.assert_series_equal(result, Series([1], ['bool'])) @pytest.mark.parametrize('opname', ['eq', 'ne', 'gt', 'lt', 'ge', 'le']) def test_ser_flex_cmp_return_dtypes_empty(self, opname): # GH#15115 empty Series case ser = Series([1, 3, 2], index=range(3)) empty = ser.iloc[:0] const = 2 result = getattr(empty, opname)(const).get_dtype_counts() tm.assert_series_equal(result, Series([1], ['bool'])) @pytest.mark.parametrize('op', [operator.eq, operator.ne, operator.le, operator.lt, operator.ge, operator.gt]) @pytest.mark.parametrize('names', [(None, None, None), ('foo', 'bar', None), ('baz', 'baz', 'baz')]) def test_ser_cmp_result_names(self, names, op): # datetime64 dtype dti = pd.date_range('1949-06-07 03:00:00', freq='H', periods=5, name=names[0]) ser = Series(dti).rename(names[1]) result = op(ser, dti) assert result.name == names[2] # datetime64tz dtype dti = dti.tz_localize('US/Central') ser = Series(dti).rename(names[1]) result = op(ser, dti) assert result.name == names[2] # timedelta64 dtype tdi = dti - dti.shift(1) ser = Series(tdi).rename(names[1]) result = op(ser, tdi) assert result.name == names[2] # categorical if op in [operator.eq, operator.ne]: # categorical dtype comparisons raise for inequalities cidx = tdi.astype('category') ser = Series(cidx).rename(names[1]) result = op(ser, cidx) assert result.name == names[2]
MJuddBooth/pandas
pandas/tests/series/test_arithmetic.py
pandas/tests/extension/base/reshaping.py
# flake8: noqa from .common import ( is_array_like, is_bool, is_bool_dtype, is_categorical, is_categorical_dtype, is_complex, is_complex_dtype, is_datetime64_any_dtype, is_datetime64_dtype, is_datetime64_ns_dtype, is_datetime64tz_dtype, is_datetimetz, is_dict_like, is_dtype_equal, is_extension_array_dtype, is_extension_type, is_file_like, is_float, is_float_dtype, is_hashable, is_int64_dtype, is_integer, is_integer_dtype, is_interval, is_interval_dtype, is_iterator, is_list_like, is_named_tuple, is_number, is_numeric_dtype, is_object_dtype, is_period, is_period_dtype, is_re, is_re_compilable, is_scalar, is_signed_integer_dtype, is_sparse, is_string_dtype, is_timedelta64_dtype, is_timedelta64_ns_dtype, is_unsigned_integer_dtype, pandas_dtype)
# -*- coding: utf-8 -*- import operator import numpy as np import pytest import pandas as pd from pandas import Series, compat from pandas.core.indexes.period import IncompatibleFrequency import pandas.util.testing as tm def _permute(obj): return obj.take(np.random.permutation(len(obj))) class TestSeriesFlexArithmetic(object): @pytest.mark.parametrize( 'ts', [ (lambda x: x, lambda x: x * 2, False), (lambda x: x, lambda x: x[::2], False), (lambda x: x, lambda x: 5, True), (lambda x: tm.makeFloatSeries(), lambda x: tm.makeFloatSeries(), True) ]) @pytest.mark.parametrize('opname', ['add', 'sub', 'mul', 'floordiv', 'truediv', 'div', 'pow']) def test_flex_method_equivalence(self, opname, ts): # check that Series.{opname} behaves like Series.__{opname}__, tser = tm.makeTimeSeries().rename('ts') series = ts[0](tser) other = ts[1](tser) check_reverse = ts[2] if opname == 'div' and compat.PY3: pytest.skip('div test only for Py3') op = getattr(Series, opname) if op == 'div': alt = operator.truediv else: alt = getattr(operator, opname) result = op(series, other) expected = alt(series, other) tm.assert_almost_equal(result, expected) if check_reverse: rop = getattr(Series, "r" + opname) result = rop(series, other) expected = alt(other, series) tm.assert_almost_equal(result, expected) class TestSeriesArithmetic(object): # Some of these may end up in tests/arithmetic, but are not yet sorted def test_add_series_with_period_index(self): rng = pd.period_range('1/1/2000', '1/1/2010', freq='A') ts = Series(np.random.randn(len(rng)), index=rng) result = ts + ts[::2] expected = ts + ts expected[1::2] = np.nan tm.assert_series_equal(result, expected) result = ts + _permute(ts[::2]) tm.assert_series_equal(result, expected) msg = "Input has different freq=D from PeriodIndex\\(freq=A-DEC\\)" with pytest.raises(IncompatibleFrequency, match=msg): ts + ts.asfreq('D', how="end") # ------------------------------------------------------------------ # Comparisons class TestSeriesFlexComparison(object): def test_comparison_flex_basic(self): left = pd.Series(np.random.randn(10)) right = pd.Series(np.random.randn(10)) tm.assert_series_equal(left.eq(right), left == right) tm.assert_series_equal(left.ne(right), left != right) tm.assert_series_equal(left.le(right), left < right) tm.assert_series_equal(left.lt(right), left <= right) tm.assert_series_equal(left.gt(right), left > right) tm.assert_series_equal(left.ge(right), left >= right) # axis for axis in [0, None, 'index']: tm.assert_series_equal(left.eq(right, axis=axis), left == right) tm.assert_series_equal(left.ne(right, axis=axis), left != right) tm.assert_series_equal(left.le(right, axis=axis), left < right) tm.assert_series_equal(left.lt(right, axis=axis), left <= right) tm.assert_series_equal(left.gt(right, axis=axis), left > right) tm.assert_series_equal(left.ge(right, axis=axis), left >= right) # msg = 'No axis named 1 for object type' for op in ['eq', 'ne', 'le', 'le', 'gt', 'ge']: with pytest.raises(ValueError, match=msg): getattr(left, op)(right, axis=1) class TestSeriesComparison(object): def test_comparison_different_length(self): a = Series(['a', 'b', 'c']) b = Series(['b', 'a']) with pytest.raises(ValueError): a < b a = Series([1, 2]) b = Series([2, 3, 4]) with pytest.raises(ValueError): a == b @pytest.mark.parametrize('opname', ['eq', 'ne', 'gt', 'lt', 'ge', 'le']) def test_ser_flex_cmp_return_dtypes(self, opname): # GH#15115 ser = Series([1, 3, 2], index=range(3)) const = 2 result = getattr(ser, opname)(const).get_dtype_counts() tm.assert_series_equal(result, Series([1], ['bool'])) @pytest.mark.parametrize('opname', ['eq', 'ne', 'gt', 'lt', 'ge', 'le']) def test_ser_flex_cmp_return_dtypes_empty(self, opname): # GH#15115 empty Series case ser = Series([1, 3, 2], index=range(3)) empty = ser.iloc[:0] const = 2 result = getattr(empty, opname)(const).get_dtype_counts() tm.assert_series_equal(result, Series([1], ['bool'])) @pytest.mark.parametrize('op', [operator.eq, operator.ne, operator.le, operator.lt, operator.ge, operator.gt]) @pytest.mark.parametrize('names', [(None, None, None), ('foo', 'bar', None), ('baz', 'baz', 'baz')]) def test_ser_cmp_result_names(self, names, op): # datetime64 dtype dti = pd.date_range('1949-06-07 03:00:00', freq='H', periods=5, name=names[0]) ser = Series(dti).rename(names[1]) result = op(ser, dti) assert result.name == names[2] # datetime64tz dtype dti = dti.tz_localize('US/Central') ser = Series(dti).rename(names[1]) result = op(ser, dti) assert result.name == names[2] # timedelta64 dtype tdi = dti - dti.shift(1) ser = Series(tdi).rename(names[1]) result = op(ser, tdi) assert result.name == names[2] # categorical if op in [operator.eq, operator.ne]: # categorical dtype comparisons raise for inequalities cidx = tdi.astype('category') ser = Series(cidx).rename(names[1]) result = op(ser, cidx) assert result.name == names[2]
MJuddBooth/pandas
pandas/tests/series/test_arithmetic.py
pandas/core/dtypes/api.py
# -*- coding: utf-8 -*- from collections import defaultdict from functools import partial import itertools import operator import re import numpy as np from pandas._libs import internals as libinternals, lib from pandas.compat import map, range, zip from pandas.util._validators import validate_bool_kwarg from pandas.core.dtypes.cast import ( find_common_type, infer_dtype_from_scalar, maybe_convert_objects, maybe_promote) from pandas.core.dtypes.common import ( _NS_DTYPE, is_datetimelike_v_numeric, is_extension_array_dtype, is_extension_type, is_list_like, is_numeric_v_string_like, is_scalar) import pandas.core.dtypes.concat as _concat from pandas.core.dtypes.generic import ABCExtensionArray, ABCSeries from pandas.core.dtypes.missing import isna import pandas.core.algorithms as algos from pandas.core.arrays.sparse import _maybe_to_sparse from pandas.core.base import PandasObject from pandas.core.index import Index, MultiIndex, ensure_index from pandas.core.indexing import maybe_convert_indices from pandas.io.formats.printing import pprint_thing from .blocks import ( Block, CategoricalBlock, DatetimeTZBlock, ExtensionBlock, ObjectValuesExtensionBlock, _extend_blocks, _merge_blocks, _safe_reshape, get_block_type, make_block) from .concat import ( # all for concatenate_block_managers combine_concat_plans, concatenate_join_units, get_mgr_concatenation_plan, is_uniform_join_units) # TODO: flexible with index=None and/or items=None class BlockManager(PandasObject): """ Core internal data structure to implement DataFrame, Series, Panel, etc. Manage a bunch of labeled 2D mixed-type ndarrays. Essentially it's a lightweight blocked set of labeled data to be manipulated by the DataFrame public API class Attributes ---------- shape ndim axes values items Methods ------- set_axis(axis, new_labels) copy(deep=True) get_dtype_counts get_ftype_counts get_dtypes get_ftypes apply(func, axes, block_filter_fn) get_bool_data get_numeric_data get_slice(slice_like, axis) get(label) iget(loc) take(indexer, axis) reindex_axis(new_labels, axis) reindex_indexer(new_labels, indexer, axis) delete(label) insert(loc, label, value) set(label, value) Parameters ---------- Notes ----- This is *not* a public API class """ __slots__ = ['axes', 'blocks', '_ndim', '_shape', '_known_consolidated', '_is_consolidated', '_blknos', '_blklocs'] def __init__(self, blocks, axes, do_integrity_check=True): self.axes = [ensure_index(ax) for ax in axes] self.blocks = tuple(blocks) for block in blocks: if block.is_sparse: if len(block.mgr_locs) != 1: raise AssertionError("Sparse block refers to multiple " "items") else: if self.ndim != block.ndim: raise AssertionError( 'Number of Block dimensions ({block}) must equal ' 'number of axes ({self})'.format(block=block.ndim, self=self.ndim)) if do_integrity_check: self._verify_integrity() self._consolidate_check() self._rebuild_blknos_and_blklocs() def make_empty(self, axes=None): """ return an empty BlockManager with the items axis of len 0 """ if axes is None: axes = [ensure_index([])] + [ensure_index(a) for a in self.axes[1:]] # preserve dtype if possible if self.ndim == 1: blocks = np.array([], dtype=self.array_dtype) else: blocks = [] return self.__class__(blocks, axes) def __nonzero__(self): return True # Python3 compat __bool__ = __nonzero__ @property def shape(self): return tuple(len(ax) for ax in self.axes) @property def ndim(self): return len(self.axes) def set_axis(self, axis, new_labels): new_labels = ensure_index(new_labels) old_len = len(self.axes[axis]) new_len = len(new_labels) if new_len != old_len: raise ValueError( 'Length mismatch: Expected axis has {old} elements, new ' 'values have {new} elements'.format(old=old_len, new=new_len)) self.axes[axis] = new_labels def rename_axis(self, mapper, axis, copy=True, level=None): """ Rename one of axes. Parameters ---------- mapper : unary callable axis : int copy : boolean, default True level : int, default None """ obj = self.copy(deep=copy) obj.set_axis(axis, _transform_index(self.axes[axis], mapper, level)) return obj @property def _is_single_block(self): if self.ndim == 1: return True if len(self.blocks) != 1: return False blk = self.blocks[0] return (blk.mgr_locs.is_slice_like and blk.mgr_locs.as_slice == slice(0, len(self), 1)) def _rebuild_blknos_and_blklocs(self): """ Update mgr._blknos / mgr._blklocs. """ new_blknos = np.empty(self.shape[0], dtype=np.int64) new_blklocs = np.empty(self.shape[0], dtype=np.int64) new_blknos.fill(-1) new_blklocs.fill(-1) for blkno, blk in enumerate(self.blocks): rl = blk.mgr_locs new_blknos[rl.indexer] = blkno new_blklocs[rl.indexer] = np.arange(len(rl)) if (new_blknos == -1).any(): raise AssertionError("Gaps in blk ref_locs") self._blknos = new_blknos self._blklocs = new_blklocs @property def items(self): return self.axes[0] def _get_counts(self, f): """ return a dict of the counts of the function in BlockManager """ self._consolidate_inplace() counts = dict() for b in self.blocks: v = f(b) counts[v] = counts.get(v, 0) + b.shape[0] return counts def get_dtype_counts(self): return self._get_counts(lambda b: b.dtype.name) def get_ftype_counts(self): return self._get_counts(lambda b: b.ftype) def get_dtypes(self): dtypes = np.array([blk.dtype for blk in self.blocks]) return algos.take_1d(dtypes, self._blknos, allow_fill=False) def get_ftypes(self): ftypes = np.array([blk.ftype for blk in self.blocks]) return algos.take_1d(ftypes, self._blknos, allow_fill=False) def __getstate__(self): block_values = [b.values for b in self.blocks] block_items = [self.items[b.mgr_locs.indexer] for b in self.blocks] axes_array = [ax for ax in self.axes] extra_state = { '0.14.1': { 'axes': axes_array, 'blocks': [dict(values=b.values, mgr_locs=b.mgr_locs.indexer) for b in self.blocks] } } # First three elements of the state are to maintain forward # compatibility with 0.13.1. return axes_array, block_values, block_items, extra_state def __setstate__(self, state): def unpickle_block(values, mgr_locs): return make_block(values, placement=mgr_locs) if (isinstance(state, tuple) and len(state) >= 4 and '0.14.1' in state[3]): state = state[3]['0.14.1'] self.axes = [ensure_index(ax) for ax in state['axes']] self.blocks = tuple(unpickle_block(b['values'], b['mgr_locs']) for b in state['blocks']) else: # discard anything after 3rd, support beta pickling format for a # little while longer ax_arrays, bvalues, bitems = state[:3] self.axes = [ensure_index(ax) for ax in ax_arrays] if len(bitems) == 1 and self.axes[0].equals(bitems[0]): # This is a workaround for pre-0.14.1 pickles that didn't # support unpickling multi-block frames/panels with non-unique # columns/items, because given a manager with items ["a", "b", # "a"] there's no way of knowing which block's "a" is where. # # Single-block case can be supported under the assumption that # block items corresponded to manager items 1-to-1. all_mgr_locs = [slice(0, len(bitems[0]))] else: all_mgr_locs = [self.axes[0].get_indexer(blk_items) for blk_items in bitems] self.blocks = tuple( unpickle_block(values, mgr_locs) for values, mgr_locs in zip(bvalues, all_mgr_locs)) self._post_setstate() def _post_setstate(self): self._is_consolidated = False self._known_consolidated = False self._rebuild_blknos_and_blklocs() def __len__(self): return len(self.items) def __unicode__(self): output = pprint_thing(self.__class__.__name__) for i, ax in enumerate(self.axes): if i == 0: output += u'\nItems: {ax}'.format(ax=ax) else: output += u'\nAxis {i}: {ax}'.format(i=i, ax=ax) for block in self.blocks: output += u'\n{block}'.format(block=pprint_thing(block)) return output def _verify_integrity(self): mgr_shape = self.shape tot_items = sum(len(x.mgr_locs) for x in self.blocks) for block in self.blocks: if block._verify_integrity and block.shape[1:] != mgr_shape[1:]: construction_error(tot_items, block.shape[1:], self.axes) if len(self.items) != tot_items: raise AssertionError('Number of manager items must equal union of ' 'block items\n# manager items: {0}, # ' 'tot_items: {1}'.format( len(self.items), tot_items)) def apply(self, f, axes=None, filter=None, do_integrity_check=False, consolidate=True, **kwargs): """ iterate over the blocks, collect and create a new block manager Parameters ---------- f : the callable or function name to operate on at the block level axes : optional (if not supplied, use self.axes) filter : list, if supplied, only call the block if the filter is in the block do_integrity_check : boolean, default False. Do the block manager integrity check consolidate: boolean, default True. Join together blocks having same dtype Returns ------- Block Manager (new object) """ result_blocks = [] # filter kwarg is used in replace-* family of methods if filter is not None: filter_locs = set(self.items.get_indexer_for(filter)) if len(filter_locs) == len(self.items): # All items are included, as if there were no filtering filter = None else: kwargs['filter'] = filter_locs if consolidate: self._consolidate_inplace() if f == 'where': align_copy = True if kwargs.get('align', True): align_keys = ['other', 'cond'] else: align_keys = ['cond'] elif f == 'putmask': align_copy = False if kwargs.get('align', True): align_keys = ['new', 'mask'] else: align_keys = ['mask'] elif f == 'fillna': # fillna internally does putmask, maybe it's better to do this # at mgr, not block level? align_copy = False align_keys = ['value'] else: align_keys = [] # TODO(EA): may interfere with ExtensionBlock.setitem for blocks # with a .values attribute. aligned_args = {k: kwargs[k] for k in align_keys if hasattr(kwargs[k], 'values') and not isinstance(kwargs[k], ABCExtensionArray)} for b in self.blocks: if filter is not None: if not b.mgr_locs.isin(filter_locs).any(): result_blocks.append(b) continue if aligned_args: b_items = self.items[b.mgr_locs.indexer] for k, obj in aligned_args.items(): axis = getattr(obj, '_info_axis_number', 0) kwargs[k] = obj.reindex(b_items, axis=axis, copy=align_copy) applied = getattr(b, f)(**kwargs) result_blocks = _extend_blocks(applied, result_blocks) if len(result_blocks) == 0: return self.make_empty(axes or self.axes) bm = self.__class__(result_blocks, axes or self.axes, do_integrity_check=do_integrity_check) bm._consolidate_inplace() return bm def quantile(self, axis=0, consolidate=True, transposed=False, interpolation='linear', qs=None, numeric_only=None): """ Iterate over blocks applying quantile reduction. This routine is intended for reduction type operations and will do inference on the generated blocks. Parameters ---------- axis: reduction axis, default 0 consolidate: boolean, default True. Join together blocks having same dtype transposed: boolean, default False we are holding transposed data interpolation : type of interpolation, default 'linear' qs : a scalar or list of the quantiles to be computed numeric_only : ignored Returns ------- Block Manager (new object) """ # Series dispatches to DataFrame for quantile, which allows us to # simplify some of the code here and in the blocks assert self.ndim >= 2 if consolidate: self._consolidate_inplace() def get_axe(block, qs, axes): from pandas import Float64Index if is_list_like(qs): ax = Float64Index(qs) elif block.ndim == 1: ax = Float64Index([qs]) else: ax = axes[0] return ax axes, blocks = [], [] for b in self.blocks: block = b.quantile(axis=axis, qs=qs, interpolation=interpolation) axe = get_axe(b, qs, axes=self.axes) axes.append(axe) blocks.append(block) # note that some DatetimeTZ, Categorical are always ndim==1 ndim = {b.ndim for b in blocks} assert 0 not in ndim, ndim if 2 in ndim: new_axes = list(self.axes) # multiple blocks that are reduced if len(blocks) > 1: new_axes[1] = axes[0] # reset the placement to the original for b, sb in zip(blocks, self.blocks): b.mgr_locs = sb.mgr_locs else: new_axes[axis] = Index(np.concatenate( [ax.values for ax in axes])) if transposed: new_axes = new_axes[::-1] blocks = [b.make_block(b.values.T, placement=np.arange(b.shape[1]) ) for b in blocks] return self.__class__(blocks, new_axes) # single block, i.e. ndim == {1} values = _concat._concat_compat([b.values for b in blocks]) # compute the orderings of our original data if len(self.blocks) > 1: indexer = np.empty(len(self.axes[0]), dtype=np.intp) i = 0 for b in self.blocks: for j in b.mgr_locs: indexer[j] = i i = i + 1 values = values.take(indexer) return SingleBlockManager( [make_block(values, ndim=1, placement=np.arange(len(values)))], axes[0]) def isna(self, func, **kwargs): return self.apply('apply', func=func, **kwargs) def where(self, **kwargs): return self.apply('where', **kwargs) def setitem(self, **kwargs): return self.apply('setitem', **kwargs) def putmask(self, **kwargs): return self.apply('putmask', **kwargs) def diff(self, **kwargs): return self.apply('diff', **kwargs) def interpolate(self, **kwargs): return self.apply('interpolate', **kwargs) def shift(self, **kwargs): return self.apply('shift', **kwargs) def fillna(self, **kwargs): return self.apply('fillna', **kwargs) def downcast(self, **kwargs): return self.apply('downcast', **kwargs) def astype(self, dtype, **kwargs): return self.apply('astype', dtype=dtype, **kwargs) def convert(self, **kwargs): return self.apply('convert', **kwargs) def replace(self, **kwargs): return self.apply('replace', **kwargs) def replace_list(self, src_list, dest_list, inplace=False, regex=False): """ do a list replace """ inplace = validate_bool_kwarg(inplace, 'inplace') # figure out our mask a-priori to avoid repeated replacements values = self.as_array() def comp(s, regex=False): """ Generate a bool array by perform an equality check, or perform an element-wise regular expression matching """ if isna(s): return isna(values) if hasattr(s, 'asm8'): return _compare_or_regex_search(maybe_convert_objects(values), getattr(s, 'asm8'), regex) return _compare_or_regex_search(values, s, regex) masks = [comp(s, regex) for i, s in enumerate(src_list)] result_blocks = [] src_len = len(src_list) - 1 for blk in self.blocks: # its possible to get multiple result blocks here # replace ALWAYS will return a list rb = [blk if inplace else blk.copy()] for i, (s, d) in enumerate(zip(src_list, dest_list)): new_rb = [] for b in rb: m = masks[i][b.mgr_locs.indexer] convert = i == src_len result = b._replace_coerce(mask=m, to_replace=s, value=d, inplace=inplace, convert=convert, regex=regex) if m.any(): new_rb = _extend_blocks(result, new_rb) else: new_rb.append(b) rb = new_rb result_blocks.extend(rb) bm = self.__class__(result_blocks, self.axes) bm._consolidate_inplace() return bm def is_consolidated(self): """ Return True if more than one block with the same dtype """ if not self._known_consolidated: self._consolidate_check() return self._is_consolidated def _consolidate_check(self): ftypes = [blk.ftype for blk in self.blocks] self._is_consolidated = len(ftypes) == len(set(ftypes)) self._known_consolidated = True @property def is_mixed_type(self): # Warning, consolidation needs to get checked upstairs self._consolidate_inplace() return len(self.blocks) > 1 @property def is_numeric_mixed_type(self): # Warning, consolidation needs to get checked upstairs self._consolidate_inplace() return all(block.is_numeric for block in self.blocks) @property def is_datelike_mixed_type(self): # Warning, consolidation needs to get checked upstairs self._consolidate_inplace() return any(block.is_datelike for block in self.blocks) @property def any_extension_types(self): """Whether any of the blocks in this manager are extension blocks""" return any(block.is_extension for block in self.blocks) @property def is_view(self): """ return a boolean if we are a single block and are a view """ if len(self.blocks) == 1: return self.blocks[0].is_view # It is technically possible to figure out which blocks are views # e.g. [ b.values.base is not None for b in self.blocks ] # but then we have the case of possibly some blocks being a view # and some blocks not. setting in theory is possible on the non-view # blocks w/o causing a SettingWithCopy raise/warn. But this is a bit # complicated return False def get_bool_data(self, copy=False): """ Parameters ---------- copy : boolean, default False Whether to copy the blocks """ self._consolidate_inplace() return self.combine([b for b in self.blocks if b.is_bool], copy) def get_numeric_data(self, copy=False): """ Parameters ---------- copy : boolean, default False Whether to copy the blocks """ self._consolidate_inplace() return self.combine([b for b in self.blocks if b.is_numeric], copy) def combine(self, blocks, copy=True): """ return a new manager with the blocks """ if len(blocks) == 0: return self.make_empty() # FIXME: optimization potential indexer = np.sort(np.concatenate([b.mgr_locs.as_array for b in blocks])) inv_indexer = lib.get_reverse_indexer(indexer, self.shape[0]) new_blocks = [] for b in blocks: b = b.copy(deep=copy) b.mgr_locs = algos.take_1d(inv_indexer, b.mgr_locs.as_array, axis=0, allow_fill=False) new_blocks.append(b) axes = list(self.axes) axes[0] = self.items.take(indexer) return self.__class__(new_blocks, axes, do_integrity_check=False) def get_slice(self, slobj, axis=0): if axis >= self.ndim: raise IndexError("Requested axis not found in manager") if axis == 0: new_blocks = self._slice_take_blocks_ax0(slobj) else: slicer = [slice(None)] * (axis + 1) slicer[axis] = slobj slicer = tuple(slicer) new_blocks = [blk.getitem_block(slicer) for blk in self.blocks] new_axes = list(self.axes) new_axes[axis] = new_axes[axis][slobj] bm = self.__class__(new_blocks, new_axes, do_integrity_check=False) bm._consolidate_inplace() return bm def __contains__(self, item): return item in self.items @property def nblocks(self): return len(self.blocks) def copy(self, deep=True): """ Make deep or shallow copy of BlockManager Parameters ---------- deep : boolean o rstring, default True If False, return shallow copy (do not copy data) If 'all', copy data and a deep copy of the index Returns ------- copy : BlockManager """ # this preserves the notion of view copying of axes if deep: if deep == 'all': copy = lambda ax: ax.copy(deep=True) else: copy = lambda ax: ax.view() new_axes = [copy(ax) for ax in self.axes] else: new_axes = list(self.axes) return self.apply('copy', axes=new_axes, deep=deep, do_integrity_check=False) def as_array(self, transpose=False, items=None): """Convert the blockmanager data into an numpy array. Parameters ---------- transpose : boolean, default False If True, transpose the return array items : list of strings or None Names of block items that will be included in the returned array. ``None`` means that all block items will be used Returns ------- arr : ndarray """ if len(self.blocks) == 0: arr = np.empty(self.shape, dtype=float) return arr.transpose() if transpose else arr if items is not None: mgr = self.reindex_axis(items, axis=0) else: mgr = self if self._is_single_block and mgr.blocks[0].is_datetimetz: # TODO(Block.get_values): Make DatetimeTZBlock.get_values # always be object dtype. Some callers seem to want the # DatetimeArray (previously DTI) arr = mgr.blocks[0].get_values(dtype=object) elif self._is_single_block or not self.is_mixed_type: arr = np.asarray(mgr.blocks[0].get_values()) else: arr = mgr._interleave() return arr.transpose() if transpose else arr def _interleave(self): """ Return ndarray from blocks with specified item order Items must be contained in the blocks """ from pandas.core.dtypes.common import is_sparse dtype = _interleaved_dtype(self.blocks) # TODO: https://github.com/pandas-dev/pandas/issues/22791 # Give EAs some input on what happens here. Sparse needs this. if is_sparse(dtype): dtype = dtype.subtype elif is_extension_array_dtype(dtype): dtype = 'object' result = np.empty(self.shape, dtype=dtype) itemmask = np.zeros(self.shape[0]) for blk in self.blocks: rl = blk.mgr_locs result[rl.indexer] = blk.get_values(dtype) itemmask[rl.indexer] = 1 if not itemmask.all(): raise AssertionError('Some items were not contained in blocks') return result def to_dict(self, copy=True): """ Return a dict of str(dtype) -> BlockManager Parameters ---------- copy : boolean, default True Returns ------- values : a dict of dtype -> BlockManager Notes ----- This consolidates based on str(dtype) """ self._consolidate_inplace() bd = {} for b in self.blocks: bd.setdefault(str(b.dtype), []).append(b) return {dtype: self.combine(blocks, copy=copy) for dtype, blocks in bd.items()} def xs(self, key, axis=1, copy=True, takeable=False): if axis < 1: raise AssertionError( 'Can only take xs across axis >= 1, got {ax}'.format(ax=axis)) # take by position if takeable: loc = key else: loc = self.axes[axis].get_loc(key) slicer = [slice(None, None) for _ in range(self.ndim)] slicer[axis] = loc slicer = tuple(slicer) new_axes = list(self.axes) # could be an array indexer! if isinstance(loc, (slice, np.ndarray)): new_axes[axis] = new_axes[axis][loc] else: new_axes.pop(axis) new_blocks = [] if len(self.blocks) > 1: # we must copy here as we are mixed type for blk in self.blocks: newb = make_block(values=blk.values[slicer], klass=blk.__class__, placement=blk.mgr_locs) new_blocks.append(newb) elif len(self.blocks) == 1: block = self.blocks[0] vals = block.values[slicer] if copy: vals = vals.copy() new_blocks = [make_block(values=vals, placement=block.mgr_locs, klass=block.__class__)] return self.__class__(new_blocks, new_axes) def fast_xs(self, loc): """ get a cross sectional for a given location in the items ; handle dups return the result, is *could* be a view in the case of a single block """ if len(self.blocks) == 1: return self.blocks[0].iget((slice(None), loc)) items = self.items # non-unique (GH4726) if not items.is_unique: result = self._interleave() if self.ndim == 2: result = result.T return result[loc] # unique dtype = _interleaved_dtype(self.blocks) n = len(items) if is_extension_array_dtype(dtype): # we'll eventually construct an ExtensionArray. result = np.empty(n, dtype=object) else: result = np.empty(n, dtype=dtype) for blk in self.blocks: # Such assignment may incorrectly coerce NaT to None # result[blk.mgr_locs] = blk._slice((slice(None), loc)) for i, rl in enumerate(blk.mgr_locs): result[rl] = blk._try_coerce_result(blk.iget((i, loc))) if is_extension_array_dtype(dtype): result = dtype.construct_array_type()._from_sequence( result, dtype=dtype ) return result def consolidate(self): """ Join together blocks having same dtype Returns ------- y : BlockManager """ if self.is_consolidated(): return self bm = self.__class__(self.blocks, self.axes) bm._is_consolidated = False bm._consolidate_inplace() return bm def _consolidate_inplace(self): if not self.is_consolidated(): self.blocks = tuple(_consolidate(self.blocks)) self._is_consolidated = True self._known_consolidated = True self._rebuild_blknos_and_blklocs() def get(self, item, fastpath=True): """ Return values for selected item (ndarray or BlockManager). """ if self.items.is_unique: if not isna(item): loc = self.items.get_loc(item) else: indexer = np.arange(len(self.items))[isna(self.items)] # allow a single nan location indexer if not is_scalar(indexer): if len(indexer) == 1: loc = indexer.item() else: raise ValueError("cannot label index with a null key") return self.iget(loc, fastpath=fastpath) else: if isna(item): raise TypeError("cannot label index with a null key") indexer = self.items.get_indexer_for([item]) return self.reindex_indexer(new_axis=self.items[indexer], indexer=indexer, axis=0, allow_dups=True) def iget(self, i, fastpath=True): """ Return the data as a SingleBlockManager if fastpath=True and possible Otherwise return as a ndarray """ block = self.blocks[self._blknos[i]] values = block.iget(self._blklocs[i]) if not fastpath or not block._box_to_block_values or values.ndim != 1: return values # fastpath shortcut for select a single-dim from a 2-dim BM return SingleBlockManager( [block.make_block_same_class(values, placement=slice(0, len(values)), ndim=1)], self.axes[1]) def delete(self, item): """ Delete selected item (items if non-unique) in-place. """ indexer = self.items.get_loc(item) is_deleted = np.zeros(self.shape[0], dtype=np.bool_) is_deleted[indexer] = True ref_loc_offset = -is_deleted.cumsum() is_blk_deleted = [False] * len(self.blocks) if isinstance(indexer, int): affected_start = indexer else: affected_start = is_deleted.nonzero()[0][0] for blkno, _ in _fast_count_smallints(self._blknos[affected_start:]): blk = self.blocks[blkno] bml = blk.mgr_locs blk_del = is_deleted[bml.indexer].nonzero()[0] if len(blk_del) == len(bml): is_blk_deleted[blkno] = True continue elif len(blk_del) != 0: blk.delete(blk_del) bml = blk.mgr_locs blk.mgr_locs = bml.add(ref_loc_offset[bml.indexer]) # FIXME: use Index.delete as soon as it uses fastpath=True self.axes[0] = self.items[~is_deleted] self.blocks = tuple(b for blkno, b in enumerate(self.blocks) if not is_blk_deleted[blkno]) self._shape = None self._rebuild_blknos_and_blklocs() def set(self, item, value): """ Set new item in-place. Does not consolidate. Adds new Block if not contained in the current set of items """ # FIXME: refactor, clearly separate broadcasting & zip-like assignment # can prob also fix the various if tests for sparse/categorical # TODO(EA): Remove an is_extension_ when all extension types satisfy # the interface value_is_extension_type = (is_extension_type(value) or is_extension_array_dtype(value)) # categorical/spares/datetimetz if value_is_extension_type: def value_getitem(placement): return value else: if value.ndim == self.ndim - 1: value = _safe_reshape(value, (1,) + value.shape) def value_getitem(placement): return value else: def value_getitem(placement): return value[placement.indexer] if value.shape[1:] != self.shape[1:]: raise AssertionError('Shape of new values must be compatible ' 'with manager shape') try: loc = self.items.get_loc(item) except KeyError: # This item wasn't present, just insert at end self.insert(len(self.items), item, value) return if isinstance(loc, int): loc = [loc] blknos = self._blknos[loc] blklocs = self._blklocs[loc].copy() unfit_mgr_locs = [] unfit_val_locs = [] removed_blknos = [] for blkno, val_locs in libinternals.get_blkno_placements(blknos, self.nblocks, group=True): blk = self.blocks[blkno] blk_locs = blklocs[val_locs.indexer] if blk.should_store(value): blk.set(blk_locs, value_getitem(val_locs)) else: unfit_mgr_locs.append(blk.mgr_locs.as_array[blk_locs]) unfit_val_locs.append(val_locs) # If all block items are unfit, schedule the block for removal. if len(val_locs) == len(blk.mgr_locs): removed_blknos.append(blkno) else: self._blklocs[blk.mgr_locs.indexer] = -1 blk.delete(blk_locs) self._blklocs[blk.mgr_locs.indexer] = np.arange(len(blk)) if len(removed_blknos): # Remove blocks & update blknos accordingly is_deleted = np.zeros(self.nblocks, dtype=np.bool_) is_deleted[removed_blknos] = True new_blknos = np.empty(self.nblocks, dtype=np.int64) new_blknos.fill(-1) new_blknos[~is_deleted] = np.arange(self.nblocks - len(removed_blknos)) self._blknos = algos.take_1d(new_blknos, self._blknos, axis=0, allow_fill=False) self.blocks = tuple(blk for i, blk in enumerate(self.blocks) if i not in set(removed_blknos)) if unfit_val_locs: unfit_mgr_locs = np.concatenate(unfit_mgr_locs) unfit_count = len(unfit_mgr_locs) new_blocks = [] if value_is_extension_type: # This code (ab-)uses the fact that sparse blocks contain only # one item. new_blocks.extend( make_block(values=value.copy(), ndim=self.ndim, placement=slice(mgr_loc, mgr_loc + 1)) for mgr_loc in unfit_mgr_locs) self._blknos[unfit_mgr_locs] = (np.arange(unfit_count) + len(self.blocks)) self._blklocs[unfit_mgr_locs] = 0 else: # unfit_val_locs contains BlockPlacement objects unfit_val_items = unfit_val_locs[0].append(unfit_val_locs[1:]) new_blocks.append( make_block(values=value_getitem(unfit_val_items), ndim=self.ndim, placement=unfit_mgr_locs)) self._blknos[unfit_mgr_locs] = len(self.blocks) self._blklocs[unfit_mgr_locs] = np.arange(unfit_count) self.blocks += tuple(new_blocks) # Newly created block's dtype may already be present. self._known_consolidated = False def insert(self, loc, item, value, allow_duplicates=False): """ Insert item at selected position. Parameters ---------- loc : int item : hashable value : array_like allow_duplicates: bool If False, trying to insert non-unique item will raise """ if not allow_duplicates and item in self.items: # Should this be a different kind of error?? raise ValueError('cannot insert {}, already exists'.format(item)) if not isinstance(loc, int): raise TypeError("loc must be int") # insert to the axis; this could possibly raise a TypeError new_axis = self.items.insert(loc, item) block = make_block(values=value, ndim=self.ndim, placement=slice(loc, loc + 1)) for blkno, count in _fast_count_smallints(self._blknos[loc:]): blk = self.blocks[blkno] if count == len(blk.mgr_locs): blk.mgr_locs = blk.mgr_locs.add(1) else: new_mgr_locs = blk.mgr_locs.as_array.copy() new_mgr_locs[new_mgr_locs >= loc] += 1 blk.mgr_locs = new_mgr_locs if loc == self._blklocs.shape[0]: # np.append is a lot faster, let's use it if we can. self._blklocs = np.append(self._blklocs, 0) self._blknos = np.append(self._blknos, len(self.blocks)) else: self._blklocs = np.insert(self._blklocs, loc, 0) self._blknos = np.insert(self._blknos, loc, len(self.blocks)) self.axes[0] = new_axis self.blocks += (block,) self._shape = None self._known_consolidated = False if len(self.blocks) > 100: self._consolidate_inplace() def reindex_axis(self, new_index, axis, method=None, limit=None, fill_value=None, copy=True): """ Conform block manager to new index. """ new_index = ensure_index(new_index) new_index, indexer = self.axes[axis].reindex(new_index, method=method, limit=limit) return self.reindex_indexer(new_index, indexer, axis=axis, fill_value=fill_value, copy=copy) def reindex_indexer(self, new_axis, indexer, axis, fill_value=None, allow_dups=False, copy=True): """ Parameters ---------- new_axis : Index indexer : ndarray of int64 or None axis : int fill_value : object allow_dups : bool pandas-indexer with -1's only. """ if indexer is None: if new_axis is self.axes[axis] and not copy: return self result = self.copy(deep=copy) result.axes = list(self.axes) result.axes[axis] = new_axis return result self._consolidate_inplace() # some axes don't allow reindexing with dups if not allow_dups: self.axes[axis]._can_reindex(indexer) if axis >= self.ndim: raise IndexError("Requested axis not found in manager") if axis == 0: new_blocks = self._slice_take_blocks_ax0(indexer, fill_tuple=(fill_value,)) else: new_blocks = [blk.take_nd(indexer, axis=axis, fill_tuple=( fill_value if fill_value is not None else blk.fill_value,)) for blk in self.blocks] new_axes = list(self.axes) new_axes[axis] = new_axis return self.__class__(new_blocks, new_axes) def _slice_take_blocks_ax0(self, slice_or_indexer, fill_tuple=None): """ Slice/take blocks along axis=0. Overloaded for SingleBlock Returns ------- new_blocks : list of Block """ allow_fill = fill_tuple is not None sl_type, slobj, sllen = _preprocess_slice_or_indexer( slice_or_indexer, self.shape[0], allow_fill=allow_fill) if self._is_single_block: blk = self.blocks[0] if sl_type in ('slice', 'mask'): return [blk.getitem_block(slobj, new_mgr_locs=slice(0, sllen))] elif not allow_fill or self.ndim == 1: if allow_fill and fill_tuple[0] is None: _, fill_value = maybe_promote(blk.dtype) fill_tuple = (fill_value, ) return [blk.take_nd(slobj, axis=0, new_mgr_locs=slice(0, sllen), fill_tuple=fill_tuple)] if sl_type in ('slice', 'mask'): blknos = self._blknos[slobj] blklocs = self._blklocs[slobj] else: blknos = algos.take_1d(self._blknos, slobj, fill_value=-1, allow_fill=allow_fill) blklocs = algos.take_1d(self._blklocs, slobj, fill_value=-1, allow_fill=allow_fill) # When filling blknos, make sure blknos is updated before appending to # blocks list, that way new blkno is exactly len(blocks). # # FIXME: mgr_groupby_blknos must return mgr_locs in ascending order, # pytables serialization will break otherwise. blocks = [] for blkno, mgr_locs in libinternals.get_blkno_placements(blknos, self.nblocks, group=True): if blkno == -1: # If we've got here, fill_tuple was not None. fill_value = fill_tuple[0] blocks.append(self._make_na_block(placement=mgr_locs, fill_value=fill_value)) else: blk = self.blocks[blkno] # Otherwise, slicing along items axis is necessary. if not blk._can_consolidate: # A non-consolidatable block, it's easy, because there's # only one item and each mgr loc is a copy of that single # item. for mgr_loc in mgr_locs: newblk = blk.copy(deep=True) newblk.mgr_locs = slice(mgr_loc, mgr_loc + 1) blocks.append(newblk) else: blocks.append(blk.take_nd(blklocs[mgr_locs.indexer], axis=0, new_mgr_locs=mgr_locs, fill_tuple=None)) return blocks def _make_na_block(self, placement, fill_value=None): # TODO: infer dtypes other than float64 from fill_value if fill_value is None: fill_value = np.nan block_shape = list(self.shape) block_shape[0] = len(placement) dtype, fill_value = infer_dtype_from_scalar(fill_value) block_values = np.empty(block_shape, dtype=dtype) block_values.fill(fill_value) return make_block(block_values, placement=placement) def take(self, indexer, axis=1, verify=True, convert=True): """ Take items along any axis. """ self._consolidate_inplace() indexer = (np.arange(indexer.start, indexer.stop, indexer.step, dtype='int64') if isinstance(indexer, slice) else np.asanyarray(indexer, dtype='int64')) n = self.shape[axis] if convert: indexer = maybe_convert_indices(indexer, n) if verify: if ((indexer == -1) | (indexer >= n)).any(): raise Exception('Indices must be nonzero and less than ' 'the axis length') new_labels = self.axes[axis].take(indexer) return self.reindex_indexer(new_axis=new_labels, indexer=indexer, axis=axis, allow_dups=True) def merge(self, other, lsuffix='', rsuffix=''): # We assume at this point that the axes of self and other match. # This is only called from Panel.join, which reindexes prior # to calling to ensure this assumption holds. l, r = items_overlap_with_suffix(left=self.items, lsuffix=lsuffix, right=other.items, rsuffix=rsuffix) new_items = _concat_indexes([l, r]) new_blocks = [blk.copy(deep=False) for blk in self.blocks] offset = self.shape[0] for blk in other.blocks: blk = blk.copy(deep=False) blk.mgr_locs = blk.mgr_locs.add(offset) new_blocks.append(blk) new_axes = list(self.axes) new_axes[0] = new_items return self.__class__(_consolidate(new_blocks), new_axes) def equals(self, other): self_axes, other_axes = self.axes, other.axes if len(self_axes) != len(other_axes): return False if not all(ax1.equals(ax2) for ax1, ax2 in zip(self_axes, other_axes)): return False self._consolidate_inplace() other._consolidate_inplace() if len(self.blocks) != len(other.blocks): return False # canonicalize block order, using a tuple combining the type # name and then mgr_locs because there might be unconsolidated # blocks (say, Categorical) which can only be distinguished by # the iteration order def canonicalize(block): return (block.dtype.name, block.mgr_locs.as_array.tolist()) self_blocks = sorted(self.blocks, key=canonicalize) other_blocks = sorted(other.blocks, key=canonicalize) return all(block.equals(oblock) for block, oblock in zip(self_blocks, other_blocks)) def unstack(self, unstacker_func, fill_value): """Return a blockmanager with all blocks unstacked. Parameters ---------- unstacker_func : callable A (partially-applied) ``pd.core.reshape._Unstacker`` class. fill_value : Any fill_value for newly introduced missing values. Returns ------- unstacked : BlockManager """ n_rows = self.shape[-1] dummy = unstacker_func(np.empty((0, 0)), value_columns=self.items) new_columns = dummy.get_new_columns() new_index = dummy.get_new_index() new_blocks = [] columns_mask = [] for blk in self.blocks: blocks, mask = blk._unstack( partial(unstacker_func, value_columns=self.items[blk.mgr_locs.indexer]), new_columns, n_rows, fill_value ) new_blocks.extend(blocks) columns_mask.extend(mask) new_columns = new_columns[columns_mask] bm = BlockManager(new_blocks, [new_columns, new_index]) return bm class SingleBlockManager(BlockManager): """ manage a single block with """ ndim = 1 _is_consolidated = True _known_consolidated = True __slots__ = () def __init__(self, block, axis, do_integrity_check=False, fastpath=False): if isinstance(axis, list): if len(axis) != 1: raise ValueError("cannot create SingleBlockManager with more " "than 1 axis") axis = axis[0] # passed from constructor, single block, single axis if fastpath: self.axes = [axis] if isinstance(block, list): # empty block if len(block) == 0: block = [np.array([])] elif len(block) != 1: raise ValueError('Cannot create SingleBlockManager with ' 'more than 1 block') block = block[0] else: self.axes = [ensure_index(axis)] # create the block here if isinstance(block, list): # provide consolidation to the interleaved_dtype if len(block) > 1: dtype = _interleaved_dtype(block) block = [b.astype(dtype) for b in block] block = _consolidate(block) if len(block) != 1: raise ValueError('Cannot create SingleBlockManager with ' 'more than 1 block') block = block[0] if not isinstance(block, Block): block = make_block(block, placement=slice(0, len(axis)), ndim=1) self.blocks = [block] def _post_setstate(self): pass @property def _block(self): return self.blocks[0] @property def _values(self): return self._block.values @property def _blknos(self): """ compat with BlockManager """ return None @property def _blklocs(self): """ compat with BlockManager """ return None def get_slice(self, slobj, axis=0): if axis >= self.ndim: raise IndexError("Requested axis not found in manager") return self.__class__(self._block._slice(slobj), self.index[slobj], fastpath=True) @property def index(self): return self.axes[0] def convert(self, **kwargs): """ convert the whole block as one """ kwargs['by_item'] = False return self.apply('convert', **kwargs) @property def dtype(self): return self._block.dtype @property def array_dtype(self): return self._block.array_dtype @property def ftype(self): return self._block.ftype def get_dtype_counts(self): return {self.dtype.name: 1} def get_ftype_counts(self): return {self.ftype: 1} def get_dtypes(self): return np.array([self._block.dtype]) def get_ftypes(self): return np.array([self._block.ftype]) def external_values(self): return self._block.external_values() def internal_values(self): return self._block.internal_values() def formatting_values(self): """Return the internal values used by the DataFrame/SeriesFormatter""" return self._block.formatting_values() def get_values(self): """ return a dense type view """ return np.array(self._block.to_dense(), copy=False) @property def asobject(self): """ return a object dtype array. datetime/timedelta like values are boxed to Timestamp/Timedelta instances. """ return self._block.get_values(dtype=object) @property def _can_hold_na(self): return self._block._can_hold_na def is_consolidated(self): return True def _consolidate_check(self): pass def _consolidate_inplace(self): pass def delete(self, item): """ Delete single item from SingleBlockManager. Ensures that self.blocks doesn't become empty. """ loc = self.items.get_loc(item) self._block.delete(loc) self.axes[0] = self.axes[0].delete(loc) def fast_xs(self, loc): """ fast path for getting a cross-section return a view of the data """ return self._block.values[loc] def concat(self, to_concat, new_axis): """ Concatenate a list of SingleBlockManagers into a single SingleBlockManager. Used for pd.concat of Series objects with axis=0. Parameters ---------- to_concat : list of SingleBlockManagers new_axis : Index of the result Returns ------- SingleBlockManager """ non_empties = [x for x in to_concat if len(x) > 0] # check if all series are of the same block type: if len(non_empties) > 0: blocks = [obj.blocks[0] for obj in non_empties] if len({b.dtype for b in blocks}) == 1: new_block = blocks[0].concat_same_type(blocks) else: values = [x.values for x in blocks] values = _concat._concat_compat(values) new_block = make_block( values, placement=slice(0, len(values), 1)) else: values = [x._block.values for x in to_concat] values = _concat._concat_compat(values) new_block = make_block( values, placement=slice(0, len(values), 1)) mgr = SingleBlockManager(new_block, new_axis) return mgr # -------------------------------------------------------------------- # Constructor Helpers def create_block_manager_from_blocks(blocks, axes): try: if len(blocks) == 1 and not isinstance(blocks[0], Block): # if blocks[0] is of length 0, return empty blocks if not len(blocks[0]): blocks = [] else: # It's OK if a single block is passed as values, its placement # is basically "all items", but if there're many, don't bother # converting, it's an error anyway. blocks = [make_block(values=blocks[0], placement=slice(0, len(axes[0])))] mgr = BlockManager(blocks, axes) mgr._consolidate_inplace() return mgr except (ValueError) as e: blocks = [getattr(b, 'values', b) for b in blocks] tot_items = sum(b.shape[0] for b in blocks) construction_error(tot_items, blocks[0].shape[1:], axes, e) def create_block_manager_from_arrays(arrays, names, axes): try: blocks = form_blocks(arrays, names, axes) mgr = BlockManager(blocks, axes) mgr._consolidate_inplace() return mgr except ValueError as e: construction_error(len(arrays), arrays[0].shape, axes, e) def construction_error(tot_items, block_shape, axes, e=None): """ raise a helpful message about our construction """ passed = tuple(map(int, [tot_items] + list(block_shape))) # Correcting the user facing error message during dataframe construction if len(passed) <= 2: passed = passed[::-1] implied = tuple(len(ax) for ax in axes) # Correcting the user facing error message during dataframe construction if len(implied) <= 2: implied = implied[::-1] if passed == implied and e is not None: raise e if block_shape[0] == 0: raise ValueError("Empty data passed with indices specified.") raise ValueError("Shape of passed values is {0}, indices imply {1}".format( passed, implied)) # ----------------------------------------------------------------------- def form_blocks(arrays, names, axes): # put "leftover" items in float bucket, where else? # generalize? items_dict = defaultdict(list) extra_locs = [] names_idx = ensure_index(names) if names_idx.equals(axes[0]): names_indexer = np.arange(len(names_idx)) else: assert names_idx.intersection(axes[0]).is_unique names_indexer = names_idx.get_indexer_for(axes[0]) for i, name_idx in enumerate(names_indexer): if name_idx == -1: extra_locs.append(i) continue k = names[name_idx] v = arrays[name_idx] block_type = get_block_type(v) items_dict[block_type.__name__].append((i, k, v)) blocks = [] if len(items_dict['FloatBlock']): float_blocks = _multi_blockify(items_dict['FloatBlock']) blocks.extend(float_blocks) if len(items_dict['ComplexBlock']): complex_blocks = _multi_blockify(items_dict['ComplexBlock']) blocks.extend(complex_blocks) if len(items_dict['TimeDeltaBlock']): timedelta_blocks = _multi_blockify(items_dict['TimeDeltaBlock']) blocks.extend(timedelta_blocks) if len(items_dict['IntBlock']): int_blocks = _multi_blockify(items_dict['IntBlock']) blocks.extend(int_blocks) if len(items_dict['DatetimeBlock']): datetime_blocks = _simple_blockify(items_dict['DatetimeBlock'], _NS_DTYPE) blocks.extend(datetime_blocks) if len(items_dict['DatetimeTZBlock']): dttz_blocks = [make_block(array, klass=DatetimeTZBlock, placement=[i]) for i, _, array in items_dict['DatetimeTZBlock']] blocks.extend(dttz_blocks) if len(items_dict['BoolBlock']): bool_blocks = _simple_blockify(items_dict['BoolBlock'], np.bool_) blocks.extend(bool_blocks) if len(items_dict['ObjectBlock']) > 0: object_blocks = _simple_blockify(items_dict['ObjectBlock'], np.object_) blocks.extend(object_blocks) if len(items_dict['SparseBlock']) > 0: sparse_blocks = _sparse_blockify(items_dict['SparseBlock']) blocks.extend(sparse_blocks) if len(items_dict['CategoricalBlock']) > 0: cat_blocks = [make_block(array, klass=CategoricalBlock, placement=[i]) for i, _, array in items_dict['CategoricalBlock']] blocks.extend(cat_blocks) if len(items_dict['ExtensionBlock']): external_blocks = [ make_block(array, klass=ExtensionBlock, placement=[i]) for i, _, array in items_dict['ExtensionBlock'] ] blocks.extend(external_blocks) if len(items_dict['ObjectValuesExtensionBlock']): external_blocks = [ make_block(array, klass=ObjectValuesExtensionBlock, placement=[i]) for i, _, array in items_dict['ObjectValuesExtensionBlock'] ] blocks.extend(external_blocks) if len(extra_locs): shape = (len(extra_locs),) + tuple(len(x) for x in axes[1:]) # empty items -> dtype object block_values = np.empty(shape, dtype=object) block_values.fill(np.nan) na_block = make_block(block_values, placement=extra_locs) blocks.append(na_block) return blocks def _simple_blockify(tuples, dtype): """ return a single array of a block that has a single dtype; if dtype is not None, coerce to this dtype """ values, placement = _stack_arrays(tuples, dtype) # CHECK DTYPE? if dtype is not None and values.dtype != dtype: # pragma: no cover values = values.astype(dtype) block = make_block(values, placement=placement) return [block] def _multi_blockify(tuples, dtype=None): """ return an array of blocks that potentially have different dtypes """ # group by dtype grouper = itertools.groupby(tuples, lambda x: x[2].dtype) new_blocks = [] for dtype, tup_block in grouper: values, placement = _stack_arrays(list(tup_block), dtype) block = make_block(values, placement=placement) new_blocks.append(block) return new_blocks def _sparse_blockify(tuples, dtype=None): """ return an array of blocks that potentially have different dtypes (and are sparse) """ new_blocks = [] for i, names, array in tuples: array = _maybe_to_sparse(array) block = make_block(array, placement=[i]) new_blocks.append(block) return new_blocks def _stack_arrays(tuples, dtype): # fml def _asarray_compat(x): if isinstance(x, ABCSeries): return x._values else: return np.asarray(x) def _shape_compat(x): if isinstance(x, ABCSeries): return len(x), else: return x.shape placement, names, arrays = zip(*tuples) first = arrays[0] shape = (len(arrays),) + _shape_compat(first) stacked = np.empty(shape, dtype=dtype) for i, arr in enumerate(arrays): stacked[i] = _asarray_compat(arr) return stacked, placement def _interleaved_dtype(blocks): # type: (List[Block]) -> Optional[Union[np.dtype, ExtensionDtype]] """Find the common dtype for `blocks`. Parameters ---------- blocks : List[Block] Returns ------- dtype : Optional[Union[np.dtype, ExtensionDtype]] None is returned when `blocks` is empty. """ if not len(blocks): return None return find_common_type([b.dtype for b in blocks]) def _consolidate(blocks): """ Merge blocks having same dtype, exclude non-consolidating blocks """ # sort by _can_consolidate, dtype gkey = lambda x: x._consolidate_key grouper = itertools.groupby(sorted(blocks, key=gkey), gkey) new_blocks = [] for (_can_consolidate, dtype), group_blocks in grouper: merged_blocks = _merge_blocks(list(group_blocks), dtype=dtype, _can_consolidate=_can_consolidate) new_blocks = _extend_blocks(merged_blocks, new_blocks) return new_blocks def _compare_or_regex_search(a, b, regex=False): """ Compare two array_like inputs of the same shape or two scalar values Calls operator.eq or re.search, depending on regex argument. If regex is True, perform an element-wise regex matching. Parameters ---------- a : array_like or scalar b : array_like or scalar regex : bool, default False Returns ------- mask : array_like of bool """ if not regex: op = lambda x: operator.eq(x, b) else: op = np.vectorize(lambda x: bool(re.search(b, x)) if isinstance(x, str) else False) is_a_array = isinstance(a, np.ndarray) is_b_array = isinstance(b, np.ndarray) # numpy deprecation warning to have i8 vs integer comparisons if is_datetimelike_v_numeric(a, b): result = False # numpy deprecation warning if comparing numeric vs string-like elif is_numeric_v_string_like(a, b): result = False else: result = op(a) if is_scalar(result) and (is_a_array or is_b_array): type_names = [type(a).__name__, type(b).__name__] if is_a_array: type_names[0] = 'ndarray(dtype={dtype})'.format(dtype=a.dtype) if is_b_array: type_names[1] = 'ndarray(dtype={dtype})'.format(dtype=b.dtype) raise TypeError( "Cannot compare types {a!r} and {b!r}".format(a=type_names[0], b=type_names[1])) return result def _concat_indexes(indexes): return indexes[0].append(indexes[1:]) def items_overlap_with_suffix(left, lsuffix, right, rsuffix): """ If two indices overlap, add suffixes to overlapping entries. If corresponding suffix is empty, the entry is simply converted to string. """ to_rename = left.intersection(right) if len(to_rename) == 0: return left, right else: if not lsuffix and not rsuffix: raise ValueError('columns overlap but no suffix specified: ' '{rename}'.format(rename=to_rename)) def renamer(x, suffix): """Rename the left and right indices. If there is overlap, and suffix is not None, add suffix, otherwise, leave it as-is. Parameters ---------- x : original column name suffix : str or None Returns ------- x : renamed column name """ if x in to_rename and suffix is not None: return '{x}{suffix}'.format(x=x, suffix=suffix) return x lrenamer = partial(renamer, suffix=lsuffix) rrenamer = partial(renamer, suffix=rsuffix) return (_transform_index(left, lrenamer), _transform_index(right, rrenamer)) def _transform_index(index, func, level=None): """ Apply function to all values found in index. This includes transforming multiindex entries separately. Only apply function to one level of the MultiIndex if level is specified. """ if isinstance(index, MultiIndex): if level is not None: items = [tuple(func(y) if i == level else y for i, y in enumerate(x)) for x in index] else: items = [tuple(func(y) for y in x) for x in index] return MultiIndex.from_tuples(items, names=index.names) else: items = [func(x) for x in index] return Index(items, name=index.name, tupleize_cols=False) def _fast_count_smallints(arr): """Faster version of set(arr) for sequences of small numbers.""" counts = np.bincount(arr.astype(np.int_)) nz = counts.nonzero()[0] return np.c_[nz, counts[nz]] def _preprocess_slice_or_indexer(slice_or_indexer, length, allow_fill): if isinstance(slice_or_indexer, slice): return ('slice', slice_or_indexer, libinternals.slice_len(slice_or_indexer, length)) elif (isinstance(slice_or_indexer, np.ndarray) and slice_or_indexer.dtype == np.bool_): return 'mask', slice_or_indexer, slice_or_indexer.sum() else: indexer = np.asanyarray(slice_or_indexer, dtype=np.int64) if not allow_fill: indexer = maybe_convert_indices(indexer, length) return 'fancy', indexer, len(indexer) def concatenate_block_managers(mgrs_indexers, axes, concat_axis, copy): """ Concatenate block managers into one. Parameters ---------- mgrs_indexers : list of (BlockManager, {axis: indexer,...}) tuples axes : list of Index concat_axis : int copy : bool """ concat_plans = [get_mgr_concatenation_plan(mgr, indexers) for mgr, indexers in mgrs_indexers] concat_plan = combine_concat_plans(concat_plans, concat_axis) blocks = [] for placement, join_units in concat_plan: if len(join_units) == 1 and not join_units[0].indexers: b = join_units[0].block values = b.values if copy: values = values.copy() elif not copy: values = values.view() b = b.make_block_same_class(values, placement=placement) elif is_uniform_join_units(join_units): b = join_units[0].block.concat_same_type( [ju.block for ju in join_units], placement=placement) else: b = make_block( concatenate_join_units(join_units, concat_axis, copy=copy), placement=placement) blocks.append(b) return BlockManager(blocks, axes)
# -*- coding: utf-8 -*- import operator import numpy as np import pytest import pandas as pd from pandas import Series, compat from pandas.core.indexes.period import IncompatibleFrequency import pandas.util.testing as tm def _permute(obj): return obj.take(np.random.permutation(len(obj))) class TestSeriesFlexArithmetic(object): @pytest.mark.parametrize( 'ts', [ (lambda x: x, lambda x: x * 2, False), (lambda x: x, lambda x: x[::2], False), (lambda x: x, lambda x: 5, True), (lambda x: tm.makeFloatSeries(), lambda x: tm.makeFloatSeries(), True) ]) @pytest.mark.parametrize('opname', ['add', 'sub', 'mul', 'floordiv', 'truediv', 'div', 'pow']) def test_flex_method_equivalence(self, opname, ts): # check that Series.{opname} behaves like Series.__{opname}__, tser = tm.makeTimeSeries().rename('ts') series = ts[0](tser) other = ts[1](tser) check_reverse = ts[2] if opname == 'div' and compat.PY3: pytest.skip('div test only for Py3') op = getattr(Series, opname) if op == 'div': alt = operator.truediv else: alt = getattr(operator, opname) result = op(series, other) expected = alt(series, other) tm.assert_almost_equal(result, expected) if check_reverse: rop = getattr(Series, "r" + opname) result = rop(series, other) expected = alt(other, series) tm.assert_almost_equal(result, expected) class TestSeriesArithmetic(object): # Some of these may end up in tests/arithmetic, but are not yet sorted def test_add_series_with_period_index(self): rng = pd.period_range('1/1/2000', '1/1/2010', freq='A') ts = Series(np.random.randn(len(rng)), index=rng) result = ts + ts[::2] expected = ts + ts expected[1::2] = np.nan tm.assert_series_equal(result, expected) result = ts + _permute(ts[::2]) tm.assert_series_equal(result, expected) msg = "Input has different freq=D from PeriodIndex\\(freq=A-DEC\\)" with pytest.raises(IncompatibleFrequency, match=msg): ts + ts.asfreq('D', how="end") # ------------------------------------------------------------------ # Comparisons class TestSeriesFlexComparison(object): def test_comparison_flex_basic(self): left = pd.Series(np.random.randn(10)) right = pd.Series(np.random.randn(10)) tm.assert_series_equal(left.eq(right), left == right) tm.assert_series_equal(left.ne(right), left != right) tm.assert_series_equal(left.le(right), left < right) tm.assert_series_equal(left.lt(right), left <= right) tm.assert_series_equal(left.gt(right), left > right) tm.assert_series_equal(left.ge(right), left >= right) # axis for axis in [0, None, 'index']: tm.assert_series_equal(left.eq(right, axis=axis), left == right) tm.assert_series_equal(left.ne(right, axis=axis), left != right) tm.assert_series_equal(left.le(right, axis=axis), left < right) tm.assert_series_equal(left.lt(right, axis=axis), left <= right) tm.assert_series_equal(left.gt(right, axis=axis), left > right) tm.assert_series_equal(left.ge(right, axis=axis), left >= right) # msg = 'No axis named 1 for object type' for op in ['eq', 'ne', 'le', 'le', 'gt', 'ge']: with pytest.raises(ValueError, match=msg): getattr(left, op)(right, axis=1) class TestSeriesComparison(object): def test_comparison_different_length(self): a = Series(['a', 'b', 'c']) b = Series(['b', 'a']) with pytest.raises(ValueError): a < b a = Series([1, 2]) b = Series([2, 3, 4]) with pytest.raises(ValueError): a == b @pytest.mark.parametrize('opname', ['eq', 'ne', 'gt', 'lt', 'ge', 'le']) def test_ser_flex_cmp_return_dtypes(self, opname): # GH#15115 ser = Series([1, 3, 2], index=range(3)) const = 2 result = getattr(ser, opname)(const).get_dtype_counts() tm.assert_series_equal(result, Series([1], ['bool'])) @pytest.mark.parametrize('opname', ['eq', 'ne', 'gt', 'lt', 'ge', 'le']) def test_ser_flex_cmp_return_dtypes_empty(self, opname): # GH#15115 empty Series case ser = Series([1, 3, 2], index=range(3)) empty = ser.iloc[:0] const = 2 result = getattr(empty, opname)(const).get_dtype_counts() tm.assert_series_equal(result, Series([1], ['bool'])) @pytest.mark.parametrize('op', [operator.eq, operator.ne, operator.le, operator.lt, operator.ge, operator.gt]) @pytest.mark.parametrize('names', [(None, None, None), ('foo', 'bar', None), ('baz', 'baz', 'baz')]) def test_ser_cmp_result_names(self, names, op): # datetime64 dtype dti = pd.date_range('1949-06-07 03:00:00', freq='H', periods=5, name=names[0]) ser = Series(dti).rename(names[1]) result = op(ser, dti) assert result.name == names[2] # datetime64tz dtype dti = dti.tz_localize('US/Central') ser = Series(dti).rename(names[1]) result = op(ser, dti) assert result.name == names[2] # timedelta64 dtype tdi = dti - dti.shift(1) ser = Series(tdi).rename(names[1]) result = op(ser, tdi) assert result.name == names[2] # categorical if op in [operator.eq, operator.ne]: # categorical dtype comparisons raise for inequalities cidx = tdi.astype('category') ser = Series(cidx).rename(names[1]) result = op(ser, cidx) assert result.name == names[2]
MJuddBooth/pandas
pandas/tests/series/test_arithmetic.py
pandas/core/internals/managers.py
#----------------------------------------------------------------------------- # Copyright (c) 2012 - 2020, Anaconda, Inc., and Bokeh Contributors. # All rights reserved. # # The full license is in the file LICENSE.txt, distributed with this software. #----------------------------------------------------------------------------- ''' A guide renderer for displaying grid lines on Bokeh plots. ''' #----------------------------------------------------------------------------- # Boilerplate #----------------------------------------------------------------------------- import logging # isort:skip log = logging.getLogger(__name__) #----------------------------------------------------------------------------- # Imports #----------------------------------------------------------------------------- # Bokeh imports from ..core.properties import ( Auto, Either, Float, Include, Instance, Int, Override, Seq, String, Tuple, ) from ..core.property_mixins import ScalarFillProps, ScalarHatchProps, ScalarLineProps from .axes import Axis from .renderers import GuideRenderer from .tickers import FixedTicker, Ticker #----------------------------------------------------------------------------- # Globals and constants #----------------------------------------------------------------------------- __all__ = ( 'Grid', ) #----------------------------------------------------------------------------- # General API #----------------------------------------------------------------------------- class Grid(GuideRenderer): ''' Display horizontal or vertical grid lines at locations given by a supplied ``Ticker``. ''' dimension = Int(0, help=""" Which dimension the Axis Grid lines will intersect. The x-axis is dimension 0 (vertical Grid lines) and the y-axis is dimension 1 (horizontal Grid lines). """) bounds = Either(Auto, Tuple(Float, Float), help=""" Bounds for the rendered grid lines. By default, a grid will look for a corresponding axis to ask for bounds. If one cannot be found, the grid will span the entire visible range. """) # Note: we must allow the possibility of setting both # range names be cause if a grid line is "traced" along # a path, ranges in both dimensions will matter. x_range_name = String('default', help=""" A particular (named) x-range to use for computing screen locations when rendering a grid on the plot. If unset, use the default x-range. """) y_range_name = String('default', help=""" A particular (named) y-range to use for computing screen locations when rendering a grid on the plot. If unset, use the default y-range. """) axis = Instance(Axis, help=""" An Axis to delegate ticking to. If the ticker property is None, then the Grid will use the ticker on the specified axis for computing where to draw grid lines. Otherwise, it ticker is not None, it will take precedence over any Axis. """) ticker = Instance(Ticker, help=""" A Ticker to use for computing locations for the Grid lines. """).accepts(Seq(Float), lambda ticks: FixedTicker(ticks=ticks)) grid_props = Include(ScalarLineProps, help=""" The %s of the Grid lines. """) grid_line_color = Override(default='#e5e5e5') minor_grid_props = Include(ScalarLineProps, help=""" The %s of the minor Grid lines. """) minor_grid_line_color = Override(default=None) band_fill_props = Include(ScalarFillProps, use_prefix="band", help=""" The %s of alternating bands between Grid lines. """) band_fill_alpha = Override(default=0) band_fill_color = Override(default=None) band_hatch_props = Include(ScalarHatchProps, use_prefix="band", help=""" The %s of alternating bands between Grid lines. """) level = Override(default="underlay") #----------------------------------------------------------------------------- # Dev API #----------------------------------------------------------------------------- #----------------------------------------------------------------------------- # Private API #----------------------------------------------------------------------------- #----------------------------------------------------------------------------- # Code #-----------------------------------------------------------------------------
#----------------------------------------------------------------------------- # Copyright (c) 2012 - 2020, Anaconda, Inc., and Bokeh Contributors. # All rights reserved. # # The full license is in the file LICENSE.txt, distributed with this software. #----------------------------------------------------------------------------- #----------------------------------------------------------------------------- # Boilerplate #----------------------------------------------------------------------------- import pytest ; pytest #----------------------------------------------------------------------------- # Imports #----------------------------------------------------------------------------- # Standard library imports import inspect # Module under test # isort:skip #----------------------------------------------------------------------------- # Setup #----------------------------------------------------------------------------- #----------------------------------------------------------------------------- # General API #----------------------------------------------------------------------------- def get_prop_set(class_object): # all this does is get a list of every property implemented by the object that is not present in the baseclasses of said object # note it wont detect overridden properties! base_classes = list(inspect.getmro(class_object)) base_classes.remove(class_object) base_properties = [] for base_class in base_classes: base_properties.extend(dir(base_class)) class_properties = set(dir(class_object)).difference(set(base_properties)) return class_properties class TestPanel(object): def setup_method(self): from bokeh.models import Panel self.panelCls = Panel def test_expectedprops(self) -> None: expected_properties = set(['title', 'child']) actual_properties = get_prop_set(self.panelCls) assert expected_properties.issubset(actual_properties) def test_prop_defaults(self) -> None: p1 = self.panelCls() p2 = self.panelCls() assert p1.title == "" assert p2.title == "" assert p1.child == None class TestTabs(object): def setup_method(self): from bokeh.models import Tabs, Panel self.tabsCls = Tabs self.panelCls = Panel def test_expected_props(self) -> None: expected_properties = set(['tabs', 'active']) actual_properties = get_prop_set(self.tabsCls) assert expected_properties.issubset(actual_properties) def test_props_defaults(self) -> None: tab = self.tabsCls() assert tab.tabs == [] assert tab.active == 0 #----------------------------------------------------------------------------- # Dev API #----------------------------------------------------------------------------- #----------------------------------------------------------------------------- # Private API #----------------------------------------------------------------------------- #----------------------------------------------------------------------------- # Code #-----------------------------------------------------------------------------
ericmjl/bokeh
tests/unit/bokeh/test_widgets.py
bokeh/models/grids.py
import os import pytest from cfme.fixtures.terminalreporter import reporter from cfme.utils.datafile import data_path_for_filename from cfme.utils.datafile import load_data_file from cfme.utils.path import data_path from cfme.utils.path import log_path # Collection for storing unique combinations of data file paths # and filenames for usage reporting after a completed test run seen_data_files = set() @pytest.fixture(scope="module") def datafile(request): """datafile(filename, replacements) datafile fixture, with templating support Args: filename: filename to load from the data dir replacements: template replacements Returns: Path to the loaded datafile Usage: Given a filename, it will attempt to open the given file from the test's corresponding data dir. For example, this: datafile('testfile') # in tests/subdir/test_module_name.py Would return a file object representing this file: /path/to/cfme_tests/data/subdir/test_module_name/testfile Given a filename with a leading slash, it will attempt to load the file relative to the root of the data dir. For example, this: datafile('/common/testfile') # in tests/subdir/test_module_name.py Would return a file object representing this file: /path/to/cfme_tests/data/common/testfile Note that the test module name is not used with the leading slash. .. rubric:: Templates: This fixture can also handle template replacements. If the datafile being loaded is a python template, the dictionary of replacements can be passed as the 'replacements' keyword argument. In this case, the returned data file will be a NamedTemporaryFile prepopulated with the interpolated result from combining the template with the replacements mapping. * http://docs.python.org/2/library/string.html#template-strings * http://docs.python.org/2/library/tempfile.html#tempfile.NamedTemporaryFile """ return _FixtureDataFile(request) def pytest_addoption(parser): group = parser.getgroup('cfme') group.addoption('--udf-report', action='store_true', default=False, dest='udf_report', help='flag to generate an unused data files report') def pytest_sessionfinish(session, exitstatus): udf_log_file = log_path.join('unused_data_files.log') if udf_log_file.check(): # Clean up old udf log if it exists udf_log_file.remove() if session.config.option.udf_report is False: # Short out here if not making a report return # Output an unused data files log after a test run data_files = set() for dirpath, dirnames, filenames in os.walk(str(data_path)): for filename in filenames: filepath = os.path.join(dirpath, filename) data_files.add(filepath) unused_data_files = data_files - seen_data_files if unused_data_files: # Write the log of unused data files out, minus the data dir prefix udf_log = ''.join( (line[len(str(data_path)):] + '\n' for line in unused_data_files) ) udf_log_file.write(udf_log + '\n') # Throw a notice into the terminal reporter to check the log tr = reporter() tr.write_line('') tr.write_sep( '-', '%d unused data files after test run, check %s' % ( len(unused_data_files), udf_log_file.basename ) ) class _FixtureDataFile(object): def __init__(self, request): self.base_path = str(request.session.fspath) self.testmod_path = str(request.fspath) def __call__(self, filename, replacements=None): if filename.startswith('/'): complete_path = data_path_for_filename( filename.strip('/'), self.base_path) else: complete_path = data_path_for_filename( filename, self.base_path, self.testmod_path) seen_data_files.add(complete_path) return load_data_file(complete_path, replacements)
# pylint: skip-file """Manual tests""" import pytest from cfme import test_requirements pytestmark = [pytest.mark.ignore_stream('upstream')] @pytest.mark.manual @test_requirements.satellite def test_no_rbac_warnings_in_logs_when_viewing_satellite_provider(): """ RBAC-related warnings logged when viewing Satellite provider in web UI Bugzilla: 1565266 1.) Add Satellite provider. 2.) Click on items under Providers accordion. 3.) View evm.log. No WARN-level messages should be logged. [----] W, [2018-04-09T14:09:19.654859 #13384:84e658] WARN -- : MIQ(Rbac::Filterer#lookup_method_for_descendant_class) could not find method name for ConfiguredSystem::ConfiguredSystem Polarion: assignee: tpapaioa casecomponent: Provisioning caseimportance: medium initialEstimate: 1/15h """ pass @pytest.mark.manual @test_requirements.satellite def test_satellite_host_groups_show_up_as_configuration_profiles_satellite_62(): """ For the Satellite provider satellite_62, both the centos and fedora- cloud configuration profiles show up in Configuration > Manage, in the accordion menu under All Configuration Manager Providers > Red Hat Satellite Providers > satellite_62 Configuration Manager. Polarion: assignee: tpapaioa casecomponent: Configuration caseimportance: medium initialEstimate: 1/15h """ pass @pytest.mark.manual @test_requirements.satellite def test_satellite_credential_validation_times_out_with_error_message(): """ Bug 1564601 - Satellite credential validation times out with no error message Bugzilla: 1564601 When adding a new Satellite configuration provider, if the URL cannot be accessed because of a firewall dropping packets, then credential validation should time out after 2 minutes with a flash message. Polarion: assignee: tpapaioa casecomponent: Provisioning caseimportance: medium initialEstimate: 1/6h """ pass
izapolsk/integration_tests
cfme/tests/satellite/test_satellite_manual.py
cfme/fixtures/datafile.py
import attr from riggerlib import recursive_update from cfme.cloud.instance import Instance from cfme.cloud.instance import InstanceCollection @attr.s class GCEInstance(Instance): # CFME & provider power control options START = "Start" POWER_ON = START # For compatibility with the infra objects. STOP = "Stop" DELETE = "Delete" TERMINATE = 'Delete' # CFME-only power control options SOFT_REBOOT = "Soft Reboot" # Provider-only power control options RESTART = "Restart" # CFME power states STATE_ON = "on" STATE_OFF = "off" STATE_SUSPENDED = "suspended" STATE_TERMINATED = "terminated" STATE_ARCHIVED = "archived" STATE_UNKNOWN = "unknown" @property def ui_powerstates_available(self): return { 'on': [self.STOP, self.SOFT_REBOOT, self.TERMINATE], 'off': [self.START, self.TERMINATE]} @property def ui_powerstates_unavailable(self): return { 'on': [self.START], 'off': [self.STOP, self.SOFT_REBOOT]} @property def vm_default_args(self): """Represents dictionary used for Vm/Instance provision with GCE mandatory default args""" inst_args = super(GCEInstance, self).vm_default_args provisioning = self.provider.data['provisioning'] inst_args['properties']['boot_disk_size'] = provisioning.get('boot_disk_size', '10 GB') return inst_args @property def vm_default_args_rest(self): inst_args = super(GCEInstance, self).vm_default_args_rest provisioning = self.provider.data['provisioning'] recursive_update(inst_args, { 'vm_fields': { 'boot_disk_size': provisioning['boot_disk_size'].replace(' ', '.')}}) return inst_args @attr.s class GCEInstanceCollection(InstanceCollection): ENTITY = GCEInstance
# pylint: skip-file """Manual tests""" import pytest from cfme import test_requirements pytestmark = [pytest.mark.ignore_stream('upstream')] @pytest.mark.manual @test_requirements.satellite def test_no_rbac_warnings_in_logs_when_viewing_satellite_provider(): """ RBAC-related warnings logged when viewing Satellite provider in web UI Bugzilla: 1565266 1.) Add Satellite provider. 2.) Click on items under Providers accordion. 3.) View evm.log. No WARN-level messages should be logged. [----] W, [2018-04-09T14:09:19.654859 #13384:84e658] WARN -- : MIQ(Rbac::Filterer#lookup_method_for_descendant_class) could not find method name for ConfiguredSystem::ConfiguredSystem Polarion: assignee: tpapaioa casecomponent: Provisioning caseimportance: medium initialEstimate: 1/15h """ pass @pytest.mark.manual @test_requirements.satellite def test_satellite_host_groups_show_up_as_configuration_profiles_satellite_62(): """ For the Satellite provider satellite_62, both the centos and fedora- cloud configuration profiles show up in Configuration > Manage, in the accordion menu under All Configuration Manager Providers > Red Hat Satellite Providers > satellite_62 Configuration Manager. Polarion: assignee: tpapaioa casecomponent: Configuration caseimportance: medium initialEstimate: 1/15h """ pass @pytest.mark.manual @test_requirements.satellite def test_satellite_credential_validation_times_out_with_error_message(): """ Bug 1564601 - Satellite credential validation times out with no error message Bugzilla: 1564601 When adding a new Satellite configuration provider, if the URL cannot be accessed because of a firewall dropping packets, then credential validation should time out after 2 minutes with a flash message. Polarion: assignee: tpapaioa casecomponent: Provisioning caseimportance: medium initialEstimate: 1/6h """ pass
izapolsk/integration_tests
cfme/tests/satellite/test_satellite_manual.py
cfme/cloud/instance/gce.py
"""Module handling report menus contents""" from contextlib import contextmanager import attr from navmazing import NavigateToAttribute from widgetastic.widget import Text from widgetastic_patternfly import Button from cfme.intelligence.reports import CloudIntelReportsView from cfme.intelligence.reports import ReportsMultiBoxSelect from cfme.modeling.base import BaseCollection from cfme.modeling.base import BaseEntity from cfme.utils.appliance.implementations.ui import CFMENavigateStep from cfme.utils.appliance.implementations.ui import navigate_to from cfme.utils.appliance.implementations.ui import navigator from widgetastic_manageiq import FolderManager from widgetastic_manageiq import ManageIQTree class AllReportMenusView(CloudIntelReportsView): title = Text("#explorer_title_text") reports_tree = ManageIQTree("menu_roles_treebox") @property def is_displayed(self): return ( self.in_intel_reports and self.title.text == "All EVM Groups" and self.edit_report_menus.is_opened and self.edit_report_menus.tree.currently_selected == ["All EVM Groups"] ) class EditReportMenusView(AllReportMenusView): # Buttons save_button = Button("Save") reset_button = Button("Reset") default_button = Button("Default") cancel_button = Button("Cancel") commit_button = Button("Commit") discard_button = Button("Discard") manager = FolderManager(".//div[@id='folder_lists']/table") report_select = ReportsMultiBoxSelect( move_into="Move selected reports right", move_from="Move selected reports left", available_items="available_reports", chosen_items="selected_reports" ) @property def is_displayed(self): return ( self.in_intel_reports and self.title.text == 'Editing EVM Group "{}"'.format(self.context["object"].group) and self.edit_report_menus.is_opened and self.edit_report_menus.tree.currently_selected == [ "All EVM Groups", self.context["object"].group ] ) @attr.s class ReportMenu(BaseEntity): """ This is a fake class mainly needed for navmazing navigation. """ group = None def go_to_group(self, group_name): self.group = group_name view = navigate_to(self, "Edit") assert view.is_displayed return view def get_folders(self, group): """Returns list of folders for given user group. Args: group: User group to check. """ view = self.go_to_group(group) view.reports_tree.click_path("Top Level") fields = view.manager.fields view.discard_button.click() return fields def get_subfolders(self, group, folder): """Returns list of sub-folders for given user group and folder. Args: group: User group to check. folder: Folder to read. """ view = self.go_to_group(group) view.reports_tree.click_path("Top Level", folder) fields = view.manager.fields view.discard_button.click() return fields def _action(self, action, manager, folder_name): with manager as folder_manager: getattr(folder_manager, action)(folder_name) def add_folder(self, group, folder): """Adds a folder under top-level. Args: group: User group. folder: Name of the new folder. """ self._action("add", self.manage_folder(group), folder) def add_subfolder(self, group, folder, subfolder): """Adds a subfolder under specified folder. Args: group: User group. folder: Name of the folder. subfolder: Name of the new subfolder. """ self._action("add", self.manage_folder(group, folder), subfolder) def remove_folder(self, group, folder): """Removes a folder under top-level. Args: group: User group. folder: Name of the folder. """ self._action("delete", self.manage_folder(group), folder) def remove_subfolder(self, group, folder, subfolder): """Removes a subfolder under specified folder. Args: group: User group. folder: Name of the folder. subfolder: Name of the subfolder. """ self._action("delete", self.manage_folder(group, folder), subfolder) def reset_to_default(self, group): """Clicks the `Default` button. Args: group: Group to set to Default """ view = self.go_to_group(group) view.default_button.click() view.save_button.click() flash_view = self.create_view(AllReportMenusView) assert flash_view.flash.assert_message( 'Report Menu for role "{}" was saved'.format(group) ) @contextmanager def manage_subfolder(self, group, folder, subfolder): """Context manager to use when modifying the subfolder contents. You can use manager's :py:meth:`FolderManager.bail_out` classmethod to end and discard the changes done inside the with block. Args: group: User group. folder: Parent folder name. subfolder: Subfolder name to manage. Returns: Context-managed :py:class: `widgetastic_manageiq.MultiBoxSelect` instance """ view = self.go_to_group(group) view.reports_tree.click_path("Top Level", folder, subfolder) try: yield view.report_select except FolderManager._BailOut: view.discard_button.click() except Exception: # In case of any exception, nothing will be saved view.discard_button.click() raise # And reraise the exception else: # If no exception happens, save! view.commit_button.click() view.save_button.click() flash_view = self.create_view(AllReportMenusView) flash_view.flash.assert_message( 'Report Menu for role "{}" was saved'.format(group) ) @contextmanager def manage_folder(self, group, folder=None): """Context manager to use when modifying the folder contents. You can use manager's :py:meth:`FolderManager.bail_out` classmethod to end and discard the changes done inside the with block. This context manager does not give the manager as a value to the with block so you have to import and use the :py:class:`FolderManager` class manually. Args: group: User group. folder: Which folder to manage. If None, top-level will be managed. Returns: Context-managed :py:class:`widgetastic_manageiq.FolderManager` instance """ view = self.go_to_group(group) if folder is None: view.reports_tree.click_path("Top Level") else: view.reports_tree.click_path("Top Level", folder) try: yield view.manager except FolderManager._BailOut: view.manager.discard() except Exception: # In case of any exception, nothing will be saved view.manager.discard() raise # And reraise the exception else: # If no exception happens, save! view.manager.commit() view.save_button.click() flash_view = self.create_view(AllReportMenusView) flash_view.flash.assert_message( 'Report Menu for role "{}" was saved'.format(group) ) def move_reports(self, group, folder, subfolder, *reports): """ Moves a list of reports to a given menu Args: group: User group folder: Parent of the subfolder where reports are to be moved. subfolder: Subfolder under which the reports are to be moved. reports: List of reports that are to be moved. """ reports = list(reports) cancel_view = "" with self.manage_subfolder(group, folder, subfolder) as selected_menu: selected_options = selected_menu.parent_view.report_select.all_options diff = set(selected_options) & set(reports) if diff and (len(diff) == len(reports)): cancel_view = self.create_view(AllReportMenusView) # If all the reports to be moved are already present, raise an exception to exit. raise FolderManager._BailOut # fill method replaces all the options in all_options with the value passed as argument # We do not want to replace any value, we just want to move the new reports to a given # menu. This is a work-around for that purpose. reports.extend(selected_options) selected_menu.parent_view.report_select.fill(reports) if cancel_view: cancel_view.flash.assert_message( 'Edit of Report Menu for role "{}" was cancelled by the user'.format( group ) ) @attr.s class ReportMenusCollection(BaseCollection): """Collection object for the :py:class:'cfme.intelligence.reports.ReportMenu'.""" ENTITY = ReportMenu @navigator.register(ReportMenu, "Edit") class EditReportMenus(CFMENavigateStep): VIEW = EditReportMenusView prerequisite = NavigateToAttribute( "appliance.collections.intel_report_menus", "All" ) def step(self, *args, **kwargs): self.prerequisite_view.edit_report_menus.tree.click_path( "All EVM Groups", self.obj.group ) @navigator.register(ReportMenusCollection, "All") class ReportMenus(CFMENavigateStep): VIEW = AllReportMenusView prerequisite = NavigateToAttribute("appliance.server", "CloudIntelReports") def step(self, *args, **kwargs): self.prerequisite_view.edit_report_menus.tree.click_path("All EVM Groups")
# pylint: skip-file """Manual tests""" import pytest from cfme import test_requirements pytestmark = [pytest.mark.ignore_stream('upstream')] @pytest.mark.manual @test_requirements.satellite def test_no_rbac_warnings_in_logs_when_viewing_satellite_provider(): """ RBAC-related warnings logged when viewing Satellite provider in web UI Bugzilla: 1565266 1.) Add Satellite provider. 2.) Click on items under Providers accordion. 3.) View evm.log. No WARN-level messages should be logged. [----] W, [2018-04-09T14:09:19.654859 #13384:84e658] WARN -- : MIQ(Rbac::Filterer#lookup_method_for_descendant_class) could not find method name for ConfiguredSystem::ConfiguredSystem Polarion: assignee: tpapaioa casecomponent: Provisioning caseimportance: medium initialEstimate: 1/15h """ pass @pytest.mark.manual @test_requirements.satellite def test_satellite_host_groups_show_up_as_configuration_profiles_satellite_62(): """ For the Satellite provider satellite_62, both the centos and fedora- cloud configuration profiles show up in Configuration > Manage, in the accordion menu under All Configuration Manager Providers > Red Hat Satellite Providers > satellite_62 Configuration Manager. Polarion: assignee: tpapaioa casecomponent: Configuration caseimportance: medium initialEstimate: 1/15h """ pass @pytest.mark.manual @test_requirements.satellite def test_satellite_credential_validation_times_out_with_error_message(): """ Bug 1564601 - Satellite credential validation times out with no error message Bugzilla: 1564601 When adding a new Satellite configuration provider, if the URL cannot be accessed because of a firewall dropping packets, then credential validation should time out after 2 minutes with a flash message. Polarion: assignee: tpapaioa casecomponent: Provisioning caseimportance: medium initialEstimate: 1/6h """ pass
izapolsk/integration_tests
cfme/tests/satellite/test_satellite_manual.py
cfme/intelligence/reports/menus.py
import attr import importscan import sentaku from cfme.generic_objects.definition.button_groups import GenericObjectButtonGroupsCollection from cfme.generic_objects.definition.button_groups import GenericObjectButtonsCollection from cfme.generic_objects.instance import GenericObjectInstanceCollection from cfme.modeling.base import BaseCollection from cfme.modeling.base import BaseEntity from cfme.utils.update import Updateable @attr.s class GenericObjectDefinition(BaseEntity, Updateable, sentaku.modeling.ElementMixin): """Generic Objects Definition class to context switch between UI and REST. Read/Update/Delete functionality. """ _collections = { 'generic_objects': GenericObjectInstanceCollection, 'generic_object_groups_buttons': GenericObjectButtonGroupsCollection, 'generic_object_buttons': GenericObjectButtonsCollection } update = sentaku.ContextualMethod() delete = sentaku.ContextualMethod() exists = sentaku.ContextualProperty() add_button = sentaku.ContextualMethod() add_button_group = sentaku.ContextualMethod() generic_objects = sentaku.ContextualProperty() generic_object_buttons = sentaku.ContextualProperty() instance_count = sentaku.ContextualProperty() name = attr.ib() description = attr.ib() attributes = attr.ib(default=None) # e.g. {'address': 'string'} associations = attr.ib(default=None) # e.g. {'services': 'Service'} methods = attr.ib(default=None) # e.g. ['method1', 'method2'] custom_image_file_path = attr.ib(default=None) rest_response = attr.ib(default=None, init=False) @attr.s class GenericObjectDefinitionCollection(BaseCollection, sentaku.modeling.ElementMixin): ENTITY = GenericObjectDefinition create = sentaku.ContextualMethod() all = sentaku.ContextualMethod() from cfme.generic_objects.definition import rest, ui # NOQA last for import cycles importscan.scan(rest) importscan.scan(ui)
# pylint: skip-file """Manual tests""" import pytest from cfme import test_requirements pytestmark = [pytest.mark.ignore_stream('upstream')] @pytest.mark.manual @test_requirements.satellite def test_no_rbac_warnings_in_logs_when_viewing_satellite_provider(): """ RBAC-related warnings logged when viewing Satellite provider in web UI Bugzilla: 1565266 1.) Add Satellite provider. 2.) Click on items under Providers accordion. 3.) View evm.log. No WARN-level messages should be logged. [----] W, [2018-04-09T14:09:19.654859 #13384:84e658] WARN -- : MIQ(Rbac::Filterer#lookup_method_for_descendant_class) could not find method name for ConfiguredSystem::ConfiguredSystem Polarion: assignee: tpapaioa casecomponent: Provisioning caseimportance: medium initialEstimate: 1/15h """ pass @pytest.mark.manual @test_requirements.satellite def test_satellite_host_groups_show_up_as_configuration_profiles_satellite_62(): """ For the Satellite provider satellite_62, both the centos and fedora- cloud configuration profiles show up in Configuration > Manage, in the accordion menu under All Configuration Manager Providers > Red Hat Satellite Providers > satellite_62 Configuration Manager. Polarion: assignee: tpapaioa casecomponent: Configuration caseimportance: medium initialEstimate: 1/15h """ pass @pytest.mark.manual @test_requirements.satellite def test_satellite_credential_validation_times_out_with_error_message(): """ Bug 1564601 - Satellite credential validation times out with no error message Bugzilla: 1564601 When adding a new Satellite configuration provider, if the URL cannot be accessed because of a firewall dropping packets, then credential validation should time out after 2 minutes with a flash message. Polarion: assignee: tpapaioa casecomponent: Provisioning caseimportance: medium initialEstimate: 1/6h """ pass
izapolsk/integration_tests
cfme/tests/satellite/test_satellite_manual.py
cfme/generic_objects/definition/__init__.py
from os import path from urllib.error import URLError import attr from cached_property import cached_property from wrapanapi.systems.container import Openshift from cfme.common import Taggable from cfme.common.provider import DefaultEndpoint from cfme.common.vm_console import ConsoleMixin from cfme.containers.provider import ContainersProvider from cfme.containers.provider import ContainersProviderDefaultEndpoint from cfme.containers.provider import ContainersProviderEndpointsForm from cfme.control.explorer.alert_profiles import NodeAlertProfile from cfme.control.explorer.alert_profiles import ProviderAlertProfile from cfme.utils import ssh from cfme.utils.appliance.implementations.ui import navigate_to from cfme.utils.log import logger from cfme.utils.ocp_cli import OcpCli from cfme.utils.varmeth import variable from cfme.utils.wait import TimedOutError from cfme.utils.wait import wait_for class CustomAttribute(object): def __init__(self, name, value, field_type=None, href=None): self.name = name self.value = value self.field_type = field_type self.href = href class OpenshiftDefaultEndpoint(ContainersProviderDefaultEndpoint): """Represents Openshift default endpoint""" @staticmethod def get_ca_cert(connection_info): """Getting OpenShift's certificate from the master machine. Args: connection_info (dict): username, password and hostname for OCP returns: certificate's content. """ with ssh.SSHClient(**connection_info) as provider_ssh: _, stdout, _ = provider_ssh.exec_command("cat /etc/origin/master/ca.crt") return str("".join(stdout.readlines())) class ServiceBasedEndpoint(DefaultEndpoint): @property def view_value_mapping(self): out = {'hostname': self.hostname, 'api_port': self.api_port, 'sec_protocol': self.sec_protocol} if out['sec_protocol'] and self.sec_protocol.lower() == 'ssl trusting custom ca': out['trusted_ca_certificates'] = OpenshiftDefaultEndpoint.get_ca_cert( {"username": self.ssh_creds.principal, "password": self.ssh_creds.secret, "hostname": self.master_hostname}) return out class VirtualizationEndpoint(ServiceBasedEndpoint): """Represents virtualization Endpoint""" name = 'virtualization' @property def view_value_mapping(self): # values like host, port are taken from Default endpoint # and not editable in Virtualization endpoint, only token can be added return {'kubevirt_token': self.token} class MetricsEndpoint(ServiceBasedEndpoint): """Represents metrics Endpoint""" name = 'metrics' class AlertsEndpoint(ServiceBasedEndpoint): """Represents Alerts Endpoint""" name = 'alerts' @attr.s(cmp=False) class OpenshiftProvider(ContainersProvider, ConsoleMixin, Taggable): num_route = ['num_route'] STATS_TO_MATCH = ContainersProvider.STATS_TO_MATCH + num_route type_name = "openshift" mgmt_class = Openshift db_types = ["Openshift::ContainerManager"] endpoints_form = ContainersProviderEndpointsForm settings_key = 'ems_openshift' ems_pretty_name = 'OpenShift Container Platform' http_proxy = attr.ib(default=None) adv_http = attr.ib(default=None) adv_https = attr.ib(default=None) no_proxy = attr.ib(default=None) image_repo = attr.ib(default=None) image_reg = attr.ib(default=None) image_tag = attr.ib(default=None) cve_loc = attr.ib(default=None) virt_type = attr.ib(default=None) provider = attr.ib(default=None) def create(self, **kwargs): # Enable alerts collection before adding the provider to avoid missing active # alert after adding the provider # For more info: https://bugzilla.redhat.com/show_bug.cgi?id=1514950 if getattr(self, "alerts_type") == "Prometheus": alert_profiles = self.appliance.collections.alert_profiles provider_profile = alert_profiles.instantiate(ProviderAlertProfile, "Prometheus Provider Profile") node_profile = alert_profiles.instantiate(NodeAlertProfile, "Prometheus node Profile") for profile in [provider_profile, node_profile]: profile.assign_to("The Enterprise") super(OpenshiftProvider, self).create(**kwargs) @cached_property def cli(self): return OcpCli(self) def href(self): return self.appliance.rest_api.collections.providers\ .find_by(name=self.name).resources[0].href @property def view_value_mapping(self): mapping = {'name': self.name, 'zone': self.zone, 'prov_type': ('OpenShift Container Platform' if self.appliance.is_downstream else 'OpenShift')} mapping['metrics_type'] = self.metrics_type mapping['alerts_type'] = self.alerts_type mapping['proxy'] = { 'http_proxy': self.http_proxy } mapping['advanced'] = { 'adv_http': self.adv_http, 'adv_https': self.adv_https, 'no_proxy': self.no_proxy, 'image_repo': self.image_repo, 'image_reg': self.image_reg, 'image_tag': self.image_tag, 'cve_loc': self.cve_loc } mapping['virt_type'] = self.virt_type return mapping @property def is_provider_enabled(self): return self.appliance.rest_api.collections.providers.get(name=self.name).enabled @variable(alias='db') def num_route(self): return self._num_db_generic('container_routes') @num_route.variant('ui') def num_route_ui(self): view = navigate_to(self, "Details") return int(view.entities.summary("Relationships").get_text_of('Container Routes')) @variable(alias='db') def num_template(self): return self._num_db_generic('container_templates') @num_template.variant('ui') def num_template_ui(self): view = navigate_to(self, "Details") return int(view.entities.summary("Relationships").get_text_of("Container Templates")) @classmethod def from_config(cls, prov_config, prov_key, appliance=None): appliance = appliance or cls.appliance endpoints = {} token_creds = cls.process_credential_yaml_key(prov_config['credentials'], cred_type='token') master_hostname = prov_config['endpoints']['default'].hostname ssh_creds = cls.process_credential_yaml_key(prov_config['ssh_creds']) for endp in prov_config['endpoints']: # Add ssh_password for each endpoint, so get_ca_cert # will be able to get SSL cert form OCP for each endpoint setattr(prov_config['endpoints'][endp], "master_hostname", master_hostname) setattr(prov_config['endpoints'][endp], "ssh_creds", ssh_creds) if OpenshiftDefaultEndpoint.name == endp: prov_config['endpoints'][endp]['token'] = token_creds.token endpoints[endp] = OpenshiftDefaultEndpoint(**prov_config['endpoints'][endp]) elif MetricsEndpoint.name == endp: endpoints[endp] = MetricsEndpoint(**prov_config['endpoints'][endp]) elif AlertsEndpoint.name == endp: endpoints[endp] = AlertsEndpoint(**prov_config['endpoints'][endp]) else: raise Exception('Unsupported endpoint type "{}".'.format(endp)) settings = prov_config.get('settings', {}) advanced = settings.get('advanced', {}) http_proxy = settings.get('proxy', {}).get('http_proxy') adv_http, adv_https, no_proxy, image_repo, image_reg, image_tag, cve_loc = [ advanced.get(field) for field in ('adv_http', 'adv_https', 'no_proxy', 'image_repo', 'image_reg', 'image_tag', 'cve_loc') ] return appliance.collections.containers_providers.instantiate( prov_class=cls, name=prov_config.get('name'), key=prov_key, zone=prov_config.get('server_zone'), metrics_type=prov_config.get('metrics_type'), alerts_type=prov_config.get('alerts_type'), endpoints=endpoints, provider_data=prov_config, http_proxy=http_proxy, adv_http=adv_http, adv_https=adv_https, no_proxy=no_proxy, image_repo=image_repo, image_reg=image_reg, image_tag=image_tag, cve_loc=cve_loc, virt_type=prov_config.get('virt_type')) def custom_attributes(self): """returns custom attributes""" response = self.appliance.rest_api.get( path.join(self.href(), 'custom_attributes')) out = [] for attr_dict in response['resources']: attr = self.appliance.rest_api.get(attr_dict['href']) out.append( CustomAttribute( attr['name'], attr['value'], (attr['field_type'] if 'field_type' in attr else None), attr_dict['href'] ) ) return out def add_custom_attributes(self, *custom_attributes): """Adding static custom attributes to provider. Args: custom_attributes: The custom attributes to add. returns: response. """ if not custom_attributes: raise TypeError('{} takes at least 1 argument.' .format(self.add_custom_attributes.__name__)) for c_attr in custom_attributes: if not isinstance(c_attr, CustomAttribute): raise TypeError('All arguments should be of type {}. ({} != {})' .format(CustomAttribute, type(c_attr), CustomAttribute)) payload = { "action": "add", "resources": [{ "name": ca.name, "value": str(ca.value) } for ca in custom_attributes]} for i, fld_tp in enumerate([c_attr.field_type for c_attr in custom_attributes]): if fld_tp: payload['resources'][i]['field_type'] = fld_tp return self.appliance.rest_api.post( path.join(self.href(), 'custom_attributes'), **payload) def edit_custom_attributes(self, *custom_attributes): """Editing static custom attributes in provider. Args: custom_attributes: The custom attributes to edit. returns: response. """ if not custom_attributes: raise TypeError('{} takes at least 1 argument.' .format(self.edit_custom_attributes.__name__)) for c_attr in custom_attributes: if not isinstance(c_attr, CustomAttribute): raise TypeError('All arguments should be of type {}. ({} != {})' .format(CustomAttribute, type(c_attr), CustomAttribute)) attribs = self.custom_attributes() payload = { "action": "edit", "resources": [{ "href": [c_attr for c_attr in attribs if c_attr.name == ca.name][-1].href, "value": ca.value } for ca in custom_attributes]} return self.appliance.rest_api.post( path.join(self.href(), 'custom_attributes'), **payload) def delete_custom_attributes(self, *custom_attributes): """Deleting static custom attributes from provider. Args: custom_attributes: The custom attributes to delete. (Could be also names (str)) Returns: response. """ names = [] for c_attr in custom_attributes: attr_type = type(c_attr) if attr_type in (str, CustomAttribute): names.append(c_attr if attr_type is str else c_attr.name) else: raise TypeError('Type of arguments should be either' 'str or CustomAttribute. ({} not in [str, CustomAttribute])' .format(type(c_attr))) attribs = self.custom_attributes() if not names: names = [attrib.name for attrib in attribs] payload = { "action": "delete", "resources": [{ "href": attrib.href, } for attrib in attribs if attrib.name in names]} return self.appliance.rest_api.post( path.join(self.href(), 'custom_attributes'), **payload) def sync_ssl_certificate(self): """ fixture which sync SSL certificate between CFME and OCP Args: provider (OpenShiftProvider): OCP system to sync cert from appliance (IPAppliance): CFME appliance to sync cert with Returns: None """ def _copy_certificate(): is_succeed = True try: # Copy certificate to the appliance provider_ssh.get_file("/etc/origin/master/ca.crt", "/tmp/ca.crt") appliance_ssh.put_file("/tmp/ca.crt", "/etc/pki/ca-trust/source/anchors/{crt}".format( crt=cert_name)) except URLError: logger.debug("Fail to deploy certificate from Openshift to CFME") is_succeed = False finally: return is_succeed provider_ssh = self.cli.ssh_client appliance_ssh = self.appliance.ssh_client() # Connection to the applince in case of dead connection if not appliance_ssh.connected: appliance_ssh.connect() # Checking if SSL is already configured between appliance and provider, # by send a HTTPS request (using SSL) from the appliance to the provider, # hiding the output and sending back the return code of the action _, stdout, stderr = \ appliance_ssh.exec_command( "curl https://{provider}:8443 -sS > /dev/null;echo $?".format( provider=self.provider_data.hostname)) # Do in case of failure (return code is not 0) if stdout.readline().replace('\n', "") != "0": cert_name = "{provider_name}.ca.crt".format( provider_name=self.provider_data.hostname.split(".")[0]) wait_for(_copy_certificate, num_sec=600, delay=30, message="Copy certificate from OCP to CFME") appliance_ssh.exec_command("update-ca-trust") # restarting evemserverd to apply the new SSL certificate self.appliance.evmserverd.restart() self.appliance.evmserverd.wait_for_running() self.appliance.wait_for_web_ui() def get_system_id(self): mgmt_systems_tbl = self.appliance.db.client['ext_management_systems'] return self.appliance.db.client.session.query(mgmt_systems_tbl).filter( mgmt_systems_tbl.name == self.name).first().id def get_metrics(self, **kwargs): """"Returns all the collected metrics for this provider Args: filters: list of dicts with column name and values e.g [{"resource_type": "Container"}, {"parent_ems_id": "1L"}] metrics_table: Metrics table name, there are few metrics table e.g metrics, metric_rollups, etc Returns: Query object with the relevant records """ filters = kwargs.get("filters", {}) metrics_table = kwargs.get("metrics_table", "metric_rollups") metrics_tbl = self.appliance.db.client[metrics_table] mgmt_system_id = self.get_system_id() logger.info("Getting metrics for {name} (parent_ems_id == {id})".format( name=self.name, id=mgmt_system_id)) if filters: logger.info("Filtering by: {f}".format(f=filters)) filters["parent_ems_id"] = mgmt_system_id return self.appliance.db.client.session.query(metrics_tbl).filter_by(**filters) def wait_for_collected_metrics(self, timeout="50m", table_name="metrics"): """Check the db if gathering collection data Args: timeout: timeout in minutes Return: Bool: is collected metrics count is greater than 0 """ def is_collected(): metrics_count = self.get_metrics(table=table_name).count() logger.info("Current metrics found count is {count}".format(count=metrics_count)) return metrics_count > 0 logger.info("Monitoring DB for metrics collection") result = True try: wait_for(is_collected, timeout=timeout, delay=30) except TimedOutError: logger.error( "Timeout exceeded, No metrics found in MIQ DB for the provider \"{name}\"".format( name=self.name)) result = False finally: return result def pause(self): """ Pause the OCP provider. Returns: API response. """ return self.appliance.rest_api.collections.providers.get(name=self.name).action.pause() def resume(self): """ Resume the OCP provider. Returns: API response. """ return self.appliance.rest_api.collections.providers.get(name=self.name).action.resume()
# pylint: skip-file """Manual tests""" import pytest from cfme import test_requirements pytestmark = [pytest.mark.ignore_stream('upstream')] @pytest.mark.manual @test_requirements.satellite def test_no_rbac_warnings_in_logs_when_viewing_satellite_provider(): """ RBAC-related warnings logged when viewing Satellite provider in web UI Bugzilla: 1565266 1.) Add Satellite provider. 2.) Click on items under Providers accordion. 3.) View evm.log. No WARN-level messages should be logged. [----] W, [2018-04-09T14:09:19.654859 #13384:84e658] WARN -- : MIQ(Rbac::Filterer#lookup_method_for_descendant_class) could not find method name for ConfiguredSystem::ConfiguredSystem Polarion: assignee: tpapaioa casecomponent: Provisioning caseimportance: medium initialEstimate: 1/15h """ pass @pytest.mark.manual @test_requirements.satellite def test_satellite_host_groups_show_up_as_configuration_profiles_satellite_62(): """ For the Satellite provider satellite_62, both the centos and fedora- cloud configuration profiles show up in Configuration > Manage, in the accordion menu under All Configuration Manager Providers > Red Hat Satellite Providers > satellite_62 Configuration Manager. Polarion: assignee: tpapaioa casecomponent: Configuration caseimportance: medium initialEstimate: 1/15h """ pass @pytest.mark.manual @test_requirements.satellite def test_satellite_credential_validation_times_out_with_error_message(): """ Bug 1564601 - Satellite credential validation times out with no error message Bugzilla: 1564601 When adding a new Satellite configuration provider, if the URL cannot be accessed because of a firewall dropping packets, then credential validation should time out after 2 minutes with a flash message. Polarion: assignee: tpapaioa casecomponent: Provisioning caseimportance: medium initialEstimate: 1/6h """ pass
izapolsk/integration_tests
cfme/tests/satellite/test_satellite_manual.py
cfme/containers/provider/openshift.py
"""An example config:: artifactor: log_dir: /home/test/workspace/cfme_tests/artiout per_run: test #test, run, None reuse_dir: True squash_exceptions: False threaded: False server_address: 127.0.0.1 server_port: 21212 server_enabled: True plugins: ``log_dir`` is the destination for all artifacts ``per_run`` denotes if the test artifacts should be group by run, test, or None ``reuse_dir`` if this is False and Artifactor comes across a dir that has already been used, it will die """ import atexit import os import subprocess from threading import RLock import diaper import pytest from artifactor import ArtifactorClient from cfme.fixtures.pytest_store import store from cfme.fixtures.pytest_store import write_line from cfme.markers.polarion import extract_polarion_ids from cfme.utils.appliance import find_appliance from cfme.utils.blockers import Blocker from cfme.utils.blockers import BZ from cfme.utils.conf import credentials from cfme.utils.conf import env from cfme.utils.log import logger from cfme.utils.net import net_check from cfme.utils.net import random_port from cfme.utils.wait import wait_for UNDER_TEST = False # set to true for artifactor using tests # Create a list of all our passwords for use with the sanitize request later in this module # Filter out all Nones as it will mess the output up. words = [word for word in {v.get('password') for v in credentials.values()} if word is not None] def get_test_idents(item): try: return item.location[2], item.location[0] except AttributeError: try: return item.fspath.strpath, None except AttributeError: return (None, None) def get_name(obj): return (getattr(obj, '_param_name', None) or getattr(obj, 'name', None) or str(obj)) class DummyClient(object): def fire_hook(self, *args, **kwargs): return def terminate(self): return def task_status(self): return def __bool__(self): # DummyClient is always False, # so it's easy to see if we have an artiactor client return False def get_client(art_config, pytest_config): if art_config and not UNDER_TEST: port = getattr(pytest_config.option, 'artifactor_port', None) or \ art_config.get('server_port') or random_port() pytest_config.option.artifactor_port = port art_config['server_port'] = port return ArtifactorClient( art_config['server_address'], art_config['server_port']) else: return DummyClient() def spawn_server(config, art_client): if store.slave_manager or UNDER_TEST: return None import subprocess cmd = ['miq-artifactor-server', '--port', str(art_client.port)] if config.getvalue('run_id'): cmd.append('--run-id') cmd.append(str(config.getvalue('run_id'))) proc = subprocess.Popen(cmd) return proc session_ver = None session_build = None session_stream = None session_fw_version = None def pytest_addoption(parser): parser.addoption("--run-id", action="store", default=None, help="A run id to assist in logging") @pytest.hookimpl(tryfirst=True) def pytest_configure(config): if config.getoption('--help'): return art_client = get_client( art_config=env.get('artifactor', {}), pytest_config=config) # just in case if not store.slave_manager: with diaper: atexit.register(shutdown, config) if art_client: config._art_proc = spawn_server(config, art_client) wait_for( net_check, func_args=[art_client.port, '127.0.0.1'], func_kwargs={'force': True}, num_sec=10, message="wait for artifactor to start") art_client.ready = True else: config._art_proc = None from cfme.utils.log import artifactor_handler artifactor_handler.artifactor = art_client if store.slave_manager: artifactor_handler.slaveid = store.slaveid config._art_client = art_client def fire_art_hook(config, hook, **hook_args): client = getattr(config, '_art_client', None) if client is None: assert UNDER_TEST, 'missing artifactor is only valid for inprocess tests' else: return client.fire_hook(hook, **hook_args) def fire_art_test_hook(node, hook, **hook_args): name, location = get_test_idents(node) return fire_art_hook( node.config, hook, test_name=name, test_location=location, **hook_args) @pytest.hookimpl(hookwrapper=True) def pytest_runtest_protocol(item): global session_ver global session_build global session_stream appliance = find_appliance(item) if not session_ver: session_ver = str(appliance.version) session_build = appliance.build session_stream = appliance.version.stream() if str(session_ver) not in session_build: session_build = "{}-{}".format(str(session_ver), session_build) session_fw_version = None try: proc = subprocess.Popen(['git', 'describe', '--tags'], stdout=subprocess.PIPE) proc.wait() session_fw_version = proc.stdout.read().strip() except Exception: pass # already set session_fw_version to None fire_art_hook( item.config, 'session_info', version=session_ver, build=session_build, stream=session_stream, fw_version=session_fw_version ) tier = item.get_closest_marker('tier') if tier: tier = tier.args[0] requirement = item.get_closest_marker('requirement') if requirement: requirement = requirement.args[0] param_dict = {} try: params = item.callspec.params param_dict = {p: get_name(v) for p, v in params.items()} except Exception: pass # already set param_dict ip = appliance.hostname # This pre_start_test hook is needed so that filedump is able to make get the test # object set up before the logger starts logging. As the logger fires a nested hook # to the filedumper, and we can't specify order inriggerlib. meta = item.get_closest_marker('meta') if meta and 'blockers' in meta.kwargs: blocker_spec = meta.kwargs['blockers'] blockers = [] for blocker in blocker_spec: if isinstance(blocker, int): blockers.append(BZ(blocker).url) else: blockers.append(Blocker.parse(blocker).url) else: blockers = [] fire_art_test_hook( item, 'pre_start_test', slaveid=store.slaveid, ip=ip) fire_art_test_hook( item, 'start_test', slaveid=store.slaveid, ip=ip, tier=tier, requirement=requirement, param_dict=param_dict, issues=blockers) yield def pytest_runtest_teardown(item, nextitem): name, location = get_test_idents(item) app = find_appliance(item) ip = app.hostname fire_art_test_hook( item, 'finish_test', slaveid=store.slaveid, ip=ip, wait_for_task=True) fire_art_test_hook(item, 'sanitize', words=words) jenkins_data = { 'build_url': os.environ.get('BUILD_URL'), 'build_number': os.environ.get('BUILD_NUMBER'), 'git_commit': os.environ.get('GIT_COMMIT'), 'job_name': os.environ.get('JOB_NAME') } param_dict = None try: caps = app.browser.widgetastic.selenium.capabilities param_dict = { 'browserName': caps.get('browserName', 'Unknown'), 'browserPlatform': caps.get('platformName', caps.get('platform', 'Unknown')), 'browserVersion': caps.get('browserVersion', caps.get('version', 'Unknown')) } except Exception: logger.exception("Couldn't grab browser env_vars") pass # already set param_dict fire_art_test_hook( item, 'ostriz_send', env_params=param_dict, slaveid=store.slaveid, polarion_ids=extract_polarion_ids(item), jenkins=jenkins_data) def pytest_runtest_logreport(report): if store.slave_manager: return # each node does its own reporting config = store.config # tech debt name, location = get_test_idents(report) xfail = hasattr(report, 'wasxfail') if hasattr(report, 'skipped'): if report.skipped: fire_art_hook( config, 'filedump', test_location=location, test_name=name, description="Short traceback", contents=report.longreprtext, file_type="short_tb", group_id="skipped") fire_art_hook( config, 'report_test', test_location=location, test_name=name, test_xfail=xfail, test_when=report.when, test_outcome=report.outcome, test_phase_duration=report.duration) fire_art_hook(config, 'build_report') @pytest.hookimpl(hookwrapper=True) def pytest_unconfigure(config): yield shutdown(config) lock = RLock() def shutdown(config): app = find_appliance(config, require=False) if app is not None: with lock: proc = config._art_proc if proc and proc.returncode is None: if not store.slave_manager: write_line('collecting artifacts') fire_art_hook(config, 'finish_session') if not store.slave_manager: config._art_client.terminate() proc.wait()
# pylint: skip-file """Manual tests""" import pytest from cfme import test_requirements pytestmark = [pytest.mark.ignore_stream('upstream')] @pytest.mark.manual @test_requirements.satellite def test_no_rbac_warnings_in_logs_when_viewing_satellite_provider(): """ RBAC-related warnings logged when viewing Satellite provider in web UI Bugzilla: 1565266 1.) Add Satellite provider. 2.) Click on items under Providers accordion. 3.) View evm.log. No WARN-level messages should be logged. [----] W, [2018-04-09T14:09:19.654859 #13384:84e658] WARN -- : MIQ(Rbac::Filterer#lookup_method_for_descendant_class) could not find method name for ConfiguredSystem::ConfiguredSystem Polarion: assignee: tpapaioa casecomponent: Provisioning caseimportance: medium initialEstimate: 1/15h """ pass @pytest.mark.manual @test_requirements.satellite def test_satellite_host_groups_show_up_as_configuration_profiles_satellite_62(): """ For the Satellite provider satellite_62, both the centos and fedora- cloud configuration profiles show up in Configuration > Manage, in the accordion menu under All Configuration Manager Providers > Red Hat Satellite Providers > satellite_62 Configuration Manager. Polarion: assignee: tpapaioa casecomponent: Configuration caseimportance: medium initialEstimate: 1/15h """ pass @pytest.mark.manual @test_requirements.satellite def test_satellite_credential_validation_times_out_with_error_message(): """ Bug 1564601 - Satellite credential validation times out with no error message Bugzilla: 1564601 When adding a new Satellite configuration provider, if the URL cannot be accessed because of a firewall dropping packets, then credential validation should time out after 2 minutes with a flash message. Polarion: assignee: tpapaioa casecomponent: Provisioning caseimportance: medium initialEstimate: 1/6h """ pass
izapolsk/integration_tests
cfme/tests/satellite/test_satellite_manual.py
cfme/fixtures/artifactor_plugin.py
"""Util to handle processes.""" import subprocess def kill_subprocess(process: subprocess.Popen) -> None: """Force kill a subprocess and wait for it to exit.""" process.kill() process.communicate() process.wait() del process
"""The tests for Home Assistant frontend.""" from datetime import timedelta import re import pytest from homeassistant.components.frontend import ( CONF_EXTRA_HTML_URL, CONF_EXTRA_HTML_URL_ES5, CONF_JS_VERSION, CONF_THEMES, DOMAIN, EVENT_PANELS_UPDATED, THEMES_STORAGE_KEY, ) from homeassistant.components.websocket_api.const import TYPE_RESULT from homeassistant.const import HTTP_NOT_FOUND from homeassistant.loader import async_get_integration from homeassistant.setup import async_setup_component from homeassistant.util import dt from tests.async_mock import patch from tests.common import async_capture_events, async_fire_time_changed CONFIG_THEMES = { DOMAIN: { CONF_THEMES: { "happy": {"primary-color": "red"}, "dark": {"primary-color": "black"}, } } } @pytest.fixture def mock_http_client(hass, aiohttp_client): """Start the Home Assistant HTTP component.""" hass.loop.run_until_complete(async_setup_component(hass, "frontend", {})) return hass.loop.run_until_complete(aiohttp_client(hass.http.app)) @pytest.fixture def mock_http_client_with_themes(hass, aiohttp_client): """Start the Home Assistant HTTP component.""" hass.loop.run_until_complete( async_setup_component( hass, "frontend", {DOMAIN: {CONF_THEMES: {"happy": {"primary-color": "red"}}}}, ) ) return hass.loop.run_until_complete(aiohttp_client(hass.http.app)) @pytest.fixture def mock_http_client_with_urls(hass, aiohttp_client): """Start the Home Assistant HTTP component.""" hass.loop.run_until_complete( async_setup_component( hass, "frontend", { DOMAIN: { CONF_JS_VERSION: "auto", CONF_EXTRA_HTML_URL: ["https://domain.com/my_extra_url.html"], CONF_EXTRA_HTML_URL_ES5: [ "https://domain.com/my_extra_url_es5.html" ], } }, ) ) return hass.loop.run_until_complete(aiohttp_client(hass.http.app)) @pytest.fixture def mock_onboarded(): """Mock that we're onboarded.""" with patch( "homeassistant.components.onboarding.async_is_onboarded", return_value=True ): yield async def test_frontend_and_static(mock_http_client, mock_onboarded): """Test if we can get the frontend.""" resp = await mock_http_client.get("") assert resp.status == 200 assert "cache-control" not in resp.headers text = await resp.text() # Test we can retrieve frontend.js frontendjs = re.search(r"(?P<app>\/frontend_es5\/app.[A-Za-z0-9]{8}.js)", text) assert frontendjs is not None, text resp = await mock_http_client.get(frontendjs.groups(0)[0]) assert resp.status == 200 assert "public" in resp.headers.get("cache-control") async def test_dont_cache_service_worker(mock_http_client): """Test that we don't cache the service worker.""" resp = await mock_http_client.get("/service_worker.js") assert resp.status == 200 assert "cache-control" not in resp.headers async def test_404(mock_http_client): """Test for HTTP 404 error.""" resp = await mock_http_client.get("/not-existing") assert resp.status == HTTP_NOT_FOUND async def test_we_cannot_POST_to_root(mock_http_client): """Test that POST is not allow to root.""" resp = await mock_http_client.post("/") assert resp.status == 405 async def test_themes_api(hass, hass_ws_client): """Test that /api/themes returns correct data.""" assert await async_setup_component(hass, "frontend", CONFIG_THEMES) client = await hass_ws_client(hass) await client.send_json({"id": 5, "type": "frontend/get_themes"}) msg = await client.receive_json() assert msg["result"]["default_theme"] == "default" assert msg["result"]["default_dark_theme"] is None assert msg["result"]["themes"] == { "happy": {"primary-color": "red"}, "dark": {"primary-color": "black"}, } # safe mode hass.config.safe_mode = True await client.send_json({"id": 6, "type": "frontend/get_themes"}) msg = await client.receive_json() assert msg["result"]["default_theme"] == "safe_mode" assert msg["result"]["themes"] == { "safe_mode": {"primary-color": "#db4437", "accent-color": "#ffca28"} } async def test_themes_persist(hass, hass_ws_client, hass_storage): """Test that theme settings are restores after restart.""" hass_storage[THEMES_STORAGE_KEY] = { "key": THEMES_STORAGE_KEY, "version": 1, "data": { "frontend_default_theme": "happy", "frontend_default_dark_theme": "dark", }, } assert await async_setup_component(hass, "frontend", CONFIG_THEMES) client = await hass_ws_client(hass) await client.send_json({"id": 5, "type": "frontend/get_themes"}) msg = await client.receive_json() assert msg["result"]["default_theme"] == "happy" assert msg["result"]["default_dark_theme"] == "dark" async def test_themes_save_storage(hass, hass_storage): """Test that theme settings are restores after restart.""" hass_storage[THEMES_STORAGE_KEY] = { "key": THEMES_STORAGE_KEY, "version": 1, "data": {}, } assert await async_setup_component(hass, "frontend", CONFIG_THEMES) await hass.services.async_call( DOMAIN, "set_theme", {"name": "happy"}, blocking=True ) await hass.services.async_call( DOMAIN, "set_theme", {"name": "dark", "mode": "dark"}, blocking=True ) # To trigger the call_later async_fire_time_changed(hass, dt.utcnow() + timedelta(seconds=60)) # To execute the save await hass.async_block_till_done() assert hass_storage[THEMES_STORAGE_KEY]["data"] == { "frontend_default_theme": "happy", "frontend_default_dark_theme": "dark", } async def test_themes_set_theme(hass, hass_ws_client): """Test frontend.set_theme service.""" assert await async_setup_component(hass, "frontend", CONFIG_THEMES) client = await hass_ws_client(hass) await hass.services.async_call( DOMAIN, "set_theme", {"name": "happy"}, blocking=True ) await client.send_json({"id": 5, "type": "frontend/get_themes"}) msg = await client.receive_json() assert msg["result"]["default_theme"] == "happy" await hass.services.async_call( DOMAIN, "set_theme", {"name": "default"}, blocking=True ) await client.send_json({"id": 6, "type": "frontend/get_themes"}) msg = await client.receive_json() assert msg["result"]["default_theme"] == "default" await hass.services.async_call( DOMAIN, "set_theme", {"name": "happy"}, blocking=True ) await hass.services.async_call(DOMAIN, "set_theme", {"name": "none"}, blocking=True) await client.send_json({"id": 7, "type": "frontend/get_themes"}) msg = await client.receive_json() assert msg["result"]["default_theme"] == "default" async def test_themes_set_theme_wrong_name(hass, hass_ws_client): """Test frontend.set_theme service called with wrong name.""" assert await async_setup_component(hass, "frontend", CONFIG_THEMES) client = await hass_ws_client(hass) await hass.services.async_call( DOMAIN, "set_theme", {"name": "wrong"}, blocking=True ) await client.send_json({"id": 5, "type": "frontend/get_themes"}) msg = await client.receive_json() assert msg["result"]["default_theme"] == "default" async def test_themes_set_dark_theme(hass, hass_ws_client): """Test frontend.set_theme service called with dark mode.""" assert await async_setup_component(hass, "frontend", CONFIG_THEMES) client = await hass_ws_client(hass) await hass.services.async_call( DOMAIN, "set_theme", {"name": "dark", "mode": "dark"}, blocking=True ) await client.send_json({"id": 5, "type": "frontend/get_themes"}) msg = await client.receive_json() assert msg["result"]["default_dark_theme"] == "dark" await hass.services.async_call( DOMAIN, "set_theme", {"name": "default", "mode": "dark"}, blocking=True ) await client.send_json({"id": 6, "type": "frontend/get_themes"}) msg = await client.receive_json() assert msg["result"]["default_dark_theme"] == "default" await hass.services.async_call( DOMAIN, "set_theme", {"name": "none", "mode": "dark"}, blocking=True ) await client.send_json({"id": 7, "type": "frontend/get_themes"}) msg = await client.receive_json() assert msg["result"]["default_dark_theme"] is None async def test_themes_set_dark_theme_wrong_name(hass, hass_ws_client): """Test frontend.set_theme service called with mode dark and wrong name.""" assert await async_setup_component(hass, "frontend", CONFIG_THEMES) client = await hass_ws_client(hass) await hass.services.async_call( DOMAIN, "set_theme", {"name": "wrong", "mode": "dark"}, blocking=True ) await client.send_json({"id": 5, "type": "frontend/get_themes"}) msg = await client.receive_json() assert msg["result"]["default_dark_theme"] is None async def test_themes_reload_themes(hass, hass_ws_client): """Test frontend.reload_themes service.""" assert await async_setup_component(hass, "frontend", CONFIG_THEMES) client = await hass_ws_client(hass) with patch( "homeassistant.components.frontend.async_hass_config_yaml", return_value={DOMAIN: {CONF_THEMES: {"sad": {"primary-color": "blue"}}}}, ): await hass.services.async_call( DOMAIN, "set_theme", {"name": "happy"}, blocking=True ) await hass.services.async_call(DOMAIN, "reload_themes", blocking=True) await client.send_json({"id": 5, "type": "frontend/get_themes"}) msg = await client.receive_json() assert msg["result"]["themes"] == {"sad": {"primary-color": "blue"}} assert msg["result"]["default_theme"] == "default" async def test_missing_themes(hass, hass_ws_client): """Test that themes API works when themes are not defined.""" await async_setup_component(hass, "frontend", {}) client = await hass_ws_client(hass) await client.send_json({"id": 5, "type": "frontend/get_themes"}) msg = await client.receive_json() assert msg["id"] == 5 assert msg["type"] == TYPE_RESULT assert msg["success"] assert msg["result"]["default_theme"] == "default" assert msg["result"]["themes"] == {} async def test_get_panels(hass, hass_ws_client, mock_http_client): """Test get_panels command.""" events = async_capture_events(hass, EVENT_PANELS_UPDATED) resp = await mock_http_client.get("/map") assert resp.status == HTTP_NOT_FOUND hass.components.frontend.async_register_built_in_panel( "map", "Map", "mdi:tooltip-account", require_admin=True ) resp = await mock_http_client.get("/map") assert resp.status == 200 assert len(events) == 1 client = await hass_ws_client(hass) await client.send_json({"id": 5, "type": "get_panels"}) msg = await client.receive_json() assert msg["id"] == 5 assert msg["type"] == TYPE_RESULT assert msg["success"] assert msg["result"]["map"]["component_name"] == "map" assert msg["result"]["map"]["url_path"] == "map" assert msg["result"]["map"]["icon"] == "mdi:tooltip-account" assert msg["result"]["map"]["title"] == "Map" assert msg["result"]["map"]["require_admin"] is True hass.components.frontend.async_remove_panel("map") resp = await mock_http_client.get("/map") assert resp.status == HTTP_NOT_FOUND assert len(events) == 2 async def test_get_panels_non_admin(hass, hass_ws_client, hass_admin_user): """Test get_panels command.""" hass_admin_user.groups = [] await async_setup_component(hass, "frontend", {}) hass.components.frontend.async_register_built_in_panel( "map", "Map", "mdi:tooltip-account", require_admin=True ) hass.components.frontend.async_register_built_in_panel( "history", "History", "mdi:history" ) client = await hass_ws_client(hass) await client.send_json({"id": 5, "type": "get_panels"}) msg = await client.receive_json() assert msg["id"] == 5 assert msg["type"] == TYPE_RESULT assert msg["success"] assert "history" in msg["result"] assert "map" not in msg["result"] async def test_get_translations(hass, hass_ws_client): """Test get_translations command.""" await async_setup_component(hass, "frontend", {}) client = await hass_ws_client(hass) with patch( "homeassistant.components.frontend.async_get_translations", side_effect=lambda hass, lang, category, integration, config_flow: { "lang": lang }, ): await client.send_json( { "id": 5, "type": "frontend/get_translations", "language": "nl", "category": "lang", } ) msg = await client.receive_json() assert msg["id"] == 5 assert msg["type"] == TYPE_RESULT assert msg["success"] assert msg["result"] == {"resources": {"lang": "nl"}} async def test_auth_load(mock_http_client, mock_onboarded): """Test auth component loaded by default.""" resp = await mock_http_client.get("/auth/providers") assert resp.status == 200 async def test_onboarding_load(mock_http_client): """Test onboarding component loaded by default.""" resp = await mock_http_client.get("/api/onboarding") assert resp.status == 200 async def test_auth_authorize(mock_http_client): """Test the authorize endpoint works.""" resp = await mock_http_client.get( "/auth/authorize?response_type=code&client_id=https://localhost/&" "redirect_uri=https://localhost/&state=123%23456" ) assert resp.status == 200 # No caching of auth page. assert "cache-control" not in resp.headers text = await resp.text() # Test we can retrieve authorize.js authorizejs = re.search( r"(?P<app>\/frontend_latest\/authorize.[A-Za-z0-9]{8}.js)", text ) assert authorizejs is not None, text resp = await mock_http_client.get(authorizejs.groups(0)[0]) assert resp.status == 200 assert "public" in resp.headers.get("cache-control") async def test_get_version(hass, hass_ws_client): """Test get_version command.""" frontend = await async_get_integration(hass, "frontend") cur_version = next( req.split("==", 1)[1] for req in frontend.requirements if req.startswith("home-assistant-frontend==") ) await async_setup_component(hass, "frontend", {}) client = await hass_ws_client(hass) await client.send_json({"id": 5, "type": "frontend/get_version"}) msg = await client.receive_json() assert msg["id"] == 5 assert msg["type"] == TYPE_RESULT assert msg["success"] assert msg["result"] == {"version": cur_version} async def test_static_paths(hass, mock_http_client): """Test static paths.""" resp = await mock_http_client.get( "/.well-known/change-password", allow_redirects=False ) assert resp.status == 302 assert resp.headers["location"] == "/profile"
GenericStudent/home-assistant
tests/components/frontend/test_init.py
homeassistant/util/process.py
# flake8: noqa __docformat__ = "restructuredtext" # Let users know if they're missing any of our hard dependencies hard_dependencies = ("numpy", "pytz", "dateutil") missing_dependencies = [] for dependency in hard_dependencies: try: __import__(dependency) except ImportError as e: missing_dependencies.append(f"{dependency}: {e}") if missing_dependencies: raise ImportError( "Unable to import required dependencies:\n" + "\n".join(missing_dependencies) ) del hard_dependencies, dependency, missing_dependencies # numpy compat from pandas.compat import ( np_version_under1p18 as _np_version_under1p18, is_numpy_dev as _is_numpy_dev, ) try: from pandas._libs import hashtable as _hashtable, lib as _lib, tslib as _tslib except ImportError as e: # pragma: no cover # hack but overkill to use re module = str(e).replace("cannot import name ", "") raise ImportError( f"C extension: {module} not built. If you want to import " "pandas from the source directory, you may need to run " "'python setup.py build_ext --force' to build the C extensions first." ) from e from pandas._config import ( get_option, set_option, reset_option, describe_option, option_context, options, ) # let init-time option registration happen import pandas.core.config_init from pandas.core.api import ( # dtype Int8Dtype, Int16Dtype, Int32Dtype, Int64Dtype, UInt8Dtype, UInt16Dtype, UInt32Dtype, UInt64Dtype, Float32Dtype, Float64Dtype, CategoricalDtype, PeriodDtype, IntervalDtype, DatetimeTZDtype, StringDtype, BooleanDtype, # missing NA, isna, isnull, notna, notnull, # indexes Index, CategoricalIndex, Int64Index, UInt64Index, RangeIndex, Float64Index, MultiIndex, IntervalIndex, TimedeltaIndex, DatetimeIndex, PeriodIndex, IndexSlice, # tseries NaT, Period, period_range, Timedelta, timedelta_range, Timestamp, date_range, bdate_range, Interval, interval_range, DateOffset, # conversion to_numeric, to_datetime, to_timedelta, # misc Flags, Grouper, factorize, unique, value_counts, NamedAgg, array, Categorical, set_eng_float_format, Series, DataFrame, ) from pandas.core.arrays.sparse import SparseDtype from pandas.tseries.api import infer_freq from pandas.tseries import offsets from pandas.core.computation.api import eval from pandas.core.reshape.api import ( concat, lreshape, melt, wide_to_long, merge, merge_asof, merge_ordered, crosstab, pivot, pivot_table, get_dummies, cut, qcut, ) import pandas.api from pandas.util._print_versions import show_versions from pandas.io.api import ( # excel ExcelFile, ExcelWriter, read_excel, # parsers read_csv, read_fwf, read_table, # pickle read_pickle, to_pickle, # pytables HDFStore, read_hdf, # sql read_sql, read_sql_query, read_sql_table, # misc read_clipboard, read_parquet, read_orc, read_feather, read_gbq, read_html, read_xml, read_json, read_stata, read_sas, read_spss, ) from pandas.io.json import _json_normalize as json_normalize from pandas.util._tester import test import pandas.testing import pandas.arrays # use the closest tagged version if possible from pandas._version import get_versions v = get_versions() __version__ = v.get("closest-tag", v["version"]) __git_version__ = v.get("full-revisionid") del get_versions, v # GH 27101 def __getattr__(name): import warnings if name == "datetime": warnings.warn( "The pandas.datetime class is deprecated " "and will be removed from pandas in a future version. " "Import from datetime module instead.", FutureWarning, stacklevel=2, ) from datetime import datetime as dt return dt elif name == "np": warnings.warn( "The pandas.np module is deprecated " "and will be removed from pandas in a future version. " "Import numpy directly instead", FutureWarning, stacklevel=2, ) import numpy as np return np elif name in {"SparseSeries", "SparseDataFrame"}: warnings.warn( f"The {name} class is removed from pandas. Accessing it from " "the top-level namespace will also be removed in the next version", FutureWarning, stacklevel=2, ) return type(name, (), {}) elif name == "SparseArray": warnings.warn( "The pandas.SparseArray class is deprecated " "and will be removed from pandas in a future version. " "Use pandas.arrays.SparseArray instead.", FutureWarning, stacklevel=2, ) from pandas.core.arrays.sparse import SparseArray as _SparseArray return _SparseArray raise AttributeError(f"module 'pandas' has no attribute '{name}'") # module level doc-string __doc__ = """ pandas - a powerful data analysis and manipulation library for Python ===================================================================== **pandas** is a Python package providing fast, flexible, and expressive data structures designed to make working with "relational" or "labeled" data both easy and intuitive. It aims to be the fundamental high-level building block for doing practical, **real world** data analysis in Python. Additionally, it has the broader goal of becoming **the most powerful and flexible open source data analysis / manipulation tool available in any language**. It is already well on its way toward this goal. Main Features ------------- Here are just a few of the things that pandas does well: - Easy handling of missing data in floating point as well as non-floating point data. - Size mutability: columns can be inserted and deleted from DataFrame and higher dimensional objects - Automatic and explicit data alignment: objects can be explicitly aligned to a set of labels, or the user can simply ignore the labels and let `Series`, `DataFrame`, etc. automatically align the data for you in computations. - Powerful, flexible group by functionality to perform split-apply-combine operations on data sets, for both aggregating and transforming data. - Make it easy to convert ragged, differently-indexed data in other Python and NumPy data structures into DataFrame objects. - Intelligent label-based slicing, fancy indexing, and subsetting of large data sets. - Intuitive merging and joining data sets. - Flexible reshaping and pivoting of data sets. - Hierarchical labeling of axes (possible to have multiple labels per tick). - Robust IO tools for loading data from flat files (CSV and delimited), Excel files, databases, and saving/loading data from the ultrafast HDF5 format. - Time series-specific functionality: date range generation and frequency conversion, moving window statistics, date shifting and lagging. """
""" Tests parsers ability to read and parse non-local files and hence require a network connection to be read. """ from io import ( BytesIO, StringIO, ) import logging import numpy as np import pytest import pandas.util._test_decorators as td from pandas import DataFrame import pandas._testing as tm from pandas.io.feather_format import read_feather from pandas.io.parsers import read_csv @pytest.mark.network @pytest.mark.parametrize( "compress_type, extension", [("gzip", ".gz"), ("bz2", ".bz2"), ("zip", ".zip"), ("xz", ".xz")], ) @pytest.mark.parametrize("mode", ["explicit", "infer"]) @pytest.mark.parametrize("engine", ["python", "c"]) def test_compressed_urls(salaries_table, compress_type, extension, mode, engine): check_compressed_urls(salaries_table, compress_type, extension, mode, engine) @tm.network def check_compressed_urls(salaries_table, compression, extension, mode, engine): # test reading compressed urls with various engines and # extension inference base_url = ( "https://github.com/pandas-dev/pandas/raw/master/" "pandas/tests/io/parser/data/salaries.csv" ) url = base_url + extension if mode != "explicit": compression = mode url_table = read_csv(url, sep="\t", compression=compression, engine=engine) tm.assert_frame_equal(url_table, salaries_table) @tm.network("https://raw.githubusercontent.com/", check_before_test=True) def test_url_encoding_csv(): """ read_csv should honor the requested encoding for URLs. GH 10424 """ path = ( "https://raw.githubusercontent.com/pandas-dev/pandas/master/" + "pandas/tests/io/parser/data/unicode_series.csv" ) df = read_csv(path, encoding="latin-1", header=None) assert df.loc[15, 1] == "Á köldum klaka (Cold Fever) (1994)" @pytest.fixture def tips_df(datapath): """DataFrame with the tips dataset.""" return read_csv(datapath("io", "data", "csv", "tips.csv")) @pytest.mark.usefixtures("s3_resource") @td.skip_if_not_us_locale() class TestS3: @td.skip_if_no("s3fs") def test_parse_public_s3_bucket(self, tips_df, s3so): # more of an integration test due to the not-public contents portion # can probably mock this though. for ext, comp in [("", None), (".gz", "gzip"), (".bz2", "bz2")]: df = read_csv( "s3://pandas-test/tips.csv" + ext, compression=comp, storage_options=s3so, ) assert isinstance(df, DataFrame) assert not df.empty tm.assert_frame_equal(df, tips_df) # Read public file from bucket with not-public contents df = read_csv("s3://cant_get_it/tips.csv", storage_options=s3so) assert isinstance(df, DataFrame) assert not df.empty tm.assert_frame_equal(df, tips_df) def test_parse_public_s3n_bucket(self, tips_df, s3so): # Read from AWS s3 as "s3n" URL df = read_csv("s3n://pandas-test/tips.csv", nrows=10, storage_options=s3so) assert isinstance(df, DataFrame) assert not df.empty tm.assert_frame_equal(tips_df.iloc[:10], df) def test_parse_public_s3a_bucket(self, tips_df, s3so): # Read from AWS s3 as "s3a" URL df = read_csv("s3a://pandas-test/tips.csv", nrows=10, storage_options=s3so) assert isinstance(df, DataFrame) assert not df.empty tm.assert_frame_equal(tips_df.iloc[:10], df) def test_parse_public_s3_bucket_nrows(self, tips_df, s3so): for ext, comp in [("", None), (".gz", "gzip"), (".bz2", "bz2")]: df = read_csv( "s3://pandas-test/tips.csv" + ext, nrows=10, compression=comp, storage_options=s3so, ) assert isinstance(df, DataFrame) assert not df.empty tm.assert_frame_equal(tips_df.iloc[:10], df) def test_parse_public_s3_bucket_chunked(self, tips_df, s3so): # Read with a chunksize chunksize = 5 for ext, comp in [("", None), (".gz", "gzip"), (".bz2", "bz2")]: with read_csv( "s3://pandas-test/tips.csv" + ext, chunksize=chunksize, compression=comp, storage_options=s3so, ) as df_reader: assert df_reader.chunksize == chunksize for i_chunk in [0, 1, 2]: # Read a couple of chunks and make sure we see them # properly. df = df_reader.get_chunk() assert isinstance(df, DataFrame) assert not df.empty true_df = tips_df.iloc[ chunksize * i_chunk : chunksize * (i_chunk + 1) ] tm.assert_frame_equal(true_df, df) def test_parse_public_s3_bucket_chunked_python(self, tips_df, s3so): # Read with a chunksize using the Python parser chunksize = 5 for ext, comp in [("", None), (".gz", "gzip"), (".bz2", "bz2")]: with read_csv( "s3://pandas-test/tips.csv" + ext, chunksize=chunksize, compression=comp, engine="python", storage_options=s3so, ) as df_reader: assert df_reader.chunksize == chunksize for i_chunk in [0, 1, 2]: # Read a couple of chunks and make sure we see them properly. df = df_reader.get_chunk() assert isinstance(df, DataFrame) assert not df.empty true_df = tips_df.iloc[ chunksize * i_chunk : chunksize * (i_chunk + 1) ] tm.assert_frame_equal(true_df, df) def test_parse_public_s3_bucket_python(self, tips_df, s3so): for ext, comp in [("", None), (".gz", "gzip"), (".bz2", "bz2")]: df = read_csv( "s3://pandas-test/tips.csv" + ext, engine="python", compression=comp, storage_options=s3so, ) assert isinstance(df, DataFrame) assert not df.empty tm.assert_frame_equal(df, tips_df) def test_infer_s3_compression(self, tips_df, s3so): for ext in ["", ".gz", ".bz2"]: df = read_csv( "s3://pandas-test/tips.csv" + ext, engine="python", compression="infer", storage_options=s3so, ) assert isinstance(df, DataFrame) assert not df.empty tm.assert_frame_equal(df, tips_df) def test_parse_public_s3_bucket_nrows_python(self, tips_df, s3so): for ext, comp in [("", None), (".gz", "gzip"), (".bz2", "bz2")]: df = read_csv( "s3://pandas-test/tips.csv" + ext, engine="python", nrows=10, compression=comp, storage_options=s3so, ) assert isinstance(df, DataFrame) assert not df.empty tm.assert_frame_equal(tips_df.iloc[:10], df) def test_read_s3_fails(self, s3so): msg = "The specified bucket does not exist" with pytest.raises(IOError, match=msg): read_csv("s3://nyqpug/asdf.csv", storage_options=s3so) # Receive a permission error when trying to read a private bucket. # It's irrelevant here that this isn't actually a table. with pytest.raises(IOError, match=msg): read_csv("s3://cant_get_it/file.csv") @pytest.mark.xfail(reason="GH#39155 s3fs upgrade", strict=False) def test_write_s3_csv_fails(self, tips_df, s3so): # GH 32486 # Attempting to write to an invalid S3 path should raise import botocore # GH 34087 # https://boto3.amazonaws.com/v1/documentation/api/latest/guide/error-handling.html # Catch a ClientError since AWS Service Errors are defined dynamically error = (FileNotFoundError, botocore.exceptions.ClientError) with pytest.raises(error, match="The specified bucket does not exist"): tips_df.to_csv( "s3://an_s3_bucket_data_doesnt_exit/not_real.csv", storage_options=s3so ) @pytest.mark.xfail(reason="GH#39155 s3fs upgrade", strict=False) @td.skip_if_no("pyarrow") def test_write_s3_parquet_fails(self, tips_df, s3so): # GH 27679 # Attempting to write to an invalid S3 path should raise import botocore # GH 34087 # https://boto3.amazonaws.com/v1/documentation/api/latest/guide/error-handling.html # Catch a ClientError since AWS Service Errors are defined dynamically error = (FileNotFoundError, botocore.exceptions.ClientError) with pytest.raises(error, match="The specified bucket does not exist"): tips_df.to_parquet( "s3://an_s3_bucket_data_doesnt_exit/not_real.parquet", storage_options=s3so, ) def test_read_csv_handles_boto_s3_object(self, s3_resource, tips_file): # see gh-16135 s3_object = s3_resource.meta.client.get_object( Bucket="pandas-test", Key="tips.csv" ) with BytesIO(s3_object["Body"].read()) as buffer: result = read_csv(buffer, encoding="utf8") assert isinstance(result, DataFrame) assert not result.empty expected = read_csv(tips_file) tm.assert_frame_equal(result, expected) def test_read_csv_chunked_download(self, s3_resource, caplog, s3so): # 8 MB, S3FS uses 5MB chunks import s3fs df = DataFrame(np.random.randn(100000, 4), columns=list("abcd")) str_buf = StringIO() df.to_csv(str_buf) buf = BytesIO(str_buf.getvalue().encode("utf-8")) s3_resource.Bucket("pandas-test").put_object(Key="large-file.csv", Body=buf) # Possibly some state leaking in between tests. # If we don't clear this cache, we saw `GetObject operation: Forbidden`. # Presumably the s3fs instance is being cached, with the directory listing # from *before* we add the large-file.csv in the pandas-test bucket. s3fs.S3FileSystem.clear_instance_cache() with caplog.at_level(logging.DEBUG, logger="s3fs"): read_csv("s3://pandas-test/large-file.csv", nrows=5, storage_options=s3so) # log of fetch_range (start, stop) assert (0, 5505024) in (x.args[-2:] for x in caplog.records) def test_read_s3_with_hash_in_key(self, tips_df, s3so): # GH 25945 result = read_csv("s3://pandas-test/tips#1.csv", storage_options=s3so) tm.assert_frame_equal(tips_df, result) @td.skip_if_no("pyarrow") def test_read_feather_s3_file_path(self, feather_file, s3so): # GH 29055 expected = read_feather(feather_file) res = read_feather( "s3://pandas-test/simple_dataset.feather", storage_options=s3so ) tm.assert_frame_equal(expected, res)
datapythonista/pandas
pandas/tests/io/parser/test_network.py
pandas/__init__.py
import numpy as np import pandas as pd from pandas import ( Categorical, DataFrame, Index, Series, Timestamp, ) import pandas._testing as tm from pandas.core.arrays import IntervalArray class TestGetNumericData: def test_get_numeric_data_preserve_dtype(self): # get the numeric data obj = DataFrame({"A": [1, "2", 3.0]}) result = obj._get_numeric_data() expected = DataFrame(index=[0, 1, 2], dtype=object) tm.assert_frame_equal(result, expected) def test_get_numeric_data(self): datetime64name = np.dtype("M8[ns]").name objectname = np.dtype(np.object_).name df = DataFrame( {"a": 1.0, "b": 2, "c": "foo", "f": Timestamp("20010102")}, index=np.arange(10), ) result = df.dtypes expected = Series( [ np.dtype("float64"), np.dtype("int64"), np.dtype(objectname), np.dtype(datetime64name), ], index=["a", "b", "c", "f"], ) tm.assert_series_equal(result, expected) df = DataFrame( { "a": 1.0, "b": 2, "c": "foo", "d": np.array([1.0] * 10, dtype="float32"), "e": np.array([1] * 10, dtype="int32"), "f": np.array([1] * 10, dtype="int16"), "g": Timestamp("20010102"), }, index=np.arange(10), ) result = df._get_numeric_data() expected = df.loc[:, ["a", "b", "d", "e", "f"]] tm.assert_frame_equal(result, expected) only_obj = df.loc[:, ["c", "g"]] result = only_obj._get_numeric_data() expected = df.loc[:, []] tm.assert_frame_equal(result, expected) df = DataFrame.from_dict({"a": [1, 2], "b": ["foo", "bar"], "c": [np.pi, np.e]}) result = df._get_numeric_data() expected = DataFrame.from_dict({"a": [1, 2], "c": [np.pi, np.e]}) tm.assert_frame_equal(result, expected) df = result.copy() result = df._get_numeric_data() expected = df tm.assert_frame_equal(result, expected) def test_get_numeric_data_mixed_dtype(self): # numeric and object columns df = DataFrame( { "a": [1, 2, 3], "b": [True, False, True], "c": ["foo", "bar", "baz"], "d": [None, None, None], "e": [3.14, 0.577, 2.773], } ) result = df._get_numeric_data() tm.assert_index_equal(result.columns, Index(["a", "b", "e"])) def test_get_numeric_data_extension_dtype(self): # GH#22290 df = DataFrame( { "A": pd.array([-10, np.nan, 0, 10, 20, 30], dtype="Int64"), "B": Categorical(list("abcabc")), "C": pd.array([0, 1, 2, 3, np.nan, 5], dtype="UInt8"), "D": IntervalArray.from_breaks(range(7)), } ) result = df._get_numeric_data() expected = df.loc[:, ["A", "C"]] tm.assert_frame_equal(result, expected)
""" Tests parsers ability to read and parse non-local files and hence require a network connection to be read. """ from io import ( BytesIO, StringIO, ) import logging import numpy as np import pytest import pandas.util._test_decorators as td from pandas import DataFrame import pandas._testing as tm from pandas.io.feather_format import read_feather from pandas.io.parsers import read_csv @pytest.mark.network @pytest.mark.parametrize( "compress_type, extension", [("gzip", ".gz"), ("bz2", ".bz2"), ("zip", ".zip"), ("xz", ".xz")], ) @pytest.mark.parametrize("mode", ["explicit", "infer"]) @pytest.mark.parametrize("engine", ["python", "c"]) def test_compressed_urls(salaries_table, compress_type, extension, mode, engine): check_compressed_urls(salaries_table, compress_type, extension, mode, engine) @tm.network def check_compressed_urls(salaries_table, compression, extension, mode, engine): # test reading compressed urls with various engines and # extension inference base_url = ( "https://github.com/pandas-dev/pandas/raw/master/" "pandas/tests/io/parser/data/salaries.csv" ) url = base_url + extension if mode != "explicit": compression = mode url_table = read_csv(url, sep="\t", compression=compression, engine=engine) tm.assert_frame_equal(url_table, salaries_table) @tm.network("https://raw.githubusercontent.com/", check_before_test=True) def test_url_encoding_csv(): """ read_csv should honor the requested encoding for URLs. GH 10424 """ path = ( "https://raw.githubusercontent.com/pandas-dev/pandas/master/" + "pandas/tests/io/parser/data/unicode_series.csv" ) df = read_csv(path, encoding="latin-1", header=None) assert df.loc[15, 1] == "Á köldum klaka (Cold Fever) (1994)" @pytest.fixture def tips_df(datapath): """DataFrame with the tips dataset.""" return read_csv(datapath("io", "data", "csv", "tips.csv")) @pytest.mark.usefixtures("s3_resource") @td.skip_if_not_us_locale() class TestS3: @td.skip_if_no("s3fs") def test_parse_public_s3_bucket(self, tips_df, s3so): # more of an integration test due to the not-public contents portion # can probably mock this though. for ext, comp in [("", None), (".gz", "gzip"), (".bz2", "bz2")]: df = read_csv( "s3://pandas-test/tips.csv" + ext, compression=comp, storage_options=s3so, ) assert isinstance(df, DataFrame) assert not df.empty tm.assert_frame_equal(df, tips_df) # Read public file from bucket with not-public contents df = read_csv("s3://cant_get_it/tips.csv", storage_options=s3so) assert isinstance(df, DataFrame) assert not df.empty tm.assert_frame_equal(df, tips_df) def test_parse_public_s3n_bucket(self, tips_df, s3so): # Read from AWS s3 as "s3n" URL df = read_csv("s3n://pandas-test/tips.csv", nrows=10, storage_options=s3so) assert isinstance(df, DataFrame) assert not df.empty tm.assert_frame_equal(tips_df.iloc[:10], df) def test_parse_public_s3a_bucket(self, tips_df, s3so): # Read from AWS s3 as "s3a" URL df = read_csv("s3a://pandas-test/tips.csv", nrows=10, storage_options=s3so) assert isinstance(df, DataFrame) assert not df.empty tm.assert_frame_equal(tips_df.iloc[:10], df) def test_parse_public_s3_bucket_nrows(self, tips_df, s3so): for ext, comp in [("", None), (".gz", "gzip"), (".bz2", "bz2")]: df = read_csv( "s3://pandas-test/tips.csv" + ext, nrows=10, compression=comp, storage_options=s3so, ) assert isinstance(df, DataFrame) assert not df.empty tm.assert_frame_equal(tips_df.iloc[:10], df) def test_parse_public_s3_bucket_chunked(self, tips_df, s3so): # Read with a chunksize chunksize = 5 for ext, comp in [("", None), (".gz", "gzip"), (".bz2", "bz2")]: with read_csv( "s3://pandas-test/tips.csv" + ext, chunksize=chunksize, compression=comp, storage_options=s3so, ) as df_reader: assert df_reader.chunksize == chunksize for i_chunk in [0, 1, 2]: # Read a couple of chunks and make sure we see them # properly. df = df_reader.get_chunk() assert isinstance(df, DataFrame) assert not df.empty true_df = tips_df.iloc[ chunksize * i_chunk : chunksize * (i_chunk + 1) ] tm.assert_frame_equal(true_df, df) def test_parse_public_s3_bucket_chunked_python(self, tips_df, s3so): # Read with a chunksize using the Python parser chunksize = 5 for ext, comp in [("", None), (".gz", "gzip"), (".bz2", "bz2")]: with read_csv( "s3://pandas-test/tips.csv" + ext, chunksize=chunksize, compression=comp, engine="python", storage_options=s3so, ) as df_reader: assert df_reader.chunksize == chunksize for i_chunk in [0, 1, 2]: # Read a couple of chunks and make sure we see them properly. df = df_reader.get_chunk() assert isinstance(df, DataFrame) assert not df.empty true_df = tips_df.iloc[ chunksize * i_chunk : chunksize * (i_chunk + 1) ] tm.assert_frame_equal(true_df, df) def test_parse_public_s3_bucket_python(self, tips_df, s3so): for ext, comp in [("", None), (".gz", "gzip"), (".bz2", "bz2")]: df = read_csv( "s3://pandas-test/tips.csv" + ext, engine="python", compression=comp, storage_options=s3so, ) assert isinstance(df, DataFrame) assert not df.empty tm.assert_frame_equal(df, tips_df) def test_infer_s3_compression(self, tips_df, s3so): for ext in ["", ".gz", ".bz2"]: df = read_csv( "s3://pandas-test/tips.csv" + ext, engine="python", compression="infer", storage_options=s3so, ) assert isinstance(df, DataFrame) assert not df.empty tm.assert_frame_equal(df, tips_df) def test_parse_public_s3_bucket_nrows_python(self, tips_df, s3so): for ext, comp in [("", None), (".gz", "gzip"), (".bz2", "bz2")]: df = read_csv( "s3://pandas-test/tips.csv" + ext, engine="python", nrows=10, compression=comp, storage_options=s3so, ) assert isinstance(df, DataFrame) assert not df.empty tm.assert_frame_equal(tips_df.iloc[:10], df) def test_read_s3_fails(self, s3so): msg = "The specified bucket does not exist" with pytest.raises(IOError, match=msg): read_csv("s3://nyqpug/asdf.csv", storage_options=s3so) # Receive a permission error when trying to read a private bucket. # It's irrelevant here that this isn't actually a table. with pytest.raises(IOError, match=msg): read_csv("s3://cant_get_it/file.csv") @pytest.mark.xfail(reason="GH#39155 s3fs upgrade", strict=False) def test_write_s3_csv_fails(self, tips_df, s3so): # GH 32486 # Attempting to write to an invalid S3 path should raise import botocore # GH 34087 # https://boto3.amazonaws.com/v1/documentation/api/latest/guide/error-handling.html # Catch a ClientError since AWS Service Errors are defined dynamically error = (FileNotFoundError, botocore.exceptions.ClientError) with pytest.raises(error, match="The specified bucket does not exist"): tips_df.to_csv( "s3://an_s3_bucket_data_doesnt_exit/not_real.csv", storage_options=s3so ) @pytest.mark.xfail(reason="GH#39155 s3fs upgrade", strict=False) @td.skip_if_no("pyarrow") def test_write_s3_parquet_fails(self, tips_df, s3so): # GH 27679 # Attempting to write to an invalid S3 path should raise import botocore # GH 34087 # https://boto3.amazonaws.com/v1/documentation/api/latest/guide/error-handling.html # Catch a ClientError since AWS Service Errors are defined dynamically error = (FileNotFoundError, botocore.exceptions.ClientError) with pytest.raises(error, match="The specified bucket does not exist"): tips_df.to_parquet( "s3://an_s3_bucket_data_doesnt_exit/not_real.parquet", storage_options=s3so, ) def test_read_csv_handles_boto_s3_object(self, s3_resource, tips_file): # see gh-16135 s3_object = s3_resource.meta.client.get_object( Bucket="pandas-test", Key="tips.csv" ) with BytesIO(s3_object["Body"].read()) as buffer: result = read_csv(buffer, encoding="utf8") assert isinstance(result, DataFrame) assert not result.empty expected = read_csv(tips_file) tm.assert_frame_equal(result, expected) def test_read_csv_chunked_download(self, s3_resource, caplog, s3so): # 8 MB, S3FS uses 5MB chunks import s3fs df = DataFrame(np.random.randn(100000, 4), columns=list("abcd")) str_buf = StringIO() df.to_csv(str_buf) buf = BytesIO(str_buf.getvalue().encode("utf-8")) s3_resource.Bucket("pandas-test").put_object(Key="large-file.csv", Body=buf) # Possibly some state leaking in between tests. # If we don't clear this cache, we saw `GetObject operation: Forbidden`. # Presumably the s3fs instance is being cached, with the directory listing # from *before* we add the large-file.csv in the pandas-test bucket. s3fs.S3FileSystem.clear_instance_cache() with caplog.at_level(logging.DEBUG, logger="s3fs"): read_csv("s3://pandas-test/large-file.csv", nrows=5, storage_options=s3so) # log of fetch_range (start, stop) assert (0, 5505024) in (x.args[-2:] for x in caplog.records) def test_read_s3_with_hash_in_key(self, tips_df, s3so): # GH 25945 result = read_csv("s3://pandas-test/tips#1.csv", storage_options=s3so) tm.assert_frame_equal(tips_df, result) @td.skip_if_no("pyarrow") def test_read_feather_s3_file_path(self, feather_file, s3so): # GH 29055 expected = read_feather(feather_file) res = read_feather( "s3://pandas-test/simple_dataset.feather", storage_options=s3so ) tm.assert_frame_equal(expected, res)
datapythonista/pandas
pandas/tests/io/parser/test_network.py
pandas/tests/frame/methods/test_get_numeric_data.py
import warnings import pytest import pandas as pd import pandas._testing as tm from pandas.tests.extension.base.base import BaseExtensionTests class BaseReduceTests(BaseExtensionTests): """ Reduction specific tests. Generally these only make sense for numeric/boolean operations. """ def check_reduce(self, s, op_name, skipna): result = getattr(s, op_name)(skipna=skipna) expected = getattr(s.astype("float64"), op_name)(skipna=skipna) tm.assert_almost_equal(result, expected) class BaseNoReduceTests(BaseReduceTests): """ we don't define any reductions """ @pytest.mark.parametrize("skipna", [True, False]) def test_reduce_series_numeric(self, data, all_numeric_reductions, skipna): op_name = all_numeric_reductions s = pd.Series(data) msg = ( "[Cc]annot perform|Categorical is not ordered for operation|" "'Categorical' does not implement reduction|" ) with pytest.raises(TypeError, match=msg): getattr(s, op_name)(skipna=skipna) @pytest.mark.parametrize("skipna", [True, False]) def test_reduce_series_boolean(self, data, all_boolean_reductions, skipna): op_name = all_boolean_reductions s = pd.Series(data) msg = ( "[Cc]annot perform|Categorical is not ordered for operation|" "'Categorical' does not implement reduction|" ) with pytest.raises(TypeError, match=msg): getattr(s, op_name)(skipna=skipna) class BaseNumericReduceTests(BaseReduceTests): @pytest.mark.parametrize("skipna", [True, False]) def test_reduce_series(self, data, all_numeric_reductions, skipna): op_name = all_numeric_reductions s = pd.Series(data) # min/max with empty produce numpy warnings with warnings.catch_warnings(): warnings.simplefilter("ignore", RuntimeWarning) self.check_reduce(s, op_name, skipna) class BaseBooleanReduceTests(BaseReduceTests): @pytest.mark.parametrize("skipna", [True, False]) def test_reduce_series(self, data, all_boolean_reductions, skipna): op_name = all_boolean_reductions s = pd.Series(data) self.check_reduce(s, op_name, skipna)
""" Tests parsers ability to read and parse non-local files and hence require a network connection to be read. """ from io import ( BytesIO, StringIO, ) import logging import numpy as np import pytest import pandas.util._test_decorators as td from pandas import DataFrame import pandas._testing as tm from pandas.io.feather_format import read_feather from pandas.io.parsers import read_csv @pytest.mark.network @pytest.mark.parametrize( "compress_type, extension", [("gzip", ".gz"), ("bz2", ".bz2"), ("zip", ".zip"), ("xz", ".xz")], ) @pytest.mark.parametrize("mode", ["explicit", "infer"]) @pytest.mark.parametrize("engine", ["python", "c"]) def test_compressed_urls(salaries_table, compress_type, extension, mode, engine): check_compressed_urls(salaries_table, compress_type, extension, mode, engine) @tm.network def check_compressed_urls(salaries_table, compression, extension, mode, engine): # test reading compressed urls with various engines and # extension inference base_url = ( "https://github.com/pandas-dev/pandas/raw/master/" "pandas/tests/io/parser/data/salaries.csv" ) url = base_url + extension if mode != "explicit": compression = mode url_table = read_csv(url, sep="\t", compression=compression, engine=engine) tm.assert_frame_equal(url_table, salaries_table) @tm.network("https://raw.githubusercontent.com/", check_before_test=True) def test_url_encoding_csv(): """ read_csv should honor the requested encoding for URLs. GH 10424 """ path = ( "https://raw.githubusercontent.com/pandas-dev/pandas/master/" + "pandas/tests/io/parser/data/unicode_series.csv" ) df = read_csv(path, encoding="latin-1", header=None) assert df.loc[15, 1] == "Á köldum klaka (Cold Fever) (1994)" @pytest.fixture def tips_df(datapath): """DataFrame with the tips dataset.""" return read_csv(datapath("io", "data", "csv", "tips.csv")) @pytest.mark.usefixtures("s3_resource") @td.skip_if_not_us_locale() class TestS3: @td.skip_if_no("s3fs") def test_parse_public_s3_bucket(self, tips_df, s3so): # more of an integration test due to the not-public contents portion # can probably mock this though. for ext, comp in [("", None), (".gz", "gzip"), (".bz2", "bz2")]: df = read_csv( "s3://pandas-test/tips.csv" + ext, compression=comp, storage_options=s3so, ) assert isinstance(df, DataFrame) assert not df.empty tm.assert_frame_equal(df, tips_df) # Read public file from bucket with not-public contents df = read_csv("s3://cant_get_it/tips.csv", storage_options=s3so) assert isinstance(df, DataFrame) assert not df.empty tm.assert_frame_equal(df, tips_df) def test_parse_public_s3n_bucket(self, tips_df, s3so): # Read from AWS s3 as "s3n" URL df = read_csv("s3n://pandas-test/tips.csv", nrows=10, storage_options=s3so) assert isinstance(df, DataFrame) assert not df.empty tm.assert_frame_equal(tips_df.iloc[:10], df) def test_parse_public_s3a_bucket(self, tips_df, s3so): # Read from AWS s3 as "s3a" URL df = read_csv("s3a://pandas-test/tips.csv", nrows=10, storage_options=s3so) assert isinstance(df, DataFrame) assert not df.empty tm.assert_frame_equal(tips_df.iloc[:10], df) def test_parse_public_s3_bucket_nrows(self, tips_df, s3so): for ext, comp in [("", None), (".gz", "gzip"), (".bz2", "bz2")]: df = read_csv( "s3://pandas-test/tips.csv" + ext, nrows=10, compression=comp, storage_options=s3so, ) assert isinstance(df, DataFrame) assert not df.empty tm.assert_frame_equal(tips_df.iloc[:10], df) def test_parse_public_s3_bucket_chunked(self, tips_df, s3so): # Read with a chunksize chunksize = 5 for ext, comp in [("", None), (".gz", "gzip"), (".bz2", "bz2")]: with read_csv( "s3://pandas-test/tips.csv" + ext, chunksize=chunksize, compression=comp, storage_options=s3so, ) as df_reader: assert df_reader.chunksize == chunksize for i_chunk in [0, 1, 2]: # Read a couple of chunks and make sure we see them # properly. df = df_reader.get_chunk() assert isinstance(df, DataFrame) assert not df.empty true_df = tips_df.iloc[ chunksize * i_chunk : chunksize * (i_chunk + 1) ] tm.assert_frame_equal(true_df, df) def test_parse_public_s3_bucket_chunked_python(self, tips_df, s3so): # Read with a chunksize using the Python parser chunksize = 5 for ext, comp in [("", None), (".gz", "gzip"), (".bz2", "bz2")]: with read_csv( "s3://pandas-test/tips.csv" + ext, chunksize=chunksize, compression=comp, engine="python", storage_options=s3so, ) as df_reader: assert df_reader.chunksize == chunksize for i_chunk in [0, 1, 2]: # Read a couple of chunks and make sure we see them properly. df = df_reader.get_chunk() assert isinstance(df, DataFrame) assert not df.empty true_df = tips_df.iloc[ chunksize * i_chunk : chunksize * (i_chunk + 1) ] tm.assert_frame_equal(true_df, df) def test_parse_public_s3_bucket_python(self, tips_df, s3so): for ext, comp in [("", None), (".gz", "gzip"), (".bz2", "bz2")]: df = read_csv( "s3://pandas-test/tips.csv" + ext, engine="python", compression=comp, storage_options=s3so, ) assert isinstance(df, DataFrame) assert not df.empty tm.assert_frame_equal(df, tips_df) def test_infer_s3_compression(self, tips_df, s3so): for ext in ["", ".gz", ".bz2"]: df = read_csv( "s3://pandas-test/tips.csv" + ext, engine="python", compression="infer", storage_options=s3so, ) assert isinstance(df, DataFrame) assert not df.empty tm.assert_frame_equal(df, tips_df) def test_parse_public_s3_bucket_nrows_python(self, tips_df, s3so): for ext, comp in [("", None), (".gz", "gzip"), (".bz2", "bz2")]: df = read_csv( "s3://pandas-test/tips.csv" + ext, engine="python", nrows=10, compression=comp, storage_options=s3so, ) assert isinstance(df, DataFrame) assert not df.empty tm.assert_frame_equal(tips_df.iloc[:10], df) def test_read_s3_fails(self, s3so): msg = "The specified bucket does not exist" with pytest.raises(IOError, match=msg): read_csv("s3://nyqpug/asdf.csv", storage_options=s3so) # Receive a permission error when trying to read a private bucket. # It's irrelevant here that this isn't actually a table. with pytest.raises(IOError, match=msg): read_csv("s3://cant_get_it/file.csv") @pytest.mark.xfail(reason="GH#39155 s3fs upgrade", strict=False) def test_write_s3_csv_fails(self, tips_df, s3so): # GH 32486 # Attempting to write to an invalid S3 path should raise import botocore # GH 34087 # https://boto3.amazonaws.com/v1/documentation/api/latest/guide/error-handling.html # Catch a ClientError since AWS Service Errors are defined dynamically error = (FileNotFoundError, botocore.exceptions.ClientError) with pytest.raises(error, match="The specified bucket does not exist"): tips_df.to_csv( "s3://an_s3_bucket_data_doesnt_exit/not_real.csv", storage_options=s3so ) @pytest.mark.xfail(reason="GH#39155 s3fs upgrade", strict=False) @td.skip_if_no("pyarrow") def test_write_s3_parquet_fails(self, tips_df, s3so): # GH 27679 # Attempting to write to an invalid S3 path should raise import botocore # GH 34087 # https://boto3.amazonaws.com/v1/documentation/api/latest/guide/error-handling.html # Catch a ClientError since AWS Service Errors are defined dynamically error = (FileNotFoundError, botocore.exceptions.ClientError) with pytest.raises(error, match="The specified bucket does not exist"): tips_df.to_parquet( "s3://an_s3_bucket_data_doesnt_exit/not_real.parquet", storage_options=s3so, ) def test_read_csv_handles_boto_s3_object(self, s3_resource, tips_file): # see gh-16135 s3_object = s3_resource.meta.client.get_object( Bucket="pandas-test", Key="tips.csv" ) with BytesIO(s3_object["Body"].read()) as buffer: result = read_csv(buffer, encoding="utf8") assert isinstance(result, DataFrame) assert not result.empty expected = read_csv(tips_file) tm.assert_frame_equal(result, expected) def test_read_csv_chunked_download(self, s3_resource, caplog, s3so): # 8 MB, S3FS uses 5MB chunks import s3fs df = DataFrame(np.random.randn(100000, 4), columns=list("abcd")) str_buf = StringIO() df.to_csv(str_buf) buf = BytesIO(str_buf.getvalue().encode("utf-8")) s3_resource.Bucket("pandas-test").put_object(Key="large-file.csv", Body=buf) # Possibly some state leaking in between tests. # If we don't clear this cache, we saw `GetObject operation: Forbidden`. # Presumably the s3fs instance is being cached, with the directory listing # from *before* we add the large-file.csv in the pandas-test bucket. s3fs.S3FileSystem.clear_instance_cache() with caplog.at_level(logging.DEBUG, logger="s3fs"): read_csv("s3://pandas-test/large-file.csv", nrows=5, storage_options=s3so) # log of fetch_range (start, stop) assert (0, 5505024) in (x.args[-2:] for x in caplog.records) def test_read_s3_with_hash_in_key(self, tips_df, s3so): # GH 25945 result = read_csv("s3://pandas-test/tips#1.csv", storage_options=s3so) tm.assert_frame_equal(tips_df, result) @td.skip_if_no("pyarrow") def test_read_feather_s3_file_path(self, feather_file, s3so): # GH 29055 expected = read_feather(feather_file) res = read_feather( "s3://pandas-test/simple_dataset.feather", storage_options=s3so ) tm.assert_frame_equal(expected, res)
datapythonista/pandas
pandas/tests/io/parser/test_network.py
pandas/tests/extension/base/reduce.py
from __future__ import annotations from contextlib import suppress from typing import ( TYPE_CHECKING, Any, Hashable, Sequence, ) import warnings import numpy as np from pandas._libs.indexing import NDFrameIndexerBase from pandas._libs.lib import item_from_zerodim from pandas.errors import ( AbstractMethodError, InvalidIndexError, ) from pandas.util._decorators import doc from pandas.core.dtypes.common import ( is_array_like, is_bool_dtype, is_hashable, is_integer, is_iterator, is_list_like, is_numeric_dtype, is_object_dtype, is_scalar, is_sequence, ) from pandas.core.dtypes.concat import concat_compat from pandas.core.dtypes.generic import ( ABCDataFrame, ABCSeries, ) from pandas.core.dtypes.missing import ( infer_fill_value, isna, ) import pandas.core.common as com from pandas.core.construction import array as pd_array from pandas.core.indexers import ( check_array_indexer, is_empty_indexer, is_exact_shape_match, is_list_like_indexer, length_of_indexer, ) from pandas.core.indexes.api import ( Index, MultiIndex, ) if TYPE_CHECKING: from pandas import ( DataFrame, Series, ) # "null slice" _NS = slice(None, None) # the public IndexSlicerMaker class _IndexSlice: """ Create an object to more easily perform multi-index slicing. See Also -------- MultiIndex.remove_unused_levels : New MultiIndex with no unused levels. Notes ----- See :ref:`Defined Levels <advanced.shown_levels>` for further info on slicing a MultiIndex. Examples -------- >>> midx = pd.MultiIndex.from_product([['A0','A1'], ['B0','B1','B2','B3']]) >>> columns = ['foo', 'bar'] >>> dfmi = pd.DataFrame(np.arange(16).reshape((len(midx), len(columns))), ... index=midx, columns=columns) Using the default slice command: >>> dfmi.loc[(slice(None), slice('B0', 'B1')), :] foo bar A0 B0 0 1 B1 2 3 A1 B0 8 9 B1 10 11 Using the IndexSlice class for a more intuitive command: >>> idx = pd.IndexSlice >>> dfmi.loc[idx[:, 'B0':'B1'], :] foo bar A0 B0 0 1 B1 2 3 A1 B0 8 9 B1 10 11 """ def __getitem__(self, arg): return arg IndexSlice = _IndexSlice() class IndexingError(Exception): pass class IndexingMixin: """ Mixin for adding .loc/.iloc/.at/.iat to Dataframes and Series. """ @property def iloc(self) -> _iLocIndexer: """ Purely integer-location based indexing for selection by position. ``.iloc[]`` is primarily integer position based (from ``0`` to ``length-1`` of the axis), but may also be used with a boolean array. Allowed inputs are: - An integer, e.g. ``5``. - A list or array of integers, e.g. ``[4, 3, 0]``. - A slice object with ints, e.g. ``1:7``. - A boolean array. - A ``callable`` function with one argument (the calling Series or DataFrame) and that returns valid output for indexing (one of the above). This is useful in method chains, when you don't have a reference to the calling object, but would like to base your selection on some value. ``.iloc`` will raise ``IndexError`` if a requested indexer is out-of-bounds, except *slice* indexers which allow out-of-bounds indexing (this conforms with python/numpy *slice* semantics). See more at :ref:`Selection by Position <indexing.integer>`. See Also -------- DataFrame.iat : Fast integer location scalar accessor. DataFrame.loc : Purely label-location based indexer for selection by label. Series.iloc : Purely integer-location based indexing for selection by position. Examples -------- >>> mydict = [{'a': 1, 'b': 2, 'c': 3, 'd': 4}, ... {'a': 100, 'b': 200, 'c': 300, 'd': 400}, ... {'a': 1000, 'b': 2000, 'c': 3000, 'd': 4000 }] >>> df = pd.DataFrame(mydict) >>> df a b c d 0 1 2 3 4 1 100 200 300 400 2 1000 2000 3000 4000 **Indexing just the rows** With a scalar integer. >>> type(df.iloc[0]) <class 'pandas.core.series.Series'> >>> df.iloc[0] a 1 b 2 c 3 d 4 Name: 0, dtype: int64 With a list of integers. >>> df.iloc[[0]] a b c d 0 1 2 3 4 >>> type(df.iloc[[0]]) <class 'pandas.core.frame.DataFrame'> >>> df.iloc[[0, 1]] a b c d 0 1 2 3 4 1 100 200 300 400 With a `slice` object. >>> df.iloc[:3] a b c d 0 1 2 3 4 1 100 200 300 400 2 1000 2000 3000 4000 With a boolean mask the same length as the index. >>> df.iloc[[True, False, True]] a b c d 0 1 2 3 4 2 1000 2000 3000 4000 With a callable, useful in method chains. The `x` passed to the ``lambda`` is the DataFrame being sliced. This selects the rows whose index label even. >>> df.iloc[lambda x: x.index % 2 == 0] a b c d 0 1 2 3 4 2 1000 2000 3000 4000 **Indexing both axes** You can mix the indexer types for the index and columns. Use ``:`` to select the entire axis. With scalar integers. >>> df.iloc[0, 1] 2 With lists of integers. >>> df.iloc[[0, 2], [1, 3]] b d 0 2 4 2 2000 4000 With `slice` objects. >>> df.iloc[1:3, 0:3] a b c 1 100 200 300 2 1000 2000 3000 With a boolean array whose length matches the columns. >>> df.iloc[:, [True, False, True, False]] a c 0 1 3 1 100 300 2 1000 3000 With a callable function that expects the Series or DataFrame. >>> df.iloc[:, lambda df: [0, 2]] a c 0 1 3 1 100 300 2 1000 3000 """ return _iLocIndexer("iloc", self) @property def loc(self) -> _LocIndexer: """ Access a group of rows and columns by label(s) or a boolean array. ``.loc[]`` is primarily label based, but may also be used with a boolean array. Allowed inputs are: - A single label, e.g. ``5`` or ``'a'``, (note that ``5`` is interpreted as a *label* of the index, and **never** as an integer position along the index). - A list or array of labels, e.g. ``['a', 'b', 'c']``. - A slice object with labels, e.g. ``'a':'f'``. .. warning:: Note that contrary to usual python slices, **both** the start and the stop are included - A boolean array of the same length as the axis being sliced, e.g. ``[True, False, True]``. - An alignable boolean Series. The index of the key will be aligned before masking. - An alignable Index. The Index of the returned selection will be the input. - A ``callable`` function with one argument (the calling Series or DataFrame) and that returns valid output for indexing (one of the above) See more at :ref:`Selection by Label <indexing.label>`. Raises ------ KeyError If any items are not found. IndexingError If an indexed key is passed and its index is unalignable to the frame index. See Also -------- DataFrame.at : Access a single value for a row/column label pair. DataFrame.iloc : Access group of rows and columns by integer position(s). DataFrame.xs : Returns a cross-section (row(s) or column(s)) from the Series/DataFrame. Series.loc : Access group of values using labels. Examples -------- **Getting values** >>> df = pd.DataFrame([[1, 2], [4, 5], [7, 8]], ... index=['cobra', 'viper', 'sidewinder'], ... columns=['max_speed', 'shield']) >>> df max_speed shield cobra 1 2 viper 4 5 sidewinder 7 8 Single label. Note this returns the row as a Series. >>> df.loc['viper'] max_speed 4 shield 5 Name: viper, dtype: int64 List of labels. Note using ``[[]]`` returns a DataFrame. >>> df.loc[['viper', 'sidewinder']] max_speed shield viper 4 5 sidewinder 7 8 Single label for row and column >>> df.loc['cobra', 'shield'] 2 Slice with labels for row and single label for column. As mentioned above, note that both the start and stop of the slice are included. >>> df.loc['cobra':'viper', 'max_speed'] cobra 1 viper 4 Name: max_speed, dtype: int64 Boolean list with the same length as the row axis >>> df.loc[[False, False, True]] max_speed shield sidewinder 7 8 Alignable boolean Series: >>> df.loc[pd.Series([False, True, False], ... index=['viper', 'sidewinder', 'cobra'])] max_speed shield sidewinder 7 8 Index (same behavior as ``df.reindex``) >>> df.loc[pd.Index(["cobra", "viper"], name="foo")] max_speed shield foo cobra 1 2 viper 4 5 Conditional that returns a boolean Series >>> df.loc[df['shield'] > 6] max_speed shield sidewinder 7 8 Conditional that returns a boolean Series with column labels specified >>> df.loc[df['shield'] > 6, ['max_speed']] max_speed sidewinder 7 Callable that returns a boolean Series >>> df.loc[lambda df: df['shield'] == 8] max_speed shield sidewinder 7 8 **Setting values** Set value for all items matching the list of labels >>> df.loc[['viper', 'sidewinder'], ['shield']] = 50 >>> df max_speed shield cobra 1 2 viper 4 50 sidewinder 7 50 Set value for an entire row >>> df.loc['cobra'] = 10 >>> df max_speed shield cobra 10 10 viper 4 50 sidewinder 7 50 Set value for an entire column >>> df.loc[:, 'max_speed'] = 30 >>> df max_speed shield cobra 30 10 viper 30 50 sidewinder 30 50 Set value for rows matching callable condition >>> df.loc[df['shield'] > 35] = 0 >>> df max_speed shield cobra 30 10 viper 0 0 sidewinder 0 0 **Getting values on a DataFrame with an index that has integer labels** Another example using integers for the index >>> df = pd.DataFrame([[1, 2], [4, 5], [7, 8]], ... index=[7, 8, 9], columns=['max_speed', 'shield']) >>> df max_speed shield 7 1 2 8 4 5 9 7 8 Slice with integer labels for rows. As mentioned above, note that both the start and stop of the slice are included. >>> df.loc[7:9] max_speed shield 7 1 2 8 4 5 9 7 8 **Getting values with a MultiIndex** A number of examples using a DataFrame with a MultiIndex >>> tuples = [ ... ('cobra', 'mark i'), ('cobra', 'mark ii'), ... ('sidewinder', 'mark i'), ('sidewinder', 'mark ii'), ... ('viper', 'mark ii'), ('viper', 'mark iii') ... ] >>> index = pd.MultiIndex.from_tuples(tuples) >>> values = [[12, 2], [0, 4], [10, 20], ... [1, 4], [7, 1], [16, 36]] >>> df = pd.DataFrame(values, columns=['max_speed', 'shield'], index=index) >>> df max_speed shield cobra mark i 12 2 mark ii 0 4 sidewinder mark i 10 20 mark ii 1 4 viper mark ii 7 1 mark iii 16 36 Single label. Note this returns a DataFrame with a single index. >>> df.loc['cobra'] max_speed shield mark i 12 2 mark ii 0 4 Single index tuple. Note this returns a Series. >>> df.loc[('cobra', 'mark ii')] max_speed 0 shield 4 Name: (cobra, mark ii), dtype: int64 Single label for row and column. Similar to passing in a tuple, this returns a Series. >>> df.loc['cobra', 'mark i'] max_speed 12 shield 2 Name: (cobra, mark i), dtype: int64 Single tuple. Note using ``[[]]`` returns a DataFrame. >>> df.loc[[('cobra', 'mark ii')]] max_speed shield cobra mark ii 0 4 Single tuple for the index with a single label for the column >>> df.loc[('cobra', 'mark i'), 'shield'] 2 Slice from index tuple to single label >>> df.loc[('cobra', 'mark i'):'viper'] max_speed shield cobra mark i 12 2 mark ii 0 4 sidewinder mark i 10 20 mark ii 1 4 viper mark ii 7 1 mark iii 16 36 Slice from index tuple to index tuple >>> df.loc[('cobra', 'mark i'):('viper', 'mark ii')] max_speed shield cobra mark i 12 2 mark ii 0 4 sidewinder mark i 10 20 mark ii 1 4 viper mark ii 7 1 """ return _LocIndexer("loc", self) @property def at(self) -> _AtIndexer: """ Access a single value for a row/column label pair. Similar to ``loc``, in that both provide label-based lookups. Use ``at`` if you only need to get or set a single value in a DataFrame or Series. Raises ------ KeyError If 'label' does not exist in DataFrame. See Also -------- DataFrame.iat : Access a single value for a row/column pair by integer position. DataFrame.loc : Access a group of rows and columns by label(s). Series.at : Access a single value using a label. Examples -------- >>> df = pd.DataFrame([[0, 2, 3], [0, 4, 1], [10, 20, 30]], ... index=[4, 5, 6], columns=['A', 'B', 'C']) >>> df A B C 4 0 2 3 5 0 4 1 6 10 20 30 Get value at specified row/column pair >>> df.at[4, 'B'] 2 Set value at specified row/column pair >>> df.at[4, 'B'] = 10 >>> df.at[4, 'B'] 10 Get value within a Series >>> df.loc[5].at['B'] 4 """ return _AtIndexer("at", self) @property def iat(self) -> _iAtIndexer: """ Access a single value for a row/column pair by integer position. Similar to ``iloc``, in that both provide integer-based lookups. Use ``iat`` if you only need to get or set a single value in a DataFrame or Series. Raises ------ IndexError When integer position is out of bounds. See Also -------- DataFrame.at : Access a single value for a row/column label pair. DataFrame.loc : Access a group of rows and columns by label(s). DataFrame.iloc : Access a group of rows and columns by integer position(s). Examples -------- >>> df = pd.DataFrame([[0, 2, 3], [0, 4, 1], [10, 20, 30]], ... columns=['A', 'B', 'C']) >>> df A B C 0 0 2 3 1 0 4 1 2 10 20 30 Get value at specified row/column pair >>> df.iat[1, 2] 1 Set value at specified row/column pair >>> df.iat[1, 2] = 10 >>> df.iat[1, 2] 10 Get value within a series >>> df.loc[0].iat[1] 2 """ return _iAtIndexer("iat", self) class _LocationIndexer(NDFrameIndexerBase): _valid_types: str axis = None def __call__(self, axis=None): # we need to return a copy of ourselves new_self = type(self)(self.name, self.obj) if axis is not None: axis = self.obj._get_axis_number(axis) new_self.axis = axis return new_self def _get_setitem_indexer(self, key): """ Convert a potentially-label-based key into a positional indexer. """ if self.name == "loc": self._ensure_listlike_indexer(key) if self.axis is not None: return self._convert_tuple(key, is_setter=True) ax = self.obj._get_axis(0) if isinstance(ax, MultiIndex) and self.name != "iloc": with suppress(TypeError, KeyError, InvalidIndexError): # TypeError e.g. passed a bool return ax.get_loc(key) if isinstance(key, tuple): with suppress(IndexingError): return self._convert_tuple(key, is_setter=True) if isinstance(key, range): return list(key) try: return self._convert_to_indexer(key, axis=0, is_setter=True) except TypeError as e: # invalid indexer type vs 'other' indexing errors if "cannot do" in str(e): raise elif "unhashable type" in str(e): raise raise IndexingError(key) from e def _ensure_listlike_indexer(self, key, axis=None, value=None): """ Ensure that a list-like of column labels are all present by adding them if they do not already exist. Parameters ---------- key : list-like of column labels Target labels. axis : key axis if known """ column_axis = 1 # column only exists in 2-dimensional DataFrame if self.ndim != 2: return if isinstance(key, tuple) and len(key) > 1: # key may be a tuple if we are .loc # if length of key is > 1 set key to column part key = key[column_axis] axis = column_axis if ( axis == column_axis and not isinstance(self.obj.columns, MultiIndex) and is_list_like_indexer(key) and not com.is_bool_indexer(key) and all(is_hashable(k) for k in key) ): # GH#38148 keys = self.obj.columns.union(key, sort=False) self.obj._mgr = self.obj._mgr.reindex_axis( keys, axis=0, consolidate=False, only_slice=True ) def __setitem__(self, key, value): if isinstance(key, tuple): key = tuple(list(x) if is_iterator(x) else x for x in key) key = tuple(com.apply_if_callable(x, self.obj) for x in key) else: key = com.apply_if_callable(key, self.obj) indexer = self._get_setitem_indexer(key) self._has_valid_setitem_indexer(key) iloc = self if self.name == "iloc" else self.obj.iloc iloc._setitem_with_indexer(indexer, value, self.name) def _validate_key(self, key, axis: int): """ Ensure that key is valid for current indexer. Parameters ---------- key : scalar, slice or list-like Key requested. axis : int Dimension on which the indexing is being made. Raises ------ TypeError If the key (or some element of it) has wrong type. IndexError If the key (or some element of it) is out of bounds. KeyError If the key was not found. """ raise AbstractMethodError(self) def _has_valid_tuple(self, key: tuple): """ Check the key for valid keys across my indexer. """ self._validate_key_length(key) for i, k in enumerate(key): try: self._validate_key(k, i) except ValueError as err: raise ValueError( "Location based indexing can only have " f"[{self._valid_types}] types" ) from err def _is_nested_tuple_indexer(self, tup: tuple) -> bool: """ Returns ------- bool """ if any(isinstance(ax, MultiIndex) for ax in self.obj.axes): return any(is_nested_tuple(tup, ax) for ax in self.obj.axes) return False def _convert_tuple(self, key, is_setter: bool = False): keyidx = [] if self.axis is not None: axis = self.obj._get_axis_number(self.axis) for i in range(self.ndim): if i == axis: keyidx.append( self._convert_to_indexer(key, axis=axis, is_setter=is_setter) ) else: keyidx.append(slice(None)) else: self._validate_key_length(key) for i, k in enumerate(key): idx = self._convert_to_indexer(k, axis=i, is_setter=is_setter) keyidx.append(idx) return tuple(keyidx) def _validate_key_length(self, key: Sequence[Any]) -> None: if len(key) > self.ndim: raise IndexingError("Too many indexers") def _getitem_tuple_same_dim(self, tup: tuple): """ Index with indexers that should return an object of the same dimension as self.obj. This is only called after a failed call to _getitem_lowerdim. """ retval = self.obj for i, key in enumerate(tup): if com.is_null_slice(key): continue retval = getattr(retval, self.name)._getitem_axis(key, axis=i) # We should never have retval.ndim < self.ndim, as that should # be handled by the _getitem_lowerdim call above. assert retval.ndim == self.ndim return retval def _getitem_lowerdim(self, tup: tuple): # we can directly get the axis result since the axis is specified if self.axis is not None: axis = self.obj._get_axis_number(self.axis) return self._getitem_axis(tup, axis=axis) # we may have a nested tuples indexer here if self._is_nested_tuple_indexer(tup): return self._getitem_nested_tuple(tup) # we maybe be using a tuple to represent multiple dimensions here ax0 = self.obj._get_axis(0) # ...but iloc should handle the tuple as simple integer-location # instead of checking it as multiindex representation (GH 13797) if isinstance(ax0, MultiIndex) and self.name != "iloc": with suppress(IndexingError): return self._handle_lowerdim_multi_index_axis0(tup) self._validate_key_length(tup) for i, key in enumerate(tup): if is_label_like(key): # We don't need to check for tuples here because those are # caught by the _is_nested_tuple_indexer check above. section = self._getitem_axis(key, axis=i) # We should never have a scalar section here, because # _getitem_lowerdim is only called after a check for # is_scalar_access, which that would be. if section.ndim == self.ndim: # we're in the middle of slicing through a MultiIndex # revise the key wrt to `section` by inserting an _NS new_key = tup[:i] + (_NS,) + tup[i + 1 :] else: # Note: the section.ndim == self.ndim check above # rules out having DataFrame here, so we dont need to worry # about transposing. new_key = tup[:i] + tup[i + 1 :] if len(new_key) == 1: new_key = new_key[0] # Slices should return views, but calling iloc/loc with a null # slice returns a new object. if com.is_null_slice(new_key): return section # This is an elided recursive call to iloc/loc return getattr(section, self.name)[new_key] raise IndexingError("not applicable") def _getitem_nested_tuple(self, tup: tuple): # we have a nested tuple so have at least 1 multi-index level # we should be able to match up the dimensionality here # we have too many indexers for our dim, but have at least 1 # multi-index dimension, try to see if we have something like # a tuple passed to a series with a multi-index if len(tup) > self.ndim: if self.name != "loc": # This should never be reached, but lets be explicit about it raise ValueError("Too many indices") if isinstance(self.obj, ABCSeries) and any( isinstance(k, tuple) for k in tup ): # GH#35349 Raise if tuple in tuple for series raise ValueError("Too many indices") if self.ndim == 1 or not any(isinstance(x, slice) for x in tup): # GH#10521 Series should reduce MultiIndex dimensions instead of # DataFrame, IndexingError is not raised when slice(None,None,None) # with one row. with suppress(IndexingError): return self._handle_lowerdim_multi_index_axis0(tup) # this is a series with a multi-index specified a tuple of # selectors axis = self.axis or 0 return self._getitem_axis(tup, axis=axis) # handle the multi-axis by taking sections and reducing # this is iterative obj = self.obj # GH#41369 Loop in reverse order ensures indexing along columns before rows # which selects only necessary blocks which avoids dtype conversion if possible axis = len(tup) - 1 for key in tup[::-1]: if com.is_null_slice(key): axis -= 1 continue obj = getattr(obj, self.name)._getitem_axis(key, axis=axis) axis -= 1 # if we have a scalar, we are done if is_scalar(obj) or not hasattr(obj, "ndim"): break return obj def _convert_to_indexer(self, key, axis: int, is_setter: bool = False): raise AbstractMethodError(self) def __getitem__(self, key): if type(key) is tuple: key = tuple(list(x) if is_iterator(x) else x for x in key) key = tuple(com.apply_if_callable(x, self.obj) for x in key) if self._is_scalar_access(key): with suppress(KeyError, IndexError, AttributeError): # AttributeError for IntervalTree get_value return self.obj._get_value(*key, takeable=self._takeable) return self._getitem_tuple(key) else: # we by definition only have the 0th axis axis = self.axis or 0 maybe_callable = com.apply_if_callable(key, self.obj) return self._getitem_axis(maybe_callable, axis=axis) def _is_scalar_access(self, key: tuple): raise NotImplementedError() def _getitem_tuple(self, tup: tuple): raise AbstractMethodError(self) def _getitem_axis(self, key, axis: int): raise NotImplementedError() def _has_valid_setitem_indexer(self, indexer) -> bool: raise AbstractMethodError(self) def _getbool_axis(self, key, axis: int): # caller is responsible for ensuring non-None axis labels = self.obj._get_axis(axis) key = check_bool_indexer(labels, key) inds = key.nonzero()[0] return self.obj._take_with_is_copy(inds, axis=axis) @doc(IndexingMixin.loc) class _LocIndexer(_LocationIndexer): _takeable: bool = False _valid_types = ( "labels (MUST BE IN THE INDEX), slices of labels (BOTH " "endpoints included! Can be slices of integers if the " "index is integers), listlike of labels, boolean" ) # ------------------------------------------------------------------- # Key Checks @doc(_LocationIndexer._validate_key) def _validate_key(self, key, axis: int): # valid for a collection of labels (we check their presence later) # slice of labels (where start-end in labels) # slice of integers (only if in the labels) # boolean not in slice and with boolean index if isinstance(key, bool) and not is_bool_dtype(self.obj.index): raise KeyError( f"{key}: boolean label can not be used without a boolean index" ) if isinstance(key, slice) and ( isinstance(key.start, bool) or isinstance(key.stop, bool) ): raise TypeError(f"{key}: boolean values can not be used in a slice") def _has_valid_setitem_indexer(self, indexer) -> bool: return True def _is_scalar_access(self, key: tuple) -> bool: """ Returns ------- bool """ # this is a shortcut accessor to both .loc and .iloc # that provide the equivalent access of .at and .iat # a) avoid getting things via sections and (to minimize dtype changes) # b) provide a performant path if len(key) != self.ndim: return False for i, k in enumerate(key): if not is_scalar(k): return False ax = self.obj.axes[i] if isinstance(ax, MultiIndex): return False if isinstance(k, str) and ax._supports_partial_string_indexing: # partial string indexing, df.loc['2000', 'A'] # should not be considered scalar return False if not ax.is_unique: return False return True # ------------------------------------------------------------------- # MultiIndex Handling def _multi_take_opportunity(self, tup: tuple) -> bool: """ Check whether there is the possibility to use ``_multi_take``. Currently the limit is that all axes being indexed, must be indexed with list-likes. Parameters ---------- tup : tuple Tuple of indexers, one per axis. Returns ------- bool Whether the current indexing, can be passed through `_multi_take`. """ if not all(is_list_like_indexer(x) for x in tup): return False # just too complicated return not any(com.is_bool_indexer(x) for x in tup) def _multi_take(self, tup: tuple): """ Create the indexers for the passed tuple of keys, and executes the take operation. This allows the take operation to be executed all at once, rather than once for each dimension. Improving efficiency. Parameters ---------- tup : tuple Tuple of indexers, one per axis. Returns ------- values: same type as the object being indexed """ # GH 836 d = { axis: self._get_listlike_indexer(key, axis) for (key, axis) in zip(tup, self.obj._AXIS_ORDERS) } return self.obj._reindex_with_indexers(d, copy=True, allow_dups=True) # ------------------------------------------------------------------- def _getitem_iterable(self, key, axis: int): """ Index current object with an iterable collection of keys. Parameters ---------- key : iterable Targeted labels. axis : int Dimension on which the indexing is being made. Raises ------ KeyError If no key was found. Will change in the future to raise if not all keys were found. Returns ------- scalar, DataFrame, or Series: indexed value(s). """ # we assume that not com.is_bool_indexer(key), as that is # handled before we get here. self._validate_key(key, axis) # A collection of keys keyarr, indexer = self._get_listlike_indexer(key, axis) return self.obj._reindex_with_indexers( {axis: [keyarr, indexer]}, copy=True, allow_dups=True ) def _getitem_tuple(self, tup: tuple): with suppress(IndexingError): return self._getitem_lowerdim(tup) # no multi-index, so validate all of the indexers self._has_valid_tuple(tup) # ugly hack for GH #836 if self._multi_take_opportunity(tup): return self._multi_take(tup) return self._getitem_tuple_same_dim(tup) def _get_label(self, label, axis: int): # GH#5667 this will fail if the label is not present in the axis. return self.obj.xs(label, axis=axis) def _handle_lowerdim_multi_index_axis0(self, tup: tuple): # we have an axis0 multi-index, handle or raise axis = self.axis or 0 try: # fast path for series or for tup devoid of slices return self._get_label(tup, axis=axis) except (TypeError, InvalidIndexError): # slices are unhashable pass except KeyError as ek: # raise KeyError if number of indexers match # else IndexingError will be raised if self.ndim < len(tup) <= self.obj.index.nlevels: raise ek raise IndexingError("No label returned") def _getitem_axis(self, key, axis: int): key = item_from_zerodim(key) if is_iterator(key): key = list(key) labels = self.obj._get_axis(axis) key = labels._get_partial_string_timestamp_match_key(key) if isinstance(key, slice): self._validate_key(key, axis) return self._get_slice_axis(key, axis=axis) elif com.is_bool_indexer(key): return self._getbool_axis(key, axis=axis) elif is_list_like_indexer(key): # an iterable multi-selection if not (isinstance(key, tuple) and isinstance(labels, MultiIndex)): if hasattr(key, "ndim") and key.ndim > 1: raise ValueError("Cannot index with multidimensional key") return self._getitem_iterable(key, axis=axis) # nested tuple slicing if is_nested_tuple(key, labels): locs = labels.get_locs(key) indexer = [slice(None)] * self.ndim indexer[axis] = locs return self.obj.iloc[tuple(indexer)] # fall thru to straight lookup self._validate_key(key, axis) return self._get_label(key, axis=axis) def _get_slice_axis(self, slice_obj: slice, axis: int): """ This is pretty simple as we just have to deal with labels. """ # caller is responsible for ensuring non-None axis obj = self.obj if not need_slice(slice_obj): return obj.copy(deep=False) labels = obj._get_axis(axis) indexer = labels.slice_indexer(slice_obj.start, slice_obj.stop, slice_obj.step) if isinstance(indexer, slice): return self.obj._slice(indexer, axis=axis) else: # DatetimeIndex overrides Index.slice_indexer and may # return a DatetimeIndex instead of a slice object. return self.obj.take(indexer, axis=axis) def _convert_to_indexer(self, key, axis: int, is_setter: bool = False): """ Convert indexing key into something we can use to do actual fancy indexing on a ndarray. Examples ix[:5] -> slice(0, 5) ix[[1,2,3]] -> [1,2,3] ix[['foo', 'bar', 'baz']] -> [i, j, k] (indices of foo, bar, baz) Going by Zen of Python? 'In the face of ambiguity, refuse the temptation to guess.' raise AmbiguousIndexError with integer labels? - No, prefer label-based indexing """ labels = self.obj._get_axis(axis) if isinstance(key, slice): return labels._convert_slice_indexer(key, kind="loc") # see if we are positional in nature is_int_index = labels.is_integer() is_int_positional = is_integer(key) and not is_int_index if is_scalar(key) or isinstance(labels, MultiIndex): # Otherwise get_loc will raise InvalidIndexError # if we are a label return me try: return labels.get_loc(key) except LookupError: if isinstance(key, tuple) and isinstance(labels, MultiIndex): if len(key) == labels.nlevels: return {"key": key} raise except InvalidIndexError: # GH35015, using datetime as column indices raises exception if not isinstance(labels, MultiIndex): raise except TypeError: pass except ValueError: if not is_int_positional: raise # a positional if is_int_positional: # if we are setting and its not a valid location # its an insert which fails by definition # always valid return {"key": key} if is_nested_tuple(key, labels): if isinstance(self.obj, ABCSeries) and any( isinstance(k, tuple) for k in key ): # GH#35349 Raise if tuple in tuple for series raise ValueError("Too many indices") return labels.get_locs(key) elif is_list_like_indexer(key): if is_iterator(key): key = list(key) if com.is_bool_indexer(key): key = check_bool_indexer(labels, key) (inds,) = key.nonzero() return inds else: return self._get_listlike_indexer(key, axis)[1] else: try: return labels.get_loc(key) except LookupError: # allow a not found key only if we are a setter if not is_list_like_indexer(key): return {"key": key} raise def _get_listlike_indexer(self, key, axis: int): """ Transform a list-like of keys into a new index and an indexer. Parameters ---------- key : list-like Targeted labels. axis: int Dimension on which the indexing is being made. Raises ------ KeyError If at least one key was requested but none was found. Returns ------- keyarr: Index New index (coinciding with 'key' if the axis is unique). values : array-like Indexer for the return object, -1 denotes keys not found. """ ax = self.obj._get_axis(axis) # Have the index compute an indexer or return None # if it cannot handle: indexer, keyarr = ax._convert_listlike_indexer(key) # We only act on all found values: if indexer is not None and (indexer != -1).all(): # _validate_read_indexer is a no-op if no -1s, so skip return ax[indexer], indexer if ax._index_as_unique: indexer = ax.get_indexer_for(keyarr) keyarr = ax.reindex(keyarr)[0] else: keyarr, indexer, new_indexer = ax._reindex_non_unique(keyarr) self._validate_read_indexer(keyarr, indexer, axis) return keyarr, indexer def _validate_read_indexer(self, key, indexer, axis: int): """ Check that indexer can be used to return a result. e.g. at least one element was found, unless the list of keys was actually empty. Parameters ---------- key : list-like Targeted labels (only used to show correct error message). indexer: array-like of booleans Indices corresponding to the key, (with -1 indicating not found). axis : int Dimension on which the indexing is being made. Raises ------ KeyError If at least one key was requested but none was found. """ if len(key) == 0: return # Count missing values: missing_mask = indexer < 0 missing = (missing_mask).sum() if missing: if missing == len(indexer): axis_name = self.obj._get_axis_name(axis) raise KeyError(f"None of [{key}] are in the [{axis_name}]") ax = self.obj._get_axis(axis) not_found = list(set(key) - set(ax)) raise KeyError(f"{not_found} not in index") @doc(IndexingMixin.iloc) class _iLocIndexer(_LocationIndexer): _valid_types = ( "integer, integer slice (START point is INCLUDED, END " "point is EXCLUDED), listlike of integers, boolean array" ) _takeable = True # ------------------------------------------------------------------- # Key Checks def _validate_key(self, key, axis: int): if com.is_bool_indexer(key): if hasattr(key, "index") and isinstance(key.index, Index): if key.index.inferred_type == "integer": raise NotImplementedError( "iLocation based boolean " "indexing on an integer type " "is not available" ) raise ValueError( "iLocation based boolean indexing cannot use " "an indexable as a mask" ) return if isinstance(key, slice): return elif is_integer(key): self._validate_integer(key, axis) elif isinstance(key, tuple): # a tuple should already have been caught by this point # so don't treat a tuple as a valid indexer raise IndexingError("Too many indexers") elif is_list_like_indexer(key): arr = np.array(key) len_axis = len(self.obj._get_axis(axis)) # check that the key has a numeric dtype if not is_numeric_dtype(arr.dtype): raise IndexError(f".iloc requires numeric indexers, got {arr}") # check that the key does not exceed the maximum size of the index if len(arr) and (arr.max() >= len_axis or arr.min() < -len_axis): raise IndexError("positional indexers are out-of-bounds") else: raise ValueError(f"Can only index by location with a [{self._valid_types}]") def _has_valid_setitem_indexer(self, indexer) -> bool: """ Validate that a positional indexer cannot enlarge its target will raise if needed, does not modify the indexer externally. Returns ------- bool """ if isinstance(indexer, dict): raise IndexError("iloc cannot enlarge its target object") if isinstance(indexer, ABCDataFrame): warnings.warn( "DataFrame indexer for .iloc is deprecated and will be removed in" "a future version.\n" "consider using .loc with a DataFrame indexer for automatic alignment.", FutureWarning, stacklevel=3, ) if not isinstance(indexer, tuple): indexer = _tuplify(self.ndim, indexer) for ax, i in zip(self.obj.axes, indexer): if isinstance(i, slice): # should check the stop slice? pass elif is_list_like_indexer(i): # should check the elements? pass elif is_integer(i): if i >= len(ax): raise IndexError("iloc cannot enlarge its target object") elif isinstance(i, dict): raise IndexError("iloc cannot enlarge its target object") return True def _is_scalar_access(self, key: tuple) -> bool: """ Returns ------- bool """ # this is a shortcut accessor to both .loc and .iloc # that provide the equivalent access of .at and .iat # a) avoid getting things via sections and (to minimize dtype changes) # b) provide a performant path if len(key) != self.ndim: return False return all(is_integer(k) for k in key) def _validate_integer(self, key: int, axis: int) -> None: """ Check that 'key' is a valid position in the desired axis. Parameters ---------- key : int Requested position. axis : int Desired axis. Raises ------ IndexError If 'key' is not a valid position in axis 'axis'. """ len_axis = len(self.obj._get_axis(axis)) if key >= len_axis or key < -len_axis: raise IndexError("single positional indexer is out-of-bounds") # ------------------------------------------------------------------- def _getitem_tuple(self, tup: tuple): self._has_valid_tuple(tup) with suppress(IndexingError): return self._getitem_lowerdim(tup) return self._getitem_tuple_same_dim(tup) def _get_list_axis(self, key, axis: int): """ Return Series values by list or array of integers. Parameters ---------- key : list-like positional indexer axis : int Returns ------- Series object Notes ----- `axis` can only be zero. """ try: return self.obj._take_with_is_copy(key, axis=axis) except IndexError as err: # re-raise with different error message raise IndexError("positional indexers are out-of-bounds") from err def _getitem_axis(self, key, axis: int): if isinstance(key, ABCDataFrame): raise IndexError( "DataFrame indexer is not allowed for .iloc\n" "Consider using .loc for automatic alignment." ) if isinstance(key, slice): return self._get_slice_axis(key, axis=axis) if is_iterator(key): key = list(key) if isinstance(key, list): key = np.asarray(key) if com.is_bool_indexer(key): self._validate_key(key, axis) return self._getbool_axis(key, axis=axis) # a list of integers elif is_list_like_indexer(key): return self._get_list_axis(key, axis=axis) # a single integer else: key = item_from_zerodim(key) if not is_integer(key): raise TypeError("Cannot index by location index with a non-integer key") # validate the location self._validate_integer(key, axis) return self.obj._ixs(key, axis=axis) def _get_slice_axis(self, slice_obj: slice, axis: int): # caller is responsible for ensuring non-None axis obj = self.obj if not need_slice(slice_obj): return obj.copy(deep=False) labels = obj._get_axis(axis) labels._validate_positional_slice(slice_obj) return self.obj._slice(slice_obj, axis=axis) def _convert_to_indexer(self, key, axis: int, is_setter: bool = False): """ Much simpler as we only have to deal with our valid types. """ return key def _get_setitem_indexer(self, key): # GH#32257 Fall through to let numpy do validation if is_iterator(key): return list(key) return key # ------------------------------------------------------------------- def _setitem_with_indexer(self, indexer, value, name="iloc"): """ _setitem_with_indexer is for setting values on a Series/DataFrame using positional indexers. If the relevant keys are not present, the Series/DataFrame may be expanded. This method is currently broken when dealing with non-unique Indexes, since it goes from positional indexers back to labels when calling BlockManager methods, see GH#12991, GH#22046, GH#15686. """ info_axis = self.obj._info_axis_number # maybe partial set take_split_path = not self.obj._mgr.is_single_block # if there is only one block/type, still have to take split path # unless the block is one-dimensional or it can hold the value if ( not take_split_path and getattr(self.obj._mgr, "blocks", False) and self.ndim > 1 ): # in case of dict, keys are indices val = list(value.values()) if isinstance(value, dict) else value blk = self.obj._mgr.blocks[0] take_split_path = not blk._can_hold_element(val) # if we have any multi-indexes that have non-trivial slices # (not null slices) then we must take the split path, xref # GH 10360, GH 27841 if isinstance(indexer, tuple) and len(indexer) == len(self.obj.axes): for i, ax in zip(indexer, self.obj.axes): if isinstance(ax, MultiIndex) and not ( is_integer(i) or com.is_null_slice(i) ): take_split_path = True break if isinstance(indexer, tuple): nindexer = [] for i, idx in enumerate(indexer): if isinstance(idx, dict): # reindex the axis to the new value # and set inplace key, _ = convert_missing_indexer(idx) # if this is the items axes, then take the main missing # path first # this correctly sets the dtype and avoids cache issues # essentially this separates out the block that is needed # to possibly be modified if self.ndim > 1 and i == info_axis: # add the new item, and set the value # must have all defined axes if we have a scalar # or a list-like on the non-info axes if we have a # list-like if not len(self.obj): if not is_list_like_indexer(value): raise ValueError( "cannot set a frame with no " "defined index and a scalar" ) self.obj[key] = value return # add a new item with the dtype setup if com.is_null_slice(indexer[0]): # We are setting an entire column self.obj[key] = value else: self.obj[key] = infer_fill_value(value) new_indexer = convert_from_missing_indexer_tuple( indexer, self.obj.axes ) self._setitem_with_indexer(new_indexer, value, name) return # reindex the axis # make sure to clear the cache because we are # just replacing the block manager here # so the object is the same index = self.obj._get_axis(i) labels = index.insert(len(index), key) # We are expanding the Series/DataFrame values to match # the length of thenew index `labels`. GH#40096 ensure # this is valid even if the index has duplicates. taker = np.arange(len(index) + 1, dtype=np.intp) taker[-1] = -1 reindexers = {i: (labels, taker)} new_obj = self.obj._reindex_with_indexers( reindexers, allow_dups=True ) self.obj._mgr = new_obj._mgr self.obj._maybe_update_cacher(clear=True) self.obj._is_copy = None nindexer.append(labels.get_loc(key)) else: nindexer.append(idx) indexer = tuple(nindexer) else: indexer, missing = convert_missing_indexer(indexer) if missing: self._setitem_with_indexer_missing(indexer, value) return # align and set the values if take_split_path: # We have to operate column-wise self._setitem_with_indexer_split_path(indexer, value, name) else: self._setitem_single_block(indexer, value, name) def _setitem_with_indexer_split_path(self, indexer, value, name: str): """ Setitem column-wise. """ # Above we only set take_split_path to True for 2D cases assert self.ndim == 2 if not isinstance(indexer, tuple): indexer = _tuplify(self.ndim, indexer) if len(indexer) > self.ndim: raise IndexError("too many indices for array") if isinstance(indexer[0], np.ndarray) and indexer[0].ndim > 2: raise ValueError(r"Cannot set values with ndim > 2") if (isinstance(value, ABCSeries) and name != "iloc") or isinstance(value, dict): from pandas import Series value = self._align_series(indexer, Series(value)) # Ensure we have something we can iterate over info_axis = indexer[1] ilocs = self._ensure_iterable_column_indexer(info_axis) pi = indexer[0] lplane_indexer = length_of_indexer(pi, self.obj.index) # lplane_indexer gives the expected length of obj[indexer[0]] # we need an iterable, with a ndim of at least 1 # eg. don't pass through np.array(0) if is_list_like_indexer(value) and getattr(value, "ndim", 1) > 0: if isinstance(value, ABCDataFrame): self._setitem_with_indexer_frame_value(indexer, value, name) elif np.ndim(value) == 2: self._setitem_with_indexer_2d_value(indexer, value) elif len(ilocs) == 1 and lplane_indexer == len(value) and not is_scalar(pi): # We are setting multiple rows in a single column. self._setitem_single_column(ilocs[0], value, pi) elif len(ilocs) == 1 and 0 != lplane_indexer != len(value): # We are trying to set N values into M entries of a single # column, which is invalid for N != M # Exclude zero-len for e.g. boolean masking that is all-false if len(value) == 1 and not is_integer(info_axis): # This is a case like df.iloc[:3, [1]] = [0] # where we treat as df.iloc[:3, 1] = 0 return self._setitem_with_indexer((pi, info_axis[0]), value[0]) raise ValueError( "Must have equal len keys and value " "when setting with an iterable" ) elif lplane_indexer == 0 and len(value) == len(self.obj.index): # We get here in one case via .loc with a all-False mask pass elif len(ilocs) == len(value): # We are setting multiple columns in a single row. for loc, v in zip(ilocs, value): self._setitem_single_column(loc, v, pi) elif len(ilocs) == 1 and com.is_null_slice(pi) and len(self.obj) == 0: # This is a setitem-with-expansion, see # test_loc_setitem_empty_append_expands_rows_mixed_dtype # e.g. df = DataFrame(columns=["x", "y"]) # df["x"] = df["x"].astype(np.int64) # df.loc[:, "x"] = [1, 2, 3] self._setitem_single_column(ilocs[0], value, pi) else: raise ValueError( "Must have equal len keys and value " "when setting with an iterable" ) else: # scalar value for loc in ilocs: self._setitem_single_column(loc, value, pi) def _setitem_with_indexer_2d_value(self, indexer, value): # We get here with np.ndim(value) == 2, excluding DataFrame, # which goes through _setitem_with_indexer_frame_value pi = indexer[0] ilocs = self._ensure_iterable_column_indexer(indexer[1]) # GH#7551 Note that this coerces the dtype if we are mixed value = np.array(value, dtype=object) if len(ilocs) != value.shape[1]: raise ValueError( "Must have equal len keys and value when setting with an ndarray" ) for i, loc in enumerate(ilocs): # setting with a list, re-coerces self._setitem_single_column(loc, value[:, i].tolist(), pi) def _setitem_with_indexer_frame_value(self, indexer, value: DataFrame, name: str): ilocs = self._ensure_iterable_column_indexer(indexer[1]) sub_indexer = list(indexer) pi = indexer[0] multiindex_indexer = isinstance(self.obj.columns, MultiIndex) unique_cols = value.columns.is_unique # We do not want to align the value in case of iloc GH#37728 if name == "iloc": for i, loc in enumerate(ilocs): val = value.iloc[:, i] self._setitem_single_column(loc, val, pi) elif not unique_cols and value.columns.equals(self.obj.columns): # We assume we are already aligned, see # test_iloc_setitem_frame_duplicate_columns_multiple_blocks for loc in ilocs: item = self.obj.columns[loc] if item in value: sub_indexer[1] = item val = self._align_series( tuple(sub_indexer), value.iloc[:, loc], multiindex_indexer, ) else: val = np.nan self._setitem_single_column(loc, val, pi) elif not unique_cols: raise ValueError("Setting with non-unique columns is not allowed.") else: for loc in ilocs: item = self.obj.columns[loc] if item in value: sub_indexer[1] = item val = self._align_series( tuple(sub_indexer), value[item], multiindex_indexer ) else: val = np.nan self._setitem_single_column(loc, val, pi) def _setitem_single_column(self, loc: int, value, plane_indexer): """ Parameters ---------- loc : int Indexer for column position plane_indexer : int, slice, listlike[int] The indexer we use for setitem along axis=0. """ pi = plane_indexer ser = self.obj._ixs(loc, axis=1) # perform the equivalent of a setitem on the info axis # as we have a null slice or a slice with full bounds # which means essentially reassign to the columns of a # multi-dim object # GH#6149 (null slice), GH#10408 (full bounds) if com.is_null_slice(pi) or com.is_full_slice(pi, len(self.obj)): ser = value elif ( is_array_like(value) and is_exact_shape_match(ser, value) and not is_empty_indexer(pi, value) ): if is_list_like(pi): ser = value[np.argsort(pi)] else: # in case of slice ser = value[pi] else: # set the item, possibly having a dtype change ser = ser.copy() ser._mgr = ser._mgr.setitem(indexer=(pi,), value=value) ser._maybe_update_cacher(clear=True) # reset the sliced object if unique self.obj._iset_item(loc, ser) def _setitem_single_block(self, indexer, value, name: str): """ _setitem_with_indexer for the case when we have a single Block. """ from pandas import Series info_axis = self.obj._info_axis_number item_labels = self.obj._get_axis(info_axis) if isinstance(indexer, tuple): # if we are setting on the info axis ONLY # set using those methods to avoid block-splitting # logic here if ( len(indexer) > info_axis and is_integer(indexer[info_axis]) and all( com.is_null_slice(idx) for i, idx in enumerate(indexer) if i != info_axis ) ): selected_item_labels = item_labels[indexer[info_axis]] if len(item_labels.get_indexer_for([selected_item_labels])) == 1: self.obj[selected_item_labels] = value return indexer = maybe_convert_ix(*indexer) if (isinstance(value, ABCSeries) and name != "iloc") or isinstance(value, dict): # TODO(EA): ExtensionBlock.setitem this causes issues with # setting for extensionarrays that store dicts. Need to decide # if it's worth supporting that. value = self._align_series(indexer, Series(value)) elif isinstance(value, ABCDataFrame) and name != "iloc": value = self._align_frame(indexer, value) # check for chained assignment self.obj._check_is_chained_assignment_possible() # actually do the set self.obj._mgr = self.obj._mgr.setitem(indexer=indexer, value=value) self.obj._maybe_update_cacher(clear=True) def _setitem_with_indexer_missing(self, indexer, value): """ Insert new row(s) or column(s) into the Series or DataFrame. """ from pandas import Series # reindex the axis to the new value # and set inplace if self.ndim == 1: index = self.obj.index new_index = index.insert(len(index), indexer) # we have a coerced indexer, e.g. a float # that matches in an Int64Index, so # we will not create a duplicate index, rather # index to that element # e.g. 0.0 -> 0 # GH#12246 if index.is_unique: new_indexer = index.get_indexer([new_index[-1]]) if (new_indexer != -1).any(): # We get only here with loc, so can hard code return self._setitem_with_indexer(new_indexer, value, "loc") # this preserves dtype of the value new_values = Series([value])._values if len(self.obj._values): # GH#22717 handle casting compatibility that np.concatenate # does incorrectly new_values = concat_compat([self.obj._values, new_values]) self.obj._mgr = self.obj._constructor( new_values, index=new_index, name=self.obj.name )._mgr self.obj._maybe_update_cacher(clear=True) elif self.ndim == 2: if not len(self.obj.columns): # no columns and scalar raise ValueError("cannot set a frame with no defined columns") if isinstance(value, ABCSeries): # append a Series value = value.reindex(index=self.obj.columns, copy=True) value.name = indexer elif isinstance(value, dict): value = Series( value, index=self.obj.columns, name=indexer, dtype=object ) else: # a list-list if is_list_like_indexer(value): # must have conforming columns if len(value) != len(self.obj.columns): raise ValueError("cannot set a row with mismatched columns") value = Series(value, index=self.obj.columns, name=indexer) self.obj._mgr = self.obj.append(value)._mgr self.obj._maybe_update_cacher(clear=True) def _ensure_iterable_column_indexer(self, column_indexer): """ Ensure that our column indexer is something that can be iterated over. """ if is_integer(column_indexer): ilocs = [column_indexer] elif isinstance(column_indexer, slice): ilocs = np.arange(len(self.obj.columns))[column_indexer] elif isinstance(column_indexer, np.ndarray) and is_bool_dtype( column_indexer.dtype ): ilocs = np.arange(len(column_indexer))[column_indexer] else: ilocs = column_indexer return ilocs def _align_series(self, indexer, ser: Series, multiindex_indexer: bool = False): """ Parameters ---------- indexer : tuple, slice, scalar Indexer used to get the locations that will be set to `ser`. ser : pd.Series Values to assign to the locations specified by `indexer`. multiindex_indexer : bool, optional Defaults to False. Should be set to True if `indexer` was from a `pd.MultiIndex`, to avoid unnecessary broadcasting. Returns ------- `np.array` of `ser` broadcast to the appropriate shape for assignment to the locations selected by `indexer` """ if isinstance(indexer, (slice, np.ndarray, list, Index)): indexer = (indexer,) if isinstance(indexer, tuple): # flatten np.ndarray indexers def ravel(i): return i.ravel() if isinstance(i, np.ndarray) else i indexer = tuple(map(ravel, indexer)) aligners = [not com.is_null_slice(idx) for idx in indexer] sum_aligners = sum(aligners) single_aligner = sum_aligners == 1 is_frame = self.ndim == 2 obj = self.obj # are we a single alignable value on a non-primary # dim (e.g. panel: 1,2, or frame: 0) ? # hence need to align to a single axis dimension # rather that find all valid dims # frame if is_frame: single_aligner = single_aligner and aligners[0] # we have a frame, with multiple indexers on both axes; and a # series, so need to broadcast (see GH5206) if sum_aligners == self.ndim and all(is_sequence(_) for _ in indexer): ser = ser.reindex(obj.axes[0][indexer[0]], copy=True)._values # single indexer if len(indexer) > 1 and not multiindex_indexer: len_indexer = len(indexer[1]) ser = np.tile(ser, len_indexer).reshape(len_indexer, -1).T return ser for i, idx in enumerate(indexer): ax = obj.axes[i] # multiple aligners (or null slices) if is_sequence(idx) or isinstance(idx, slice): if single_aligner and com.is_null_slice(idx): continue new_ix = ax[idx] if not is_list_like_indexer(new_ix): new_ix = Index([new_ix]) else: new_ix = Index(new_ix) if ser.index.equals(new_ix) or not len(new_ix): return ser._values.copy() return ser.reindex(new_ix)._values # 2 dims elif single_aligner: # reindex along index ax = self.obj.axes[1] if ser.index.equals(ax) or not len(ax): return ser._values.copy() return ser.reindex(ax)._values elif is_integer(indexer) and self.ndim == 1: if is_object_dtype(self.obj): return ser ax = self.obj._get_axis(0) if ser.index.equals(ax): return ser._values.copy() return ser.reindex(ax)._values[indexer] elif is_integer(indexer): ax = self.obj._get_axis(1) if ser.index.equals(ax): return ser._values.copy() return ser.reindex(ax)._values raise ValueError("Incompatible indexer with Series") def _align_frame(self, indexer, df: DataFrame): is_frame = self.ndim == 2 if isinstance(indexer, tuple): idx, cols = None, None sindexers = [] for i, ix in enumerate(indexer): ax = self.obj.axes[i] if is_sequence(ix) or isinstance(ix, slice): if isinstance(ix, np.ndarray): ix = ix.ravel() if idx is None: idx = ax[ix] elif cols is None: cols = ax[ix] else: break else: sindexers.append(i) if idx is not None and cols is not None: if df.index.equals(idx) and df.columns.equals(cols): val = df.copy()._values else: val = df.reindex(idx, columns=cols)._values return val elif (isinstance(indexer, slice) or is_list_like_indexer(indexer)) and is_frame: ax = self.obj.index[indexer] if df.index.equals(ax): val = df.copy()._values else: # we have a multi-index and are trying to align # with a particular, level GH3738 if ( isinstance(ax, MultiIndex) and isinstance(df.index, MultiIndex) and ax.nlevels != df.index.nlevels ): raise TypeError( "cannot align on a multi-index with out " "specifying the join levels" ) val = df.reindex(index=ax)._values return val raise ValueError("Incompatible indexer with DataFrame") class _ScalarAccessIndexer(NDFrameIndexerBase): """ Access scalars quickly. """ def _convert_key(self, key, is_setter: bool = False): raise AbstractMethodError(self) def __getitem__(self, key): if not isinstance(key, tuple): # we could have a convertible item here (e.g. Timestamp) if not is_list_like_indexer(key): key = (key,) else: raise ValueError("Invalid call for scalar access (getting)!") key = self._convert_key(key) return self.obj._get_value(*key, takeable=self._takeable) def __setitem__(self, key, value): if isinstance(key, tuple): key = tuple(com.apply_if_callable(x, self.obj) for x in key) else: # scalar callable may return tuple key = com.apply_if_callable(key, self.obj) if not isinstance(key, tuple): key = _tuplify(self.ndim, key) key = list(self._convert_key(key, is_setter=True)) if len(key) != self.ndim: raise ValueError("Not enough indexers for scalar access (setting)!") self.obj._set_value(*key, value=value, takeable=self._takeable) @doc(IndexingMixin.at) class _AtIndexer(_ScalarAccessIndexer): _takeable = False def _convert_key(self, key, is_setter: bool = False): """ Require they keys to be the same type as the index. (so we don't fallback) """ # GH 26989 # For series, unpacking key needs to result in the label. # This is already the case for len(key) == 1; e.g. (1,) if self.ndim == 1 and len(key) > 1: key = (key,) # allow arbitrary setting if is_setter: return list(key) return key @property def _axes_are_unique(self) -> bool: # Only relevant for self.ndim == 2 assert self.ndim == 2 return self.obj.index.is_unique and self.obj.columns.is_unique def __getitem__(self, key): if self.ndim == 2 and not self._axes_are_unique: # GH#33041 fall back to .loc if not isinstance(key, tuple) or not all(is_scalar(x) for x in key): raise ValueError("Invalid call for scalar access (getting)!") return self.obj.loc[key] return super().__getitem__(key) def __setitem__(self, key, value): if self.ndim == 2 and not self._axes_are_unique: # GH#33041 fall back to .loc if not isinstance(key, tuple) or not all(is_scalar(x) for x in key): raise ValueError("Invalid call for scalar access (setting)!") self.obj.loc[key] = value return return super().__setitem__(key, value) @doc(IndexingMixin.iat) class _iAtIndexer(_ScalarAccessIndexer): _takeable = True def _convert_key(self, key, is_setter: bool = False): """ Require integer args. (and convert to label arguments) """ for i in key: if not is_integer(i): raise ValueError("iAt based indexing can only have integer indexers") return key def _tuplify(ndim: int, loc: Hashable) -> tuple[Hashable | slice, ...]: """ Given an indexer for the first dimension, create an equivalent tuple for indexing over all dimensions. Parameters ---------- ndim : int loc : object Returns ------- tuple """ _tup: list[Hashable | slice] _tup = [slice(None, None) for _ in range(ndim)] _tup[0] = loc return tuple(_tup) def convert_to_index_sliceable(obj: DataFrame, key): """ If we are index sliceable, then return my slicer, otherwise return None. """ idx = obj.index if isinstance(key, slice): return idx._convert_slice_indexer(key, kind="getitem") elif isinstance(key, str): # we are an actual column if key in obj.columns: return None # We might have a datetimelike string that we can translate to a # slice here via partial string indexing if idx._supports_partial_string_indexing: try: res = idx._get_string_slice(str(key)) warnings.warn( "Indexing a DataFrame with a datetimelike index using a single " "string to slice the rows, like `frame[string]`, is deprecated " "and will be removed in a future version. Use `frame.loc[string]` " "instead.", FutureWarning, stacklevel=3, ) return res except (KeyError, ValueError, NotImplementedError): return None return None def check_bool_indexer(index: Index, key) -> np.ndarray: """ Check if key is a valid boolean indexer for an object with such index and perform reindexing or conversion if needed. This function assumes that is_bool_indexer(key) == True. Parameters ---------- index : Index Index of the object on which the indexing is done. key : list-like Boolean indexer to check. Returns ------- np.array Resulting key. Raises ------ IndexError If the key does not have the same length as index. IndexingError If the index of the key is unalignable to index. """ result = key if isinstance(key, ABCSeries) and not key.index.equals(index): result = result.reindex(index) mask = isna(result._values) if mask.any(): raise IndexingError( "Unalignable boolean Series provided as " "indexer (index of the boolean Series and of " "the indexed object do not match)." ) return result.astype(bool)._values if is_object_dtype(key): # key might be object-dtype bool, check_array_indexer needs bool array result = np.asarray(result, dtype=bool) elif not is_array_like(result): # GH 33924 # key may contain nan elements, check_array_indexer needs bool array result = pd_array(result, dtype=bool) return check_array_indexer(index, result) def convert_missing_indexer(indexer): """ Reverse convert a missing indexer, which is a dict return the scalar indexer and a boolean indicating if we converted """ if isinstance(indexer, dict): # a missing key (but not a tuple indexer) indexer = indexer["key"] if isinstance(indexer, bool): raise KeyError("cannot use a single bool to index into setitem") return indexer, True return indexer, False def convert_from_missing_indexer_tuple(indexer, axes): """ Create a filtered indexer that doesn't have any missing indexers. """ def get_indexer(_i, _idx): return axes[_i].get_loc(_idx["key"]) if isinstance(_idx, dict) else _idx return tuple(get_indexer(_i, _idx) for _i, _idx in enumerate(indexer)) def maybe_convert_ix(*args): """ We likely want to take the cross-product. """ for arg in args: if not isinstance(arg, (np.ndarray, list, ABCSeries, Index)): return args return np.ix_(*args) def is_nested_tuple(tup, labels) -> bool: """ Returns ------- bool """ # check for a compatible nested tuple and multiindexes among the axes if not isinstance(tup, tuple): return False for k in tup: if is_list_like(k) or isinstance(k, slice): return isinstance(labels, MultiIndex) return False def is_label_like(key) -> bool: """ Returns ------- bool """ # select a label or row return not isinstance(key, slice) and not is_list_like_indexer(key) def need_slice(obj: slice) -> bool: """ Returns ------- bool """ return ( obj.start is not None or obj.stop is not None or (obj.step is not None and obj.step != 1) )
""" Tests parsers ability to read and parse non-local files and hence require a network connection to be read. """ from io import ( BytesIO, StringIO, ) import logging import numpy as np import pytest import pandas.util._test_decorators as td from pandas import DataFrame import pandas._testing as tm from pandas.io.feather_format import read_feather from pandas.io.parsers import read_csv @pytest.mark.network @pytest.mark.parametrize( "compress_type, extension", [("gzip", ".gz"), ("bz2", ".bz2"), ("zip", ".zip"), ("xz", ".xz")], ) @pytest.mark.parametrize("mode", ["explicit", "infer"]) @pytest.mark.parametrize("engine", ["python", "c"]) def test_compressed_urls(salaries_table, compress_type, extension, mode, engine): check_compressed_urls(salaries_table, compress_type, extension, mode, engine) @tm.network def check_compressed_urls(salaries_table, compression, extension, mode, engine): # test reading compressed urls with various engines and # extension inference base_url = ( "https://github.com/pandas-dev/pandas/raw/master/" "pandas/tests/io/parser/data/salaries.csv" ) url = base_url + extension if mode != "explicit": compression = mode url_table = read_csv(url, sep="\t", compression=compression, engine=engine) tm.assert_frame_equal(url_table, salaries_table) @tm.network("https://raw.githubusercontent.com/", check_before_test=True) def test_url_encoding_csv(): """ read_csv should honor the requested encoding for URLs. GH 10424 """ path = ( "https://raw.githubusercontent.com/pandas-dev/pandas/master/" + "pandas/tests/io/parser/data/unicode_series.csv" ) df = read_csv(path, encoding="latin-1", header=None) assert df.loc[15, 1] == "Á köldum klaka (Cold Fever) (1994)" @pytest.fixture def tips_df(datapath): """DataFrame with the tips dataset.""" return read_csv(datapath("io", "data", "csv", "tips.csv")) @pytest.mark.usefixtures("s3_resource") @td.skip_if_not_us_locale() class TestS3: @td.skip_if_no("s3fs") def test_parse_public_s3_bucket(self, tips_df, s3so): # more of an integration test due to the not-public contents portion # can probably mock this though. for ext, comp in [("", None), (".gz", "gzip"), (".bz2", "bz2")]: df = read_csv( "s3://pandas-test/tips.csv" + ext, compression=comp, storage_options=s3so, ) assert isinstance(df, DataFrame) assert not df.empty tm.assert_frame_equal(df, tips_df) # Read public file from bucket with not-public contents df = read_csv("s3://cant_get_it/tips.csv", storage_options=s3so) assert isinstance(df, DataFrame) assert not df.empty tm.assert_frame_equal(df, tips_df) def test_parse_public_s3n_bucket(self, tips_df, s3so): # Read from AWS s3 as "s3n" URL df = read_csv("s3n://pandas-test/tips.csv", nrows=10, storage_options=s3so) assert isinstance(df, DataFrame) assert not df.empty tm.assert_frame_equal(tips_df.iloc[:10], df) def test_parse_public_s3a_bucket(self, tips_df, s3so): # Read from AWS s3 as "s3a" URL df = read_csv("s3a://pandas-test/tips.csv", nrows=10, storage_options=s3so) assert isinstance(df, DataFrame) assert not df.empty tm.assert_frame_equal(tips_df.iloc[:10], df) def test_parse_public_s3_bucket_nrows(self, tips_df, s3so): for ext, comp in [("", None), (".gz", "gzip"), (".bz2", "bz2")]: df = read_csv( "s3://pandas-test/tips.csv" + ext, nrows=10, compression=comp, storage_options=s3so, ) assert isinstance(df, DataFrame) assert not df.empty tm.assert_frame_equal(tips_df.iloc[:10], df) def test_parse_public_s3_bucket_chunked(self, tips_df, s3so): # Read with a chunksize chunksize = 5 for ext, comp in [("", None), (".gz", "gzip"), (".bz2", "bz2")]: with read_csv( "s3://pandas-test/tips.csv" + ext, chunksize=chunksize, compression=comp, storage_options=s3so, ) as df_reader: assert df_reader.chunksize == chunksize for i_chunk in [0, 1, 2]: # Read a couple of chunks and make sure we see them # properly. df = df_reader.get_chunk() assert isinstance(df, DataFrame) assert not df.empty true_df = tips_df.iloc[ chunksize * i_chunk : chunksize * (i_chunk + 1) ] tm.assert_frame_equal(true_df, df) def test_parse_public_s3_bucket_chunked_python(self, tips_df, s3so): # Read with a chunksize using the Python parser chunksize = 5 for ext, comp in [("", None), (".gz", "gzip"), (".bz2", "bz2")]: with read_csv( "s3://pandas-test/tips.csv" + ext, chunksize=chunksize, compression=comp, engine="python", storage_options=s3so, ) as df_reader: assert df_reader.chunksize == chunksize for i_chunk in [0, 1, 2]: # Read a couple of chunks and make sure we see them properly. df = df_reader.get_chunk() assert isinstance(df, DataFrame) assert not df.empty true_df = tips_df.iloc[ chunksize * i_chunk : chunksize * (i_chunk + 1) ] tm.assert_frame_equal(true_df, df) def test_parse_public_s3_bucket_python(self, tips_df, s3so): for ext, comp in [("", None), (".gz", "gzip"), (".bz2", "bz2")]: df = read_csv( "s3://pandas-test/tips.csv" + ext, engine="python", compression=comp, storage_options=s3so, ) assert isinstance(df, DataFrame) assert not df.empty tm.assert_frame_equal(df, tips_df) def test_infer_s3_compression(self, tips_df, s3so): for ext in ["", ".gz", ".bz2"]: df = read_csv( "s3://pandas-test/tips.csv" + ext, engine="python", compression="infer", storage_options=s3so, ) assert isinstance(df, DataFrame) assert not df.empty tm.assert_frame_equal(df, tips_df) def test_parse_public_s3_bucket_nrows_python(self, tips_df, s3so): for ext, comp in [("", None), (".gz", "gzip"), (".bz2", "bz2")]: df = read_csv( "s3://pandas-test/tips.csv" + ext, engine="python", nrows=10, compression=comp, storage_options=s3so, ) assert isinstance(df, DataFrame) assert not df.empty tm.assert_frame_equal(tips_df.iloc[:10], df) def test_read_s3_fails(self, s3so): msg = "The specified bucket does not exist" with pytest.raises(IOError, match=msg): read_csv("s3://nyqpug/asdf.csv", storage_options=s3so) # Receive a permission error when trying to read a private bucket. # It's irrelevant here that this isn't actually a table. with pytest.raises(IOError, match=msg): read_csv("s3://cant_get_it/file.csv") @pytest.mark.xfail(reason="GH#39155 s3fs upgrade", strict=False) def test_write_s3_csv_fails(self, tips_df, s3so): # GH 32486 # Attempting to write to an invalid S3 path should raise import botocore # GH 34087 # https://boto3.amazonaws.com/v1/documentation/api/latest/guide/error-handling.html # Catch a ClientError since AWS Service Errors are defined dynamically error = (FileNotFoundError, botocore.exceptions.ClientError) with pytest.raises(error, match="The specified bucket does not exist"): tips_df.to_csv( "s3://an_s3_bucket_data_doesnt_exit/not_real.csv", storage_options=s3so ) @pytest.mark.xfail(reason="GH#39155 s3fs upgrade", strict=False) @td.skip_if_no("pyarrow") def test_write_s3_parquet_fails(self, tips_df, s3so): # GH 27679 # Attempting to write to an invalid S3 path should raise import botocore # GH 34087 # https://boto3.amazonaws.com/v1/documentation/api/latest/guide/error-handling.html # Catch a ClientError since AWS Service Errors are defined dynamically error = (FileNotFoundError, botocore.exceptions.ClientError) with pytest.raises(error, match="The specified bucket does not exist"): tips_df.to_parquet( "s3://an_s3_bucket_data_doesnt_exit/not_real.parquet", storage_options=s3so, ) def test_read_csv_handles_boto_s3_object(self, s3_resource, tips_file): # see gh-16135 s3_object = s3_resource.meta.client.get_object( Bucket="pandas-test", Key="tips.csv" ) with BytesIO(s3_object["Body"].read()) as buffer: result = read_csv(buffer, encoding="utf8") assert isinstance(result, DataFrame) assert not result.empty expected = read_csv(tips_file) tm.assert_frame_equal(result, expected) def test_read_csv_chunked_download(self, s3_resource, caplog, s3so): # 8 MB, S3FS uses 5MB chunks import s3fs df = DataFrame(np.random.randn(100000, 4), columns=list("abcd")) str_buf = StringIO() df.to_csv(str_buf) buf = BytesIO(str_buf.getvalue().encode("utf-8")) s3_resource.Bucket("pandas-test").put_object(Key="large-file.csv", Body=buf) # Possibly some state leaking in between tests. # If we don't clear this cache, we saw `GetObject operation: Forbidden`. # Presumably the s3fs instance is being cached, with the directory listing # from *before* we add the large-file.csv in the pandas-test bucket. s3fs.S3FileSystem.clear_instance_cache() with caplog.at_level(logging.DEBUG, logger="s3fs"): read_csv("s3://pandas-test/large-file.csv", nrows=5, storage_options=s3so) # log of fetch_range (start, stop) assert (0, 5505024) in (x.args[-2:] for x in caplog.records) def test_read_s3_with_hash_in_key(self, tips_df, s3so): # GH 25945 result = read_csv("s3://pandas-test/tips#1.csv", storage_options=s3so) tm.assert_frame_equal(tips_df, result) @td.skip_if_no("pyarrow") def test_read_feather_s3_file_path(self, feather_file, s3so): # GH 29055 expected = read_feather(feather_file) res = read_feather( "s3://pandas-test/simple_dataset.feather", storage_options=s3so ) tm.assert_frame_equal(expected, res)
datapythonista/pandas
pandas/tests/io/parser/test_network.py
pandas/core/indexing.py
from pandas import ( TimedeltaIndex, timedelta_range, ) import pandas._testing as tm class TestTimedeltaIndexDelete: def test_delete(self): idx = timedelta_range(start="1 Days", periods=5, freq="D", name="idx") # preserve freq expected_0 = timedelta_range(start="2 Days", periods=4, freq="D", name="idx") expected_4 = timedelta_range(start="1 Days", periods=4, freq="D", name="idx") # reset freq to None expected_1 = TimedeltaIndex( ["1 day", "3 day", "4 day", "5 day"], freq=None, name="idx" ) cases = { 0: expected_0, -5: expected_0, -1: expected_4, 4: expected_4, 1: expected_1, } for n, expected in cases.items(): result = idx.delete(n) tm.assert_index_equal(result, expected) assert result.name == expected.name assert result.freq == expected.freq with tm.external_error_raised((IndexError, ValueError)): # either depending on numpy version idx.delete(5) def test_delete_slice(self): idx = timedelta_range(start="1 days", periods=10, freq="D", name="idx") # preserve freq expected_0_2 = timedelta_range(start="4 days", periods=7, freq="D", name="idx") expected_7_9 = timedelta_range(start="1 days", periods=7, freq="D", name="idx") # reset freq to None expected_3_5 = TimedeltaIndex( ["1 d", "2 d", "3 d", "7 d", "8 d", "9 d", "10d"], freq=None, name="idx" ) cases = { (0, 1, 2): expected_0_2, (7, 8, 9): expected_7_9, (3, 4, 5): expected_3_5, } for n, expected in cases.items(): result = idx.delete(n) tm.assert_index_equal(result, expected) assert result.name == expected.name assert result.freq == expected.freq result = idx.delete(slice(n[0], n[-1] + 1)) tm.assert_index_equal(result, expected) assert result.name == expected.name assert result.freq == expected.freq def test_delete_doesnt_infer_freq(self): # GH#30655 behavior matches DatetimeIndex tdi = TimedeltaIndex(["1 Day", "2 Days", None, "3 Days", "4 Days"]) result = tdi.delete(2) assert result.freq is None
""" Tests parsers ability to read and parse non-local files and hence require a network connection to be read. """ from io import ( BytesIO, StringIO, ) import logging import numpy as np import pytest import pandas.util._test_decorators as td from pandas import DataFrame import pandas._testing as tm from pandas.io.feather_format import read_feather from pandas.io.parsers import read_csv @pytest.mark.network @pytest.mark.parametrize( "compress_type, extension", [("gzip", ".gz"), ("bz2", ".bz2"), ("zip", ".zip"), ("xz", ".xz")], ) @pytest.mark.parametrize("mode", ["explicit", "infer"]) @pytest.mark.parametrize("engine", ["python", "c"]) def test_compressed_urls(salaries_table, compress_type, extension, mode, engine): check_compressed_urls(salaries_table, compress_type, extension, mode, engine) @tm.network def check_compressed_urls(salaries_table, compression, extension, mode, engine): # test reading compressed urls with various engines and # extension inference base_url = ( "https://github.com/pandas-dev/pandas/raw/master/" "pandas/tests/io/parser/data/salaries.csv" ) url = base_url + extension if mode != "explicit": compression = mode url_table = read_csv(url, sep="\t", compression=compression, engine=engine) tm.assert_frame_equal(url_table, salaries_table) @tm.network("https://raw.githubusercontent.com/", check_before_test=True) def test_url_encoding_csv(): """ read_csv should honor the requested encoding for URLs. GH 10424 """ path = ( "https://raw.githubusercontent.com/pandas-dev/pandas/master/" + "pandas/tests/io/parser/data/unicode_series.csv" ) df = read_csv(path, encoding="latin-1", header=None) assert df.loc[15, 1] == "Á köldum klaka (Cold Fever) (1994)" @pytest.fixture def tips_df(datapath): """DataFrame with the tips dataset.""" return read_csv(datapath("io", "data", "csv", "tips.csv")) @pytest.mark.usefixtures("s3_resource") @td.skip_if_not_us_locale() class TestS3: @td.skip_if_no("s3fs") def test_parse_public_s3_bucket(self, tips_df, s3so): # more of an integration test due to the not-public contents portion # can probably mock this though. for ext, comp in [("", None), (".gz", "gzip"), (".bz2", "bz2")]: df = read_csv( "s3://pandas-test/tips.csv" + ext, compression=comp, storage_options=s3so, ) assert isinstance(df, DataFrame) assert not df.empty tm.assert_frame_equal(df, tips_df) # Read public file from bucket with not-public contents df = read_csv("s3://cant_get_it/tips.csv", storage_options=s3so) assert isinstance(df, DataFrame) assert not df.empty tm.assert_frame_equal(df, tips_df) def test_parse_public_s3n_bucket(self, tips_df, s3so): # Read from AWS s3 as "s3n" URL df = read_csv("s3n://pandas-test/tips.csv", nrows=10, storage_options=s3so) assert isinstance(df, DataFrame) assert not df.empty tm.assert_frame_equal(tips_df.iloc[:10], df) def test_parse_public_s3a_bucket(self, tips_df, s3so): # Read from AWS s3 as "s3a" URL df = read_csv("s3a://pandas-test/tips.csv", nrows=10, storage_options=s3so) assert isinstance(df, DataFrame) assert not df.empty tm.assert_frame_equal(tips_df.iloc[:10], df) def test_parse_public_s3_bucket_nrows(self, tips_df, s3so): for ext, comp in [("", None), (".gz", "gzip"), (".bz2", "bz2")]: df = read_csv( "s3://pandas-test/tips.csv" + ext, nrows=10, compression=comp, storage_options=s3so, ) assert isinstance(df, DataFrame) assert not df.empty tm.assert_frame_equal(tips_df.iloc[:10], df) def test_parse_public_s3_bucket_chunked(self, tips_df, s3so): # Read with a chunksize chunksize = 5 for ext, comp in [("", None), (".gz", "gzip"), (".bz2", "bz2")]: with read_csv( "s3://pandas-test/tips.csv" + ext, chunksize=chunksize, compression=comp, storage_options=s3so, ) as df_reader: assert df_reader.chunksize == chunksize for i_chunk in [0, 1, 2]: # Read a couple of chunks and make sure we see them # properly. df = df_reader.get_chunk() assert isinstance(df, DataFrame) assert not df.empty true_df = tips_df.iloc[ chunksize * i_chunk : chunksize * (i_chunk + 1) ] tm.assert_frame_equal(true_df, df) def test_parse_public_s3_bucket_chunked_python(self, tips_df, s3so): # Read with a chunksize using the Python parser chunksize = 5 for ext, comp in [("", None), (".gz", "gzip"), (".bz2", "bz2")]: with read_csv( "s3://pandas-test/tips.csv" + ext, chunksize=chunksize, compression=comp, engine="python", storage_options=s3so, ) as df_reader: assert df_reader.chunksize == chunksize for i_chunk in [0, 1, 2]: # Read a couple of chunks and make sure we see them properly. df = df_reader.get_chunk() assert isinstance(df, DataFrame) assert not df.empty true_df = tips_df.iloc[ chunksize * i_chunk : chunksize * (i_chunk + 1) ] tm.assert_frame_equal(true_df, df) def test_parse_public_s3_bucket_python(self, tips_df, s3so): for ext, comp in [("", None), (".gz", "gzip"), (".bz2", "bz2")]: df = read_csv( "s3://pandas-test/tips.csv" + ext, engine="python", compression=comp, storage_options=s3so, ) assert isinstance(df, DataFrame) assert not df.empty tm.assert_frame_equal(df, tips_df) def test_infer_s3_compression(self, tips_df, s3so): for ext in ["", ".gz", ".bz2"]: df = read_csv( "s3://pandas-test/tips.csv" + ext, engine="python", compression="infer", storage_options=s3so, ) assert isinstance(df, DataFrame) assert not df.empty tm.assert_frame_equal(df, tips_df) def test_parse_public_s3_bucket_nrows_python(self, tips_df, s3so): for ext, comp in [("", None), (".gz", "gzip"), (".bz2", "bz2")]: df = read_csv( "s3://pandas-test/tips.csv" + ext, engine="python", nrows=10, compression=comp, storage_options=s3so, ) assert isinstance(df, DataFrame) assert not df.empty tm.assert_frame_equal(tips_df.iloc[:10], df) def test_read_s3_fails(self, s3so): msg = "The specified bucket does not exist" with pytest.raises(IOError, match=msg): read_csv("s3://nyqpug/asdf.csv", storage_options=s3so) # Receive a permission error when trying to read a private bucket. # It's irrelevant here that this isn't actually a table. with pytest.raises(IOError, match=msg): read_csv("s3://cant_get_it/file.csv") @pytest.mark.xfail(reason="GH#39155 s3fs upgrade", strict=False) def test_write_s3_csv_fails(self, tips_df, s3so): # GH 32486 # Attempting to write to an invalid S3 path should raise import botocore # GH 34087 # https://boto3.amazonaws.com/v1/documentation/api/latest/guide/error-handling.html # Catch a ClientError since AWS Service Errors are defined dynamically error = (FileNotFoundError, botocore.exceptions.ClientError) with pytest.raises(error, match="The specified bucket does not exist"): tips_df.to_csv( "s3://an_s3_bucket_data_doesnt_exit/not_real.csv", storage_options=s3so ) @pytest.mark.xfail(reason="GH#39155 s3fs upgrade", strict=False) @td.skip_if_no("pyarrow") def test_write_s3_parquet_fails(self, tips_df, s3so): # GH 27679 # Attempting to write to an invalid S3 path should raise import botocore # GH 34087 # https://boto3.amazonaws.com/v1/documentation/api/latest/guide/error-handling.html # Catch a ClientError since AWS Service Errors are defined dynamically error = (FileNotFoundError, botocore.exceptions.ClientError) with pytest.raises(error, match="The specified bucket does not exist"): tips_df.to_parquet( "s3://an_s3_bucket_data_doesnt_exit/not_real.parquet", storage_options=s3so, ) def test_read_csv_handles_boto_s3_object(self, s3_resource, tips_file): # see gh-16135 s3_object = s3_resource.meta.client.get_object( Bucket="pandas-test", Key="tips.csv" ) with BytesIO(s3_object["Body"].read()) as buffer: result = read_csv(buffer, encoding="utf8") assert isinstance(result, DataFrame) assert not result.empty expected = read_csv(tips_file) tm.assert_frame_equal(result, expected) def test_read_csv_chunked_download(self, s3_resource, caplog, s3so): # 8 MB, S3FS uses 5MB chunks import s3fs df = DataFrame(np.random.randn(100000, 4), columns=list("abcd")) str_buf = StringIO() df.to_csv(str_buf) buf = BytesIO(str_buf.getvalue().encode("utf-8")) s3_resource.Bucket("pandas-test").put_object(Key="large-file.csv", Body=buf) # Possibly some state leaking in between tests. # If we don't clear this cache, we saw `GetObject operation: Forbidden`. # Presumably the s3fs instance is being cached, with the directory listing # from *before* we add the large-file.csv in the pandas-test bucket. s3fs.S3FileSystem.clear_instance_cache() with caplog.at_level(logging.DEBUG, logger="s3fs"): read_csv("s3://pandas-test/large-file.csv", nrows=5, storage_options=s3so) # log of fetch_range (start, stop) assert (0, 5505024) in (x.args[-2:] for x in caplog.records) def test_read_s3_with_hash_in_key(self, tips_df, s3so): # GH 25945 result = read_csv("s3://pandas-test/tips#1.csv", storage_options=s3so) tm.assert_frame_equal(tips_df, result) @td.skip_if_no("pyarrow") def test_read_feather_s3_file_path(self, feather_file, s3so): # GH 29055 expected = read_feather(feather_file) res = read_feather( "s3://pandas-test/simple_dataset.feather", storage_options=s3so ) tm.assert_frame_equal(expected, res)
datapythonista/pandas
pandas/tests/io/parser/test_network.py
pandas/tests/indexes/timedeltas/test_delete.py
from __future__ import annotations from contextlib import contextmanager import re from typing import ( Sequence, Type, cast, ) import warnings @contextmanager def assert_produces_warning( expected_warning: type[Warning] | bool | None = Warning, filter_level="always", check_stacklevel: bool = True, raise_on_extra_warnings: bool = True, match: str | None = None, ): """ Context manager for running code expected to either raise a specific warning, or not raise any warnings. Verifies that the code raises the expected warning, and that it does not raise any other unexpected warnings. It is basically a wrapper around ``warnings.catch_warnings``. Parameters ---------- expected_warning : {Warning, False, None}, default Warning The type of Exception raised. ``exception.Warning`` is the base class for all warnings. To check that no warning is returned, specify ``False`` or ``None``. filter_level : str or None, default "always" Specifies whether warnings are ignored, displayed, or turned into errors. Valid values are: * "error" - turns matching warnings into exceptions * "ignore" - discard the warning * "always" - always emit a warning * "default" - print the warning the first time it is generated from each location * "module" - print the warning the first time it is generated from each module * "once" - print the warning the first time it is generated check_stacklevel : bool, default True If True, displays the line that called the function containing the warning to show were the function is called. Otherwise, the line that implements the function is displayed. raise_on_extra_warnings : bool, default True Whether extra warnings not of the type `expected_warning` should cause the test to fail. match : str, optional Match warning message. Examples -------- >>> import warnings >>> with assert_produces_warning(): ... warnings.warn(UserWarning()) ... >>> with assert_produces_warning(False): ... warnings.warn(RuntimeWarning()) ... Traceback (most recent call last): ... AssertionError: Caused unexpected warning(s): ['RuntimeWarning']. >>> with assert_produces_warning(UserWarning): ... warnings.warn(RuntimeWarning()) Traceback (most recent call last): ... AssertionError: Did not see expected warning of class 'UserWarning'. ..warn:: This is *not* thread-safe. """ __tracebackhide__ = True with warnings.catch_warnings(record=True) as w: warnings.simplefilter(filter_level) yield w if expected_warning: expected_warning = cast(Type[Warning], expected_warning) _assert_caught_expected_warning( caught_warnings=w, expected_warning=expected_warning, match=match, check_stacklevel=check_stacklevel, ) if raise_on_extra_warnings: _assert_caught_no_extra_warnings( caught_warnings=w, expected_warning=expected_warning, ) def _assert_caught_expected_warning( *, caught_warnings: Sequence[warnings.WarningMessage], expected_warning: type[Warning], match: str | None, check_stacklevel: bool, ) -> None: """Assert that there was the expected warning among the caught warnings.""" saw_warning = False matched_message = False for actual_warning in caught_warnings: if issubclass(actual_warning.category, expected_warning): saw_warning = True if check_stacklevel and issubclass( actual_warning.category, (FutureWarning, DeprecationWarning) ): _assert_raised_with_correct_stacklevel(actual_warning) if match is not None and re.search(match, str(actual_warning.message)): matched_message = True if not saw_warning: raise AssertionError( f"Did not see expected warning of class " f"{repr(expected_warning.__name__)}" ) if match and not matched_message: raise AssertionError( f"Did not see warning {repr(expected_warning.__name__)} " f"matching {match}" ) def _assert_caught_no_extra_warnings( *, caught_warnings: Sequence[warnings.WarningMessage], expected_warning: type[Warning] | bool | None, ) -> None: """Assert that no extra warnings apart from the expected ones are caught.""" extra_warnings = [] for actual_warning in caught_warnings: if _is_unexpected_warning(actual_warning, expected_warning): unclosed = "unclosed transport <asyncio.sslproto._SSLProtocolTransport" if actual_warning.category == ResourceWarning and unclosed in str( actual_warning.message ): # FIXME: kludge because pytest.filterwarnings does not # suppress these, xref GH#38630 continue extra_warnings.append( ( actual_warning.category.__name__, actual_warning.message, actual_warning.filename, actual_warning.lineno, ) ) if extra_warnings: raise AssertionError(f"Caused unexpected warning(s): {repr(extra_warnings)}") def _is_unexpected_warning( actual_warning: warnings.WarningMessage, expected_warning: type[Warning] | bool | None, ) -> bool: """Check if the actual warning issued is unexpected.""" if actual_warning and not expected_warning: return True expected_warning = cast(Type[Warning], expected_warning) return bool(not issubclass(actual_warning.category, expected_warning)) def _assert_raised_with_correct_stacklevel( actual_warning: warnings.WarningMessage, ) -> None: from inspect import ( getframeinfo, stack, ) caller = getframeinfo(stack()[4][0]) msg = ( "Warning not set with correct stacklevel. " f"File where warning is raised: {actual_warning.filename} != " f"{caller.filename}. Warning message: {actual_warning.message}" ) assert actual_warning.filename == caller.filename, msg
""" Tests parsers ability to read and parse non-local files and hence require a network connection to be read. """ from io import ( BytesIO, StringIO, ) import logging import numpy as np import pytest import pandas.util._test_decorators as td from pandas import DataFrame import pandas._testing as tm from pandas.io.feather_format import read_feather from pandas.io.parsers import read_csv @pytest.mark.network @pytest.mark.parametrize( "compress_type, extension", [("gzip", ".gz"), ("bz2", ".bz2"), ("zip", ".zip"), ("xz", ".xz")], ) @pytest.mark.parametrize("mode", ["explicit", "infer"]) @pytest.mark.parametrize("engine", ["python", "c"]) def test_compressed_urls(salaries_table, compress_type, extension, mode, engine): check_compressed_urls(salaries_table, compress_type, extension, mode, engine) @tm.network def check_compressed_urls(salaries_table, compression, extension, mode, engine): # test reading compressed urls with various engines and # extension inference base_url = ( "https://github.com/pandas-dev/pandas/raw/master/" "pandas/tests/io/parser/data/salaries.csv" ) url = base_url + extension if mode != "explicit": compression = mode url_table = read_csv(url, sep="\t", compression=compression, engine=engine) tm.assert_frame_equal(url_table, salaries_table) @tm.network("https://raw.githubusercontent.com/", check_before_test=True) def test_url_encoding_csv(): """ read_csv should honor the requested encoding for URLs. GH 10424 """ path = ( "https://raw.githubusercontent.com/pandas-dev/pandas/master/" + "pandas/tests/io/parser/data/unicode_series.csv" ) df = read_csv(path, encoding="latin-1", header=None) assert df.loc[15, 1] == "Á köldum klaka (Cold Fever) (1994)" @pytest.fixture def tips_df(datapath): """DataFrame with the tips dataset.""" return read_csv(datapath("io", "data", "csv", "tips.csv")) @pytest.mark.usefixtures("s3_resource") @td.skip_if_not_us_locale() class TestS3: @td.skip_if_no("s3fs") def test_parse_public_s3_bucket(self, tips_df, s3so): # more of an integration test due to the not-public contents portion # can probably mock this though. for ext, comp in [("", None), (".gz", "gzip"), (".bz2", "bz2")]: df = read_csv( "s3://pandas-test/tips.csv" + ext, compression=comp, storage_options=s3so, ) assert isinstance(df, DataFrame) assert not df.empty tm.assert_frame_equal(df, tips_df) # Read public file from bucket with not-public contents df = read_csv("s3://cant_get_it/tips.csv", storage_options=s3so) assert isinstance(df, DataFrame) assert not df.empty tm.assert_frame_equal(df, tips_df) def test_parse_public_s3n_bucket(self, tips_df, s3so): # Read from AWS s3 as "s3n" URL df = read_csv("s3n://pandas-test/tips.csv", nrows=10, storage_options=s3so) assert isinstance(df, DataFrame) assert not df.empty tm.assert_frame_equal(tips_df.iloc[:10], df) def test_parse_public_s3a_bucket(self, tips_df, s3so): # Read from AWS s3 as "s3a" URL df = read_csv("s3a://pandas-test/tips.csv", nrows=10, storage_options=s3so) assert isinstance(df, DataFrame) assert not df.empty tm.assert_frame_equal(tips_df.iloc[:10], df) def test_parse_public_s3_bucket_nrows(self, tips_df, s3so): for ext, comp in [("", None), (".gz", "gzip"), (".bz2", "bz2")]: df = read_csv( "s3://pandas-test/tips.csv" + ext, nrows=10, compression=comp, storage_options=s3so, ) assert isinstance(df, DataFrame) assert not df.empty tm.assert_frame_equal(tips_df.iloc[:10], df) def test_parse_public_s3_bucket_chunked(self, tips_df, s3so): # Read with a chunksize chunksize = 5 for ext, comp in [("", None), (".gz", "gzip"), (".bz2", "bz2")]: with read_csv( "s3://pandas-test/tips.csv" + ext, chunksize=chunksize, compression=comp, storage_options=s3so, ) as df_reader: assert df_reader.chunksize == chunksize for i_chunk in [0, 1, 2]: # Read a couple of chunks and make sure we see them # properly. df = df_reader.get_chunk() assert isinstance(df, DataFrame) assert not df.empty true_df = tips_df.iloc[ chunksize * i_chunk : chunksize * (i_chunk + 1) ] tm.assert_frame_equal(true_df, df) def test_parse_public_s3_bucket_chunked_python(self, tips_df, s3so): # Read with a chunksize using the Python parser chunksize = 5 for ext, comp in [("", None), (".gz", "gzip"), (".bz2", "bz2")]: with read_csv( "s3://pandas-test/tips.csv" + ext, chunksize=chunksize, compression=comp, engine="python", storage_options=s3so, ) as df_reader: assert df_reader.chunksize == chunksize for i_chunk in [0, 1, 2]: # Read a couple of chunks and make sure we see them properly. df = df_reader.get_chunk() assert isinstance(df, DataFrame) assert not df.empty true_df = tips_df.iloc[ chunksize * i_chunk : chunksize * (i_chunk + 1) ] tm.assert_frame_equal(true_df, df) def test_parse_public_s3_bucket_python(self, tips_df, s3so): for ext, comp in [("", None), (".gz", "gzip"), (".bz2", "bz2")]: df = read_csv( "s3://pandas-test/tips.csv" + ext, engine="python", compression=comp, storage_options=s3so, ) assert isinstance(df, DataFrame) assert not df.empty tm.assert_frame_equal(df, tips_df) def test_infer_s3_compression(self, tips_df, s3so): for ext in ["", ".gz", ".bz2"]: df = read_csv( "s3://pandas-test/tips.csv" + ext, engine="python", compression="infer", storage_options=s3so, ) assert isinstance(df, DataFrame) assert not df.empty tm.assert_frame_equal(df, tips_df) def test_parse_public_s3_bucket_nrows_python(self, tips_df, s3so): for ext, comp in [("", None), (".gz", "gzip"), (".bz2", "bz2")]: df = read_csv( "s3://pandas-test/tips.csv" + ext, engine="python", nrows=10, compression=comp, storage_options=s3so, ) assert isinstance(df, DataFrame) assert not df.empty tm.assert_frame_equal(tips_df.iloc[:10], df) def test_read_s3_fails(self, s3so): msg = "The specified bucket does not exist" with pytest.raises(IOError, match=msg): read_csv("s3://nyqpug/asdf.csv", storage_options=s3so) # Receive a permission error when trying to read a private bucket. # It's irrelevant here that this isn't actually a table. with pytest.raises(IOError, match=msg): read_csv("s3://cant_get_it/file.csv") @pytest.mark.xfail(reason="GH#39155 s3fs upgrade", strict=False) def test_write_s3_csv_fails(self, tips_df, s3so): # GH 32486 # Attempting to write to an invalid S3 path should raise import botocore # GH 34087 # https://boto3.amazonaws.com/v1/documentation/api/latest/guide/error-handling.html # Catch a ClientError since AWS Service Errors are defined dynamically error = (FileNotFoundError, botocore.exceptions.ClientError) with pytest.raises(error, match="The specified bucket does not exist"): tips_df.to_csv( "s3://an_s3_bucket_data_doesnt_exit/not_real.csv", storage_options=s3so ) @pytest.mark.xfail(reason="GH#39155 s3fs upgrade", strict=False) @td.skip_if_no("pyarrow") def test_write_s3_parquet_fails(self, tips_df, s3so): # GH 27679 # Attempting to write to an invalid S3 path should raise import botocore # GH 34087 # https://boto3.amazonaws.com/v1/documentation/api/latest/guide/error-handling.html # Catch a ClientError since AWS Service Errors are defined dynamically error = (FileNotFoundError, botocore.exceptions.ClientError) with pytest.raises(error, match="The specified bucket does not exist"): tips_df.to_parquet( "s3://an_s3_bucket_data_doesnt_exit/not_real.parquet", storage_options=s3so, ) def test_read_csv_handles_boto_s3_object(self, s3_resource, tips_file): # see gh-16135 s3_object = s3_resource.meta.client.get_object( Bucket="pandas-test", Key="tips.csv" ) with BytesIO(s3_object["Body"].read()) as buffer: result = read_csv(buffer, encoding="utf8") assert isinstance(result, DataFrame) assert not result.empty expected = read_csv(tips_file) tm.assert_frame_equal(result, expected) def test_read_csv_chunked_download(self, s3_resource, caplog, s3so): # 8 MB, S3FS uses 5MB chunks import s3fs df = DataFrame(np.random.randn(100000, 4), columns=list("abcd")) str_buf = StringIO() df.to_csv(str_buf) buf = BytesIO(str_buf.getvalue().encode("utf-8")) s3_resource.Bucket("pandas-test").put_object(Key="large-file.csv", Body=buf) # Possibly some state leaking in between tests. # If we don't clear this cache, we saw `GetObject operation: Forbidden`. # Presumably the s3fs instance is being cached, with the directory listing # from *before* we add the large-file.csv in the pandas-test bucket. s3fs.S3FileSystem.clear_instance_cache() with caplog.at_level(logging.DEBUG, logger="s3fs"): read_csv("s3://pandas-test/large-file.csv", nrows=5, storage_options=s3so) # log of fetch_range (start, stop) assert (0, 5505024) in (x.args[-2:] for x in caplog.records) def test_read_s3_with_hash_in_key(self, tips_df, s3so): # GH 25945 result = read_csv("s3://pandas-test/tips#1.csv", storage_options=s3so) tm.assert_frame_equal(tips_df, result) @td.skip_if_no("pyarrow") def test_read_feather_s3_file_path(self, feather_file, s3so): # GH 29055 expected = read_feather(feather_file) res = read_feather( "s3://pandas-test/simple_dataset.feather", storage_options=s3so ) tm.assert_frame_equal(expected, res)
datapythonista/pandas
pandas/tests/io/parser/test_network.py
pandas/_testing/_warnings.py
from typing import Optional import numpy as np from pandas._libs import lib from pandas.core.dtypes.cast import maybe_downcast_numeric from pandas.core.dtypes.common import ( ensure_object, is_datetime_or_timedelta_dtype, is_decimal, is_integer_dtype, is_number, is_numeric_dtype, is_scalar, needs_i8_conversion, ) from pandas.core.dtypes.generic import ( ABCIndex, ABCSeries, ) import pandas as pd from pandas.core.arrays.numeric import NumericArray def to_numeric(arg, errors="raise", downcast=None): """ Convert argument to a numeric type. The default return dtype is `float64` or `int64` depending on the data supplied. Use the `downcast` parameter to obtain other dtypes. Please note that precision loss may occur if really large numbers are passed in. Due to the internal limitations of `ndarray`, if numbers smaller than `-9223372036854775808` (np.iinfo(np.int64).min) or larger than `18446744073709551615` (np.iinfo(np.uint64).max) are passed in, it is very likely they will be converted to float so that they can stored in an `ndarray`. These warnings apply similarly to `Series` since it internally leverages `ndarray`. Parameters ---------- arg : scalar, list, tuple, 1-d array, or Series Argument to be converted. errors : {'ignore', 'raise', 'coerce'}, default 'raise' - If 'raise', then invalid parsing will raise an exception. - If 'coerce', then invalid parsing will be set as NaN. - If 'ignore', then invalid parsing will return the input. downcast : {'integer', 'signed', 'unsigned', 'float'}, default None If not None, and if the data has been successfully cast to a numerical dtype (or if the data was numeric to begin with), downcast that resulting data to the smallest numerical dtype possible according to the following rules: - 'integer' or 'signed': smallest signed int dtype (min.: np.int8) - 'unsigned': smallest unsigned int dtype (min.: np.uint8) - 'float': smallest float dtype (min.: np.float32) As this behaviour is separate from the core conversion to numeric values, any errors raised during the downcasting will be surfaced regardless of the value of the 'errors' input. In addition, downcasting will only occur if the size of the resulting data's dtype is strictly larger than the dtype it is to be cast to, so if none of the dtypes checked satisfy that specification, no downcasting will be performed on the data. Returns ------- ret Numeric if parsing succeeded. Return type depends on input. Series if Series, otherwise ndarray. See Also -------- DataFrame.astype : Cast argument to a specified dtype. to_datetime : Convert argument to datetime. to_timedelta : Convert argument to timedelta. numpy.ndarray.astype : Cast a numpy array to a specified type. DataFrame.convert_dtypes : Convert dtypes. Examples -------- Take separate series and convert to numeric, coercing when told to >>> s = pd.Series(['1.0', '2', -3]) >>> pd.to_numeric(s) 0 1.0 1 2.0 2 -3.0 dtype: float64 >>> pd.to_numeric(s, downcast='float') 0 1.0 1 2.0 2 -3.0 dtype: float32 >>> pd.to_numeric(s, downcast='signed') 0 1 1 2 2 -3 dtype: int8 >>> s = pd.Series(['apple', '1.0', '2', -3]) >>> pd.to_numeric(s, errors='ignore') 0 apple 1 1.0 2 2 3 -3 dtype: object >>> pd.to_numeric(s, errors='coerce') 0 NaN 1 1.0 2 2.0 3 -3.0 dtype: float64 Downcasting of nullable integer and floating dtypes is supported: >>> s = pd.Series([1, 2, 3], dtype="Int64") >>> pd.to_numeric(s, downcast="integer") 0 1 1 2 2 3 dtype: Int8 >>> s = pd.Series([1.0, 2.1, 3.0], dtype="Float64") >>> pd.to_numeric(s, downcast="float") 0 1.0 1 2.1 2 3.0 dtype: Float32 """ if downcast not in (None, "integer", "signed", "unsigned", "float"): raise ValueError("invalid downcasting method provided") if errors not in ("ignore", "raise", "coerce"): raise ValueError("invalid error value specified") is_series = False is_index = False is_scalars = False if isinstance(arg, ABCSeries): is_series = True values = arg.values elif isinstance(arg, ABCIndex): is_index = True if needs_i8_conversion(arg.dtype): values = arg.asi8 else: values = arg.values elif isinstance(arg, (list, tuple)): values = np.array(arg, dtype="O") elif is_scalar(arg): if is_decimal(arg): return float(arg) if is_number(arg): return arg is_scalars = True values = np.array([arg], dtype="O") elif getattr(arg, "ndim", 1) > 1: raise TypeError("arg must be a list, tuple, 1-d array, or Series") else: values = arg # GH33013: for IntegerArray & FloatingArray extract non-null values for casting # save mask to reconstruct the full array after casting mask: Optional[np.ndarray] = None if isinstance(values, NumericArray): mask = values._mask values = values._data[~mask] values_dtype = getattr(values, "dtype", None) if is_numeric_dtype(values_dtype): pass elif is_datetime_or_timedelta_dtype(values_dtype): values = values.view(np.int64) else: values = ensure_object(values) coerce_numeric = errors not in ("ignore", "raise") try: values, _ = lib.maybe_convert_numeric( values, set(), coerce_numeric=coerce_numeric ) except (ValueError, TypeError): if errors == "raise": raise # attempt downcast only if the data has been successfully converted # to a numerical dtype and if a downcast method has been specified if downcast is not None and is_numeric_dtype(values.dtype): typecodes = None if downcast in ("integer", "signed"): typecodes = np.typecodes["Integer"] elif downcast == "unsigned" and (not len(values) or np.min(values) >= 0): typecodes = np.typecodes["UnsignedInteger"] elif downcast == "float": typecodes = np.typecodes["Float"] # pandas support goes only to np.float32, # as float dtypes smaller than that are # extremely rare and not well supported float_32_char = np.dtype(np.float32).char float_32_ind = typecodes.index(float_32_char) typecodes = typecodes[float_32_ind:] if typecodes is not None: # from smallest to largest for dtype in typecodes: dtype = np.dtype(dtype) if dtype.itemsize <= values.dtype.itemsize: values = maybe_downcast_numeric(values, dtype) # successful conversion if values.dtype == dtype: break # GH33013: for IntegerArray & FloatingArray need to reconstruct masked array if mask is not None: data = np.zeros(mask.shape, dtype=values.dtype) data[~mask] = values from pandas.core.arrays import ( FloatingArray, IntegerArray, ) klass = IntegerArray if is_integer_dtype(data.dtype) else FloatingArray values = klass(data, mask.copy()) if is_series: return arg._constructor(values, index=arg.index, name=arg.name) elif is_index: # because we want to coerce to numeric if possible, # do not use _shallow_copy return pd.Index(values, name=arg.name) elif is_scalars: return values[0] else: return values
""" Tests parsers ability to read and parse non-local files and hence require a network connection to be read. """ from io import ( BytesIO, StringIO, ) import logging import numpy as np import pytest import pandas.util._test_decorators as td from pandas import DataFrame import pandas._testing as tm from pandas.io.feather_format import read_feather from pandas.io.parsers import read_csv @pytest.mark.network @pytest.mark.parametrize( "compress_type, extension", [("gzip", ".gz"), ("bz2", ".bz2"), ("zip", ".zip"), ("xz", ".xz")], ) @pytest.mark.parametrize("mode", ["explicit", "infer"]) @pytest.mark.parametrize("engine", ["python", "c"]) def test_compressed_urls(salaries_table, compress_type, extension, mode, engine): check_compressed_urls(salaries_table, compress_type, extension, mode, engine) @tm.network def check_compressed_urls(salaries_table, compression, extension, mode, engine): # test reading compressed urls with various engines and # extension inference base_url = ( "https://github.com/pandas-dev/pandas/raw/master/" "pandas/tests/io/parser/data/salaries.csv" ) url = base_url + extension if mode != "explicit": compression = mode url_table = read_csv(url, sep="\t", compression=compression, engine=engine) tm.assert_frame_equal(url_table, salaries_table) @tm.network("https://raw.githubusercontent.com/", check_before_test=True) def test_url_encoding_csv(): """ read_csv should honor the requested encoding for URLs. GH 10424 """ path = ( "https://raw.githubusercontent.com/pandas-dev/pandas/master/" + "pandas/tests/io/parser/data/unicode_series.csv" ) df = read_csv(path, encoding="latin-1", header=None) assert df.loc[15, 1] == "Á köldum klaka (Cold Fever) (1994)" @pytest.fixture def tips_df(datapath): """DataFrame with the tips dataset.""" return read_csv(datapath("io", "data", "csv", "tips.csv")) @pytest.mark.usefixtures("s3_resource") @td.skip_if_not_us_locale() class TestS3: @td.skip_if_no("s3fs") def test_parse_public_s3_bucket(self, tips_df, s3so): # more of an integration test due to the not-public contents portion # can probably mock this though. for ext, comp in [("", None), (".gz", "gzip"), (".bz2", "bz2")]: df = read_csv( "s3://pandas-test/tips.csv" + ext, compression=comp, storage_options=s3so, ) assert isinstance(df, DataFrame) assert not df.empty tm.assert_frame_equal(df, tips_df) # Read public file from bucket with not-public contents df = read_csv("s3://cant_get_it/tips.csv", storage_options=s3so) assert isinstance(df, DataFrame) assert not df.empty tm.assert_frame_equal(df, tips_df) def test_parse_public_s3n_bucket(self, tips_df, s3so): # Read from AWS s3 as "s3n" URL df = read_csv("s3n://pandas-test/tips.csv", nrows=10, storage_options=s3so) assert isinstance(df, DataFrame) assert not df.empty tm.assert_frame_equal(tips_df.iloc[:10], df) def test_parse_public_s3a_bucket(self, tips_df, s3so): # Read from AWS s3 as "s3a" URL df = read_csv("s3a://pandas-test/tips.csv", nrows=10, storage_options=s3so) assert isinstance(df, DataFrame) assert not df.empty tm.assert_frame_equal(tips_df.iloc[:10], df) def test_parse_public_s3_bucket_nrows(self, tips_df, s3so): for ext, comp in [("", None), (".gz", "gzip"), (".bz2", "bz2")]: df = read_csv( "s3://pandas-test/tips.csv" + ext, nrows=10, compression=comp, storage_options=s3so, ) assert isinstance(df, DataFrame) assert not df.empty tm.assert_frame_equal(tips_df.iloc[:10], df) def test_parse_public_s3_bucket_chunked(self, tips_df, s3so): # Read with a chunksize chunksize = 5 for ext, comp in [("", None), (".gz", "gzip"), (".bz2", "bz2")]: with read_csv( "s3://pandas-test/tips.csv" + ext, chunksize=chunksize, compression=comp, storage_options=s3so, ) as df_reader: assert df_reader.chunksize == chunksize for i_chunk in [0, 1, 2]: # Read a couple of chunks and make sure we see them # properly. df = df_reader.get_chunk() assert isinstance(df, DataFrame) assert not df.empty true_df = tips_df.iloc[ chunksize * i_chunk : chunksize * (i_chunk + 1) ] tm.assert_frame_equal(true_df, df) def test_parse_public_s3_bucket_chunked_python(self, tips_df, s3so): # Read with a chunksize using the Python parser chunksize = 5 for ext, comp in [("", None), (".gz", "gzip"), (".bz2", "bz2")]: with read_csv( "s3://pandas-test/tips.csv" + ext, chunksize=chunksize, compression=comp, engine="python", storage_options=s3so, ) as df_reader: assert df_reader.chunksize == chunksize for i_chunk in [0, 1, 2]: # Read a couple of chunks and make sure we see them properly. df = df_reader.get_chunk() assert isinstance(df, DataFrame) assert not df.empty true_df = tips_df.iloc[ chunksize * i_chunk : chunksize * (i_chunk + 1) ] tm.assert_frame_equal(true_df, df) def test_parse_public_s3_bucket_python(self, tips_df, s3so): for ext, comp in [("", None), (".gz", "gzip"), (".bz2", "bz2")]: df = read_csv( "s3://pandas-test/tips.csv" + ext, engine="python", compression=comp, storage_options=s3so, ) assert isinstance(df, DataFrame) assert not df.empty tm.assert_frame_equal(df, tips_df) def test_infer_s3_compression(self, tips_df, s3so): for ext in ["", ".gz", ".bz2"]: df = read_csv( "s3://pandas-test/tips.csv" + ext, engine="python", compression="infer", storage_options=s3so, ) assert isinstance(df, DataFrame) assert not df.empty tm.assert_frame_equal(df, tips_df) def test_parse_public_s3_bucket_nrows_python(self, tips_df, s3so): for ext, comp in [("", None), (".gz", "gzip"), (".bz2", "bz2")]: df = read_csv( "s3://pandas-test/tips.csv" + ext, engine="python", nrows=10, compression=comp, storage_options=s3so, ) assert isinstance(df, DataFrame) assert not df.empty tm.assert_frame_equal(tips_df.iloc[:10], df) def test_read_s3_fails(self, s3so): msg = "The specified bucket does not exist" with pytest.raises(IOError, match=msg): read_csv("s3://nyqpug/asdf.csv", storage_options=s3so) # Receive a permission error when trying to read a private bucket. # It's irrelevant here that this isn't actually a table. with pytest.raises(IOError, match=msg): read_csv("s3://cant_get_it/file.csv") @pytest.mark.xfail(reason="GH#39155 s3fs upgrade", strict=False) def test_write_s3_csv_fails(self, tips_df, s3so): # GH 32486 # Attempting to write to an invalid S3 path should raise import botocore # GH 34087 # https://boto3.amazonaws.com/v1/documentation/api/latest/guide/error-handling.html # Catch a ClientError since AWS Service Errors are defined dynamically error = (FileNotFoundError, botocore.exceptions.ClientError) with pytest.raises(error, match="The specified bucket does not exist"): tips_df.to_csv( "s3://an_s3_bucket_data_doesnt_exit/not_real.csv", storage_options=s3so ) @pytest.mark.xfail(reason="GH#39155 s3fs upgrade", strict=False) @td.skip_if_no("pyarrow") def test_write_s3_parquet_fails(self, tips_df, s3so): # GH 27679 # Attempting to write to an invalid S3 path should raise import botocore # GH 34087 # https://boto3.amazonaws.com/v1/documentation/api/latest/guide/error-handling.html # Catch a ClientError since AWS Service Errors are defined dynamically error = (FileNotFoundError, botocore.exceptions.ClientError) with pytest.raises(error, match="The specified bucket does not exist"): tips_df.to_parquet( "s3://an_s3_bucket_data_doesnt_exit/not_real.parquet", storage_options=s3so, ) def test_read_csv_handles_boto_s3_object(self, s3_resource, tips_file): # see gh-16135 s3_object = s3_resource.meta.client.get_object( Bucket="pandas-test", Key="tips.csv" ) with BytesIO(s3_object["Body"].read()) as buffer: result = read_csv(buffer, encoding="utf8") assert isinstance(result, DataFrame) assert not result.empty expected = read_csv(tips_file) tm.assert_frame_equal(result, expected) def test_read_csv_chunked_download(self, s3_resource, caplog, s3so): # 8 MB, S3FS uses 5MB chunks import s3fs df = DataFrame(np.random.randn(100000, 4), columns=list("abcd")) str_buf = StringIO() df.to_csv(str_buf) buf = BytesIO(str_buf.getvalue().encode("utf-8")) s3_resource.Bucket("pandas-test").put_object(Key="large-file.csv", Body=buf) # Possibly some state leaking in between tests. # If we don't clear this cache, we saw `GetObject operation: Forbidden`. # Presumably the s3fs instance is being cached, with the directory listing # from *before* we add the large-file.csv in the pandas-test bucket. s3fs.S3FileSystem.clear_instance_cache() with caplog.at_level(logging.DEBUG, logger="s3fs"): read_csv("s3://pandas-test/large-file.csv", nrows=5, storage_options=s3so) # log of fetch_range (start, stop) assert (0, 5505024) in (x.args[-2:] for x in caplog.records) def test_read_s3_with_hash_in_key(self, tips_df, s3so): # GH 25945 result = read_csv("s3://pandas-test/tips#1.csv", storage_options=s3so) tm.assert_frame_equal(tips_df, result) @td.skip_if_no("pyarrow") def test_read_feather_s3_file_path(self, feather_file, s3so): # GH 29055 expected = read_feather(feather_file) res = read_feather( "s3://pandas-test/simple_dataset.feather", storage_options=s3so ) tm.assert_frame_equal(expected, res)
datapythonista/pandas
pandas/tests/io/parser/test_network.py
pandas/core/tools/numeric.py
# flake8: noqa __docformat__ = "restructuredtext" # Let users know if they're missing any of our hard dependencies hard_dependencies = ("numpy", "pytz", "dateutil") missing_dependencies = [] for dependency in hard_dependencies: try: __import__(dependency) except ImportError as e: missing_dependencies.append(f"{dependency}: {e}") if missing_dependencies: raise ImportError( "Unable to import required dependencies:\n" + "\n".join(missing_dependencies) ) del hard_dependencies, dependency, missing_dependencies # numpy compat from pandas.compat import ( np_version_under1p18 as _np_version_under1p18, is_numpy_dev as _is_numpy_dev, ) try: from pandas._libs import hashtable as _hashtable, lib as _lib, tslib as _tslib except ImportError as e: # pragma: no cover # hack but overkill to use re module = str(e).replace("cannot import name ", "") raise ImportError( f"C extension: {module} not built. If you want to import " "pandas from the source directory, you may need to run " "'python setup.py build_ext --force' to build the C extensions first." ) from e from pandas._config import ( get_option, set_option, reset_option, describe_option, option_context, options, ) # let init-time option registration happen import pandas.core.config_init from pandas.core.api import ( # dtype Int8Dtype, Int16Dtype, Int32Dtype, Int64Dtype, UInt8Dtype, UInt16Dtype, UInt32Dtype, UInt64Dtype, Float32Dtype, Float64Dtype, CategoricalDtype, PeriodDtype, IntervalDtype, DatetimeTZDtype, StringDtype, BooleanDtype, # missing NA, isna, isnull, notna, notnull, # indexes Index, CategoricalIndex, Int64Index, UInt64Index, RangeIndex, Float64Index, MultiIndex, IntervalIndex, TimedeltaIndex, DatetimeIndex, PeriodIndex, IndexSlice, # tseries NaT, Period, period_range, Timedelta, timedelta_range, Timestamp, date_range, bdate_range, Interval, interval_range, DateOffset, # conversion to_numeric, to_datetime, to_timedelta, # misc Flags, Grouper, factorize, unique, value_counts, NamedAgg, array, Categorical, set_eng_float_format, Series, DataFrame, ) from pandas.core.arrays.sparse import SparseDtype from pandas.tseries.api import infer_freq from pandas.tseries import offsets from pandas.core.computation.api import eval from pandas.core.reshape.api import ( concat, lreshape, melt, wide_to_long, merge, merge_asof, merge_ordered, crosstab, pivot, pivot_table, get_dummies, cut, qcut, ) import pandas.api from pandas.util._print_versions import show_versions from pandas.io.api import ( # excel ExcelFile, ExcelWriter, read_excel, # parsers read_csv, read_fwf, read_table, # pickle read_pickle, to_pickle, # pytables HDFStore, read_hdf, # sql read_sql, read_sql_query, read_sql_table, # misc read_clipboard, read_parquet, read_orc, read_feather, read_gbq, read_html, read_xml, read_json, read_stata, read_sas, read_spss, ) from pandas.io.json import _json_normalize as json_normalize from pandas.util._tester import test import pandas.testing import pandas.arrays # use the closest tagged version if possible from pandas._version import get_versions v = get_versions() __version__ = v.get("closest-tag", v["version"]) __git_version__ = v.get("full-revisionid") del get_versions, v # GH 27101 def __getattr__(name): import warnings if name == "datetime": warnings.warn( "The pandas.datetime class is deprecated " "and will be removed from pandas in a future version. " "Import from datetime module instead.", FutureWarning, stacklevel=2, ) from datetime import datetime as dt return dt elif name == "np": warnings.warn( "The pandas.np module is deprecated " "and will be removed from pandas in a future version. " "Import numpy directly instead", FutureWarning, stacklevel=2, ) import numpy as np return np elif name in {"SparseSeries", "SparseDataFrame"}: warnings.warn( f"The {name} class is removed from pandas. Accessing it from " "the top-level namespace will also be removed in the next version", FutureWarning, stacklevel=2, ) return type(name, (), {}) elif name == "SparseArray": warnings.warn( "The pandas.SparseArray class is deprecated " "and will be removed from pandas in a future version. " "Use pandas.arrays.SparseArray instead.", FutureWarning, stacklevel=2, ) from pandas.core.arrays.sparse import SparseArray as _SparseArray return _SparseArray raise AttributeError(f"module 'pandas' has no attribute '{name}'") # module level doc-string __doc__ = """ pandas - a powerful data analysis and manipulation library for Python ===================================================================== **pandas** is a Python package providing fast, flexible, and expressive data structures designed to make working with "relational" or "labeled" data both easy and intuitive. It aims to be the fundamental high-level building block for doing practical, **real world** data analysis in Python. Additionally, it has the broader goal of becoming **the most powerful and flexible open source data analysis / manipulation tool available in any language**. It is already well on its way toward this goal. Main Features ------------- Here are just a few of the things that pandas does well: - Easy handling of missing data in floating point as well as non-floating point data. - Size mutability: columns can be inserted and deleted from DataFrame and higher dimensional objects - Automatic and explicit data alignment: objects can be explicitly aligned to a set of labels, or the user can simply ignore the labels and let `Series`, `DataFrame`, etc. automatically align the data for you in computations. - Powerful, flexible group by functionality to perform split-apply-combine operations on data sets, for both aggregating and transforming data. - Make it easy to convert ragged, differently-indexed data in other Python and NumPy data structures into DataFrame objects. - Intelligent label-based slicing, fancy indexing, and subsetting of large data sets. - Intuitive merging and joining data sets. - Flexible reshaping and pivoting of data sets. - Hierarchical labeling of axes (possible to have multiple labels per tick). - Robust IO tools for loading data from flat files (CSV and delimited), Excel files, databases, and saving/loading data from the ultrafast HDF5 format. - Time series-specific functionality: date range generation and frequency conversion, moving window statistics, date shifting and lagging. """
""" test parquet compat """ import datetime from io import BytesIO import os import pathlib from warnings import ( catch_warnings, filterwarnings, ) import numpy as np import pytest from pandas._config import get_option from pandas.compat import ( PY38, is_platform_windows, ) from pandas.compat.pyarrow import ( pa_version_under1p0, pa_version_under2p0, ) import pandas.util._test_decorators as td import pandas as pd import pandas._testing as tm from pandas.util.version import Version from pandas.io.parquet import ( FastParquetImpl, PyArrowImpl, get_engine, read_parquet, to_parquet, ) try: import pyarrow _HAVE_PYARROW = True except ImportError: _HAVE_PYARROW = False try: with catch_warnings(): # `np.bool` is a deprecated alias... filterwarnings("ignore", "`np.bool`", category=DeprecationWarning) import fastparquet _HAVE_FASTPARQUET = True except ImportError: _HAVE_FASTPARQUET = False pytestmark = pytest.mark.filterwarnings( "ignore:RangeIndex.* is deprecated:DeprecationWarning" ) # TODO(ArrayManager) fastparquet relies on BlockManager internals # setup engines & skips @pytest.fixture( params=[ pytest.param( "fastparquet", marks=pytest.mark.skipif( not _HAVE_FASTPARQUET or get_option("mode.data_manager") == "array", reason="fastparquet is not installed or ArrayManager is used", ), ), pytest.param( "pyarrow", marks=pytest.mark.skipif( not _HAVE_PYARROW, reason="pyarrow is not installed" ), ), ] ) def engine(request): return request.param @pytest.fixture def pa(): if not _HAVE_PYARROW: pytest.skip("pyarrow is not installed") return "pyarrow" @pytest.fixture def fp(): if not _HAVE_FASTPARQUET: pytest.skip("fastparquet is not installed") elif get_option("mode.data_manager") == "array": pytest.skip("ArrayManager is not supported with fastparquet") return "fastparquet" @pytest.fixture def df_compat(): return pd.DataFrame({"A": [1, 2, 3], "B": "foo"}) @pytest.fixture def df_cross_compat(): df = pd.DataFrame( { "a": list("abc"), "b": list(range(1, 4)), # 'c': np.arange(3, 6).astype('u1'), "d": np.arange(4.0, 7.0, dtype="float64"), "e": [True, False, True], "f": pd.date_range("20130101", periods=3), # 'g': pd.date_range('20130101', periods=3, # tz='US/Eastern'), # 'h': pd.date_range('20130101', periods=3, freq='ns') } ) return df @pytest.fixture def df_full(): return pd.DataFrame( { "string": list("abc"), "string_with_nan": ["a", np.nan, "c"], "string_with_none": ["a", None, "c"], "bytes": [b"foo", b"bar", b"baz"], "unicode": ["foo", "bar", "baz"], "int": list(range(1, 4)), "uint": np.arange(3, 6).astype("u1"), "float": np.arange(4.0, 7.0, dtype="float64"), "float_with_nan": [2.0, np.nan, 3.0], "bool": [True, False, True], "datetime": pd.date_range("20130101", periods=3), "datetime_with_nat": [ pd.Timestamp("20130101"), pd.NaT, pd.Timestamp("20130103"), ], } ) @pytest.fixture( params=[ datetime.datetime.now(datetime.timezone.utc), datetime.datetime.now(datetime.timezone.min), datetime.datetime.now(datetime.timezone.max), datetime.datetime.strptime("2019-01-04T16:41:24+0200", "%Y-%m-%dT%H:%M:%S%z"), datetime.datetime.strptime("2019-01-04T16:41:24+0215", "%Y-%m-%dT%H:%M:%S%z"), datetime.datetime.strptime("2019-01-04T16:41:24-0200", "%Y-%m-%dT%H:%M:%S%z"), datetime.datetime.strptime("2019-01-04T16:41:24-0215", "%Y-%m-%dT%H:%M:%S%z"), ] ) def timezone_aware_date_list(request): return request.param def check_round_trip( df, engine=None, path=None, write_kwargs=None, read_kwargs=None, expected=None, check_names=True, check_like=False, check_dtype=True, repeat=2, ): """Verify parquet serializer and deserializer produce the same results. Performs a pandas to disk and disk to pandas round trip, then compares the 2 resulting DataFrames to verify equality. Parameters ---------- df: Dataframe engine: str, optional 'pyarrow' or 'fastparquet' path: str, optional write_kwargs: dict of str:str, optional read_kwargs: dict of str:str, optional expected: DataFrame, optional Expected deserialization result, otherwise will be equal to `df` check_names: list of str, optional Closed set of column names to be compared check_like: bool, optional If True, ignore the order of index & columns. repeat: int, optional How many times to repeat the test """ write_kwargs = write_kwargs or {"compression": None} read_kwargs = read_kwargs or {} if expected is None: expected = df if engine: write_kwargs["engine"] = engine read_kwargs["engine"] = engine def compare(repeat): for _ in range(repeat): df.to_parquet(path, **write_kwargs) with catch_warnings(record=True): actual = read_parquet(path, **read_kwargs) tm.assert_frame_equal( expected, actual, check_names=check_names, check_like=check_like, check_dtype=check_dtype, ) if path is None: with tm.ensure_clean() as path: compare(repeat) else: compare(repeat) def test_invalid_engine(df_compat): msg = "engine must be one of 'pyarrow', 'fastparquet'" with pytest.raises(ValueError, match=msg): check_round_trip(df_compat, "foo", "bar") def test_options_py(df_compat, pa): # use the set option with pd.option_context("io.parquet.engine", "pyarrow"): check_round_trip(df_compat) def test_options_fp(df_compat, fp): # use the set option with pd.option_context("io.parquet.engine", "fastparquet"): check_round_trip(df_compat) def test_options_auto(df_compat, fp, pa): # use the set option with pd.option_context("io.parquet.engine", "auto"): check_round_trip(df_compat) def test_options_get_engine(fp, pa): assert isinstance(get_engine("pyarrow"), PyArrowImpl) assert isinstance(get_engine("fastparquet"), FastParquetImpl) with pd.option_context("io.parquet.engine", "pyarrow"): assert isinstance(get_engine("auto"), PyArrowImpl) assert isinstance(get_engine("pyarrow"), PyArrowImpl) assert isinstance(get_engine("fastparquet"), FastParquetImpl) with pd.option_context("io.parquet.engine", "fastparquet"): assert isinstance(get_engine("auto"), FastParquetImpl) assert isinstance(get_engine("pyarrow"), PyArrowImpl) assert isinstance(get_engine("fastparquet"), FastParquetImpl) with pd.option_context("io.parquet.engine", "auto"): assert isinstance(get_engine("auto"), PyArrowImpl) assert isinstance(get_engine("pyarrow"), PyArrowImpl) assert isinstance(get_engine("fastparquet"), FastParquetImpl) def test_get_engine_auto_error_message(): # Expect different error messages from get_engine(engine="auto") # if engines aren't installed vs. are installed but bad version from pandas.compat._optional import VERSIONS # Do we have engines installed, but a bad version of them? pa_min_ver = VERSIONS.get("pyarrow") fp_min_ver = VERSIONS.get("fastparquet") have_pa_bad_version = ( False if not _HAVE_PYARROW else Version(pyarrow.__version__) < Version(pa_min_ver) ) have_fp_bad_version = ( False if not _HAVE_FASTPARQUET else Version(fastparquet.__version__) < Version(fp_min_ver) ) # Do we have usable engines installed? have_usable_pa = _HAVE_PYARROW and not have_pa_bad_version have_usable_fp = _HAVE_FASTPARQUET and not have_fp_bad_version if not have_usable_pa and not have_usable_fp: # No usable engines found. if have_pa_bad_version: match = f"Pandas requires version .{pa_min_ver}. or newer of .pyarrow." with pytest.raises(ImportError, match=match): get_engine("auto") else: match = "Missing optional dependency .pyarrow." with pytest.raises(ImportError, match=match): get_engine("auto") if have_fp_bad_version: match = f"Pandas requires version .{fp_min_ver}. or newer of .fastparquet." with pytest.raises(ImportError, match=match): get_engine("auto") else: match = "Missing optional dependency .fastparquet." with pytest.raises(ImportError, match=match): get_engine("auto") def test_cross_engine_pa_fp(df_cross_compat, pa, fp): # cross-compat with differing reading/writing engines df = df_cross_compat with tm.ensure_clean() as path: df.to_parquet(path, engine=pa, compression=None) result = read_parquet(path, engine=fp) tm.assert_frame_equal(result, df) result = read_parquet(path, engine=fp, columns=["a", "d"]) tm.assert_frame_equal(result, df[["a", "d"]]) def test_cross_engine_fp_pa(request, df_cross_compat, pa, fp): # cross-compat with differing reading/writing engines df = df_cross_compat with tm.ensure_clean() as path: df.to_parquet(path, engine=fp, compression=None) with catch_warnings(record=True): result = read_parquet(path, engine=pa) tm.assert_frame_equal(result, df) result = read_parquet(path, engine=pa, columns=["a", "d"]) tm.assert_frame_equal(result, df[["a", "d"]]) class Base: def check_error_on_write(self, df, engine, exc, err_msg): # check that we are raising the exception on writing with tm.ensure_clean() as path: with pytest.raises(exc, match=err_msg): to_parquet(df, path, engine, compression=None) def check_external_error_on_write(self, df, engine, exc): # check that an external library is raising the exception on writing with tm.ensure_clean() as path: with tm.external_error_raised(exc): to_parquet(df, path, engine, compression=None) @tm.network def test_parquet_read_from_url(self, df_compat, engine): if engine != "auto": pytest.importorskip(engine) url = ( "https://raw.githubusercontent.com/pandas-dev/pandas/" "master/pandas/tests/io/data/parquet/simple.parquet" ) df = read_parquet(url) tm.assert_frame_equal(df, df_compat) class TestBasic(Base): def test_error(self, engine): for obj in [ pd.Series([1, 2, 3]), 1, "foo", pd.Timestamp("20130101"), np.array([1, 2, 3]), ]: msg = "to_parquet only supports IO with DataFrames" self.check_error_on_write(obj, engine, ValueError, msg) def test_columns_dtypes(self, engine): df = pd.DataFrame({"string": list("abc"), "int": list(range(1, 4))}) # unicode df.columns = ["foo", "bar"] check_round_trip(df, engine) def test_columns_dtypes_invalid(self, engine): df = pd.DataFrame({"string": list("abc"), "int": list(range(1, 4))}) msg = "parquet must have string column names" # numeric df.columns = [0, 1] self.check_error_on_write(df, engine, ValueError, msg) # bytes df.columns = [b"foo", b"bar"] self.check_error_on_write(df, engine, ValueError, msg) # python object df.columns = [ datetime.datetime(2011, 1, 1, 0, 0), datetime.datetime(2011, 1, 1, 1, 1), ] self.check_error_on_write(df, engine, ValueError, msg) @pytest.mark.parametrize("compression", [None, "gzip", "snappy", "brotli"]) def test_compression(self, engine, compression): if compression == "snappy": pytest.importorskip("snappy") elif compression == "brotli": pytest.importorskip("brotli") df = pd.DataFrame({"A": [1, 2, 3]}) check_round_trip(df, engine, write_kwargs={"compression": compression}) def test_read_columns(self, engine): # GH18154 df = pd.DataFrame({"string": list("abc"), "int": list(range(1, 4))}) expected = pd.DataFrame({"string": list("abc")}) check_round_trip( df, engine, expected=expected, read_kwargs={"columns": ["string"]} ) def test_write_index(self, engine): check_names = engine != "fastparquet" df = pd.DataFrame({"A": [1, 2, 3]}) check_round_trip(df, engine) indexes = [ [2, 3, 4], pd.date_range("20130101", periods=3), list("abc"), [1, 3, 4], ] # non-default index for index in indexes: df.index = index if isinstance(index, pd.DatetimeIndex): df.index = df.index._with_freq(None) # freq doesn't round-trip check_round_trip(df, engine, check_names=check_names) # index with meta-data df.index = [0, 1, 2] df.index.name = "foo" check_round_trip(df, engine) def test_write_multiindex(self, pa): # Not supported in fastparquet as of 0.1.3 or older pyarrow version engine = pa df = pd.DataFrame({"A": [1, 2, 3]}) index = pd.MultiIndex.from_tuples([("a", 1), ("a", 2), ("b", 1)]) df.index = index check_round_trip(df, engine) def test_multiindex_with_columns(self, pa): engine = pa dates = pd.date_range("01-Jan-2018", "01-Dec-2018", freq="MS") df = pd.DataFrame(np.random.randn(2 * len(dates), 3), columns=list("ABC")) index1 = pd.MultiIndex.from_product( [["Level1", "Level2"], dates], names=["level", "date"] ) index2 = index1.copy(names=None) for index in [index1, index2]: df.index = index check_round_trip(df, engine) check_round_trip( df, engine, read_kwargs={"columns": ["A", "B"]}, expected=df[["A", "B"]] ) def test_write_ignoring_index(self, engine): # ENH 20768 # Ensure index=False omits the index from the written Parquet file. df = pd.DataFrame({"a": [1, 2, 3], "b": ["q", "r", "s"]}) write_kwargs = {"compression": None, "index": False} # Because we're dropping the index, we expect the loaded dataframe to # have the default integer index. expected = df.reset_index(drop=True) check_round_trip(df, engine, write_kwargs=write_kwargs, expected=expected) # Ignore custom index df = pd.DataFrame( {"a": [1, 2, 3], "b": ["q", "r", "s"]}, index=["zyx", "wvu", "tsr"] ) check_round_trip(df, engine, write_kwargs=write_kwargs, expected=expected) # Ignore multi-indexes as well. arrays = [ ["bar", "bar", "baz", "baz", "foo", "foo", "qux", "qux"], ["one", "two", "one", "two", "one", "two", "one", "two"], ] df = pd.DataFrame( {"one": list(range(8)), "two": [-i for i in range(8)]}, index=arrays ) expected = df.reset_index(drop=True) check_round_trip(df, engine, write_kwargs=write_kwargs, expected=expected) def test_write_column_multiindex(self, engine): # Not able to write column multi-indexes with non-string column names. mi_columns = pd.MultiIndex.from_tuples([("a", 1), ("a", 2), ("b", 1)]) df = pd.DataFrame(np.random.randn(4, 3), columns=mi_columns) msg = ( r"\s*parquet must have string column names for all values in\s*" "each level of the MultiIndex" ) self.check_error_on_write(df, engine, ValueError, msg) def test_write_column_multiindex_nonstring(self, pa): # GH #34777 # Not supported in fastparquet as of 0.1.3 engine = pa # Not able to write column multi-indexes with non-string column names arrays = [ ["bar", "bar", "baz", "baz", "foo", "foo", "qux", "qux"], [1, 2, 1, 2, 1, 2, 1, 2], ] df = pd.DataFrame(np.random.randn(8, 8), columns=arrays) df.columns.names = ["Level1", "Level2"] msg = ( r"\s*parquet must have string column names for all values in\s*" "each level of the MultiIndex" ) self.check_error_on_write(df, engine, ValueError, msg) def test_write_column_multiindex_string(self, pa): # GH #34777 # Not supported in fastparquet as of 0.1.3 engine = pa # Write column multi-indexes with string column names arrays = [ ["bar", "bar", "baz", "baz", "foo", "foo", "qux", "qux"], ["one", "two", "one", "two", "one", "two", "one", "two"], ] df = pd.DataFrame(np.random.randn(8, 8), columns=arrays) df.columns.names = ["ColLevel1", "ColLevel2"] check_round_trip(df, engine) def test_write_column_index_string(self, pa): # GH #34777 # Not supported in fastparquet as of 0.1.3 engine = pa # Write column indexes with string column names arrays = ["bar", "baz", "foo", "qux"] df = pd.DataFrame(np.random.randn(8, 4), columns=arrays) df.columns.name = "StringCol" check_round_trip(df, engine) def test_write_column_index_nonstring(self, pa): # GH #34777 # Not supported in fastparquet as of 0.1.3 engine = pa # Write column indexes with string column names arrays = [1, 2, 3, 4] df = pd.DataFrame(np.random.randn(8, 4), columns=arrays) df.columns.name = "NonStringCol" msg = r"parquet must have string column names" self.check_error_on_write(df, engine, ValueError, msg) @pytest.mark.filterwarnings("ignore:CategoricalBlock is deprecated:DeprecationWarning") class TestParquetPyArrow(Base): def test_basic(self, pa, df_full): df = df_full # additional supported types for pyarrow dti = pd.date_range("20130101", periods=3, tz="Europe/Brussels") dti = dti._with_freq(None) # freq doesn't round-trip df["datetime_tz"] = dti df["bool_with_none"] = [True, None, True] check_round_trip(df, pa) def test_basic_subset_columns(self, pa, df_full): # GH18628 df = df_full # additional supported types for pyarrow df["datetime_tz"] = pd.date_range("20130101", periods=3, tz="Europe/Brussels") check_round_trip( df, pa, expected=df[["string", "int"]], read_kwargs={"columns": ["string", "int"]}, ) def test_to_bytes_without_path_or_buf_provided(self, pa, df_full): # GH 37105 buf_bytes = df_full.to_parquet(engine=pa) assert isinstance(buf_bytes, bytes) buf_stream = BytesIO(buf_bytes) res = read_parquet(buf_stream) tm.assert_frame_equal(df_full, res) def test_duplicate_columns(self, pa): # not currently able to handle duplicate columns df = pd.DataFrame(np.arange(12).reshape(4, 3), columns=list("aaa")).copy() self.check_error_on_write(df, pa, ValueError, "Duplicate column names found") def test_unsupported(self, pa): # timedelta df = pd.DataFrame({"a": pd.timedelta_range("1 day", periods=3)}) self.check_external_error_on_write(df, pa, NotImplementedError) # mixed python objects df = pd.DataFrame({"a": ["a", 1, 2.0]}) # pyarrow 0.11 raises ArrowTypeError # older pyarrows raise ArrowInvalid self.check_external_error_on_write(df, pa, pyarrow.ArrowException) def test_categorical(self, pa): # supported in >= 0.7.0 df = pd.DataFrame() df["a"] = pd.Categorical(list("abcdef")) # test for null, out-of-order values, and unobserved category df["b"] = pd.Categorical( ["bar", "foo", "foo", "bar", None, "bar"], dtype=pd.CategoricalDtype(["foo", "bar", "baz"]), ) # test for ordered flag df["c"] = pd.Categorical( ["a", "b", "c", "a", "c", "b"], categories=["b", "c", "d"], ordered=True ) check_round_trip(df, pa) @pytest.mark.xfail( is_platform_windows() and PY38, reason="localhost connection rejected", strict=False, ) def test_s3_roundtrip_explicit_fs(self, df_compat, s3_resource, pa, s3so): s3fs = pytest.importorskip("s3fs") s3 = s3fs.S3FileSystem(**s3so) kw = {"filesystem": s3} check_round_trip( df_compat, pa, path="pandas-test/pyarrow.parquet", read_kwargs=kw, write_kwargs=kw, ) def test_s3_roundtrip(self, df_compat, s3_resource, pa, s3so): # GH #19134 s3so = {"storage_options": s3so} check_round_trip( df_compat, pa, path="s3://pandas-test/pyarrow.parquet", read_kwargs=s3so, write_kwargs=s3so, ) @td.skip_if_no("s3fs") # also requires flask @pytest.mark.parametrize( "partition_col", [ ["A"], [], ], ) def test_s3_roundtrip_for_dir( self, df_compat, s3_resource, pa, partition_col, s3so ): # GH #26388 expected_df = df_compat.copy() # GH #35791 # read_table uses the new Arrow Datasets API since pyarrow 1.0.0 # Previous behaviour was pyarrow partitioned columns become 'category' dtypes # These are added to back of dataframe on read. In new API category dtype is # only used if partition field is string, but this changed again to use # category dtype for all types (not only strings) in pyarrow 2.0.0 if partition_col: partition_col_type = ( "int32" if (not pa_version_under1p0) and pa_version_under2p0 else "category" ) expected_df[partition_col] = expected_df[partition_col].astype( partition_col_type ) check_round_trip( df_compat, pa, expected=expected_df, path="s3://pandas-test/parquet_dir", read_kwargs={"storage_options": s3so}, write_kwargs={ "partition_cols": partition_col, "compression": None, "storage_options": s3so, }, check_like=True, repeat=1, ) @td.skip_if_no("pyarrow") def test_read_file_like_obj_support(self, df_compat): buffer = BytesIO() df_compat.to_parquet(buffer) df_from_buf = read_parquet(buffer) tm.assert_frame_equal(df_compat, df_from_buf) @td.skip_if_no("pyarrow") def test_expand_user(self, df_compat, monkeypatch): monkeypatch.setenv("HOME", "TestingUser") monkeypatch.setenv("USERPROFILE", "TestingUser") with pytest.raises(OSError, match=r".*TestingUser.*"): read_parquet("~/file.parquet") with pytest.raises(OSError, match=r".*TestingUser.*"): df_compat.to_parquet("~/file.parquet") def test_partition_cols_supported(self, pa, df_full): # GH #23283 partition_cols = ["bool", "int"] df = df_full with tm.ensure_clean_dir() as path: df.to_parquet(path, partition_cols=partition_cols, compression=None) import pyarrow.parquet as pq dataset = pq.ParquetDataset(path, validate_schema=False) assert len(dataset.partitions.partition_names) == 2 assert dataset.partitions.partition_names == set(partition_cols) assert read_parquet(path).shape == df.shape def test_partition_cols_string(self, pa, df_full): # GH #27117 partition_cols = "bool" partition_cols_list = [partition_cols] df = df_full with tm.ensure_clean_dir() as path: df.to_parquet(path, partition_cols=partition_cols, compression=None) import pyarrow.parquet as pq dataset = pq.ParquetDataset(path, validate_schema=False) assert len(dataset.partitions.partition_names) == 1 assert dataset.partitions.partition_names == set(partition_cols_list) assert read_parquet(path).shape == df.shape @pytest.mark.parametrize("path_type", [str, pathlib.Path]) def test_partition_cols_pathlib(self, pa, df_compat, path_type): # GH 35902 partition_cols = "B" partition_cols_list = [partition_cols] df = df_compat with tm.ensure_clean_dir() as path_str: path = path_type(path_str) df.to_parquet(path, partition_cols=partition_cols_list) assert read_parquet(path).shape == df.shape def test_empty_dataframe(self, pa): # GH #27339 df = pd.DataFrame() check_round_trip(df, pa) def test_write_with_schema(self, pa): import pyarrow df = pd.DataFrame({"x": [0, 1]}) schema = pyarrow.schema([pyarrow.field("x", type=pyarrow.bool_())]) out_df = df.astype(bool) check_round_trip(df, pa, write_kwargs={"schema": schema}, expected=out_df) @td.skip_if_no("pyarrow") def test_additional_extension_arrays(self, pa): # test additional ExtensionArrays that are supported through the # __arrow_array__ protocol df = pd.DataFrame( { "a": pd.Series([1, 2, 3], dtype="Int64"), "b": pd.Series([1, 2, 3], dtype="UInt32"), "c": pd.Series(["a", None, "c"], dtype="string"), } ) check_round_trip(df, pa) df = pd.DataFrame({"a": pd.Series([1, 2, 3, None], dtype="Int64")}) check_round_trip(df, pa) @td.skip_if_no("pyarrow", min_version="1.0.0") def test_pyarrow_backed_string_array(self, pa): # test ArrowStringArray supported through the __arrow_array__ protocol from pandas.core.arrays.string_arrow import ArrowStringDtype # noqa: F401 df = pd.DataFrame({"a": pd.Series(["a", None, "c"], dtype="arrow_string")}) check_round_trip(df, pa, expected=df) @td.skip_if_no("pyarrow") def test_additional_extension_types(self, pa): # test additional ExtensionArrays that are supported through the # __arrow_array__ protocol + by defining a custom ExtensionType df = pd.DataFrame( { # Arrow does not yet support struct in writing to Parquet (ARROW-1644) # "c": pd.arrays.IntervalArray.from_tuples([(0, 1), (1, 2), (3, 4)]), "d": pd.period_range("2012-01-01", periods=3, freq="D"), } ) check_round_trip(df, pa) @td.skip_if_no("pyarrow") def test_use_nullable_dtypes(self, pa): import pyarrow.parquet as pq table = pyarrow.table( { "a": pyarrow.array([1, 2, 3, None], "int64"), "b": pyarrow.array([1, 2, 3, None], "uint8"), "c": pyarrow.array(["a", "b", "c", None]), "d": pyarrow.array([True, False, True, None]), } ) with tm.ensure_clean() as path: # write manually with pyarrow to write integers pq.write_table(table, path) result1 = read_parquet(path) result2 = read_parquet(path, use_nullable_dtypes=True) assert result1["a"].dtype == np.dtype("float64") expected = pd.DataFrame( { "a": pd.array([1, 2, 3, None], dtype="Int64"), "b": pd.array([1, 2, 3, None], dtype="UInt8"), "c": pd.array(["a", "b", "c", None], dtype="string"), "d": pd.array([True, False, True, None], dtype="boolean"), } ) tm.assert_frame_equal(result2, expected) def test_timestamp_nanoseconds(self, pa): # with version 2.0, pyarrow defaults to writing the nanoseconds, so # this should work without error df = pd.DataFrame({"a": pd.date_range("2017-01-01", freq="1n", periods=10)}) check_round_trip(df, pa, write_kwargs={"version": "2.0"}) def test_timezone_aware_index(self, pa, timezone_aware_date_list): if not pa_version_under2p0: # temporary skip this test until it is properly resolved # https://github.com/pandas-dev/pandas/issues/37286 pytest.skip() idx = 5 * [timezone_aware_date_list] df = pd.DataFrame(index=idx, data={"index_as_col": idx}) # see gh-36004 # compare time(zone) values only, skip their class: # pyarrow always creates fixed offset timezones using pytz.FixedOffset() # even if it was datetime.timezone() originally # # technically they are the same: # they both implement datetime.tzinfo # they both wrap datetime.timedelta() # this use-case sets the resolution to 1 minute check_round_trip(df, pa, check_dtype=False) @td.skip_if_no("pyarrow", min_version="1.0.0") def test_filter_row_groups(self, pa): # https://github.com/pandas-dev/pandas/issues/26551 df = pd.DataFrame({"a": list(range(0, 3))}) with tm.ensure_clean() as path: df.to_parquet(path, pa) result = read_parquet( path, pa, filters=[("a", "==", 0)], use_legacy_dataset=False ) assert len(result) == 1 def test_read_parquet_manager(self, pa, using_array_manager): # ensure that read_parquet honors the pandas.options.mode.data_manager option df = pd.DataFrame(np.random.randn(10, 3), columns=["A", "B", "C"]) with tm.ensure_clean() as path: df.to_parquet(path, pa) result = read_parquet(path, pa) if using_array_manager: assert isinstance(result._mgr, pd.core.internals.ArrayManager) else: assert isinstance(result._mgr, pd.core.internals.BlockManager) class TestParquetFastParquet(Base): def test_basic(self, fp, df_full): df = df_full dti = pd.date_range("20130101", periods=3, tz="US/Eastern") dti = dti._with_freq(None) # freq doesn't round-trip df["datetime_tz"] = dti df["timedelta"] = pd.timedelta_range("1 day", periods=3) check_round_trip(df, fp) @pytest.mark.skip(reason="not supported") def test_duplicate_columns(self, fp): # not currently able to handle duplicate columns df = pd.DataFrame(np.arange(12).reshape(4, 3), columns=list("aaa")).copy() msg = "Cannot create parquet dataset with duplicate column names" self.check_error_on_write(df, fp, ValueError, msg) def test_bool_with_none(self, fp): df = pd.DataFrame({"a": [True, None, False]}) expected = pd.DataFrame({"a": [1.0, np.nan, 0.0]}, dtype="float16") check_round_trip(df, fp, expected=expected) def test_unsupported(self, fp): # period df = pd.DataFrame({"a": pd.period_range("2013", freq="M", periods=3)}) # error from fastparquet -> don't check exact error message self.check_error_on_write(df, fp, ValueError, None) # mixed df = pd.DataFrame({"a": ["a", 1, 2.0]}) msg = "Can't infer object conversion type" self.check_error_on_write(df, fp, ValueError, msg) def test_categorical(self, fp): df = pd.DataFrame({"a": pd.Categorical(list("abc"))}) check_round_trip(df, fp) def test_filter_row_groups(self, fp): d = {"a": list(range(0, 3))} df = pd.DataFrame(d) with tm.ensure_clean() as path: df.to_parquet(path, fp, compression=None, row_group_offsets=1) result = read_parquet(path, fp, filters=[("a", "==", 0)]) assert len(result) == 1 def test_s3_roundtrip(self, df_compat, s3_resource, fp, s3so): # GH #19134 check_round_trip( df_compat, fp, path="s3://pandas-test/fastparquet.parquet", read_kwargs={"storage_options": s3so}, write_kwargs={"compression": None, "storage_options": s3so}, ) def test_partition_cols_supported(self, fp, df_full): # GH #23283 partition_cols = ["bool", "int"] df = df_full with tm.ensure_clean_dir() as path: df.to_parquet( path, engine="fastparquet", partition_cols=partition_cols, compression=None, ) assert os.path.exists(path) import fastparquet actual_partition_cols = fastparquet.ParquetFile(path, False).cats assert len(actual_partition_cols) == 2 def test_partition_cols_string(self, fp, df_full): # GH #27117 partition_cols = "bool" df = df_full with tm.ensure_clean_dir() as path: df.to_parquet( path, engine="fastparquet", partition_cols=partition_cols, compression=None, ) assert os.path.exists(path) import fastparquet actual_partition_cols = fastparquet.ParquetFile(path, False).cats assert len(actual_partition_cols) == 1 def test_partition_on_supported(self, fp, df_full): # GH #23283 partition_cols = ["bool", "int"] df = df_full with tm.ensure_clean_dir() as path: df.to_parquet( path, engine="fastparquet", compression=None, partition_on=partition_cols, ) assert os.path.exists(path) import fastparquet actual_partition_cols = fastparquet.ParquetFile(path, False).cats assert len(actual_partition_cols) == 2 def test_error_on_using_partition_cols_and_partition_on(self, fp, df_full): # GH #23283 partition_cols = ["bool", "int"] df = df_full msg = ( "Cannot use both partition_on and partition_cols. Use partition_cols for " "partitioning data" ) with pytest.raises(ValueError, match=msg): with tm.ensure_clean_dir() as path: df.to_parquet( path, engine="fastparquet", compression=None, partition_on=partition_cols, partition_cols=partition_cols, ) def test_empty_dataframe(self, fp): # GH #27339 df = pd.DataFrame() expected = df.copy() expected.index.name = "index" check_round_trip(df, fp, expected=expected) def test_timezone_aware_index(self, fp, timezone_aware_date_list): idx = 5 * [timezone_aware_date_list] df = pd.DataFrame(index=idx, data={"index_as_col": idx}) expected = df.copy() expected.index.name = "index" check_round_trip(df, fp, expected=expected) def test_use_nullable_dtypes_not_supported(self, fp): df = pd.DataFrame({"a": [1, 2]}) with tm.ensure_clean() as path: df.to_parquet(path) with pytest.raises(ValueError, match="not supported for the fastparquet"): read_parquet(path, engine="fastparquet", use_nullable_dtypes=True)
datapythonista/pandas
pandas/tests/io/test_parquet.py
pandas/__init__.py
import numpy as np import pandas as pd from pandas import ( Categorical, DataFrame, Index, Series, Timestamp, ) import pandas._testing as tm from pandas.core.arrays import IntervalArray class TestGetNumericData: def test_get_numeric_data_preserve_dtype(self): # get the numeric data obj = DataFrame({"A": [1, "2", 3.0]}) result = obj._get_numeric_data() expected = DataFrame(index=[0, 1, 2], dtype=object) tm.assert_frame_equal(result, expected) def test_get_numeric_data(self): datetime64name = np.dtype("M8[ns]").name objectname = np.dtype(np.object_).name df = DataFrame( {"a": 1.0, "b": 2, "c": "foo", "f": Timestamp("20010102")}, index=np.arange(10), ) result = df.dtypes expected = Series( [ np.dtype("float64"), np.dtype("int64"), np.dtype(objectname), np.dtype(datetime64name), ], index=["a", "b", "c", "f"], ) tm.assert_series_equal(result, expected) df = DataFrame( { "a": 1.0, "b": 2, "c": "foo", "d": np.array([1.0] * 10, dtype="float32"), "e": np.array([1] * 10, dtype="int32"), "f": np.array([1] * 10, dtype="int16"), "g": Timestamp("20010102"), }, index=np.arange(10), ) result = df._get_numeric_data() expected = df.loc[:, ["a", "b", "d", "e", "f"]] tm.assert_frame_equal(result, expected) only_obj = df.loc[:, ["c", "g"]] result = only_obj._get_numeric_data() expected = df.loc[:, []] tm.assert_frame_equal(result, expected) df = DataFrame.from_dict({"a": [1, 2], "b": ["foo", "bar"], "c": [np.pi, np.e]}) result = df._get_numeric_data() expected = DataFrame.from_dict({"a": [1, 2], "c": [np.pi, np.e]}) tm.assert_frame_equal(result, expected) df = result.copy() result = df._get_numeric_data() expected = df tm.assert_frame_equal(result, expected) def test_get_numeric_data_mixed_dtype(self): # numeric and object columns df = DataFrame( { "a": [1, 2, 3], "b": [True, False, True], "c": ["foo", "bar", "baz"], "d": [None, None, None], "e": [3.14, 0.577, 2.773], } ) result = df._get_numeric_data() tm.assert_index_equal(result.columns, Index(["a", "b", "e"])) def test_get_numeric_data_extension_dtype(self): # GH#22290 df = DataFrame( { "A": pd.array([-10, np.nan, 0, 10, 20, 30], dtype="Int64"), "B": Categorical(list("abcabc")), "C": pd.array([0, 1, 2, 3, np.nan, 5], dtype="UInt8"), "D": IntervalArray.from_breaks(range(7)), } ) result = df._get_numeric_data() expected = df.loc[:, ["A", "C"]] tm.assert_frame_equal(result, expected)
""" test parquet compat """ import datetime from io import BytesIO import os import pathlib from warnings import ( catch_warnings, filterwarnings, ) import numpy as np import pytest from pandas._config import get_option from pandas.compat import ( PY38, is_platform_windows, ) from pandas.compat.pyarrow import ( pa_version_under1p0, pa_version_under2p0, ) import pandas.util._test_decorators as td import pandas as pd import pandas._testing as tm from pandas.util.version import Version from pandas.io.parquet import ( FastParquetImpl, PyArrowImpl, get_engine, read_parquet, to_parquet, ) try: import pyarrow _HAVE_PYARROW = True except ImportError: _HAVE_PYARROW = False try: with catch_warnings(): # `np.bool` is a deprecated alias... filterwarnings("ignore", "`np.bool`", category=DeprecationWarning) import fastparquet _HAVE_FASTPARQUET = True except ImportError: _HAVE_FASTPARQUET = False pytestmark = pytest.mark.filterwarnings( "ignore:RangeIndex.* is deprecated:DeprecationWarning" ) # TODO(ArrayManager) fastparquet relies on BlockManager internals # setup engines & skips @pytest.fixture( params=[ pytest.param( "fastparquet", marks=pytest.mark.skipif( not _HAVE_FASTPARQUET or get_option("mode.data_manager") == "array", reason="fastparquet is not installed or ArrayManager is used", ), ), pytest.param( "pyarrow", marks=pytest.mark.skipif( not _HAVE_PYARROW, reason="pyarrow is not installed" ), ), ] ) def engine(request): return request.param @pytest.fixture def pa(): if not _HAVE_PYARROW: pytest.skip("pyarrow is not installed") return "pyarrow" @pytest.fixture def fp(): if not _HAVE_FASTPARQUET: pytest.skip("fastparquet is not installed") elif get_option("mode.data_manager") == "array": pytest.skip("ArrayManager is not supported with fastparquet") return "fastparquet" @pytest.fixture def df_compat(): return pd.DataFrame({"A": [1, 2, 3], "B": "foo"}) @pytest.fixture def df_cross_compat(): df = pd.DataFrame( { "a": list("abc"), "b": list(range(1, 4)), # 'c': np.arange(3, 6).astype('u1'), "d": np.arange(4.0, 7.0, dtype="float64"), "e": [True, False, True], "f": pd.date_range("20130101", periods=3), # 'g': pd.date_range('20130101', periods=3, # tz='US/Eastern'), # 'h': pd.date_range('20130101', periods=3, freq='ns') } ) return df @pytest.fixture def df_full(): return pd.DataFrame( { "string": list("abc"), "string_with_nan": ["a", np.nan, "c"], "string_with_none": ["a", None, "c"], "bytes": [b"foo", b"bar", b"baz"], "unicode": ["foo", "bar", "baz"], "int": list(range(1, 4)), "uint": np.arange(3, 6).astype("u1"), "float": np.arange(4.0, 7.0, dtype="float64"), "float_with_nan": [2.0, np.nan, 3.0], "bool": [True, False, True], "datetime": pd.date_range("20130101", periods=3), "datetime_with_nat": [ pd.Timestamp("20130101"), pd.NaT, pd.Timestamp("20130103"), ], } ) @pytest.fixture( params=[ datetime.datetime.now(datetime.timezone.utc), datetime.datetime.now(datetime.timezone.min), datetime.datetime.now(datetime.timezone.max), datetime.datetime.strptime("2019-01-04T16:41:24+0200", "%Y-%m-%dT%H:%M:%S%z"), datetime.datetime.strptime("2019-01-04T16:41:24+0215", "%Y-%m-%dT%H:%M:%S%z"), datetime.datetime.strptime("2019-01-04T16:41:24-0200", "%Y-%m-%dT%H:%M:%S%z"), datetime.datetime.strptime("2019-01-04T16:41:24-0215", "%Y-%m-%dT%H:%M:%S%z"), ] ) def timezone_aware_date_list(request): return request.param def check_round_trip( df, engine=None, path=None, write_kwargs=None, read_kwargs=None, expected=None, check_names=True, check_like=False, check_dtype=True, repeat=2, ): """Verify parquet serializer and deserializer produce the same results. Performs a pandas to disk and disk to pandas round trip, then compares the 2 resulting DataFrames to verify equality. Parameters ---------- df: Dataframe engine: str, optional 'pyarrow' or 'fastparquet' path: str, optional write_kwargs: dict of str:str, optional read_kwargs: dict of str:str, optional expected: DataFrame, optional Expected deserialization result, otherwise will be equal to `df` check_names: list of str, optional Closed set of column names to be compared check_like: bool, optional If True, ignore the order of index & columns. repeat: int, optional How many times to repeat the test """ write_kwargs = write_kwargs or {"compression": None} read_kwargs = read_kwargs or {} if expected is None: expected = df if engine: write_kwargs["engine"] = engine read_kwargs["engine"] = engine def compare(repeat): for _ in range(repeat): df.to_parquet(path, **write_kwargs) with catch_warnings(record=True): actual = read_parquet(path, **read_kwargs) tm.assert_frame_equal( expected, actual, check_names=check_names, check_like=check_like, check_dtype=check_dtype, ) if path is None: with tm.ensure_clean() as path: compare(repeat) else: compare(repeat) def test_invalid_engine(df_compat): msg = "engine must be one of 'pyarrow', 'fastparquet'" with pytest.raises(ValueError, match=msg): check_round_trip(df_compat, "foo", "bar") def test_options_py(df_compat, pa): # use the set option with pd.option_context("io.parquet.engine", "pyarrow"): check_round_trip(df_compat) def test_options_fp(df_compat, fp): # use the set option with pd.option_context("io.parquet.engine", "fastparquet"): check_round_trip(df_compat) def test_options_auto(df_compat, fp, pa): # use the set option with pd.option_context("io.parquet.engine", "auto"): check_round_trip(df_compat) def test_options_get_engine(fp, pa): assert isinstance(get_engine("pyarrow"), PyArrowImpl) assert isinstance(get_engine("fastparquet"), FastParquetImpl) with pd.option_context("io.parquet.engine", "pyarrow"): assert isinstance(get_engine("auto"), PyArrowImpl) assert isinstance(get_engine("pyarrow"), PyArrowImpl) assert isinstance(get_engine("fastparquet"), FastParquetImpl) with pd.option_context("io.parquet.engine", "fastparquet"): assert isinstance(get_engine("auto"), FastParquetImpl) assert isinstance(get_engine("pyarrow"), PyArrowImpl) assert isinstance(get_engine("fastparquet"), FastParquetImpl) with pd.option_context("io.parquet.engine", "auto"): assert isinstance(get_engine("auto"), PyArrowImpl) assert isinstance(get_engine("pyarrow"), PyArrowImpl) assert isinstance(get_engine("fastparquet"), FastParquetImpl) def test_get_engine_auto_error_message(): # Expect different error messages from get_engine(engine="auto") # if engines aren't installed vs. are installed but bad version from pandas.compat._optional import VERSIONS # Do we have engines installed, but a bad version of them? pa_min_ver = VERSIONS.get("pyarrow") fp_min_ver = VERSIONS.get("fastparquet") have_pa_bad_version = ( False if not _HAVE_PYARROW else Version(pyarrow.__version__) < Version(pa_min_ver) ) have_fp_bad_version = ( False if not _HAVE_FASTPARQUET else Version(fastparquet.__version__) < Version(fp_min_ver) ) # Do we have usable engines installed? have_usable_pa = _HAVE_PYARROW and not have_pa_bad_version have_usable_fp = _HAVE_FASTPARQUET and not have_fp_bad_version if not have_usable_pa and not have_usable_fp: # No usable engines found. if have_pa_bad_version: match = f"Pandas requires version .{pa_min_ver}. or newer of .pyarrow." with pytest.raises(ImportError, match=match): get_engine("auto") else: match = "Missing optional dependency .pyarrow." with pytest.raises(ImportError, match=match): get_engine("auto") if have_fp_bad_version: match = f"Pandas requires version .{fp_min_ver}. or newer of .fastparquet." with pytest.raises(ImportError, match=match): get_engine("auto") else: match = "Missing optional dependency .fastparquet." with pytest.raises(ImportError, match=match): get_engine("auto") def test_cross_engine_pa_fp(df_cross_compat, pa, fp): # cross-compat with differing reading/writing engines df = df_cross_compat with tm.ensure_clean() as path: df.to_parquet(path, engine=pa, compression=None) result = read_parquet(path, engine=fp) tm.assert_frame_equal(result, df) result = read_parquet(path, engine=fp, columns=["a", "d"]) tm.assert_frame_equal(result, df[["a", "d"]]) def test_cross_engine_fp_pa(request, df_cross_compat, pa, fp): # cross-compat with differing reading/writing engines df = df_cross_compat with tm.ensure_clean() as path: df.to_parquet(path, engine=fp, compression=None) with catch_warnings(record=True): result = read_parquet(path, engine=pa) tm.assert_frame_equal(result, df) result = read_parquet(path, engine=pa, columns=["a", "d"]) tm.assert_frame_equal(result, df[["a", "d"]]) class Base: def check_error_on_write(self, df, engine, exc, err_msg): # check that we are raising the exception on writing with tm.ensure_clean() as path: with pytest.raises(exc, match=err_msg): to_parquet(df, path, engine, compression=None) def check_external_error_on_write(self, df, engine, exc): # check that an external library is raising the exception on writing with tm.ensure_clean() as path: with tm.external_error_raised(exc): to_parquet(df, path, engine, compression=None) @tm.network def test_parquet_read_from_url(self, df_compat, engine): if engine != "auto": pytest.importorskip(engine) url = ( "https://raw.githubusercontent.com/pandas-dev/pandas/" "master/pandas/tests/io/data/parquet/simple.parquet" ) df = read_parquet(url) tm.assert_frame_equal(df, df_compat) class TestBasic(Base): def test_error(self, engine): for obj in [ pd.Series([1, 2, 3]), 1, "foo", pd.Timestamp("20130101"), np.array([1, 2, 3]), ]: msg = "to_parquet only supports IO with DataFrames" self.check_error_on_write(obj, engine, ValueError, msg) def test_columns_dtypes(self, engine): df = pd.DataFrame({"string": list("abc"), "int": list(range(1, 4))}) # unicode df.columns = ["foo", "bar"] check_round_trip(df, engine) def test_columns_dtypes_invalid(self, engine): df = pd.DataFrame({"string": list("abc"), "int": list(range(1, 4))}) msg = "parquet must have string column names" # numeric df.columns = [0, 1] self.check_error_on_write(df, engine, ValueError, msg) # bytes df.columns = [b"foo", b"bar"] self.check_error_on_write(df, engine, ValueError, msg) # python object df.columns = [ datetime.datetime(2011, 1, 1, 0, 0), datetime.datetime(2011, 1, 1, 1, 1), ] self.check_error_on_write(df, engine, ValueError, msg) @pytest.mark.parametrize("compression", [None, "gzip", "snappy", "brotli"]) def test_compression(self, engine, compression): if compression == "snappy": pytest.importorskip("snappy") elif compression == "brotli": pytest.importorskip("brotli") df = pd.DataFrame({"A": [1, 2, 3]}) check_round_trip(df, engine, write_kwargs={"compression": compression}) def test_read_columns(self, engine): # GH18154 df = pd.DataFrame({"string": list("abc"), "int": list(range(1, 4))}) expected = pd.DataFrame({"string": list("abc")}) check_round_trip( df, engine, expected=expected, read_kwargs={"columns": ["string"]} ) def test_write_index(self, engine): check_names = engine != "fastparquet" df = pd.DataFrame({"A": [1, 2, 3]}) check_round_trip(df, engine) indexes = [ [2, 3, 4], pd.date_range("20130101", periods=3), list("abc"), [1, 3, 4], ] # non-default index for index in indexes: df.index = index if isinstance(index, pd.DatetimeIndex): df.index = df.index._with_freq(None) # freq doesn't round-trip check_round_trip(df, engine, check_names=check_names) # index with meta-data df.index = [0, 1, 2] df.index.name = "foo" check_round_trip(df, engine) def test_write_multiindex(self, pa): # Not supported in fastparquet as of 0.1.3 or older pyarrow version engine = pa df = pd.DataFrame({"A": [1, 2, 3]}) index = pd.MultiIndex.from_tuples([("a", 1), ("a", 2), ("b", 1)]) df.index = index check_round_trip(df, engine) def test_multiindex_with_columns(self, pa): engine = pa dates = pd.date_range("01-Jan-2018", "01-Dec-2018", freq="MS") df = pd.DataFrame(np.random.randn(2 * len(dates), 3), columns=list("ABC")) index1 = pd.MultiIndex.from_product( [["Level1", "Level2"], dates], names=["level", "date"] ) index2 = index1.copy(names=None) for index in [index1, index2]: df.index = index check_round_trip(df, engine) check_round_trip( df, engine, read_kwargs={"columns": ["A", "B"]}, expected=df[["A", "B"]] ) def test_write_ignoring_index(self, engine): # ENH 20768 # Ensure index=False omits the index from the written Parquet file. df = pd.DataFrame({"a": [1, 2, 3], "b": ["q", "r", "s"]}) write_kwargs = {"compression": None, "index": False} # Because we're dropping the index, we expect the loaded dataframe to # have the default integer index. expected = df.reset_index(drop=True) check_round_trip(df, engine, write_kwargs=write_kwargs, expected=expected) # Ignore custom index df = pd.DataFrame( {"a": [1, 2, 3], "b": ["q", "r", "s"]}, index=["zyx", "wvu", "tsr"] ) check_round_trip(df, engine, write_kwargs=write_kwargs, expected=expected) # Ignore multi-indexes as well. arrays = [ ["bar", "bar", "baz", "baz", "foo", "foo", "qux", "qux"], ["one", "two", "one", "two", "one", "two", "one", "two"], ] df = pd.DataFrame( {"one": list(range(8)), "two": [-i for i in range(8)]}, index=arrays ) expected = df.reset_index(drop=True) check_round_trip(df, engine, write_kwargs=write_kwargs, expected=expected) def test_write_column_multiindex(self, engine): # Not able to write column multi-indexes with non-string column names. mi_columns = pd.MultiIndex.from_tuples([("a", 1), ("a", 2), ("b", 1)]) df = pd.DataFrame(np.random.randn(4, 3), columns=mi_columns) msg = ( r"\s*parquet must have string column names for all values in\s*" "each level of the MultiIndex" ) self.check_error_on_write(df, engine, ValueError, msg) def test_write_column_multiindex_nonstring(self, pa): # GH #34777 # Not supported in fastparquet as of 0.1.3 engine = pa # Not able to write column multi-indexes with non-string column names arrays = [ ["bar", "bar", "baz", "baz", "foo", "foo", "qux", "qux"], [1, 2, 1, 2, 1, 2, 1, 2], ] df = pd.DataFrame(np.random.randn(8, 8), columns=arrays) df.columns.names = ["Level1", "Level2"] msg = ( r"\s*parquet must have string column names for all values in\s*" "each level of the MultiIndex" ) self.check_error_on_write(df, engine, ValueError, msg) def test_write_column_multiindex_string(self, pa): # GH #34777 # Not supported in fastparquet as of 0.1.3 engine = pa # Write column multi-indexes with string column names arrays = [ ["bar", "bar", "baz", "baz", "foo", "foo", "qux", "qux"], ["one", "two", "one", "two", "one", "two", "one", "two"], ] df = pd.DataFrame(np.random.randn(8, 8), columns=arrays) df.columns.names = ["ColLevel1", "ColLevel2"] check_round_trip(df, engine) def test_write_column_index_string(self, pa): # GH #34777 # Not supported in fastparquet as of 0.1.3 engine = pa # Write column indexes with string column names arrays = ["bar", "baz", "foo", "qux"] df = pd.DataFrame(np.random.randn(8, 4), columns=arrays) df.columns.name = "StringCol" check_round_trip(df, engine) def test_write_column_index_nonstring(self, pa): # GH #34777 # Not supported in fastparquet as of 0.1.3 engine = pa # Write column indexes with string column names arrays = [1, 2, 3, 4] df = pd.DataFrame(np.random.randn(8, 4), columns=arrays) df.columns.name = "NonStringCol" msg = r"parquet must have string column names" self.check_error_on_write(df, engine, ValueError, msg) @pytest.mark.filterwarnings("ignore:CategoricalBlock is deprecated:DeprecationWarning") class TestParquetPyArrow(Base): def test_basic(self, pa, df_full): df = df_full # additional supported types for pyarrow dti = pd.date_range("20130101", periods=3, tz="Europe/Brussels") dti = dti._with_freq(None) # freq doesn't round-trip df["datetime_tz"] = dti df["bool_with_none"] = [True, None, True] check_round_trip(df, pa) def test_basic_subset_columns(self, pa, df_full): # GH18628 df = df_full # additional supported types for pyarrow df["datetime_tz"] = pd.date_range("20130101", periods=3, tz="Europe/Brussels") check_round_trip( df, pa, expected=df[["string", "int"]], read_kwargs={"columns": ["string", "int"]}, ) def test_to_bytes_without_path_or_buf_provided(self, pa, df_full): # GH 37105 buf_bytes = df_full.to_parquet(engine=pa) assert isinstance(buf_bytes, bytes) buf_stream = BytesIO(buf_bytes) res = read_parquet(buf_stream) tm.assert_frame_equal(df_full, res) def test_duplicate_columns(self, pa): # not currently able to handle duplicate columns df = pd.DataFrame(np.arange(12).reshape(4, 3), columns=list("aaa")).copy() self.check_error_on_write(df, pa, ValueError, "Duplicate column names found") def test_unsupported(self, pa): # timedelta df = pd.DataFrame({"a": pd.timedelta_range("1 day", periods=3)}) self.check_external_error_on_write(df, pa, NotImplementedError) # mixed python objects df = pd.DataFrame({"a": ["a", 1, 2.0]}) # pyarrow 0.11 raises ArrowTypeError # older pyarrows raise ArrowInvalid self.check_external_error_on_write(df, pa, pyarrow.ArrowException) def test_categorical(self, pa): # supported in >= 0.7.0 df = pd.DataFrame() df["a"] = pd.Categorical(list("abcdef")) # test for null, out-of-order values, and unobserved category df["b"] = pd.Categorical( ["bar", "foo", "foo", "bar", None, "bar"], dtype=pd.CategoricalDtype(["foo", "bar", "baz"]), ) # test for ordered flag df["c"] = pd.Categorical( ["a", "b", "c", "a", "c", "b"], categories=["b", "c", "d"], ordered=True ) check_round_trip(df, pa) @pytest.mark.xfail( is_platform_windows() and PY38, reason="localhost connection rejected", strict=False, ) def test_s3_roundtrip_explicit_fs(self, df_compat, s3_resource, pa, s3so): s3fs = pytest.importorskip("s3fs") s3 = s3fs.S3FileSystem(**s3so) kw = {"filesystem": s3} check_round_trip( df_compat, pa, path="pandas-test/pyarrow.parquet", read_kwargs=kw, write_kwargs=kw, ) def test_s3_roundtrip(self, df_compat, s3_resource, pa, s3so): # GH #19134 s3so = {"storage_options": s3so} check_round_trip( df_compat, pa, path="s3://pandas-test/pyarrow.parquet", read_kwargs=s3so, write_kwargs=s3so, ) @td.skip_if_no("s3fs") # also requires flask @pytest.mark.parametrize( "partition_col", [ ["A"], [], ], ) def test_s3_roundtrip_for_dir( self, df_compat, s3_resource, pa, partition_col, s3so ): # GH #26388 expected_df = df_compat.copy() # GH #35791 # read_table uses the new Arrow Datasets API since pyarrow 1.0.0 # Previous behaviour was pyarrow partitioned columns become 'category' dtypes # These are added to back of dataframe on read. In new API category dtype is # only used if partition field is string, but this changed again to use # category dtype for all types (not only strings) in pyarrow 2.0.0 if partition_col: partition_col_type = ( "int32" if (not pa_version_under1p0) and pa_version_under2p0 else "category" ) expected_df[partition_col] = expected_df[partition_col].astype( partition_col_type ) check_round_trip( df_compat, pa, expected=expected_df, path="s3://pandas-test/parquet_dir", read_kwargs={"storage_options": s3so}, write_kwargs={ "partition_cols": partition_col, "compression": None, "storage_options": s3so, }, check_like=True, repeat=1, ) @td.skip_if_no("pyarrow") def test_read_file_like_obj_support(self, df_compat): buffer = BytesIO() df_compat.to_parquet(buffer) df_from_buf = read_parquet(buffer) tm.assert_frame_equal(df_compat, df_from_buf) @td.skip_if_no("pyarrow") def test_expand_user(self, df_compat, monkeypatch): monkeypatch.setenv("HOME", "TestingUser") monkeypatch.setenv("USERPROFILE", "TestingUser") with pytest.raises(OSError, match=r".*TestingUser.*"): read_parquet("~/file.parquet") with pytest.raises(OSError, match=r".*TestingUser.*"): df_compat.to_parquet("~/file.parquet") def test_partition_cols_supported(self, pa, df_full): # GH #23283 partition_cols = ["bool", "int"] df = df_full with tm.ensure_clean_dir() as path: df.to_parquet(path, partition_cols=partition_cols, compression=None) import pyarrow.parquet as pq dataset = pq.ParquetDataset(path, validate_schema=False) assert len(dataset.partitions.partition_names) == 2 assert dataset.partitions.partition_names == set(partition_cols) assert read_parquet(path).shape == df.shape def test_partition_cols_string(self, pa, df_full): # GH #27117 partition_cols = "bool" partition_cols_list = [partition_cols] df = df_full with tm.ensure_clean_dir() as path: df.to_parquet(path, partition_cols=partition_cols, compression=None) import pyarrow.parquet as pq dataset = pq.ParquetDataset(path, validate_schema=False) assert len(dataset.partitions.partition_names) == 1 assert dataset.partitions.partition_names == set(partition_cols_list) assert read_parquet(path).shape == df.shape @pytest.mark.parametrize("path_type", [str, pathlib.Path]) def test_partition_cols_pathlib(self, pa, df_compat, path_type): # GH 35902 partition_cols = "B" partition_cols_list = [partition_cols] df = df_compat with tm.ensure_clean_dir() as path_str: path = path_type(path_str) df.to_parquet(path, partition_cols=partition_cols_list) assert read_parquet(path).shape == df.shape def test_empty_dataframe(self, pa): # GH #27339 df = pd.DataFrame() check_round_trip(df, pa) def test_write_with_schema(self, pa): import pyarrow df = pd.DataFrame({"x": [0, 1]}) schema = pyarrow.schema([pyarrow.field("x", type=pyarrow.bool_())]) out_df = df.astype(bool) check_round_trip(df, pa, write_kwargs={"schema": schema}, expected=out_df) @td.skip_if_no("pyarrow") def test_additional_extension_arrays(self, pa): # test additional ExtensionArrays that are supported through the # __arrow_array__ protocol df = pd.DataFrame( { "a": pd.Series([1, 2, 3], dtype="Int64"), "b": pd.Series([1, 2, 3], dtype="UInt32"), "c": pd.Series(["a", None, "c"], dtype="string"), } ) check_round_trip(df, pa) df = pd.DataFrame({"a": pd.Series([1, 2, 3, None], dtype="Int64")}) check_round_trip(df, pa) @td.skip_if_no("pyarrow", min_version="1.0.0") def test_pyarrow_backed_string_array(self, pa): # test ArrowStringArray supported through the __arrow_array__ protocol from pandas.core.arrays.string_arrow import ArrowStringDtype # noqa: F401 df = pd.DataFrame({"a": pd.Series(["a", None, "c"], dtype="arrow_string")}) check_round_trip(df, pa, expected=df) @td.skip_if_no("pyarrow") def test_additional_extension_types(self, pa): # test additional ExtensionArrays that are supported through the # __arrow_array__ protocol + by defining a custom ExtensionType df = pd.DataFrame( { # Arrow does not yet support struct in writing to Parquet (ARROW-1644) # "c": pd.arrays.IntervalArray.from_tuples([(0, 1), (1, 2), (3, 4)]), "d": pd.period_range("2012-01-01", periods=3, freq="D"), } ) check_round_trip(df, pa) @td.skip_if_no("pyarrow") def test_use_nullable_dtypes(self, pa): import pyarrow.parquet as pq table = pyarrow.table( { "a": pyarrow.array([1, 2, 3, None], "int64"), "b": pyarrow.array([1, 2, 3, None], "uint8"), "c": pyarrow.array(["a", "b", "c", None]), "d": pyarrow.array([True, False, True, None]), } ) with tm.ensure_clean() as path: # write manually with pyarrow to write integers pq.write_table(table, path) result1 = read_parquet(path) result2 = read_parquet(path, use_nullable_dtypes=True) assert result1["a"].dtype == np.dtype("float64") expected = pd.DataFrame( { "a": pd.array([1, 2, 3, None], dtype="Int64"), "b": pd.array([1, 2, 3, None], dtype="UInt8"), "c": pd.array(["a", "b", "c", None], dtype="string"), "d": pd.array([True, False, True, None], dtype="boolean"), } ) tm.assert_frame_equal(result2, expected) def test_timestamp_nanoseconds(self, pa): # with version 2.0, pyarrow defaults to writing the nanoseconds, so # this should work without error df = pd.DataFrame({"a": pd.date_range("2017-01-01", freq="1n", periods=10)}) check_round_trip(df, pa, write_kwargs={"version": "2.0"}) def test_timezone_aware_index(self, pa, timezone_aware_date_list): if not pa_version_under2p0: # temporary skip this test until it is properly resolved # https://github.com/pandas-dev/pandas/issues/37286 pytest.skip() idx = 5 * [timezone_aware_date_list] df = pd.DataFrame(index=idx, data={"index_as_col": idx}) # see gh-36004 # compare time(zone) values only, skip their class: # pyarrow always creates fixed offset timezones using pytz.FixedOffset() # even if it was datetime.timezone() originally # # technically they are the same: # they both implement datetime.tzinfo # they both wrap datetime.timedelta() # this use-case sets the resolution to 1 minute check_round_trip(df, pa, check_dtype=False) @td.skip_if_no("pyarrow", min_version="1.0.0") def test_filter_row_groups(self, pa): # https://github.com/pandas-dev/pandas/issues/26551 df = pd.DataFrame({"a": list(range(0, 3))}) with tm.ensure_clean() as path: df.to_parquet(path, pa) result = read_parquet( path, pa, filters=[("a", "==", 0)], use_legacy_dataset=False ) assert len(result) == 1 def test_read_parquet_manager(self, pa, using_array_manager): # ensure that read_parquet honors the pandas.options.mode.data_manager option df = pd.DataFrame(np.random.randn(10, 3), columns=["A", "B", "C"]) with tm.ensure_clean() as path: df.to_parquet(path, pa) result = read_parquet(path, pa) if using_array_manager: assert isinstance(result._mgr, pd.core.internals.ArrayManager) else: assert isinstance(result._mgr, pd.core.internals.BlockManager) class TestParquetFastParquet(Base): def test_basic(self, fp, df_full): df = df_full dti = pd.date_range("20130101", periods=3, tz="US/Eastern") dti = dti._with_freq(None) # freq doesn't round-trip df["datetime_tz"] = dti df["timedelta"] = pd.timedelta_range("1 day", periods=3) check_round_trip(df, fp) @pytest.mark.skip(reason="not supported") def test_duplicate_columns(self, fp): # not currently able to handle duplicate columns df = pd.DataFrame(np.arange(12).reshape(4, 3), columns=list("aaa")).copy() msg = "Cannot create parquet dataset with duplicate column names" self.check_error_on_write(df, fp, ValueError, msg) def test_bool_with_none(self, fp): df = pd.DataFrame({"a": [True, None, False]}) expected = pd.DataFrame({"a": [1.0, np.nan, 0.0]}, dtype="float16") check_round_trip(df, fp, expected=expected) def test_unsupported(self, fp): # period df = pd.DataFrame({"a": pd.period_range("2013", freq="M", periods=3)}) # error from fastparquet -> don't check exact error message self.check_error_on_write(df, fp, ValueError, None) # mixed df = pd.DataFrame({"a": ["a", 1, 2.0]}) msg = "Can't infer object conversion type" self.check_error_on_write(df, fp, ValueError, msg) def test_categorical(self, fp): df = pd.DataFrame({"a": pd.Categorical(list("abc"))}) check_round_trip(df, fp) def test_filter_row_groups(self, fp): d = {"a": list(range(0, 3))} df = pd.DataFrame(d) with tm.ensure_clean() as path: df.to_parquet(path, fp, compression=None, row_group_offsets=1) result = read_parquet(path, fp, filters=[("a", "==", 0)]) assert len(result) == 1 def test_s3_roundtrip(self, df_compat, s3_resource, fp, s3so): # GH #19134 check_round_trip( df_compat, fp, path="s3://pandas-test/fastparquet.parquet", read_kwargs={"storage_options": s3so}, write_kwargs={"compression": None, "storage_options": s3so}, ) def test_partition_cols_supported(self, fp, df_full): # GH #23283 partition_cols = ["bool", "int"] df = df_full with tm.ensure_clean_dir() as path: df.to_parquet( path, engine="fastparquet", partition_cols=partition_cols, compression=None, ) assert os.path.exists(path) import fastparquet actual_partition_cols = fastparquet.ParquetFile(path, False).cats assert len(actual_partition_cols) == 2 def test_partition_cols_string(self, fp, df_full): # GH #27117 partition_cols = "bool" df = df_full with tm.ensure_clean_dir() as path: df.to_parquet( path, engine="fastparquet", partition_cols=partition_cols, compression=None, ) assert os.path.exists(path) import fastparquet actual_partition_cols = fastparquet.ParquetFile(path, False).cats assert len(actual_partition_cols) == 1 def test_partition_on_supported(self, fp, df_full): # GH #23283 partition_cols = ["bool", "int"] df = df_full with tm.ensure_clean_dir() as path: df.to_parquet( path, engine="fastparquet", compression=None, partition_on=partition_cols, ) assert os.path.exists(path) import fastparquet actual_partition_cols = fastparquet.ParquetFile(path, False).cats assert len(actual_partition_cols) == 2 def test_error_on_using_partition_cols_and_partition_on(self, fp, df_full): # GH #23283 partition_cols = ["bool", "int"] df = df_full msg = ( "Cannot use both partition_on and partition_cols. Use partition_cols for " "partitioning data" ) with pytest.raises(ValueError, match=msg): with tm.ensure_clean_dir() as path: df.to_parquet( path, engine="fastparquet", compression=None, partition_on=partition_cols, partition_cols=partition_cols, ) def test_empty_dataframe(self, fp): # GH #27339 df = pd.DataFrame() expected = df.copy() expected.index.name = "index" check_round_trip(df, fp, expected=expected) def test_timezone_aware_index(self, fp, timezone_aware_date_list): idx = 5 * [timezone_aware_date_list] df = pd.DataFrame(index=idx, data={"index_as_col": idx}) expected = df.copy() expected.index.name = "index" check_round_trip(df, fp, expected=expected) def test_use_nullable_dtypes_not_supported(self, fp): df = pd.DataFrame({"a": [1, 2]}) with tm.ensure_clean() as path: df.to_parquet(path) with pytest.raises(ValueError, match="not supported for the fastparquet"): read_parquet(path, engine="fastparquet", use_nullable_dtypes=True)
datapythonista/pandas
pandas/tests/io/test_parquet.py
pandas/tests/frame/methods/test_get_numeric_data.py
import warnings import pytest import pandas as pd import pandas._testing as tm from pandas.tests.extension.base.base import BaseExtensionTests class BaseReduceTests(BaseExtensionTests): """ Reduction specific tests. Generally these only make sense for numeric/boolean operations. """ def check_reduce(self, s, op_name, skipna): result = getattr(s, op_name)(skipna=skipna) expected = getattr(s.astype("float64"), op_name)(skipna=skipna) tm.assert_almost_equal(result, expected) class BaseNoReduceTests(BaseReduceTests): """ we don't define any reductions """ @pytest.mark.parametrize("skipna", [True, False]) def test_reduce_series_numeric(self, data, all_numeric_reductions, skipna): op_name = all_numeric_reductions s = pd.Series(data) msg = ( "[Cc]annot perform|Categorical is not ordered for operation|" "'Categorical' does not implement reduction|" ) with pytest.raises(TypeError, match=msg): getattr(s, op_name)(skipna=skipna) @pytest.mark.parametrize("skipna", [True, False]) def test_reduce_series_boolean(self, data, all_boolean_reductions, skipna): op_name = all_boolean_reductions s = pd.Series(data) msg = ( "[Cc]annot perform|Categorical is not ordered for operation|" "'Categorical' does not implement reduction|" ) with pytest.raises(TypeError, match=msg): getattr(s, op_name)(skipna=skipna) class BaseNumericReduceTests(BaseReduceTests): @pytest.mark.parametrize("skipna", [True, False]) def test_reduce_series(self, data, all_numeric_reductions, skipna): op_name = all_numeric_reductions s = pd.Series(data) # min/max with empty produce numpy warnings with warnings.catch_warnings(): warnings.simplefilter("ignore", RuntimeWarning) self.check_reduce(s, op_name, skipna) class BaseBooleanReduceTests(BaseReduceTests): @pytest.mark.parametrize("skipna", [True, False]) def test_reduce_series(self, data, all_boolean_reductions, skipna): op_name = all_boolean_reductions s = pd.Series(data) self.check_reduce(s, op_name, skipna)
""" test parquet compat """ import datetime from io import BytesIO import os import pathlib from warnings import ( catch_warnings, filterwarnings, ) import numpy as np import pytest from pandas._config import get_option from pandas.compat import ( PY38, is_platform_windows, ) from pandas.compat.pyarrow import ( pa_version_under1p0, pa_version_under2p0, ) import pandas.util._test_decorators as td import pandas as pd import pandas._testing as tm from pandas.util.version import Version from pandas.io.parquet import ( FastParquetImpl, PyArrowImpl, get_engine, read_parquet, to_parquet, ) try: import pyarrow _HAVE_PYARROW = True except ImportError: _HAVE_PYARROW = False try: with catch_warnings(): # `np.bool` is a deprecated alias... filterwarnings("ignore", "`np.bool`", category=DeprecationWarning) import fastparquet _HAVE_FASTPARQUET = True except ImportError: _HAVE_FASTPARQUET = False pytestmark = pytest.mark.filterwarnings( "ignore:RangeIndex.* is deprecated:DeprecationWarning" ) # TODO(ArrayManager) fastparquet relies on BlockManager internals # setup engines & skips @pytest.fixture( params=[ pytest.param( "fastparquet", marks=pytest.mark.skipif( not _HAVE_FASTPARQUET or get_option("mode.data_manager") == "array", reason="fastparquet is not installed or ArrayManager is used", ), ), pytest.param( "pyarrow", marks=pytest.mark.skipif( not _HAVE_PYARROW, reason="pyarrow is not installed" ), ), ] ) def engine(request): return request.param @pytest.fixture def pa(): if not _HAVE_PYARROW: pytest.skip("pyarrow is not installed") return "pyarrow" @pytest.fixture def fp(): if not _HAVE_FASTPARQUET: pytest.skip("fastparquet is not installed") elif get_option("mode.data_manager") == "array": pytest.skip("ArrayManager is not supported with fastparquet") return "fastparquet" @pytest.fixture def df_compat(): return pd.DataFrame({"A": [1, 2, 3], "B": "foo"}) @pytest.fixture def df_cross_compat(): df = pd.DataFrame( { "a": list("abc"), "b": list(range(1, 4)), # 'c': np.arange(3, 6).astype('u1'), "d": np.arange(4.0, 7.0, dtype="float64"), "e": [True, False, True], "f": pd.date_range("20130101", periods=3), # 'g': pd.date_range('20130101', periods=3, # tz='US/Eastern'), # 'h': pd.date_range('20130101', periods=3, freq='ns') } ) return df @pytest.fixture def df_full(): return pd.DataFrame( { "string": list("abc"), "string_with_nan": ["a", np.nan, "c"], "string_with_none": ["a", None, "c"], "bytes": [b"foo", b"bar", b"baz"], "unicode": ["foo", "bar", "baz"], "int": list(range(1, 4)), "uint": np.arange(3, 6).astype("u1"), "float": np.arange(4.0, 7.0, dtype="float64"), "float_with_nan": [2.0, np.nan, 3.0], "bool": [True, False, True], "datetime": pd.date_range("20130101", periods=3), "datetime_with_nat": [ pd.Timestamp("20130101"), pd.NaT, pd.Timestamp("20130103"), ], } ) @pytest.fixture( params=[ datetime.datetime.now(datetime.timezone.utc), datetime.datetime.now(datetime.timezone.min), datetime.datetime.now(datetime.timezone.max), datetime.datetime.strptime("2019-01-04T16:41:24+0200", "%Y-%m-%dT%H:%M:%S%z"), datetime.datetime.strptime("2019-01-04T16:41:24+0215", "%Y-%m-%dT%H:%M:%S%z"), datetime.datetime.strptime("2019-01-04T16:41:24-0200", "%Y-%m-%dT%H:%M:%S%z"), datetime.datetime.strptime("2019-01-04T16:41:24-0215", "%Y-%m-%dT%H:%M:%S%z"), ] ) def timezone_aware_date_list(request): return request.param def check_round_trip( df, engine=None, path=None, write_kwargs=None, read_kwargs=None, expected=None, check_names=True, check_like=False, check_dtype=True, repeat=2, ): """Verify parquet serializer and deserializer produce the same results. Performs a pandas to disk and disk to pandas round trip, then compares the 2 resulting DataFrames to verify equality. Parameters ---------- df: Dataframe engine: str, optional 'pyarrow' or 'fastparquet' path: str, optional write_kwargs: dict of str:str, optional read_kwargs: dict of str:str, optional expected: DataFrame, optional Expected deserialization result, otherwise will be equal to `df` check_names: list of str, optional Closed set of column names to be compared check_like: bool, optional If True, ignore the order of index & columns. repeat: int, optional How many times to repeat the test """ write_kwargs = write_kwargs or {"compression": None} read_kwargs = read_kwargs or {} if expected is None: expected = df if engine: write_kwargs["engine"] = engine read_kwargs["engine"] = engine def compare(repeat): for _ in range(repeat): df.to_parquet(path, **write_kwargs) with catch_warnings(record=True): actual = read_parquet(path, **read_kwargs) tm.assert_frame_equal( expected, actual, check_names=check_names, check_like=check_like, check_dtype=check_dtype, ) if path is None: with tm.ensure_clean() as path: compare(repeat) else: compare(repeat) def test_invalid_engine(df_compat): msg = "engine must be one of 'pyarrow', 'fastparquet'" with pytest.raises(ValueError, match=msg): check_round_trip(df_compat, "foo", "bar") def test_options_py(df_compat, pa): # use the set option with pd.option_context("io.parquet.engine", "pyarrow"): check_round_trip(df_compat) def test_options_fp(df_compat, fp): # use the set option with pd.option_context("io.parquet.engine", "fastparquet"): check_round_trip(df_compat) def test_options_auto(df_compat, fp, pa): # use the set option with pd.option_context("io.parquet.engine", "auto"): check_round_trip(df_compat) def test_options_get_engine(fp, pa): assert isinstance(get_engine("pyarrow"), PyArrowImpl) assert isinstance(get_engine("fastparquet"), FastParquetImpl) with pd.option_context("io.parquet.engine", "pyarrow"): assert isinstance(get_engine("auto"), PyArrowImpl) assert isinstance(get_engine("pyarrow"), PyArrowImpl) assert isinstance(get_engine("fastparquet"), FastParquetImpl) with pd.option_context("io.parquet.engine", "fastparquet"): assert isinstance(get_engine("auto"), FastParquetImpl) assert isinstance(get_engine("pyarrow"), PyArrowImpl) assert isinstance(get_engine("fastparquet"), FastParquetImpl) with pd.option_context("io.parquet.engine", "auto"): assert isinstance(get_engine("auto"), PyArrowImpl) assert isinstance(get_engine("pyarrow"), PyArrowImpl) assert isinstance(get_engine("fastparquet"), FastParquetImpl) def test_get_engine_auto_error_message(): # Expect different error messages from get_engine(engine="auto") # if engines aren't installed vs. are installed but bad version from pandas.compat._optional import VERSIONS # Do we have engines installed, but a bad version of them? pa_min_ver = VERSIONS.get("pyarrow") fp_min_ver = VERSIONS.get("fastparquet") have_pa_bad_version = ( False if not _HAVE_PYARROW else Version(pyarrow.__version__) < Version(pa_min_ver) ) have_fp_bad_version = ( False if not _HAVE_FASTPARQUET else Version(fastparquet.__version__) < Version(fp_min_ver) ) # Do we have usable engines installed? have_usable_pa = _HAVE_PYARROW and not have_pa_bad_version have_usable_fp = _HAVE_FASTPARQUET and not have_fp_bad_version if not have_usable_pa and not have_usable_fp: # No usable engines found. if have_pa_bad_version: match = f"Pandas requires version .{pa_min_ver}. or newer of .pyarrow." with pytest.raises(ImportError, match=match): get_engine("auto") else: match = "Missing optional dependency .pyarrow." with pytest.raises(ImportError, match=match): get_engine("auto") if have_fp_bad_version: match = f"Pandas requires version .{fp_min_ver}. or newer of .fastparquet." with pytest.raises(ImportError, match=match): get_engine("auto") else: match = "Missing optional dependency .fastparquet." with pytest.raises(ImportError, match=match): get_engine("auto") def test_cross_engine_pa_fp(df_cross_compat, pa, fp): # cross-compat with differing reading/writing engines df = df_cross_compat with tm.ensure_clean() as path: df.to_parquet(path, engine=pa, compression=None) result = read_parquet(path, engine=fp) tm.assert_frame_equal(result, df) result = read_parquet(path, engine=fp, columns=["a", "d"]) tm.assert_frame_equal(result, df[["a", "d"]]) def test_cross_engine_fp_pa(request, df_cross_compat, pa, fp): # cross-compat with differing reading/writing engines df = df_cross_compat with tm.ensure_clean() as path: df.to_parquet(path, engine=fp, compression=None) with catch_warnings(record=True): result = read_parquet(path, engine=pa) tm.assert_frame_equal(result, df) result = read_parquet(path, engine=pa, columns=["a", "d"]) tm.assert_frame_equal(result, df[["a", "d"]]) class Base: def check_error_on_write(self, df, engine, exc, err_msg): # check that we are raising the exception on writing with tm.ensure_clean() as path: with pytest.raises(exc, match=err_msg): to_parquet(df, path, engine, compression=None) def check_external_error_on_write(self, df, engine, exc): # check that an external library is raising the exception on writing with tm.ensure_clean() as path: with tm.external_error_raised(exc): to_parquet(df, path, engine, compression=None) @tm.network def test_parquet_read_from_url(self, df_compat, engine): if engine != "auto": pytest.importorskip(engine) url = ( "https://raw.githubusercontent.com/pandas-dev/pandas/" "master/pandas/tests/io/data/parquet/simple.parquet" ) df = read_parquet(url) tm.assert_frame_equal(df, df_compat) class TestBasic(Base): def test_error(self, engine): for obj in [ pd.Series([1, 2, 3]), 1, "foo", pd.Timestamp("20130101"), np.array([1, 2, 3]), ]: msg = "to_parquet only supports IO with DataFrames" self.check_error_on_write(obj, engine, ValueError, msg) def test_columns_dtypes(self, engine): df = pd.DataFrame({"string": list("abc"), "int": list(range(1, 4))}) # unicode df.columns = ["foo", "bar"] check_round_trip(df, engine) def test_columns_dtypes_invalid(self, engine): df = pd.DataFrame({"string": list("abc"), "int": list(range(1, 4))}) msg = "parquet must have string column names" # numeric df.columns = [0, 1] self.check_error_on_write(df, engine, ValueError, msg) # bytes df.columns = [b"foo", b"bar"] self.check_error_on_write(df, engine, ValueError, msg) # python object df.columns = [ datetime.datetime(2011, 1, 1, 0, 0), datetime.datetime(2011, 1, 1, 1, 1), ] self.check_error_on_write(df, engine, ValueError, msg) @pytest.mark.parametrize("compression", [None, "gzip", "snappy", "brotli"]) def test_compression(self, engine, compression): if compression == "snappy": pytest.importorskip("snappy") elif compression == "brotli": pytest.importorskip("brotli") df = pd.DataFrame({"A": [1, 2, 3]}) check_round_trip(df, engine, write_kwargs={"compression": compression}) def test_read_columns(self, engine): # GH18154 df = pd.DataFrame({"string": list("abc"), "int": list(range(1, 4))}) expected = pd.DataFrame({"string": list("abc")}) check_round_trip( df, engine, expected=expected, read_kwargs={"columns": ["string"]} ) def test_write_index(self, engine): check_names = engine != "fastparquet" df = pd.DataFrame({"A": [1, 2, 3]}) check_round_trip(df, engine) indexes = [ [2, 3, 4], pd.date_range("20130101", periods=3), list("abc"), [1, 3, 4], ] # non-default index for index in indexes: df.index = index if isinstance(index, pd.DatetimeIndex): df.index = df.index._with_freq(None) # freq doesn't round-trip check_round_trip(df, engine, check_names=check_names) # index with meta-data df.index = [0, 1, 2] df.index.name = "foo" check_round_trip(df, engine) def test_write_multiindex(self, pa): # Not supported in fastparquet as of 0.1.3 or older pyarrow version engine = pa df = pd.DataFrame({"A": [1, 2, 3]}) index = pd.MultiIndex.from_tuples([("a", 1), ("a", 2), ("b", 1)]) df.index = index check_round_trip(df, engine) def test_multiindex_with_columns(self, pa): engine = pa dates = pd.date_range("01-Jan-2018", "01-Dec-2018", freq="MS") df = pd.DataFrame(np.random.randn(2 * len(dates), 3), columns=list("ABC")) index1 = pd.MultiIndex.from_product( [["Level1", "Level2"], dates], names=["level", "date"] ) index2 = index1.copy(names=None) for index in [index1, index2]: df.index = index check_round_trip(df, engine) check_round_trip( df, engine, read_kwargs={"columns": ["A", "B"]}, expected=df[["A", "B"]] ) def test_write_ignoring_index(self, engine): # ENH 20768 # Ensure index=False omits the index from the written Parquet file. df = pd.DataFrame({"a": [1, 2, 3], "b": ["q", "r", "s"]}) write_kwargs = {"compression": None, "index": False} # Because we're dropping the index, we expect the loaded dataframe to # have the default integer index. expected = df.reset_index(drop=True) check_round_trip(df, engine, write_kwargs=write_kwargs, expected=expected) # Ignore custom index df = pd.DataFrame( {"a": [1, 2, 3], "b": ["q", "r", "s"]}, index=["zyx", "wvu", "tsr"] ) check_round_trip(df, engine, write_kwargs=write_kwargs, expected=expected) # Ignore multi-indexes as well. arrays = [ ["bar", "bar", "baz", "baz", "foo", "foo", "qux", "qux"], ["one", "two", "one", "two", "one", "two", "one", "two"], ] df = pd.DataFrame( {"one": list(range(8)), "two": [-i for i in range(8)]}, index=arrays ) expected = df.reset_index(drop=True) check_round_trip(df, engine, write_kwargs=write_kwargs, expected=expected) def test_write_column_multiindex(self, engine): # Not able to write column multi-indexes with non-string column names. mi_columns = pd.MultiIndex.from_tuples([("a", 1), ("a", 2), ("b", 1)]) df = pd.DataFrame(np.random.randn(4, 3), columns=mi_columns) msg = ( r"\s*parquet must have string column names for all values in\s*" "each level of the MultiIndex" ) self.check_error_on_write(df, engine, ValueError, msg) def test_write_column_multiindex_nonstring(self, pa): # GH #34777 # Not supported in fastparquet as of 0.1.3 engine = pa # Not able to write column multi-indexes with non-string column names arrays = [ ["bar", "bar", "baz", "baz", "foo", "foo", "qux", "qux"], [1, 2, 1, 2, 1, 2, 1, 2], ] df = pd.DataFrame(np.random.randn(8, 8), columns=arrays) df.columns.names = ["Level1", "Level2"] msg = ( r"\s*parquet must have string column names for all values in\s*" "each level of the MultiIndex" ) self.check_error_on_write(df, engine, ValueError, msg) def test_write_column_multiindex_string(self, pa): # GH #34777 # Not supported in fastparquet as of 0.1.3 engine = pa # Write column multi-indexes with string column names arrays = [ ["bar", "bar", "baz", "baz", "foo", "foo", "qux", "qux"], ["one", "two", "one", "two", "one", "two", "one", "two"], ] df = pd.DataFrame(np.random.randn(8, 8), columns=arrays) df.columns.names = ["ColLevel1", "ColLevel2"] check_round_trip(df, engine) def test_write_column_index_string(self, pa): # GH #34777 # Not supported in fastparquet as of 0.1.3 engine = pa # Write column indexes with string column names arrays = ["bar", "baz", "foo", "qux"] df = pd.DataFrame(np.random.randn(8, 4), columns=arrays) df.columns.name = "StringCol" check_round_trip(df, engine) def test_write_column_index_nonstring(self, pa): # GH #34777 # Not supported in fastparquet as of 0.1.3 engine = pa # Write column indexes with string column names arrays = [1, 2, 3, 4] df = pd.DataFrame(np.random.randn(8, 4), columns=arrays) df.columns.name = "NonStringCol" msg = r"parquet must have string column names" self.check_error_on_write(df, engine, ValueError, msg) @pytest.mark.filterwarnings("ignore:CategoricalBlock is deprecated:DeprecationWarning") class TestParquetPyArrow(Base): def test_basic(self, pa, df_full): df = df_full # additional supported types for pyarrow dti = pd.date_range("20130101", periods=3, tz="Europe/Brussels") dti = dti._with_freq(None) # freq doesn't round-trip df["datetime_tz"] = dti df["bool_with_none"] = [True, None, True] check_round_trip(df, pa) def test_basic_subset_columns(self, pa, df_full): # GH18628 df = df_full # additional supported types for pyarrow df["datetime_tz"] = pd.date_range("20130101", periods=3, tz="Europe/Brussels") check_round_trip( df, pa, expected=df[["string", "int"]], read_kwargs={"columns": ["string", "int"]}, ) def test_to_bytes_without_path_or_buf_provided(self, pa, df_full): # GH 37105 buf_bytes = df_full.to_parquet(engine=pa) assert isinstance(buf_bytes, bytes) buf_stream = BytesIO(buf_bytes) res = read_parquet(buf_stream) tm.assert_frame_equal(df_full, res) def test_duplicate_columns(self, pa): # not currently able to handle duplicate columns df = pd.DataFrame(np.arange(12).reshape(4, 3), columns=list("aaa")).copy() self.check_error_on_write(df, pa, ValueError, "Duplicate column names found") def test_unsupported(self, pa): # timedelta df = pd.DataFrame({"a": pd.timedelta_range("1 day", periods=3)}) self.check_external_error_on_write(df, pa, NotImplementedError) # mixed python objects df = pd.DataFrame({"a": ["a", 1, 2.0]}) # pyarrow 0.11 raises ArrowTypeError # older pyarrows raise ArrowInvalid self.check_external_error_on_write(df, pa, pyarrow.ArrowException) def test_categorical(self, pa): # supported in >= 0.7.0 df = pd.DataFrame() df["a"] = pd.Categorical(list("abcdef")) # test for null, out-of-order values, and unobserved category df["b"] = pd.Categorical( ["bar", "foo", "foo", "bar", None, "bar"], dtype=pd.CategoricalDtype(["foo", "bar", "baz"]), ) # test for ordered flag df["c"] = pd.Categorical( ["a", "b", "c", "a", "c", "b"], categories=["b", "c", "d"], ordered=True ) check_round_trip(df, pa) @pytest.mark.xfail( is_platform_windows() and PY38, reason="localhost connection rejected", strict=False, ) def test_s3_roundtrip_explicit_fs(self, df_compat, s3_resource, pa, s3so): s3fs = pytest.importorskip("s3fs") s3 = s3fs.S3FileSystem(**s3so) kw = {"filesystem": s3} check_round_trip( df_compat, pa, path="pandas-test/pyarrow.parquet", read_kwargs=kw, write_kwargs=kw, ) def test_s3_roundtrip(self, df_compat, s3_resource, pa, s3so): # GH #19134 s3so = {"storage_options": s3so} check_round_trip( df_compat, pa, path="s3://pandas-test/pyarrow.parquet", read_kwargs=s3so, write_kwargs=s3so, ) @td.skip_if_no("s3fs") # also requires flask @pytest.mark.parametrize( "partition_col", [ ["A"], [], ], ) def test_s3_roundtrip_for_dir( self, df_compat, s3_resource, pa, partition_col, s3so ): # GH #26388 expected_df = df_compat.copy() # GH #35791 # read_table uses the new Arrow Datasets API since pyarrow 1.0.0 # Previous behaviour was pyarrow partitioned columns become 'category' dtypes # These are added to back of dataframe on read. In new API category dtype is # only used if partition field is string, but this changed again to use # category dtype for all types (not only strings) in pyarrow 2.0.0 if partition_col: partition_col_type = ( "int32" if (not pa_version_under1p0) and pa_version_under2p0 else "category" ) expected_df[partition_col] = expected_df[partition_col].astype( partition_col_type ) check_round_trip( df_compat, pa, expected=expected_df, path="s3://pandas-test/parquet_dir", read_kwargs={"storage_options": s3so}, write_kwargs={ "partition_cols": partition_col, "compression": None, "storage_options": s3so, }, check_like=True, repeat=1, ) @td.skip_if_no("pyarrow") def test_read_file_like_obj_support(self, df_compat): buffer = BytesIO() df_compat.to_parquet(buffer) df_from_buf = read_parquet(buffer) tm.assert_frame_equal(df_compat, df_from_buf) @td.skip_if_no("pyarrow") def test_expand_user(self, df_compat, monkeypatch): monkeypatch.setenv("HOME", "TestingUser") monkeypatch.setenv("USERPROFILE", "TestingUser") with pytest.raises(OSError, match=r".*TestingUser.*"): read_parquet("~/file.parquet") with pytest.raises(OSError, match=r".*TestingUser.*"): df_compat.to_parquet("~/file.parquet") def test_partition_cols_supported(self, pa, df_full): # GH #23283 partition_cols = ["bool", "int"] df = df_full with tm.ensure_clean_dir() as path: df.to_parquet(path, partition_cols=partition_cols, compression=None) import pyarrow.parquet as pq dataset = pq.ParquetDataset(path, validate_schema=False) assert len(dataset.partitions.partition_names) == 2 assert dataset.partitions.partition_names == set(partition_cols) assert read_parquet(path).shape == df.shape def test_partition_cols_string(self, pa, df_full): # GH #27117 partition_cols = "bool" partition_cols_list = [partition_cols] df = df_full with tm.ensure_clean_dir() as path: df.to_parquet(path, partition_cols=partition_cols, compression=None) import pyarrow.parquet as pq dataset = pq.ParquetDataset(path, validate_schema=False) assert len(dataset.partitions.partition_names) == 1 assert dataset.partitions.partition_names == set(partition_cols_list) assert read_parquet(path).shape == df.shape @pytest.mark.parametrize("path_type", [str, pathlib.Path]) def test_partition_cols_pathlib(self, pa, df_compat, path_type): # GH 35902 partition_cols = "B" partition_cols_list = [partition_cols] df = df_compat with tm.ensure_clean_dir() as path_str: path = path_type(path_str) df.to_parquet(path, partition_cols=partition_cols_list) assert read_parquet(path).shape == df.shape def test_empty_dataframe(self, pa): # GH #27339 df = pd.DataFrame() check_round_trip(df, pa) def test_write_with_schema(self, pa): import pyarrow df = pd.DataFrame({"x": [0, 1]}) schema = pyarrow.schema([pyarrow.field("x", type=pyarrow.bool_())]) out_df = df.astype(bool) check_round_trip(df, pa, write_kwargs={"schema": schema}, expected=out_df) @td.skip_if_no("pyarrow") def test_additional_extension_arrays(self, pa): # test additional ExtensionArrays that are supported through the # __arrow_array__ protocol df = pd.DataFrame( { "a": pd.Series([1, 2, 3], dtype="Int64"), "b": pd.Series([1, 2, 3], dtype="UInt32"), "c": pd.Series(["a", None, "c"], dtype="string"), } ) check_round_trip(df, pa) df = pd.DataFrame({"a": pd.Series([1, 2, 3, None], dtype="Int64")}) check_round_trip(df, pa) @td.skip_if_no("pyarrow", min_version="1.0.0") def test_pyarrow_backed_string_array(self, pa): # test ArrowStringArray supported through the __arrow_array__ protocol from pandas.core.arrays.string_arrow import ArrowStringDtype # noqa: F401 df = pd.DataFrame({"a": pd.Series(["a", None, "c"], dtype="arrow_string")}) check_round_trip(df, pa, expected=df) @td.skip_if_no("pyarrow") def test_additional_extension_types(self, pa): # test additional ExtensionArrays that are supported through the # __arrow_array__ protocol + by defining a custom ExtensionType df = pd.DataFrame( { # Arrow does not yet support struct in writing to Parquet (ARROW-1644) # "c": pd.arrays.IntervalArray.from_tuples([(0, 1), (1, 2), (3, 4)]), "d": pd.period_range("2012-01-01", periods=3, freq="D"), } ) check_round_trip(df, pa) @td.skip_if_no("pyarrow") def test_use_nullable_dtypes(self, pa): import pyarrow.parquet as pq table = pyarrow.table( { "a": pyarrow.array([1, 2, 3, None], "int64"), "b": pyarrow.array([1, 2, 3, None], "uint8"), "c": pyarrow.array(["a", "b", "c", None]), "d": pyarrow.array([True, False, True, None]), } ) with tm.ensure_clean() as path: # write manually with pyarrow to write integers pq.write_table(table, path) result1 = read_parquet(path) result2 = read_parquet(path, use_nullable_dtypes=True) assert result1["a"].dtype == np.dtype("float64") expected = pd.DataFrame( { "a": pd.array([1, 2, 3, None], dtype="Int64"), "b": pd.array([1, 2, 3, None], dtype="UInt8"), "c": pd.array(["a", "b", "c", None], dtype="string"), "d": pd.array([True, False, True, None], dtype="boolean"), } ) tm.assert_frame_equal(result2, expected) def test_timestamp_nanoseconds(self, pa): # with version 2.0, pyarrow defaults to writing the nanoseconds, so # this should work without error df = pd.DataFrame({"a": pd.date_range("2017-01-01", freq="1n", periods=10)}) check_round_trip(df, pa, write_kwargs={"version": "2.0"}) def test_timezone_aware_index(self, pa, timezone_aware_date_list): if not pa_version_under2p0: # temporary skip this test until it is properly resolved # https://github.com/pandas-dev/pandas/issues/37286 pytest.skip() idx = 5 * [timezone_aware_date_list] df = pd.DataFrame(index=idx, data={"index_as_col": idx}) # see gh-36004 # compare time(zone) values only, skip their class: # pyarrow always creates fixed offset timezones using pytz.FixedOffset() # even if it was datetime.timezone() originally # # technically they are the same: # they both implement datetime.tzinfo # they both wrap datetime.timedelta() # this use-case sets the resolution to 1 minute check_round_trip(df, pa, check_dtype=False) @td.skip_if_no("pyarrow", min_version="1.0.0") def test_filter_row_groups(self, pa): # https://github.com/pandas-dev/pandas/issues/26551 df = pd.DataFrame({"a": list(range(0, 3))}) with tm.ensure_clean() as path: df.to_parquet(path, pa) result = read_parquet( path, pa, filters=[("a", "==", 0)], use_legacy_dataset=False ) assert len(result) == 1 def test_read_parquet_manager(self, pa, using_array_manager): # ensure that read_parquet honors the pandas.options.mode.data_manager option df = pd.DataFrame(np.random.randn(10, 3), columns=["A", "B", "C"]) with tm.ensure_clean() as path: df.to_parquet(path, pa) result = read_parquet(path, pa) if using_array_manager: assert isinstance(result._mgr, pd.core.internals.ArrayManager) else: assert isinstance(result._mgr, pd.core.internals.BlockManager) class TestParquetFastParquet(Base): def test_basic(self, fp, df_full): df = df_full dti = pd.date_range("20130101", periods=3, tz="US/Eastern") dti = dti._with_freq(None) # freq doesn't round-trip df["datetime_tz"] = dti df["timedelta"] = pd.timedelta_range("1 day", periods=3) check_round_trip(df, fp) @pytest.mark.skip(reason="not supported") def test_duplicate_columns(self, fp): # not currently able to handle duplicate columns df = pd.DataFrame(np.arange(12).reshape(4, 3), columns=list("aaa")).copy() msg = "Cannot create parquet dataset with duplicate column names" self.check_error_on_write(df, fp, ValueError, msg) def test_bool_with_none(self, fp): df = pd.DataFrame({"a": [True, None, False]}) expected = pd.DataFrame({"a": [1.0, np.nan, 0.0]}, dtype="float16") check_round_trip(df, fp, expected=expected) def test_unsupported(self, fp): # period df = pd.DataFrame({"a": pd.period_range("2013", freq="M", periods=3)}) # error from fastparquet -> don't check exact error message self.check_error_on_write(df, fp, ValueError, None) # mixed df = pd.DataFrame({"a": ["a", 1, 2.0]}) msg = "Can't infer object conversion type" self.check_error_on_write(df, fp, ValueError, msg) def test_categorical(self, fp): df = pd.DataFrame({"a": pd.Categorical(list("abc"))}) check_round_trip(df, fp) def test_filter_row_groups(self, fp): d = {"a": list(range(0, 3))} df = pd.DataFrame(d) with tm.ensure_clean() as path: df.to_parquet(path, fp, compression=None, row_group_offsets=1) result = read_parquet(path, fp, filters=[("a", "==", 0)]) assert len(result) == 1 def test_s3_roundtrip(self, df_compat, s3_resource, fp, s3so): # GH #19134 check_round_trip( df_compat, fp, path="s3://pandas-test/fastparquet.parquet", read_kwargs={"storage_options": s3so}, write_kwargs={"compression": None, "storage_options": s3so}, ) def test_partition_cols_supported(self, fp, df_full): # GH #23283 partition_cols = ["bool", "int"] df = df_full with tm.ensure_clean_dir() as path: df.to_parquet( path, engine="fastparquet", partition_cols=partition_cols, compression=None, ) assert os.path.exists(path) import fastparquet actual_partition_cols = fastparquet.ParquetFile(path, False).cats assert len(actual_partition_cols) == 2 def test_partition_cols_string(self, fp, df_full): # GH #27117 partition_cols = "bool" df = df_full with tm.ensure_clean_dir() as path: df.to_parquet( path, engine="fastparquet", partition_cols=partition_cols, compression=None, ) assert os.path.exists(path) import fastparquet actual_partition_cols = fastparquet.ParquetFile(path, False).cats assert len(actual_partition_cols) == 1 def test_partition_on_supported(self, fp, df_full): # GH #23283 partition_cols = ["bool", "int"] df = df_full with tm.ensure_clean_dir() as path: df.to_parquet( path, engine="fastparquet", compression=None, partition_on=partition_cols, ) assert os.path.exists(path) import fastparquet actual_partition_cols = fastparquet.ParquetFile(path, False).cats assert len(actual_partition_cols) == 2 def test_error_on_using_partition_cols_and_partition_on(self, fp, df_full): # GH #23283 partition_cols = ["bool", "int"] df = df_full msg = ( "Cannot use both partition_on and partition_cols. Use partition_cols for " "partitioning data" ) with pytest.raises(ValueError, match=msg): with tm.ensure_clean_dir() as path: df.to_parquet( path, engine="fastparquet", compression=None, partition_on=partition_cols, partition_cols=partition_cols, ) def test_empty_dataframe(self, fp): # GH #27339 df = pd.DataFrame() expected = df.copy() expected.index.name = "index" check_round_trip(df, fp, expected=expected) def test_timezone_aware_index(self, fp, timezone_aware_date_list): idx = 5 * [timezone_aware_date_list] df = pd.DataFrame(index=idx, data={"index_as_col": idx}) expected = df.copy() expected.index.name = "index" check_round_trip(df, fp, expected=expected) def test_use_nullable_dtypes_not_supported(self, fp): df = pd.DataFrame({"a": [1, 2]}) with tm.ensure_clean() as path: df.to_parquet(path) with pytest.raises(ValueError, match="not supported for the fastparquet"): read_parquet(path, engine="fastparquet", use_nullable_dtypes=True)
datapythonista/pandas
pandas/tests/io/test_parquet.py
pandas/tests/extension/base/reduce.py
from __future__ import annotations from contextlib import suppress from typing import ( TYPE_CHECKING, Any, Hashable, Sequence, ) import warnings import numpy as np from pandas._libs.indexing import NDFrameIndexerBase from pandas._libs.lib import item_from_zerodim from pandas.errors import ( AbstractMethodError, InvalidIndexError, ) from pandas.util._decorators import doc from pandas.core.dtypes.common import ( is_array_like, is_bool_dtype, is_hashable, is_integer, is_iterator, is_list_like, is_numeric_dtype, is_object_dtype, is_scalar, is_sequence, ) from pandas.core.dtypes.concat import concat_compat from pandas.core.dtypes.generic import ( ABCDataFrame, ABCSeries, ) from pandas.core.dtypes.missing import ( infer_fill_value, isna, ) import pandas.core.common as com from pandas.core.construction import array as pd_array from pandas.core.indexers import ( check_array_indexer, is_empty_indexer, is_exact_shape_match, is_list_like_indexer, length_of_indexer, ) from pandas.core.indexes.api import ( Index, MultiIndex, ) if TYPE_CHECKING: from pandas import ( DataFrame, Series, ) # "null slice" _NS = slice(None, None) # the public IndexSlicerMaker class _IndexSlice: """ Create an object to more easily perform multi-index slicing. See Also -------- MultiIndex.remove_unused_levels : New MultiIndex with no unused levels. Notes ----- See :ref:`Defined Levels <advanced.shown_levels>` for further info on slicing a MultiIndex. Examples -------- >>> midx = pd.MultiIndex.from_product([['A0','A1'], ['B0','B1','B2','B3']]) >>> columns = ['foo', 'bar'] >>> dfmi = pd.DataFrame(np.arange(16).reshape((len(midx), len(columns))), ... index=midx, columns=columns) Using the default slice command: >>> dfmi.loc[(slice(None), slice('B0', 'B1')), :] foo bar A0 B0 0 1 B1 2 3 A1 B0 8 9 B1 10 11 Using the IndexSlice class for a more intuitive command: >>> idx = pd.IndexSlice >>> dfmi.loc[idx[:, 'B0':'B1'], :] foo bar A0 B0 0 1 B1 2 3 A1 B0 8 9 B1 10 11 """ def __getitem__(self, arg): return arg IndexSlice = _IndexSlice() class IndexingError(Exception): pass class IndexingMixin: """ Mixin for adding .loc/.iloc/.at/.iat to Dataframes and Series. """ @property def iloc(self) -> _iLocIndexer: """ Purely integer-location based indexing for selection by position. ``.iloc[]`` is primarily integer position based (from ``0`` to ``length-1`` of the axis), but may also be used with a boolean array. Allowed inputs are: - An integer, e.g. ``5``. - A list or array of integers, e.g. ``[4, 3, 0]``. - A slice object with ints, e.g. ``1:7``. - A boolean array. - A ``callable`` function with one argument (the calling Series or DataFrame) and that returns valid output for indexing (one of the above). This is useful in method chains, when you don't have a reference to the calling object, but would like to base your selection on some value. ``.iloc`` will raise ``IndexError`` if a requested indexer is out-of-bounds, except *slice* indexers which allow out-of-bounds indexing (this conforms with python/numpy *slice* semantics). See more at :ref:`Selection by Position <indexing.integer>`. See Also -------- DataFrame.iat : Fast integer location scalar accessor. DataFrame.loc : Purely label-location based indexer for selection by label. Series.iloc : Purely integer-location based indexing for selection by position. Examples -------- >>> mydict = [{'a': 1, 'b': 2, 'c': 3, 'd': 4}, ... {'a': 100, 'b': 200, 'c': 300, 'd': 400}, ... {'a': 1000, 'b': 2000, 'c': 3000, 'd': 4000 }] >>> df = pd.DataFrame(mydict) >>> df a b c d 0 1 2 3 4 1 100 200 300 400 2 1000 2000 3000 4000 **Indexing just the rows** With a scalar integer. >>> type(df.iloc[0]) <class 'pandas.core.series.Series'> >>> df.iloc[0] a 1 b 2 c 3 d 4 Name: 0, dtype: int64 With a list of integers. >>> df.iloc[[0]] a b c d 0 1 2 3 4 >>> type(df.iloc[[0]]) <class 'pandas.core.frame.DataFrame'> >>> df.iloc[[0, 1]] a b c d 0 1 2 3 4 1 100 200 300 400 With a `slice` object. >>> df.iloc[:3] a b c d 0 1 2 3 4 1 100 200 300 400 2 1000 2000 3000 4000 With a boolean mask the same length as the index. >>> df.iloc[[True, False, True]] a b c d 0 1 2 3 4 2 1000 2000 3000 4000 With a callable, useful in method chains. The `x` passed to the ``lambda`` is the DataFrame being sliced. This selects the rows whose index label even. >>> df.iloc[lambda x: x.index % 2 == 0] a b c d 0 1 2 3 4 2 1000 2000 3000 4000 **Indexing both axes** You can mix the indexer types for the index and columns. Use ``:`` to select the entire axis. With scalar integers. >>> df.iloc[0, 1] 2 With lists of integers. >>> df.iloc[[0, 2], [1, 3]] b d 0 2 4 2 2000 4000 With `slice` objects. >>> df.iloc[1:3, 0:3] a b c 1 100 200 300 2 1000 2000 3000 With a boolean array whose length matches the columns. >>> df.iloc[:, [True, False, True, False]] a c 0 1 3 1 100 300 2 1000 3000 With a callable function that expects the Series or DataFrame. >>> df.iloc[:, lambda df: [0, 2]] a c 0 1 3 1 100 300 2 1000 3000 """ return _iLocIndexer("iloc", self) @property def loc(self) -> _LocIndexer: """ Access a group of rows and columns by label(s) or a boolean array. ``.loc[]`` is primarily label based, but may also be used with a boolean array. Allowed inputs are: - A single label, e.g. ``5`` or ``'a'``, (note that ``5`` is interpreted as a *label* of the index, and **never** as an integer position along the index). - A list or array of labels, e.g. ``['a', 'b', 'c']``. - A slice object with labels, e.g. ``'a':'f'``. .. warning:: Note that contrary to usual python slices, **both** the start and the stop are included - A boolean array of the same length as the axis being sliced, e.g. ``[True, False, True]``. - An alignable boolean Series. The index of the key will be aligned before masking. - An alignable Index. The Index of the returned selection will be the input. - A ``callable`` function with one argument (the calling Series or DataFrame) and that returns valid output for indexing (one of the above) See more at :ref:`Selection by Label <indexing.label>`. Raises ------ KeyError If any items are not found. IndexingError If an indexed key is passed and its index is unalignable to the frame index. See Also -------- DataFrame.at : Access a single value for a row/column label pair. DataFrame.iloc : Access group of rows and columns by integer position(s). DataFrame.xs : Returns a cross-section (row(s) or column(s)) from the Series/DataFrame. Series.loc : Access group of values using labels. Examples -------- **Getting values** >>> df = pd.DataFrame([[1, 2], [4, 5], [7, 8]], ... index=['cobra', 'viper', 'sidewinder'], ... columns=['max_speed', 'shield']) >>> df max_speed shield cobra 1 2 viper 4 5 sidewinder 7 8 Single label. Note this returns the row as a Series. >>> df.loc['viper'] max_speed 4 shield 5 Name: viper, dtype: int64 List of labels. Note using ``[[]]`` returns a DataFrame. >>> df.loc[['viper', 'sidewinder']] max_speed shield viper 4 5 sidewinder 7 8 Single label for row and column >>> df.loc['cobra', 'shield'] 2 Slice with labels for row and single label for column. As mentioned above, note that both the start and stop of the slice are included. >>> df.loc['cobra':'viper', 'max_speed'] cobra 1 viper 4 Name: max_speed, dtype: int64 Boolean list with the same length as the row axis >>> df.loc[[False, False, True]] max_speed shield sidewinder 7 8 Alignable boolean Series: >>> df.loc[pd.Series([False, True, False], ... index=['viper', 'sidewinder', 'cobra'])] max_speed shield sidewinder 7 8 Index (same behavior as ``df.reindex``) >>> df.loc[pd.Index(["cobra", "viper"], name="foo")] max_speed shield foo cobra 1 2 viper 4 5 Conditional that returns a boolean Series >>> df.loc[df['shield'] > 6] max_speed shield sidewinder 7 8 Conditional that returns a boolean Series with column labels specified >>> df.loc[df['shield'] > 6, ['max_speed']] max_speed sidewinder 7 Callable that returns a boolean Series >>> df.loc[lambda df: df['shield'] == 8] max_speed shield sidewinder 7 8 **Setting values** Set value for all items matching the list of labels >>> df.loc[['viper', 'sidewinder'], ['shield']] = 50 >>> df max_speed shield cobra 1 2 viper 4 50 sidewinder 7 50 Set value for an entire row >>> df.loc['cobra'] = 10 >>> df max_speed shield cobra 10 10 viper 4 50 sidewinder 7 50 Set value for an entire column >>> df.loc[:, 'max_speed'] = 30 >>> df max_speed shield cobra 30 10 viper 30 50 sidewinder 30 50 Set value for rows matching callable condition >>> df.loc[df['shield'] > 35] = 0 >>> df max_speed shield cobra 30 10 viper 0 0 sidewinder 0 0 **Getting values on a DataFrame with an index that has integer labels** Another example using integers for the index >>> df = pd.DataFrame([[1, 2], [4, 5], [7, 8]], ... index=[7, 8, 9], columns=['max_speed', 'shield']) >>> df max_speed shield 7 1 2 8 4 5 9 7 8 Slice with integer labels for rows. As mentioned above, note that both the start and stop of the slice are included. >>> df.loc[7:9] max_speed shield 7 1 2 8 4 5 9 7 8 **Getting values with a MultiIndex** A number of examples using a DataFrame with a MultiIndex >>> tuples = [ ... ('cobra', 'mark i'), ('cobra', 'mark ii'), ... ('sidewinder', 'mark i'), ('sidewinder', 'mark ii'), ... ('viper', 'mark ii'), ('viper', 'mark iii') ... ] >>> index = pd.MultiIndex.from_tuples(tuples) >>> values = [[12, 2], [0, 4], [10, 20], ... [1, 4], [7, 1], [16, 36]] >>> df = pd.DataFrame(values, columns=['max_speed', 'shield'], index=index) >>> df max_speed shield cobra mark i 12 2 mark ii 0 4 sidewinder mark i 10 20 mark ii 1 4 viper mark ii 7 1 mark iii 16 36 Single label. Note this returns a DataFrame with a single index. >>> df.loc['cobra'] max_speed shield mark i 12 2 mark ii 0 4 Single index tuple. Note this returns a Series. >>> df.loc[('cobra', 'mark ii')] max_speed 0 shield 4 Name: (cobra, mark ii), dtype: int64 Single label for row and column. Similar to passing in a tuple, this returns a Series. >>> df.loc['cobra', 'mark i'] max_speed 12 shield 2 Name: (cobra, mark i), dtype: int64 Single tuple. Note using ``[[]]`` returns a DataFrame. >>> df.loc[[('cobra', 'mark ii')]] max_speed shield cobra mark ii 0 4 Single tuple for the index with a single label for the column >>> df.loc[('cobra', 'mark i'), 'shield'] 2 Slice from index tuple to single label >>> df.loc[('cobra', 'mark i'):'viper'] max_speed shield cobra mark i 12 2 mark ii 0 4 sidewinder mark i 10 20 mark ii 1 4 viper mark ii 7 1 mark iii 16 36 Slice from index tuple to index tuple >>> df.loc[('cobra', 'mark i'):('viper', 'mark ii')] max_speed shield cobra mark i 12 2 mark ii 0 4 sidewinder mark i 10 20 mark ii 1 4 viper mark ii 7 1 """ return _LocIndexer("loc", self) @property def at(self) -> _AtIndexer: """ Access a single value for a row/column label pair. Similar to ``loc``, in that both provide label-based lookups. Use ``at`` if you only need to get or set a single value in a DataFrame or Series. Raises ------ KeyError If 'label' does not exist in DataFrame. See Also -------- DataFrame.iat : Access a single value for a row/column pair by integer position. DataFrame.loc : Access a group of rows and columns by label(s). Series.at : Access a single value using a label. Examples -------- >>> df = pd.DataFrame([[0, 2, 3], [0, 4, 1], [10, 20, 30]], ... index=[4, 5, 6], columns=['A', 'B', 'C']) >>> df A B C 4 0 2 3 5 0 4 1 6 10 20 30 Get value at specified row/column pair >>> df.at[4, 'B'] 2 Set value at specified row/column pair >>> df.at[4, 'B'] = 10 >>> df.at[4, 'B'] 10 Get value within a Series >>> df.loc[5].at['B'] 4 """ return _AtIndexer("at", self) @property def iat(self) -> _iAtIndexer: """ Access a single value for a row/column pair by integer position. Similar to ``iloc``, in that both provide integer-based lookups. Use ``iat`` if you only need to get or set a single value in a DataFrame or Series. Raises ------ IndexError When integer position is out of bounds. See Also -------- DataFrame.at : Access a single value for a row/column label pair. DataFrame.loc : Access a group of rows and columns by label(s). DataFrame.iloc : Access a group of rows and columns by integer position(s). Examples -------- >>> df = pd.DataFrame([[0, 2, 3], [0, 4, 1], [10, 20, 30]], ... columns=['A', 'B', 'C']) >>> df A B C 0 0 2 3 1 0 4 1 2 10 20 30 Get value at specified row/column pair >>> df.iat[1, 2] 1 Set value at specified row/column pair >>> df.iat[1, 2] = 10 >>> df.iat[1, 2] 10 Get value within a series >>> df.loc[0].iat[1] 2 """ return _iAtIndexer("iat", self) class _LocationIndexer(NDFrameIndexerBase): _valid_types: str axis = None def __call__(self, axis=None): # we need to return a copy of ourselves new_self = type(self)(self.name, self.obj) if axis is not None: axis = self.obj._get_axis_number(axis) new_self.axis = axis return new_self def _get_setitem_indexer(self, key): """ Convert a potentially-label-based key into a positional indexer. """ if self.name == "loc": self._ensure_listlike_indexer(key) if self.axis is not None: return self._convert_tuple(key, is_setter=True) ax = self.obj._get_axis(0) if isinstance(ax, MultiIndex) and self.name != "iloc": with suppress(TypeError, KeyError, InvalidIndexError): # TypeError e.g. passed a bool return ax.get_loc(key) if isinstance(key, tuple): with suppress(IndexingError): return self._convert_tuple(key, is_setter=True) if isinstance(key, range): return list(key) try: return self._convert_to_indexer(key, axis=0, is_setter=True) except TypeError as e: # invalid indexer type vs 'other' indexing errors if "cannot do" in str(e): raise elif "unhashable type" in str(e): raise raise IndexingError(key) from e def _ensure_listlike_indexer(self, key, axis=None, value=None): """ Ensure that a list-like of column labels are all present by adding them if they do not already exist. Parameters ---------- key : list-like of column labels Target labels. axis : key axis if known """ column_axis = 1 # column only exists in 2-dimensional DataFrame if self.ndim != 2: return if isinstance(key, tuple) and len(key) > 1: # key may be a tuple if we are .loc # if length of key is > 1 set key to column part key = key[column_axis] axis = column_axis if ( axis == column_axis and not isinstance(self.obj.columns, MultiIndex) and is_list_like_indexer(key) and not com.is_bool_indexer(key) and all(is_hashable(k) for k in key) ): # GH#38148 keys = self.obj.columns.union(key, sort=False) self.obj._mgr = self.obj._mgr.reindex_axis( keys, axis=0, consolidate=False, only_slice=True ) def __setitem__(self, key, value): if isinstance(key, tuple): key = tuple(list(x) if is_iterator(x) else x for x in key) key = tuple(com.apply_if_callable(x, self.obj) for x in key) else: key = com.apply_if_callable(key, self.obj) indexer = self._get_setitem_indexer(key) self._has_valid_setitem_indexer(key) iloc = self if self.name == "iloc" else self.obj.iloc iloc._setitem_with_indexer(indexer, value, self.name) def _validate_key(self, key, axis: int): """ Ensure that key is valid for current indexer. Parameters ---------- key : scalar, slice or list-like Key requested. axis : int Dimension on which the indexing is being made. Raises ------ TypeError If the key (or some element of it) has wrong type. IndexError If the key (or some element of it) is out of bounds. KeyError If the key was not found. """ raise AbstractMethodError(self) def _has_valid_tuple(self, key: tuple): """ Check the key for valid keys across my indexer. """ self._validate_key_length(key) for i, k in enumerate(key): try: self._validate_key(k, i) except ValueError as err: raise ValueError( "Location based indexing can only have " f"[{self._valid_types}] types" ) from err def _is_nested_tuple_indexer(self, tup: tuple) -> bool: """ Returns ------- bool """ if any(isinstance(ax, MultiIndex) for ax in self.obj.axes): return any(is_nested_tuple(tup, ax) for ax in self.obj.axes) return False def _convert_tuple(self, key, is_setter: bool = False): keyidx = [] if self.axis is not None: axis = self.obj._get_axis_number(self.axis) for i in range(self.ndim): if i == axis: keyidx.append( self._convert_to_indexer(key, axis=axis, is_setter=is_setter) ) else: keyidx.append(slice(None)) else: self._validate_key_length(key) for i, k in enumerate(key): idx = self._convert_to_indexer(k, axis=i, is_setter=is_setter) keyidx.append(idx) return tuple(keyidx) def _validate_key_length(self, key: Sequence[Any]) -> None: if len(key) > self.ndim: raise IndexingError("Too many indexers") def _getitem_tuple_same_dim(self, tup: tuple): """ Index with indexers that should return an object of the same dimension as self.obj. This is only called after a failed call to _getitem_lowerdim. """ retval = self.obj for i, key in enumerate(tup): if com.is_null_slice(key): continue retval = getattr(retval, self.name)._getitem_axis(key, axis=i) # We should never have retval.ndim < self.ndim, as that should # be handled by the _getitem_lowerdim call above. assert retval.ndim == self.ndim return retval def _getitem_lowerdim(self, tup: tuple): # we can directly get the axis result since the axis is specified if self.axis is not None: axis = self.obj._get_axis_number(self.axis) return self._getitem_axis(tup, axis=axis) # we may have a nested tuples indexer here if self._is_nested_tuple_indexer(tup): return self._getitem_nested_tuple(tup) # we maybe be using a tuple to represent multiple dimensions here ax0 = self.obj._get_axis(0) # ...but iloc should handle the tuple as simple integer-location # instead of checking it as multiindex representation (GH 13797) if isinstance(ax0, MultiIndex) and self.name != "iloc": with suppress(IndexingError): return self._handle_lowerdim_multi_index_axis0(tup) self._validate_key_length(tup) for i, key in enumerate(tup): if is_label_like(key): # We don't need to check for tuples here because those are # caught by the _is_nested_tuple_indexer check above. section = self._getitem_axis(key, axis=i) # We should never have a scalar section here, because # _getitem_lowerdim is only called after a check for # is_scalar_access, which that would be. if section.ndim == self.ndim: # we're in the middle of slicing through a MultiIndex # revise the key wrt to `section` by inserting an _NS new_key = tup[:i] + (_NS,) + tup[i + 1 :] else: # Note: the section.ndim == self.ndim check above # rules out having DataFrame here, so we dont need to worry # about transposing. new_key = tup[:i] + tup[i + 1 :] if len(new_key) == 1: new_key = new_key[0] # Slices should return views, but calling iloc/loc with a null # slice returns a new object. if com.is_null_slice(new_key): return section # This is an elided recursive call to iloc/loc return getattr(section, self.name)[new_key] raise IndexingError("not applicable") def _getitem_nested_tuple(self, tup: tuple): # we have a nested tuple so have at least 1 multi-index level # we should be able to match up the dimensionality here # we have too many indexers for our dim, but have at least 1 # multi-index dimension, try to see if we have something like # a tuple passed to a series with a multi-index if len(tup) > self.ndim: if self.name != "loc": # This should never be reached, but lets be explicit about it raise ValueError("Too many indices") if isinstance(self.obj, ABCSeries) and any( isinstance(k, tuple) for k in tup ): # GH#35349 Raise if tuple in tuple for series raise ValueError("Too many indices") if self.ndim == 1 or not any(isinstance(x, slice) for x in tup): # GH#10521 Series should reduce MultiIndex dimensions instead of # DataFrame, IndexingError is not raised when slice(None,None,None) # with one row. with suppress(IndexingError): return self._handle_lowerdim_multi_index_axis0(tup) # this is a series with a multi-index specified a tuple of # selectors axis = self.axis or 0 return self._getitem_axis(tup, axis=axis) # handle the multi-axis by taking sections and reducing # this is iterative obj = self.obj # GH#41369 Loop in reverse order ensures indexing along columns before rows # which selects only necessary blocks which avoids dtype conversion if possible axis = len(tup) - 1 for key in tup[::-1]: if com.is_null_slice(key): axis -= 1 continue obj = getattr(obj, self.name)._getitem_axis(key, axis=axis) axis -= 1 # if we have a scalar, we are done if is_scalar(obj) or not hasattr(obj, "ndim"): break return obj def _convert_to_indexer(self, key, axis: int, is_setter: bool = False): raise AbstractMethodError(self) def __getitem__(self, key): if type(key) is tuple: key = tuple(list(x) if is_iterator(x) else x for x in key) key = tuple(com.apply_if_callable(x, self.obj) for x in key) if self._is_scalar_access(key): with suppress(KeyError, IndexError, AttributeError): # AttributeError for IntervalTree get_value return self.obj._get_value(*key, takeable=self._takeable) return self._getitem_tuple(key) else: # we by definition only have the 0th axis axis = self.axis or 0 maybe_callable = com.apply_if_callable(key, self.obj) return self._getitem_axis(maybe_callable, axis=axis) def _is_scalar_access(self, key: tuple): raise NotImplementedError() def _getitem_tuple(self, tup: tuple): raise AbstractMethodError(self) def _getitem_axis(self, key, axis: int): raise NotImplementedError() def _has_valid_setitem_indexer(self, indexer) -> bool: raise AbstractMethodError(self) def _getbool_axis(self, key, axis: int): # caller is responsible for ensuring non-None axis labels = self.obj._get_axis(axis) key = check_bool_indexer(labels, key) inds = key.nonzero()[0] return self.obj._take_with_is_copy(inds, axis=axis) @doc(IndexingMixin.loc) class _LocIndexer(_LocationIndexer): _takeable: bool = False _valid_types = ( "labels (MUST BE IN THE INDEX), slices of labels (BOTH " "endpoints included! Can be slices of integers if the " "index is integers), listlike of labels, boolean" ) # ------------------------------------------------------------------- # Key Checks @doc(_LocationIndexer._validate_key) def _validate_key(self, key, axis: int): # valid for a collection of labels (we check their presence later) # slice of labels (where start-end in labels) # slice of integers (only if in the labels) # boolean not in slice and with boolean index if isinstance(key, bool) and not is_bool_dtype(self.obj.index): raise KeyError( f"{key}: boolean label can not be used without a boolean index" ) if isinstance(key, slice) and ( isinstance(key.start, bool) or isinstance(key.stop, bool) ): raise TypeError(f"{key}: boolean values can not be used in a slice") def _has_valid_setitem_indexer(self, indexer) -> bool: return True def _is_scalar_access(self, key: tuple) -> bool: """ Returns ------- bool """ # this is a shortcut accessor to both .loc and .iloc # that provide the equivalent access of .at and .iat # a) avoid getting things via sections and (to minimize dtype changes) # b) provide a performant path if len(key) != self.ndim: return False for i, k in enumerate(key): if not is_scalar(k): return False ax = self.obj.axes[i] if isinstance(ax, MultiIndex): return False if isinstance(k, str) and ax._supports_partial_string_indexing: # partial string indexing, df.loc['2000', 'A'] # should not be considered scalar return False if not ax.is_unique: return False return True # ------------------------------------------------------------------- # MultiIndex Handling def _multi_take_opportunity(self, tup: tuple) -> bool: """ Check whether there is the possibility to use ``_multi_take``. Currently the limit is that all axes being indexed, must be indexed with list-likes. Parameters ---------- tup : tuple Tuple of indexers, one per axis. Returns ------- bool Whether the current indexing, can be passed through `_multi_take`. """ if not all(is_list_like_indexer(x) for x in tup): return False # just too complicated return not any(com.is_bool_indexer(x) for x in tup) def _multi_take(self, tup: tuple): """ Create the indexers for the passed tuple of keys, and executes the take operation. This allows the take operation to be executed all at once, rather than once for each dimension. Improving efficiency. Parameters ---------- tup : tuple Tuple of indexers, one per axis. Returns ------- values: same type as the object being indexed """ # GH 836 d = { axis: self._get_listlike_indexer(key, axis) for (key, axis) in zip(tup, self.obj._AXIS_ORDERS) } return self.obj._reindex_with_indexers(d, copy=True, allow_dups=True) # ------------------------------------------------------------------- def _getitem_iterable(self, key, axis: int): """ Index current object with an iterable collection of keys. Parameters ---------- key : iterable Targeted labels. axis : int Dimension on which the indexing is being made. Raises ------ KeyError If no key was found. Will change in the future to raise if not all keys were found. Returns ------- scalar, DataFrame, or Series: indexed value(s). """ # we assume that not com.is_bool_indexer(key), as that is # handled before we get here. self._validate_key(key, axis) # A collection of keys keyarr, indexer = self._get_listlike_indexer(key, axis) return self.obj._reindex_with_indexers( {axis: [keyarr, indexer]}, copy=True, allow_dups=True ) def _getitem_tuple(self, tup: tuple): with suppress(IndexingError): return self._getitem_lowerdim(tup) # no multi-index, so validate all of the indexers self._has_valid_tuple(tup) # ugly hack for GH #836 if self._multi_take_opportunity(tup): return self._multi_take(tup) return self._getitem_tuple_same_dim(tup) def _get_label(self, label, axis: int): # GH#5667 this will fail if the label is not present in the axis. return self.obj.xs(label, axis=axis) def _handle_lowerdim_multi_index_axis0(self, tup: tuple): # we have an axis0 multi-index, handle or raise axis = self.axis or 0 try: # fast path for series or for tup devoid of slices return self._get_label(tup, axis=axis) except (TypeError, InvalidIndexError): # slices are unhashable pass except KeyError as ek: # raise KeyError if number of indexers match # else IndexingError will be raised if self.ndim < len(tup) <= self.obj.index.nlevels: raise ek raise IndexingError("No label returned") def _getitem_axis(self, key, axis: int): key = item_from_zerodim(key) if is_iterator(key): key = list(key) labels = self.obj._get_axis(axis) key = labels._get_partial_string_timestamp_match_key(key) if isinstance(key, slice): self._validate_key(key, axis) return self._get_slice_axis(key, axis=axis) elif com.is_bool_indexer(key): return self._getbool_axis(key, axis=axis) elif is_list_like_indexer(key): # an iterable multi-selection if not (isinstance(key, tuple) and isinstance(labels, MultiIndex)): if hasattr(key, "ndim") and key.ndim > 1: raise ValueError("Cannot index with multidimensional key") return self._getitem_iterable(key, axis=axis) # nested tuple slicing if is_nested_tuple(key, labels): locs = labels.get_locs(key) indexer = [slice(None)] * self.ndim indexer[axis] = locs return self.obj.iloc[tuple(indexer)] # fall thru to straight lookup self._validate_key(key, axis) return self._get_label(key, axis=axis) def _get_slice_axis(self, slice_obj: slice, axis: int): """ This is pretty simple as we just have to deal with labels. """ # caller is responsible for ensuring non-None axis obj = self.obj if not need_slice(slice_obj): return obj.copy(deep=False) labels = obj._get_axis(axis) indexer = labels.slice_indexer(slice_obj.start, slice_obj.stop, slice_obj.step) if isinstance(indexer, slice): return self.obj._slice(indexer, axis=axis) else: # DatetimeIndex overrides Index.slice_indexer and may # return a DatetimeIndex instead of a slice object. return self.obj.take(indexer, axis=axis) def _convert_to_indexer(self, key, axis: int, is_setter: bool = False): """ Convert indexing key into something we can use to do actual fancy indexing on a ndarray. Examples ix[:5] -> slice(0, 5) ix[[1,2,3]] -> [1,2,3] ix[['foo', 'bar', 'baz']] -> [i, j, k] (indices of foo, bar, baz) Going by Zen of Python? 'In the face of ambiguity, refuse the temptation to guess.' raise AmbiguousIndexError with integer labels? - No, prefer label-based indexing """ labels = self.obj._get_axis(axis) if isinstance(key, slice): return labels._convert_slice_indexer(key, kind="loc") # see if we are positional in nature is_int_index = labels.is_integer() is_int_positional = is_integer(key) and not is_int_index if is_scalar(key) or isinstance(labels, MultiIndex): # Otherwise get_loc will raise InvalidIndexError # if we are a label return me try: return labels.get_loc(key) except LookupError: if isinstance(key, tuple) and isinstance(labels, MultiIndex): if len(key) == labels.nlevels: return {"key": key} raise except InvalidIndexError: # GH35015, using datetime as column indices raises exception if not isinstance(labels, MultiIndex): raise except TypeError: pass except ValueError: if not is_int_positional: raise # a positional if is_int_positional: # if we are setting and its not a valid location # its an insert which fails by definition # always valid return {"key": key} if is_nested_tuple(key, labels): if isinstance(self.obj, ABCSeries) and any( isinstance(k, tuple) for k in key ): # GH#35349 Raise if tuple in tuple for series raise ValueError("Too many indices") return labels.get_locs(key) elif is_list_like_indexer(key): if is_iterator(key): key = list(key) if com.is_bool_indexer(key): key = check_bool_indexer(labels, key) (inds,) = key.nonzero() return inds else: return self._get_listlike_indexer(key, axis)[1] else: try: return labels.get_loc(key) except LookupError: # allow a not found key only if we are a setter if not is_list_like_indexer(key): return {"key": key} raise def _get_listlike_indexer(self, key, axis: int): """ Transform a list-like of keys into a new index and an indexer. Parameters ---------- key : list-like Targeted labels. axis: int Dimension on which the indexing is being made. Raises ------ KeyError If at least one key was requested but none was found. Returns ------- keyarr: Index New index (coinciding with 'key' if the axis is unique). values : array-like Indexer for the return object, -1 denotes keys not found. """ ax = self.obj._get_axis(axis) # Have the index compute an indexer or return None # if it cannot handle: indexer, keyarr = ax._convert_listlike_indexer(key) # We only act on all found values: if indexer is not None and (indexer != -1).all(): # _validate_read_indexer is a no-op if no -1s, so skip return ax[indexer], indexer if ax._index_as_unique: indexer = ax.get_indexer_for(keyarr) keyarr = ax.reindex(keyarr)[0] else: keyarr, indexer, new_indexer = ax._reindex_non_unique(keyarr) self._validate_read_indexer(keyarr, indexer, axis) return keyarr, indexer def _validate_read_indexer(self, key, indexer, axis: int): """ Check that indexer can be used to return a result. e.g. at least one element was found, unless the list of keys was actually empty. Parameters ---------- key : list-like Targeted labels (only used to show correct error message). indexer: array-like of booleans Indices corresponding to the key, (with -1 indicating not found). axis : int Dimension on which the indexing is being made. Raises ------ KeyError If at least one key was requested but none was found. """ if len(key) == 0: return # Count missing values: missing_mask = indexer < 0 missing = (missing_mask).sum() if missing: if missing == len(indexer): axis_name = self.obj._get_axis_name(axis) raise KeyError(f"None of [{key}] are in the [{axis_name}]") ax = self.obj._get_axis(axis) not_found = list(set(key) - set(ax)) raise KeyError(f"{not_found} not in index") @doc(IndexingMixin.iloc) class _iLocIndexer(_LocationIndexer): _valid_types = ( "integer, integer slice (START point is INCLUDED, END " "point is EXCLUDED), listlike of integers, boolean array" ) _takeable = True # ------------------------------------------------------------------- # Key Checks def _validate_key(self, key, axis: int): if com.is_bool_indexer(key): if hasattr(key, "index") and isinstance(key.index, Index): if key.index.inferred_type == "integer": raise NotImplementedError( "iLocation based boolean " "indexing on an integer type " "is not available" ) raise ValueError( "iLocation based boolean indexing cannot use " "an indexable as a mask" ) return if isinstance(key, slice): return elif is_integer(key): self._validate_integer(key, axis) elif isinstance(key, tuple): # a tuple should already have been caught by this point # so don't treat a tuple as a valid indexer raise IndexingError("Too many indexers") elif is_list_like_indexer(key): arr = np.array(key) len_axis = len(self.obj._get_axis(axis)) # check that the key has a numeric dtype if not is_numeric_dtype(arr.dtype): raise IndexError(f".iloc requires numeric indexers, got {arr}") # check that the key does not exceed the maximum size of the index if len(arr) and (arr.max() >= len_axis or arr.min() < -len_axis): raise IndexError("positional indexers are out-of-bounds") else: raise ValueError(f"Can only index by location with a [{self._valid_types}]") def _has_valid_setitem_indexer(self, indexer) -> bool: """ Validate that a positional indexer cannot enlarge its target will raise if needed, does not modify the indexer externally. Returns ------- bool """ if isinstance(indexer, dict): raise IndexError("iloc cannot enlarge its target object") if isinstance(indexer, ABCDataFrame): warnings.warn( "DataFrame indexer for .iloc is deprecated and will be removed in" "a future version.\n" "consider using .loc with a DataFrame indexer for automatic alignment.", FutureWarning, stacklevel=3, ) if not isinstance(indexer, tuple): indexer = _tuplify(self.ndim, indexer) for ax, i in zip(self.obj.axes, indexer): if isinstance(i, slice): # should check the stop slice? pass elif is_list_like_indexer(i): # should check the elements? pass elif is_integer(i): if i >= len(ax): raise IndexError("iloc cannot enlarge its target object") elif isinstance(i, dict): raise IndexError("iloc cannot enlarge its target object") return True def _is_scalar_access(self, key: tuple) -> bool: """ Returns ------- bool """ # this is a shortcut accessor to both .loc and .iloc # that provide the equivalent access of .at and .iat # a) avoid getting things via sections and (to minimize dtype changes) # b) provide a performant path if len(key) != self.ndim: return False return all(is_integer(k) for k in key) def _validate_integer(self, key: int, axis: int) -> None: """ Check that 'key' is a valid position in the desired axis. Parameters ---------- key : int Requested position. axis : int Desired axis. Raises ------ IndexError If 'key' is not a valid position in axis 'axis'. """ len_axis = len(self.obj._get_axis(axis)) if key >= len_axis or key < -len_axis: raise IndexError("single positional indexer is out-of-bounds") # ------------------------------------------------------------------- def _getitem_tuple(self, tup: tuple): self._has_valid_tuple(tup) with suppress(IndexingError): return self._getitem_lowerdim(tup) return self._getitem_tuple_same_dim(tup) def _get_list_axis(self, key, axis: int): """ Return Series values by list or array of integers. Parameters ---------- key : list-like positional indexer axis : int Returns ------- Series object Notes ----- `axis` can only be zero. """ try: return self.obj._take_with_is_copy(key, axis=axis) except IndexError as err: # re-raise with different error message raise IndexError("positional indexers are out-of-bounds") from err def _getitem_axis(self, key, axis: int): if isinstance(key, ABCDataFrame): raise IndexError( "DataFrame indexer is not allowed for .iloc\n" "Consider using .loc for automatic alignment." ) if isinstance(key, slice): return self._get_slice_axis(key, axis=axis) if is_iterator(key): key = list(key) if isinstance(key, list): key = np.asarray(key) if com.is_bool_indexer(key): self._validate_key(key, axis) return self._getbool_axis(key, axis=axis) # a list of integers elif is_list_like_indexer(key): return self._get_list_axis(key, axis=axis) # a single integer else: key = item_from_zerodim(key) if not is_integer(key): raise TypeError("Cannot index by location index with a non-integer key") # validate the location self._validate_integer(key, axis) return self.obj._ixs(key, axis=axis) def _get_slice_axis(self, slice_obj: slice, axis: int): # caller is responsible for ensuring non-None axis obj = self.obj if not need_slice(slice_obj): return obj.copy(deep=False) labels = obj._get_axis(axis) labels._validate_positional_slice(slice_obj) return self.obj._slice(slice_obj, axis=axis) def _convert_to_indexer(self, key, axis: int, is_setter: bool = False): """ Much simpler as we only have to deal with our valid types. """ return key def _get_setitem_indexer(self, key): # GH#32257 Fall through to let numpy do validation if is_iterator(key): return list(key) return key # ------------------------------------------------------------------- def _setitem_with_indexer(self, indexer, value, name="iloc"): """ _setitem_with_indexer is for setting values on a Series/DataFrame using positional indexers. If the relevant keys are not present, the Series/DataFrame may be expanded. This method is currently broken when dealing with non-unique Indexes, since it goes from positional indexers back to labels when calling BlockManager methods, see GH#12991, GH#22046, GH#15686. """ info_axis = self.obj._info_axis_number # maybe partial set take_split_path = not self.obj._mgr.is_single_block # if there is only one block/type, still have to take split path # unless the block is one-dimensional or it can hold the value if ( not take_split_path and getattr(self.obj._mgr, "blocks", False) and self.ndim > 1 ): # in case of dict, keys are indices val = list(value.values()) if isinstance(value, dict) else value blk = self.obj._mgr.blocks[0] take_split_path = not blk._can_hold_element(val) # if we have any multi-indexes that have non-trivial slices # (not null slices) then we must take the split path, xref # GH 10360, GH 27841 if isinstance(indexer, tuple) and len(indexer) == len(self.obj.axes): for i, ax in zip(indexer, self.obj.axes): if isinstance(ax, MultiIndex) and not ( is_integer(i) or com.is_null_slice(i) ): take_split_path = True break if isinstance(indexer, tuple): nindexer = [] for i, idx in enumerate(indexer): if isinstance(idx, dict): # reindex the axis to the new value # and set inplace key, _ = convert_missing_indexer(idx) # if this is the items axes, then take the main missing # path first # this correctly sets the dtype and avoids cache issues # essentially this separates out the block that is needed # to possibly be modified if self.ndim > 1 and i == info_axis: # add the new item, and set the value # must have all defined axes if we have a scalar # or a list-like on the non-info axes if we have a # list-like if not len(self.obj): if not is_list_like_indexer(value): raise ValueError( "cannot set a frame with no " "defined index and a scalar" ) self.obj[key] = value return # add a new item with the dtype setup if com.is_null_slice(indexer[0]): # We are setting an entire column self.obj[key] = value else: self.obj[key] = infer_fill_value(value) new_indexer = convert_from_missing_indexer_tuple( indexer, self.obj.axes ) self._setitem_with_indexer(new_indexer, value, name) return # reindex the axis # make sure to clear the cache because we are # just replacing the block manager here # so the object is the same index = self.obj._get_axis(i) labels = index.insert(len(index), key) # We are expanding the Series/DataFrame values to match # the length of thenew index `labels`. GH#40096 ensure # this is valid even if the index has duplicates. taker = np.arange(len(index) + 1, dtype=np.intp) taker[-1] = -1 reindexers = {i: (labels, taker)} new_obj = self.obj._reindex_with_indexers( reindexers, allow_dups=True ) self.obj._mgr = new_obj._mgr self.obj._maybe_update_cacher(clear=True) self.obj._is_copy = None nindexer.append(labels.get_loc(key)) else: nindexer.append(idx) indexer = tuple(nindexer) else: indexer, missing = convert_missing_indexer(indexer) if missing: self._setitem_with_indexer_missing(indexer, value) return # align and set the values if take_split_path: # We have to operate column-wise self._setitem_with_indexer_split_path(indexer, value, name) else: self._setitem_single_block(indexer, value, name) def _setitem_with_indexer_split_path(self, indexer, value, name: str): """ Setitem column-wise. """ # Above we only set take_split_path to True for 2D cases assert self.ndim == 2 if not isinstance(indexer, tuple): indexer = _tuplify(self.ndim, indexer) if len(indexer) > self.ndim: raise IndexError("too many indices for array") if isinstance(indexer[0], np.ndarray) and indexer[0].ndim > 2: raise ValueError(r"Cannot set values with ndim > 2") if (isinstance(value, ABCSeries) and name != "iloc") or isinstance(value, dict): from pandas import Series value = self._align_series(indexer, Series(value)) # Ensure we have something we can iterate over info_axis = indexer[1] ilocs = self._ensure_iterable_column_indexer(info_axis) pi = indexer[0] lplane_indexer = length_of_indexer(pi, self.obj.index) # lplane_indexer gives the expected length of obj[indexer[0]] # we need an iterable, with a ndim of at least 1 # eg. don't pass through np.array(0) if is_list_like_indexer(value) and getattr(value, "ndim", 1) > 0: if isinstance(value, ABCDataFrame): self._setitem_with_indexer_frame_value(indexer, value, name) elif np.ndim(value) == 2: self._setitem_with_indexer_2d_value(indexer, value) elif len(ilocs) == 1 and lplane_indexer == len(value) and not is_scalar(pi): # We are setting multiple rows in a single column. self._setitem_single_column(ilocs[0], value, pi) elif len(ilocs) == 1 and 0 != lplane_indexer != len(value): # We are trying to set N values into M entries of a single # column, which is invalid for N != M # Exclude zero-len for e.g. boolean masking that is all-false if len(value) == 1 and not is_integer(info_axis): # This is a case like df.iloc[:3, [1]] = [0] # where we treat as df.iloc[:3, 1] = 0 return self._setitem_with_indexer((pi, info_axis[0]), value[0]) raise ValueError( "Must have equal len keys and value " "when setting with an iterable" ) elif lplane_indexer == 0 and len(value) == len(self.obj.index): # We get here in one case via .loc with a all-False mask pass elif len(ilocs) == len(value): # We are setting multiple columns in a single row. for loc, v in zip(ilocs, value): self._setitem_single_column(loc, v, pi) elif len(ilocs) == 1 and com.is_null_slice(pi) and len(self.obj) == 0: # This is a setitem-with-expansion, see # test_loc_setitem_empty_append_expands_rows_mixed_dtype # e.g. df = DataFrame(columns=["x", "y"]) # df["x"] = df["x"].astype(np.int64) # df.loc[:, "x"] = [1, 2, 3] self._setitem_single_column(ilocs[0], value, pi) else: raise ValueError( "Must have equal len keys and value " "when setting with an iterable" ) else: # scalar value for loc in ilocs: self._setitem_single_column(loc, value, pi) def _setitem_with_indexer_2d_value(self, indexer, value): # We get here with np.ndim(value) == 2, excluding DataFrame, # which goes through _setitem_with_indexer_frame_value pi = indexer[0] ilocs = self._ensure_iterable_column_indexer(indexer[1]) # GH#7551 Note that this coerces the dtype if we are mixed value = np.array(value, dtype=object) if len(ilocs) != value.shape[1]: raise ValueError( "Must have equal len keys and value when setting with an ndarray" ) for i, loc in enumerate(ilocs): # setting with a list, re-coerces self._setitem_single_column(loc, value[:, i].tolist(), pi) def _setitem_with_indexer_frame_value(self, indexer, value: DataFrame, name: str): ilocs = self._ensure_iterable_column_indexer(indexer[1]) sub_indexer = list(indexer) pi = indexer[0] multiindex_indexer = isinstance(self.obj.columns, MultiIndex) unique_cols = value.columns.is_unique # We do not want to align the value in case of iloc GH#37728 if name == "iloc": for i, loc in enumerate(ilocs): val = value.iloc[:, i] self._setitem_single_column(loc, val, pi) elif not unique_cols and value.columns.equals(self.obj.columns): # We assume we are already aligned, see # test_iloc_setitem_frame_duplicate_columns_multiple_blocks for loc in ilocs: item = self.obj.columns[loc] if item in value: sub_indexer[1] = item val = self._align_series( tuple(sub_indexer), value.iloc[:, loc], multiindex_indexer, ) else: val = np.nan self._setitem_single_column(loc, val, pi) elif not unique_cols: raise ValueError("Setting with non-unique columns is not allowed.") else: for loc in ilocs: item = self.obj.columns[loc] if item in value: sub_indexer[1] = item val = self._align_series( tuple(sub_indexer), value[item], multiindex_indexer ) else: val = np.nan self._setitem_single_column(loc, val, pi) def _setitem_single_column(self, loc: int, value, plane_indexer): """ Parameters ---------- loc : int Indexer for column position plane_indexer : int, slice, listlike[int] The indexer we use for setitem along axis=0. """ pi = plane_indexer ser = self.obj._ixs(loc, axis=1) # perform the equivalent of a setitem on the info axis # as we have a null slice or a slice with full bounds # which means essentially reassign to the columns of a # multi-dim object # GH#6149 (null slice), GH#10408 (full bounds) if com.is_null_slice(pi) or com.is_full_slice(pi, len(self.obj)): ser = value elif ( is_array_like(value) and is_exact_shape_match(ser, value) and not is_empty_indexer(pi, value) ): if is_list_like(pi): ser = value[np.argsort(pi)] else: # in case of slice ser = value[pi] else: # set the item, possibly having a dtype change ser = ser.copy() ser._mgr = ser._mgr.setitem(indexer=(pi,), value=value) ser._maybe_update_cacher(clear=True) # reset the sliced object if unique self.obj._iset_item(loc, ser) def _setitem_single_block(self, indexer, value, name: str): """ _setitem_with_indexer for the case when we have a single Block. """ from pandas import Series info_axis = self.obj._info_axis_number item_labels = self.obj._get_axis(info_axis) if isinstance(indexer, tuple): # if we are setting on the info axis ONLY # set using those methods to avoid block-splitting # logic here if ( len(indexer) > info_axis and is_integer(indexer[info_axis]) and all( com.is_null_slice(idx) for i, idx in enumerate(indexer) if i != info_axis ) ): selected_item_labels = item_labels[indexer[info_axis]] if len(item_labels.get_indexer_for([selected_item_labels])) == 1: self.obj[selected_item_labels] = value return indexer = maybe_convert_ix(*indexer) if (isinstance(value, ABCSeries) and name != "iloc") or isinstance(value, dict): # TODO(EA): ExtensionBlock.setitem this causes issues with # setting for extensionarrays that store dicts. Need to decide # if it's worth supporting that. value = self._align_series(indexer, Series(value)) elif isinstance(value, ABCDataFrame) and name != "iloc": value = self._align_frame(indexer, value) # check for chained assignment self.obj._check_is_chained_assignment_possible() # actually do the set self.obj._mgr = self.obj._mgr.setitem(indexer=indexer, value=value) self.obj._maybe_update_cacher(clear=True) def _setitem_with_indexer_missing(self, indexer, value): """ Insert new row(s) or column(s) into the Series or DataFrame. """ from pandas import Series # reindex the axis to the new value # and set inplace if self.ndim == 1: index = self.obj.index new_index = index.insert(len(index), indexer) # we have a coerced indexer, e.g. a float # that matches in an Int64Index, so # we will not create a duplicate index, rather # index to that element # e.g. 0.0 -> 0 # GH#12246 if index.is_unique: new_indexer = index.get_indexer([new_index[-1]]) if (new_indexer != -1).any(): # We get only here with loc, so can hard code return self._setitem_with_indexer(new_indexer, value, "loc") # this preserves dtype of the value new_values = Series([value])._values if len(self.obj._values): # GH#22717 handle casting compatibility that np.concatenate # does incorrectly new_values = concat_compat([self.obj._values, new_values]) self.obj._mgr = self.obj._constructor( new_values, index=new_index, name=self.obj.name )._mgr self.obj._maybe_update_cacher(clear=True) elif self.ndim == 2: if not len(self.obj.columns): # no columns and scalar raise ValueError("cannot set a frame with no defined columns") if isinstance(value, ABCSeries): # append a Series value = value.reindex(index=self.obj.columns, copy=True) value.name = indexer elif isinstance(value, dict): value = Series( value, index=self.obj.columns, name=indexer, dtype=object ) else: # a list-list if is_list_like_indexer(value): # must have conforming columns if len(value) != len(self.obj.columns): raise ValueError("cannot set a row with mismatched columns") value = Series(value, index=self.obj.columns, name=indexer) self.obj._mgr = self.obj.append(value)._mgr self.obj._maybe_update_cacher(clear=True) def _ensure_iterable_column_indexer(self, column_indexer): """ Ensure that our column indexer is something that can be iterated over. """ if is_integer(column_indexer): ilocs = [column_indexer] elif isinstance(column_indexer, slice): ilocs = np.arange(len(self.obj.columns))[column_indexer] elif isinstance(column_indexer, np.ndarray) and is_bool_dtype( column_indexer.dtype ): ilocs = np.arange(len(column_indexer))[column_indexer] else: ilocs = column_indexer return ilocs def _align_series(self, indexer, ser: Series, multiindex_indexer: bool = False): """ Parameters ---------- indexer : tuple, slice, scalar Indexer used to get the locations that will be set to `ser`. ser : pd.Series Values to assign to the locations specified by `indexer`. multiindex_indexer : bool, optional Defaults to False. Should be set to True if `indexer` was from a `pd.MultiIndex`, to avoid unnecessary broadcasting. Returns ------- `np.array` of `ser` broadcast to the appropriate shape for assignment to the locations selected by `indexer` """ if isinstance(indexer, (slice, np.ndarray, list, Index)): indexer = (indexer,) if isinstance(indexer, tuple): # flatten np.ndarray indexers def ravel(i): return i.ravel() if isinstance(i, np.ndarray) else i indexer = tuple(map(ravel, indexer)) aligners = [not com.is_null_slice(idx) for idx in indexer] sum_aligners = sum(aligners) single_aligner = sum_aligners == 1 is_frame = self.ndim == 2 obj = self.obj # are we a single alignable value on a non-primary # dim (e.g. panel: 1,2, or frame: 0) ? # hence need to align to a single axis dimension # rather that find all valid dims # frame if is_frame: single_aligner = single_aligner and aligners[0] # we have a frame, with multiple indexers on both axes; and a # series, so need to broadcast (see GH5206) if sum_aligners == self.ndim and all(is_sequence(_) for _ in indexer): ser = ser.reindex(obj.axes[0][indexer[0]], copy=True)._values # single indexer if len(indexer) > 1 and not multiindex_indexer: len_indexer = len(indexer[1]) ser = np.tile(ser, len_indexer).reshape(len_indexer, -1).T return ser for i, idx in enumerate(indexer): ax = obj.axes[i] # multiple aligners (or null slices) if is_sequence(idx) or isinstance(idx, slice): if single_aligner and com.is_null_slice(idx): continue new_ix = ax[idx] if not is_list_like_indexer(new_ix): new_ix = Index([new_ix]) else: new_ix = Index(new_ix) if ser.index.equals(new_ix) or not len(new_ix): return ser._values.copy() return ser.reindex(new_ix)._values # 2 dims elif single_aligner: # reindex along index ax = self.obj.axes[1] if ser.index.equals(ax) or not len(ax): return ser._values.copy() return ser.reindex(ax)._values elif is_integer(indexer) and self.ndim == 1: if is_object_dtype(self.obj): return ser ax = self.obj._get_axis(0) if ser.index.equals(ax): return ser._values.copy() return ser.reindex(ax)._values[indexer] elif is_integer(indexer): ax = self.obj._get_axis(1) if ser.index.equals(ax): return ser._values.copy() return ser.reindex(ax)._values raise ValueError("Incompatible indexer with Series") def _align_frame(self, indexer, df: DataFrame): is_frame = self.ndim == 2 if isinstance(indexer, tuple): idx, cols = None, None sindexers = [] for i, ix in enumerate(indexer): ax = self.obj.axes[i] if is_sequence(ix) or isinstance(ix, slice): if isinstance(ix, np.ndarray): ix = ix.ravel() if idx is None: idx = ax[ix] elif cols is None: cols = ax[ix] else: break else: sindexers.append(i) if idx is not None and cols is not None: if df.index.equals(idx) and df.columns.equals(cols): val = df.copy()._values else: val = df.reindex(idx, columns=cols)._values return val elif (isinstance(indexer, slice) or is_list_like_indexer(indexer)) and is_frame: ax = self.obj.index[indexer] if df.index.equals(ax): val = df.copy()._values else: # we have a multi-index and are trying to align # with a particular, level GH3738 if ( isinstance(ax, MultiIndex) and isinstance(df.index, MultiIndex) and ax.nlevels != df.index.nlevels ): raise TypeError( "cannot align on a multi-index with out " "specifying the join levels" ) val = df.reindex(index=ax)._values return val raise ValueError("Incompatible indexer with DataFrame") class _ScalarAccessIndexer(NDFrameIndexerBase): """ Access scalars quickly. """ def _convert_key(self, key, is_setter: bool = False): raise AbstractMethodError(self) def __getitem__(self, key): if not isinstance(key, tuple): # we could have a convertible item here (e.g. Timestamp) if not is_list_like_indexer(key): key = (key,) else: raise ValueError("Invalid call for scalar access (getting)!") key = self._convert_key(key) return self.obj._get_value(*key, takeable=self._takeable) def __setitem__(self, key, value): if isinstance(key, tuple): key = tuple(com.apply_if_callable(x, self.obj) for x in key) else: # scalar callable may return tuple key = com.apply_if_callable(key, self.obj) if not isinstance(key, tuple): key = _tuplify(self.ndim, key) key = list(self._convert_key(key, is_setter=True)) if len(key) != self.ndim: raise ValueError("Not enough indexers for scalar access (setting)!") self.obj._set_value(*key, value=value, takeable=self._takeable) @doc(IndexingMixin.at) class _AtIndexer(_ScalarAccessIndexer): _takeable = False def _convert_key(self, key, is_setter: bool = False): """ Require they keys to be the same type as the index. (so we don't fallback) """ # GH 26989 # For series, unpacking key needs to result in the label. # This is already the case for len(key) == 1; e.g. (1,) if self.ndim == 1 and len(key) > 1: key = (key,) # allow arbitrary setting if is_setter: return list(key) return key @property def _axes_are_unique(self) -> bool: # Only relevant for self.ndim == 2 assert self.ndim == 2 return self.obj.index.is_unique and self.obj.columns.is_unique def __getitem__(self, key): if self.ndim == 2 and not self._axes_are_unique: # GH#33041 fall back to .loc if not isinstance(key, tuple) or not all(is_scalar(x) for x in key): raise ValueError("Invalid call for scalar access (getting)!") return self.obj.loc[key] return super().__getitem__(key) def __setitem__(self, key, value): if self.ndim == 2 and not self._axes_are_unique: # GH#33041 fall back to .loc if not isinstance(key, tuple) or not all(is_scalar(x) for x in key): raise ValueError("Invalid call for scalar access (setting)!") self.obj.loc[key] = value return return super().__setitem__(key, value) @doc(IndexingMixin.iat) class _iAtIndexer(_ScalarAccessIndexer): _takeable = True def _convert_key(self, key, is_setter: bool = False): """ Require integer args. (and convert to label arguments) """ for i in key: if not is_integer(i): raise ValueError("iAt based indexing can only have integer indexers") return key def _tuplify(ndim: int, loc: Hashable) -> tuple[Hashable | slice, ...]: """ Given an indexer for the first dimension, create an equivalent tuple for indexing over all dimensions. Parameters ---------- ndim : int loc : object Returns ------- tuple """ _tup: list[Hashable | slice] _tup = [slice(None, None) for _ in range(ndim)] _tup[0] = loc return tuple(_tup) def convert_to_index_sliceable(obj: DataFrame, key): """ If we are index sliceable, then return my slicer, otherwise return None. """ idx = obj.index if isinstance(key, slice): return idx._convert_slice_indexer(key, kind="getitem") elif isinstance(key, str): # we are an actual column if key in obj.columns: return None # We might have a datetimelike string that we can translate to a # slice here via partial string indexing if idx._supports_partial_string_indexing: try: res = idx._get_string_slice(str(key)) warnings.warn( "Indexing a DataFrame with a datetimelike index using a single " "string to slice the rows, like `frame[string]`, is deprecated " "and will be removed in a future version. Use `frame.loc[string]` " "instead.", FutureWarning, stacklevel=3, ) return res except (KeyError, ValueError, NotImplementedError): return None return None def check_bool_indexer(index: Index, key) -> np.ndarray: """ Check if key is a valid boolean indexer for an object with such index and perform reindexing or conversion if needed. This function assumes that is_bool_indexer(key) == True. Parameters ---------- index : Index Index of the object on which the indexing is done. key : list-like Boolean indexer to check. Returns ------- np.array Resulting key. Raises ------ IndexError If the key does not have the same length as index. IndexingError If the index of the key is unalignable to index. """ result = key if isinstance(key, ABCSeries) and not key.index.equals(index): result = result.reindex(index) mask = isna(result._values) if mask.any(): raise IndexingError( "Unalignable boolean Series provided as " "indexer (index of the boolean Series and of " "the indexed object do not match)." ) return result.astype(bool)._values if is_object_dtype(key): # key might be object-dtype bool, check_array_indexer needs bool array result = np.asarray(result, dtype=bool) elif not is_array_like(result): # GH 33924 # key may contain nan elements, check_array_indexer needs bool array result = pd_array(result, dtype=bool) return check_array_indexer(index, result) def convert_missing_indexer(indexer): """ Reverse convert a missing indexer, which is a dict return the scalar indexer and a boolean indicating if we converted """ if isinstance(indexer, dict): # a missing key (but not a tuple indexer) indexer = indexer["key"] if isinstance(indexer, bool): raise KeyError("cannot use a single bool to index into setitem") return indexer, True return indexer, False def convert_from_missing_indexer_tuple(indexer, axes): """ Create a filtered indexer that doesn't have any missing indexers. """ def get_indexer(_i, _idx): return axes[_i].get_loc(_idx["key"]) if isinstance(_idx, dict) else _idx return tuple(get_indexer(_i, _idx) for _i, _idx in enumerate(indexer)) def maybe_convert_ix(*args): """ We likely want to take the cross-product. """ for arg in args: if not isinstance(arg, (np.ndarray, list, ABCSeries, Index)): return args return np.ix_(*args) def is_nested_tuple(tup, labels) -> bool: """ Returns ------- bool """ # check for a compatible nested tuple and multiindexes among the axes if not isinstance(tup, tuple): return False for k in tup: if is_list_like(k) or isinstance(k, slice): return isinstance(labels, MultiIndex) return False def is_label_like(key) -> bool: """ Returns ------- bool """ # select a label or row return not isinstance(key, slice) and not is_list_like_indexer(key) def need_slice(obj: slice) -> bool: """ Returns ------- bool """ return ( obj.start is not None or obj.stop is not None or (obj.step is not None and obj.step != 1) )
""" test parquet compat """ import datetime from io import BytesIO import os import pathlib from warnings import ( catch_warnings, filterwarnings, ) import numpy as np import pytest from pandas._config import get_option from pandas.compat import ( PY38, is_platform_windows, ) from pandas.compat.pyarrow import ( pa_version_under1p0, pa_version_under2p0, ) import pandas.util._test_decorators as td import pandas as pd import pandas._testing as tm from pandas.util.version import Version from pandas.io.parquet import ( FastParquetImpl, PyArrowImpl, get_engine, read_parquet, to_parquet, ) try: import pyarrow _HAVE_PYARROW = True except ImportError: _HAVE_PYARROW = False try: with catch_warnings(): # `np.bool` is a deprecated alias... filterwarnings("ignore", "`np.bool`", category=DeprecationWarning) import fastparquet _HAVE_FASTPARQUET = True except ImportError: _HAVE_FASTPARQUET = False pytestmark = pytest.mark.filterwarnings( "ignore:RangeIndex.* is deprecated:DeprecationWarning" ) # TODO(ArrayManager) fastparquet relies on BlockManager internals # setup engines & skips @pytest.fixture( params=[ pytest.param( "fastparquet", marks=pytest.mark.skipif( not _HAVE_FASTPARQUET or get_option("mode.data_manager") == "array", reason="fastparquet is not installed or ArrayManager is used", ), ), pytest.param( "pyarrow", marks=pytest.mark.skipif( not _HAVE_PYARROW, reason="pyarrow is not installed" ), ), ] ) def engine(request): return request.param @pytest.fixture def pa(): if not _HAVE_PYARROW: pytest.skip("pyarrow is not installed") return "pyarrow" @pytest.fixture def fp(): if not _HAVE_FASTPARQUET: pytest.skip("fastparquet is not installed") elif get_option("mode.data_manager") == "array": pytest.skip("ArrayManager is not supported with fastparquet") return "fastparquet" @pytest.fixture def df_compat(): return pd.DataFrame({"A": [1, 2, 3], "B": "foo"}) @pytest.fixture def df_cross_compat(): df = pd.DataFrame( { "a": list("abc"), "b": list(range(1, 4)), # 'c': np.arange(3, 6).astype('u1'), "d": np.arange(4.0, 7.0, dtype="float64"), "e": [True, False, True], "f": pd.date_range("20130101", periods=3), # 'g': pd.date_range('20130101', periods=3, # tz='US/Eastern'), # 'h': pd.date_range('20130101', periods=3, freq='ns') } ) return df @pytest.fixture def df_full(): return pd.DataFrame( { "string": list("abc"), "string_with_nan": ["a", np.nan, "c"], "string_with_none": ["a", None, "c"], "bytes": [b"foo", b"bar", b"baz"], "unicode": ["foo", "bar", "baz"], "int": list(range(1, 4)), "uint": np.arange(3, 6).astype("u1"), "float": np.arange(4.0, 7.0, dtype="float64"), "float_with_nan": [2.0, np.nan, 3.0], "bool": [True, False, True], "datetime": pd.date_range("20130101", periods=3), "datetime_with_nat": [ pd.Timestamp("20130101"), pd.NaT, pd.Timestamp("20130103"), ], } ) @pytest.fixture( params=[ datetime.datetime.now(datetime.timezone.utc), datetime.datetime.now(datetime.timezone.min), datetime.datetime.now(datetime.timezone.max), datetime.datetime.strptime("2019-01-04T16:41:24+0200", "%Y-%m-%dT%H:%M:%S%z"), datetime.datetime.strptime("2019-01-04T16:41:24+0215", "%Y-%m-%dT%H:%M:%S%z"), datetime.datetime.strptime("2019-01-04T16:41:24-0200", "%Y-%m-%dT%H:%M:%S%z"), datetime.datetime.strptime("2019-01-04T16:41:24-0215", "%Y-%m-%dT%H:%M:%S%z"), ] ) def timezone_aware_date_list(request): return request.param def check_round_trip( df, engine=None, path=None, write_kwargs=None, read_kwargs=None, expected=None, check_names=True, check_like=False, check_dtype=True, repeat=2, ): """Verify parquet serializer and deserializer produce the same results. Performs a pandas to disk and disk to pandas round trip, then compares the 2 resulting DataFrames to verify equality. Parameters ---------- df: Dataframe engine: str, optional 'pyarrow' or 'fastparquet' path: str, optional write_kwargs: dict of str:str, optional read_kwargs: dict of str:str, optional expected: DataFrame, optional Expected deserialization result, otherwise will be equal to `df` check_names: list of str, optional Closed set of column names to be compared check_like: bool, optional If True, ignore the order of index & columns. repeat: int, optional How many times to repeat the test """ write_kwargs = write_kwargs or {"compression": None} read_kwargs = read_kwargs or {} if expected is None: expected = df if engine: write_kwargs["engine"] = engine read_kwargs["engine"] = engine def compare(repeat): for _ in range(repeat): df.to_parquet(path, **write_kwargs) with catch_warnings(record=True): actual = read_parquet(path, **read_kwargs) tm.assert_frame_equal( expected, actual, check_names=check_names, check_like=check_like, check_dtype=check_dtype, ) if path is None: with tm.ensure_clean() as path: compare(repeat) else: compare(repeat) def test_invalid_engine(df_compat): msg = "engine must be one of 'pyarrow', 'fastparquet'" with pytest.raises(ValueError, match=msg): check_round_trip(df_compat, "foo", "bar") def test_options_py(df_compat, pa): # use the set option with pd.option_context("io.parquet.engine", "pyarrow"): check_round_trip(df_compat) def test_options_fp(df_compat, fp): # use the set option with pd.option_context("io.parquet.engine", "fastparquet"): check_round_trip(df_compat) def test_options_auto(df_compat, fp, pa): # use the set option with pd.option_context("io.parquet.engine", "auto"): check_round_trip(df_compat) def test_options_get_engine(fp, pa): assert isinstance(get_engine("pyarrow"), PyArrowImpl) assert isinstance(get_engine("fastparquet"), FastParquetImpl) with pd.option_context("io.parquet.engine", "pyarrow"): assert isinstance(get_engine("auto"), PyArrowImpl) assert isinstance(get_engine("pyarrow"), PyArrowImpl) assert isinstance(get_engine("fastparquet"), FastParquetImpl) with pd.option_context("io.parquet.engine", "fastparquet"): assert isinstance(get_engine("auto"), FastParquetImpl) assert isinstance(get_engine("pyarrow"), PyArrowImpl) assert isinstance(get_engine("fastparquet"), FastParquetImpl) with pd.option_context("io.parquet.engine", "auto"): assert isinstance(get_engine("auto"), PyArrowImpl) assert isinstance(get_engine("pyarrow"), PyArrowImpl) assert isinstance(get_engine("fastparquet"), FastParquetImpl) def test_get_engine_auto_error_message(): # Expect different error messages from get_engine(engine="auto") # if engines aren't installed vs. are installed but bad version from pandas.compat._optional import VERSIONS # Do we have engines installed, but a bad version of them? pa_min_ver = VERSIONS.get("pyarrow") fp_min_ver = VERSIONS.get("fastparquet") have_pa_bad_version = ( False if not _HAVE_PYARROW else Version(pyarrow.__version__) < Version(pa_min_ver) ) have_fp_bad_version = ( False if not _HAVE_FASTPARQUET else Version(fastparquet.__version__) < Version(fp_min_ver) ) # Do we have usable engines installed? have_usable_pa = _HAVE_PYARROW and not have_pa_bad_version have_usable_fp = _HAVE_FASTPARQUET and not have_fp_bad_version if not have_usable_pa and not have_usable_fp: # No usable engines found. if have_pa_bad_version: match = f"Pandas requires version .{pa_min_ver}. or newer of .pyarrow." with pytest.raises(ImportError, match=match): get_engine("auto") else: match = "Missing optional dependency .pyarrow." with pytest.raises(ImportError, match=match): get_engine("auto") if have_fp_bad_version: match = f"Pandas requires version .{fp_min_ver}. or newer of .fastparquet." with pytest.raises(ImportError, match=match): get_engine("auto") else: match = "Missing optional dependency .fastparquet." with pytest.raises(ImportError, match=match): get_engine("auto") def test_cross_engine_pa_fp(df_cross_compat, pa, fp): # cross-compat with differing reading/writing engines df = df_cross_compat with tm.ensure_clean() as path: df.to_parquet(path, engine=pa, compression=None) result = read_parquet(path, engine=fp) tm.assert_frame_equal(result, df) result = read_parquet(path, engine=fp, columns=["a", "d"]) tm.assert_frame_equal(result, df[["a", "d"]]) def test_cross_engine_fp_pa(request, df_cross_compat, pa, fp): # cross-compat with differing reading/writing engines df = df_cross_compat with tm.ensure_clean() as path: df.to_parquet(path, engine=fp, compression=None) with catch_warnings(record=True): result = read_parquet(path, engine=pa) tm.assert_frame_equal(result, df) result = read_parquet(path, engine=pa, columns=["a", "d"]) tm.assert_frame_equal(result, df[["a", "d"]]) class Base: def check_error_on_write(self, df, engine, exc, err_msg): # check that we are raising the exception on writing with tm.ensure_clean() as path: with pytest.raises(exc, match=err_msg): to_parquet(df, path, engine, compression=None) def check_external_error_on_write(self, df, engine, exc): # check that an external library is raising the exception on writing with tm.ensure_clean() as path: with tm.external_error_raised(exc): to_parquet(df, path, engine, compression=None) @tm.network def test_parquet_read_from_url(self, df_compat, engine): if engine != "auto": pytest.importorskip(engine) url = ( "https://raw.githubusercontent.com/pandas-dev/pandas/" "master/pandas/tests/io/data/parquet/simple.parquet" ) df = read_parquet(url) tm.assert_frame_equal(df, df_compat) class TestBasic(Base): def test_error(self, engine): for obj in [ pd.Series([1, 2, 3]), 1, "foo", pd.Timestamp("20130101"), np.array([1, 2, 3]), ]: msg = "to_parquet only supports IO with DataFrames" self.check_error_on_write(obj, engine, ValueError, msg) def test_columns_dtypes(self, engine): df = pd.DataFrame({"string": list("abc"), "int": list(range(1, 4))}) # unicode df.columns = ["foo", "bar"] check_round_trip(df, engine) def test_columns_dtypes_invalid(self, engine): df = pd.DataFrame({"string": list("abc"), "int": list(range(1, 4))}) msg = "parquet must have string column names" # numeric df.columns = [0, 1] self.check_error_on_write(df, engine, ValueError, msg) # bytes df.columns = [b"foo", b"bar"] self.check_error_on_write(df, engine, ValueError, msg) # python object df.columns = [ datetime.datetime(2011, 1, 1, 0, 0), datetime.datetime(2011, 1, 1, 1, 1), ] self.check_error_on_write(df, engine, ValueError, msg) @pytest.mark.parametrize("compression", [None, "gzip", "snappy", "brotli"]) def test_compression(self, engine, compression): if compression == "snappy": pytest.importorskip("snappy") elif compression == "brotli": pytest.importorskip("brotli") df = pd.DataFrame({"A": [1, 2, 3]}) check_round_trip(df, engine, write_kwargs={"compression": compression}) def test_read_columns(self, engine): # GH18154 df = pd.DataFrame({"string": list("abc"), "int": list(range(1, 4))}) expected = pd.DataFrame({"string": list("abc")}) check_round_trip( df, engine, expected=expected, read_kwargs={"columns": ["string"]} ) def test_write_index(self, engine): check_names = engine != "fastparquet" df = pd.DataFrame({"A": [1, 2, 3]}) check_round_trip(df, engine) indexes = [ [2, 3, 4], pd.date_range("20130101", periods=3), list("abc"), [1, 3, 4], ] # non-default index for index in indexes: df.index = index if isinstance(index, pd.DatetimeIndex): df.index = df.index._with_freq(None) # freq doesn't round-trip check_round_trip(df, engine, check_names=check_names) # index with meta-data df.index = [0, 1, 2] df.index.name = "foo" check_round_trip(df, engine) def test_write_multiindex(self, pa): # Not supported in fastparquet as of 0.1.3 or older pyarrow version engine = pa df = pd.DataFrame({"A": [1, 2, 3]}) index = pd.MultiIndex.from_tuples([("a", 1), ("a", 2), ("b", 1)]) df.index = index check_round_trip(df, engine) def test_multiindex_with_columns(self, pa): engine = pa dates = pd.date_range("01-Jan-2018", "01-Dec-2018", freq="MS") df = pd.DataFrame(np.random.randn(2 * len(dates), 3), columns=list("ABC")) index1 = pd.MultiIndex.from_product( [["Level1", "Level2"], dates], names=["level", "date"] ) index2 = index1.copy(names=None) for index in [index1, index2]: df.index = index check_round_trip(df, engine) check_round_trip( df, engine, read_kwargs={"columns": ["A", "B"]}, expected=df[["A", "B"]] ) def test_write_ignoring_index(self, engine): # ENH 20768 # Ensure index=False omits the index from the written Parquet file. df = pd.DataFrame({"a": [1, 2, 3], "b": ["q", "r", "s"]}) write_kwargs = {"compression": None, "index": False} # Because we're dropping the index, we expect the loaded dataframe to # have the default integer index. expected = df.reset_index(drop=True) check_round_trip(df, engine, write_kwargs=write_kwargs, expected=expected) # Ignore custom index df = pd.DataFrame( {"a": [1, 2, 3], "b": ["q", "r", "s"]}, index=["zyx", "wvu", "tsr"] ) check_round_trip(df, engine, write_kwargs=write_kwargs, expected=expected) # Ignore multi-indexes as well. arrays = [ ["bar", "bar", "baz", "baz", "foo", "foo", "qux", "qux"], ["one", "two", "one", "two", "one", "two", "one", "two"], ] df = pd.DataFrame( {"one": list(range(8)), "two": [-i for i in range(8)]}, index=arrays ) expected = df.reset_index(drop=True) check_round_trip(df, engine, write_kwargs=write_kwargs, expected=expected) def test_write_column_multiindex(self, engine): # Not able to write column multi-indexes with non-string column names. mi_columns = pd.MultiIndex.from_tuples([("a", 1), ("a", 2), ("b", 1)]) df = pd.DataFrame(np.random.randn(4, 3), columns=mi_columns) msg = ( r"\s*parquet must have string column names for all values in\s*" "each level of the MultiIndex" ) self.check_error_on_write(df, engine, ValueError, msg) def test_write_column_multiindex_nonstring(self, pa): # GH #34777 # Not supported in fastparquet as of 0.1.3 engine = pa # Not able to write column multi-indexes with non-string column names arrays = [ ["bar", "bar", "baz", "baz", "foo", "foo", "qux", "qux"], [1, 2, 1, 2, 1, 2, 1, 2], ] df = pd.DataFrame(np.random.randn(8, 8), columns=arrays) df.columns.names = ["Level1", "Level2"] msg = ( r"\s*parquet must have string column names for all values in\s*" "each level of the MultiIndex" ) self.check_error_on_write(df, engine, ValueError, msg) def test_write_column_multiindex_string(self, pa): # GH #34777 # Not supported in fastparquet as of 0.1.3 engine = pa # Write column multi-indexes with string column names arrays = [ ["bar", "bar", "baz", "baz", "foo", "foo", "qux", "qux"], ["one", "two", "one", "two", "one", "two", "one", "two"], ] df = pd.DataFrame(np.random.randn(8, 8), columns=arrays) df.columns.names = ["ColLevel1", "ColLevel2"] check_round_trip(df, engine) def test_write_column_index_string(self, pa): # GH #34777 # Not supported in fastparquet as of 0.1.3 engine = pa # Write column indexes with string column names arrays = ["bar", "baz", "foo", "qux"] df = pd.DataFrame(np.random.randn(8, 4), columns=arrays) df.columns.name = "StringCol" check_round_trip(df, engine) def test_write_column_index_nonstring(self, pa): # GH #34777 # Not supported in fastparquet as of 0.1.3 engine = pa # Write column indexes with string column names arrays = [1, 2, 3, 4] df = pd.DataFrame(np.random.randn(8, 4), columns=arrays) df.columns.name = "NonStringCol" msg = r"parquet must have string column names" self.check_error_on_write(df, engine, ValueError, msg) @pytest.mark.filterwarnings("ignore:CategoricalBlock is deprecated:DeprecationWarning") class TestParquetPyArrow(Base): def test_basic(self, pa, df_full): df = df_full # additional supported types for pyarrow dti = pd.date_range("20130101", periods=3, tz="Europe/Brussels") dti = dti._with_freq(None) # freq doesn't round-trip df["datetime_tz"] = dti df["bool_with_none"] = [True, None, True] check_round_trip(df, pa) def test_basic_subset_columns(self, pa, df_full): # GH18628 df = df_full # additional supported types for pyarrow df["datetime_tz"] = pd.date_range("20130101", periods=3, tz="Europe/Brussels") check_round_trip( df, pa, expected=df[["string", "int"]], read_kwargs={"columns": ["string", "int"]}, ) def test_to_bytes_without_path_or_buf_provided(self, pa, df_full): # GH 37105 buf_bytes = df_full.to_parquet(engine=pa) assert isinstance(buf_bytes, bytes) buf_stream = BytesIO(buf_bytes) res = read_parquet(buf_stream) tm.assert_frame_equal(df_full, res) def test_duplicate_columns(self, pa): # not currently able to handle duplicate columns df = pd.DataFrame(np.arange(12).reshape(4, 3), columns=list("aaa")).copy() self.check_error_on_write(df, pa, ValueError, "Duplicate column names found") def test_unsupported(self, pa): # timedelta df = pd.DataFrame({"a": pd.timedelta_range("1 day", periods=3)}) self.check_external_error_on_write(df, pa, NotImplementedError) # mixed python objects df = pd.DataFrame({"a": ["a", 1, 2.0]}) # pyarrow 0.11 raises ArrowTypeError # older pyarrows raise ArrowInvalid self.check_external_error_on_write(df, pa, pyarrow.ArrowException) def test_categorical(self, pa): # supported in >= 0.7.0 df = pd.DataFrame() df["a"] = pd.Categorical(list("abcdef")) # test for null, out-of-order values, and unobserved category df["b"] = pd.Categorical( ["bar", "foo", "foo", "bar", None, "bar"], dtype=pd.CategoricalDtype(["foo", "bar", "baz"]), ) # test for ordered flag df["c"] = pd.Categorical( ["a", "b", "c", "a", "c", "b"], categories=["b", "c", "d"], ordered=True ) check_round_trip(df, pa) @pytest.mark.xfail( is_platform_windows() and PY38, reason="localhost connection rejected", strict=False, ) def test_s3_roundtrip_explicit_fs(self, df_compat, s3_resource, pa, s3so): s3fs = pytest.importorskip("s3fs") s3 = s3fs.S3FileSystem(**s3so) kw = {"filesystem": s3} check_round_trip( df_compat, pa, path="pandas-test/pyarrow.parquet", read_kwargs=kw, write_kwargs=kw, ) def test_s3_roundtrip(self, df_compat, s3_resource, pa, s3so): # GH #19134 s3so = {"storage_options": s3so} check_round_trip( df_compat, pa, path="s3://pandas-test/pyarrow.parquet", read_kwargs=s3so, write_kwargs=s3so, ) @td.skip_if_no("s3fs") # also requires flask @pytest.mark.parametrize( "partition_col", [ ["A"], [], ], ) def test_s3_roundtrip_for_dir( self, df_compat, s3_resource, pa, partition_col, s3so ): # GH #26388 expected_df = df_compat.copy() # GH #35791 # read_table uses the new Arrow Datasets API since pyarrow 1.0.0 # Previous behaviour was pyarrow partitioned columns become 'category' dtypes # These are added to back of dataframe on read. In new API category dtype is # only used if partition field is string, but this changed again to use # category dtype for all types (not only strings) in pyarrow 2.0.0 if partition_col: partition_col_type = ( "int32" if (not pa_version_under1p0) and pa_version_under2p0 else "category" ) expected_df[partition_col] = expected_df[partition_col].astype( partition_col_type ) check_round_trip( df_compat, pa, expected=expected_df, path="s3://pandas-test/parquet_dir", read_kwargs={"storage_options": s3so}, write_kwargs={ "partition_cols": partition_col, "compression": None, "storage_options": s3so, }, check_like=True, repeat=1, ) @td.skip_if_no("pyarrow") def test_read_file_like_obj_support(self, df_compat): buffer = BytesIO() df_compat.to_parquet(buffer) df_from_buf = read_parquet(buffer) tm.assert_frame_equal(df_compat, df_from_buf) @td.skip_if_no("pyarrow") def test_expand_user(self, df_compat, monkeypatch): monkeypatch.setenv("HOME", "TestingUser") monkeypatch.setenv("USERPROFILE", "TestingUser") with pytest.raises(OSError, match=r".*TestingUser.*"): read_parquet("~/file.parquet") with pytest.raises(OSError, match=r".*TestingUser.*"): df_compat.to_parquet("~/file.parquet") def test_partition_cols_supported(self, pa, df_full): # GH #23283 partition_cols = ["bool", "int"] df = df_full with tm.ensure_clean_dir() as path: df.to_parquet(path, partition_cols=partition_cols, compression=None) import pyarrow.parquet as pq dataset = pq.ParquetDataset(path, validate_schema=False) assert len(dataset.partitions.partition_names) == 2 assert dataset.partitions.partition_names == set(partition_cols) assert read_parquet(path).shape == df.shape def test_partition_cols_string(self, pa, df_full): # GH #27117 partition_cols = "bool" partition_cols_list = [partition_cols] df = df_full with tm.ensure_clean_dir() as path: df.to_parquet(path, partition_cols=partition_cols, compression=None) import pyarrow.parquet as pq dataset = pq.ParquetDataset(path, validate_schema=False) assert len(dataset.partitions.partition_names) == 1 assert dataset.partitions.partition_names == set(partition_cols_list) assert read_parquet(path).shape == df.shape @pytest.mark.parametrize("path_type", [str, pathlib.Path]) def test_partition_cols_pathlib(self, pa, df_compat, path_type): # GH 35902 partition_cols = "B" partition_cols_list = [partition_cols] df = df_compat with tm.ensure_clean_dir() as path_str: path = path_type(path_str) df.to_parquet(path, partition_cols=partition_cols_list) assert read_parquet(path).shape == df.shape def test_empty_dataframe(self, pa): # GH #27339 df = pd.DataFrame() check_round_trip(df, pa) def test_write_with_schema(self, pa): import pyarrow df = pd.DataFrame({"x": [0, 1]}) schema = pyarrow.schema([pyarrow.field("x", type=pyarrow.bool_())]) out_df = df.astype(bool) check_round_trip(df, pa, write_kwargs={"schema": schema}, expected=out_df) @td.skip_if_no("pyarrow") def test_additional_extension_arrays(self, pa): # test additional ExtensionArrays that are supported through the # __arrow_array__ protocol df = pd.DataFrame( { "a": pd.Series([1, 2, 3], dtype="Int64"), "b": pd.Series([1, 2, 3], dtype="UInt32"), "c": pd.Series(["a", None, "c"], dtype="string"), } ) check_round_trip(df, pa) df = pd.DataFrame({"a": pd.Series([1, 2, 3, None], dtype="Int64")}) check_round_trip(df, pa) @td.skip_if_no("pyarrow", min_version="1.0.0") def test_pyarrow_backed_string_array(self, pa): # test ArrowStringArray supported through the __arrow_array__ protocol from pandas.core.arrays.string_arrow import ArrowStringDtype # noqa: F401 df = pd.DataFrame({"a": pd.Series(["a", None, "c"], dtype="arrow_string")}) check_round_trip(df, pa, expected=df) @td.skip_if_no("pyarrow") def test_additional_extension_types(self, pa): # test additional ExtensionArrays that are supported through the # __arrow_array__ protocol + by defining a custom ExtensionType df = pd.DataFrame( { # Arrow does not yet support struct in writing to Parquet (ARROW-1644) # "c": pd.arrays.IntervalArray.from_tuples([(0, 1), (1, 2), (3, 4)]), "d": pd.period_range("2012-01-01", periods=3, freq="D"), } ) check_round_trip(df, pa) @td.skip_if_no("pyarrow") def test_use_nullable_dtypes(self, pa): import pyarrow.parquet as pq table = pyarrow.table( { "a": pyarrow.array([1, 2, 3, None], "int64"), "b": pyarrow.array([1, 2, 3, None], "uint8"), "c": pyarrow.array(["a", "b", "c", None]), "d": pyarrow.array([True, False, True, None]), } ) with tm.ensure_clean() as path: # write manually with pyarrow to write integers pq.write_table(table, path) result1 = read_parquet(path) result2 = read_parquet(path, use_nullable_dtypes=True) assert result1["a"].dtype == np.dtype("float64") expected = pd.DataFrame( { "a": pd.array([1, 2, 3, None], dtype="Int64"), "b": pd.array([1, 2, 3, None], dtype="UInt8"), "c": pd.array(["a", "b", "c", None], dtype="string"), "d": pd.array([True, False, True, None], dtype="boolean"), } ) tm.assert_frame_equal(result2, expected) def test_timestamp_nanoseconds(self, pa): # with version 2.0, pyarrow defaults to writing the nanoseconds, so # this should work without error df = pd.DataFrame({"a": pd.date_range("2017-01-01", freq="1n", periods=10)}) check_round_trip(df, pa, write_kwargs={"version": "2.0"}) def test_timezone_aware_index(self, pa, timezone_aware_date_list): if not pa_version_under2p0: # temporary skip this test until it is properly resolved # https://github.com/pandas-dev/pandas/issues/37286 pytest.skip() idx = 5 * [timezone_aware_date_list] df = pd.DataFrame(index=idx, data={"index_as_col": idx}) # see gh-36004 # compare time(zone) values only, skip their class: # pyarrow always creates fixed offset timezones using pytz.FixedOffset() # even if it was datetime.timezone() originally # # technically they are the same: # they both implement datetime.tzinfo # they both wrap datetime.timedelta() # this use-case sets the resolution to 1 minute check_round_trip(df, pa, check_dtype=False) @td.skip_if_no("pyarrow", min_version="1.0.0") def test_filter_row_groups(self, pa): # https://github.com/pandas-dev/pandas/issues/26551 df = pd.DataFrame({"a": list(range(0, 3))}) with tm.ensure_clean() as path: df.to_parquet(path, pa) result = read_parquet( path, pa, filters=[("a", "==", 0)], use_legacy_dataset=False ) assert len(result) == 1 def test_read_parquet_manager(self, pa, using_array_manager): # ensure that read_parquet honors the pandas.options.mode.data_manager option df = pd.DataFrame(np.random.randn(10, 3), columns=["A", "B", "C"]) with tm.ensure_clean() as path: df.to_parquet(path, pa) result = read_parquet(path, pa) if using_array_manager: assert isinstance(result._mgr, pd.core.internals.ArrayManager) else: assert isinstance(result._mgr, pd.core.internals.BlockManager) class TestParquetFastParquet(Base): def test_basic(self, fp, df_full): df = df_full dti = pd.date_range("20130101", periods=3, tz="US/Eastern") dti = dti._with_freq(None) # freq doesn't round-trip df["datetime_tz"] = dti df["timedelta"] = pd.timedelta_range("1 day", periods=3) check_round_trip(df, fp) @pytest.mark.skip(reason="not supported") def test_duplicate_columns(self, fp): # not currently able to handle duplicate columns df = pd.DataFrame(np.arange(12).reshape(4, 3), columns=list("aaa")).copy() msg = "Cannot create parquet dataset with duplicate column names" self.check_error_on_write(df, fp, ValueError, msg) def test_bool_with_none(self, fp): df = pd.DataFrame({"a": [True, None, False]}) expected = pd.DataFrame({"a": [1.0, np.nan, 0.0]}, dtype="float16") check_round_trip(df, fp, expected=expected) def test_unsupported(self, fp): # period df = pd.DataFrame({"a": pd.period_range("2013", freq="M", periods=3)}) # error from fastparquet -> don't check exact error message self.check_error_on_write(df, fp, ValueError, None) # mixed df = pd.DataFrame({"a": ["a", 1, 2.0]}) msg = "Can't infer object conversion type" self.check_error_on_write(df, fp, ValueError, msg) def test_categorical(self, fp): df = pd.DataFrame({"a": pd.Categorical(list("abc"))}) check_round_trip(df, fp) def test_filter_row_groups(self, fp): d = {"a": list(range(0, 3))} df = pd.DataFrame(d) with tm.ensure_clean() as path: df.to_parquet(path, fp, compression=None, row_group_offsets=1) result = read_parquet(path, fp, filters=[("a", "==", 0)]) assert len(result) == 1 def test_s3_roundtrip(self, df_compat, s3_resource, fp, s3so): # GH #19134 check_round_trip( df_compat, fp, path="s3://pandas-test/fastparquet.parquet", read_kwargs={"storage_options": s3so}, write_kwargs={"compression": None, "storage_options": s3so}, ) def test_partition_cols_supported(self, fp, df_full): # GH #23283 partition_cols = ["bool", "int"] df = df_full with tm.ensure_clean_dir() as path: df.to_parquet( path, engine="fastparquet", partition_cols=partition_cols, compression=None, ) assert os.path.exists(path) import fastparquet actual_partition_cols = fastparquet.ParquetFile(path, False).cats assert len(actual_partition_cols) == 2 def test_partition_cols_string(self, fp, df_full): # GH #27117 partition_cols = "bool" df = df_full with tm.ensure_clean_dir() as path: df.to_parquet( path, engine="fastparquet", partition_cols=partition_cols, compression=None, ) assert os.path.exists(path) import fastparquet actual_partition_cols = fastparquet.ParquetFile(path, False).cats assert len(actual_partition_cols) == 1 def test_partition_on_supported(self, fp, df_full): # GH #23283 partition_cols = ["bool", "int"] df = df_full with tm.ensure_clean_dir() as path: df.to_parquet( path, engine="fastparquet", compression=None, partition_on=partition_cols, ) assert os.path.exists(path) import fastparquet actual_partition_cols = fastparquet.ParquetFile(path, False).cats assert len(actual_partition_cols) == 2 def test_error_on_using_partition_cols_and_partition_on(self, fp, df_full): # GH #23283 partition_cols = ["bool", "int"] df = df_full msg = ( "Cannot use both partition_on and partition_cols. Use partition_cols for " "partitioning data" ) with pytest.raises(ValueError, match=msg): with tm.ensure_clean_dir() as path: df.to_parquet( path, engine="fastparquet", compression=None, partition_on=partition_cols, partition_cols=partition_cols, ) def test_empty_dataframe(self, fp): # GH #27339 df = pd.DataFrame() expected = df.copy() expected.index.name = "index" check_round_trip(df, fp, expected=expected) def test_timezone_aware_index(self, fp, timezone_aware_date_list): idx = 5 * [timezone_aware_date_list] df = pd.DataFrame(index=idx, data={"index_as_col": idx}) expected = df.copy() expected.index.name = "index" check_round_trip(df, fp, expected=expected) def test_use_nullable_dtypes_not_supported(self, fp): df = pd.DataFrame({"a": [1, 2]}) with tm.ensure_clean() as path: df.to_parquet(path) with pytest.raises(ValueError, match="not supported for the fastparquet"): read_parquet(path, engine="fastparquet", use_nullable_dtypes=True)
datapythonista/pandas
pandas/tests/io/test_parquet.py
pandas/core/indexing.py
from pandas import ( TimedeltaIndex, timedelta_range, ) import pandas._testing as tm class TestTimedeltaIndexDelete: def test_delete(self): idx = timedelta_range(start="1 Days", periods=5, freq="D", name="idx") # preserve freq expected_0 = timedelta_range(start="2 Days", periods=4, freq="D", name="idx") expected_4 = timedelta_range(start="1 Days", periods=4, freq="D", name="idx") # reset freq to None expected_1 = TimedeltaIndex( ["1 day", "3 day", "4 day", "5 day"], freq=None, name="idx" ) cases = { 0: expected_0, -5: expected_0, -1: expected_4, 4: expected_4, 1: expected_1, } for n, expected in cases.items(): result = idx.delete(n) tm.assert_index_equal(result, expected) assert result.name == expected.name assert result.freq == expected.freq with tm.external_error_raised((IndexError, ValueError)): # either depending on numpy version idx.delete(5) def test_delete_slice(self): idx = timedelta_range(start="1 days", periods=10, freq="D", name="idx") # preserve freq expected_0_2 = timedelta_range(start="4 days", periods=7, freq="D", name="idx") expected_7_9 = timedelta_range(start="1 days", periods=7, freq="D", name="idx") # reset freq to None expected_3_5 = TimedeltaIndex( ["1 d", "2 d", "3 d", "7 d", "8 d", "9 d", "10d"], freq=None, name="idx" ) cases = { (0, 1, 2): expected_0_2, (7, 8, 9): expected_7_9, (3, 4, 5): expected_3_5, } for n, expected in cases.items(): result = idx.delete(n) tm.assert_index_equal(result, expected) assert result.name == expected.name assert result.freq == expected.freq result = idx.delete(slice(n[0], n[-1] + 1)) tm.assert_index_equal(result, expected) assert result.name == expected.name assert result.freq == expected.freq def test_delete_doesnt_infer_freq(self): # GH#30655 behavior matches DatetimeIndex tdi = TimedeltaIndex(["1 Day", "2 Days", None, "3 Days", "4 Days"]) result = tdi.delete(2) assert result.freq is None
""" test parquet compat """ import datetime from io import BytesIO import os import pathlib from warnings import ( catch_warnings, filterwarnings, ) import numpy as np import pytest from pandas._config import get_option from pandas.compat import ( PY38, is_platform_windows, ) from pandas.compat.pyarrow import ( pa_version_under1p0, pa_version_under2p0, ) import pandas.util._test_decorators as td import pandas as pd import pandas._testing as tm from pandas.util.version import Version from pandas.io.parquet import ( FastParquetImpl, PyArrowImpl, get_engine, read_parquet, to_parquet, ) try: import pyarrow _HAVE_PYARROW = True except ImportError: _HAVE_PYARROW = False try: with catch_warnings(): # `np.bool` is a deprecated alias... filterwarnings("ignore", "`np.bool`", category=DeprecationWarning) import fastparquet _HAVE_FASTPARQUET = True except ImportError: _HAVE_FASTPARQUET = False pytestmark = pytest.mark.filterwarnings( "ignore:RangeIndex.* is deprecated:DeprecationWarning" ) # TODO(ArrayManager) fastparquet relies on BlockManager internals # setup engines & skips @pytest.fixture( params=[ pytest.param( "fastparquet", marks=pytest.mark.skipif( not _HAVE_FASTPARQUET or get_option("mode.data_manager") == "array", reason="fastparquet is not installed or ArrayManager is used", ), ), pytest.param( "pyarrow", marks=pytest.mark.skipif( not _HAVE_PYARROW, reason="pyarrow is not installed" ), ), ] ) def engine(request): return request.param @pytest.fixture def pa(): if not _HAVE_PYARROW: pytest.skip("pyarrow is not installed") return "pyarrow" @pytest.fixture def fp(): if not _HAVE_FASTPARQUET: pytest.skip("fastparquet is not installed") elif get_option("mode.data_manager") == "array": pytest.skip("ArrayManager is not supported with fastparquet") return "fastparquet" @pytest.fixture def df_compat(): return pd.DataFrame({"A": [1, 2, 3], "B": "foo"}) @pytest.fixture def df_cross_compat(): df = pd.DataFrame( { "a": list("abc"), "b": list(range(1, 4)), # 'c': np.arange(3, 6).astype('u1'), "d": np.arange(4.0, 7.0, dtype="float64"), "e": [True, False, True], "f": pd.date_range("20130101", periods=3), # 'g': pd.date_range('20130101', periods=3, # tz='US/Eastern'), # 'h': pd.date_range('20130101', periods=3, freq='ns') } ) return df @pytest.fixture def df_full(): return pd.DataFrame( { "string": list("abc"), "string_with_nan": ["a", np.nan, "c"], "string_with_none": ["a", None, "c"], "bytes": [b"foo", b"bar", b"baz"], "unicode": ["foo", "bar", "baz"], "int": list(range(1, 4)), "uint": np.arange(3, 6).astype("u1"), "float": np.arange(4.0, 7.0, dtype="float64"), "float_with_nan": [2.0, np.nan, 3.0], "bool": [True, False, True], "datetime": pd.date_range("20130101", periods=3), "datetime_with_nat": [ pd.Timestamp("20130101"), pd.NaT, pd.Timestamp("20130103"), ], } ) @pytest.fixture( params=[ datetime.datetime.now(datetime.timezone.utc), datetime.datetime.now(datetime.timezone.min), datetime.datetime.now(datetime.timezone.max), datetime.datetime.strptime("2019-01-04T16:41:24+0200", "%Y-%m-%dT%H:%M:%S%z"), datetime.datetime.strptime("2019-01-04T16:41:24+0215", "%Y-%m-%dT%H:%M:%S%z"), datetime.datetime.strptime("2019-01-04T16:41:24-0200", "%Y-%m-%dT%H:%M:%S%z"), datetime.datetime.strptime("2019-01-04T16:41:24-0215", "%Y-%m-%dT%H:%M:%S%z"), ] ) def timezone_aware_date_list(request): return request.param def check_round_trip( df, engine=None, path=None, write_kwargs=None, read_kwargs=None, expected=None, check_names=True, check_like=False, check_dtype=True, repeat=2, ): """Verify parquet serializer and deserializer produce the same results. Performs a pandas to disk and disk to pandas round trip, then compares the 2 resulting DataFrames to verify equality. Parameters ---------- df: Dataframe engine: str, optional 'pyarrow' or 'fastparquet' path: str, optional write_kwargs: dict of str:str, optional read_kwargs: dict of str:str, optional expected: DataFrame, optional Expected deserialization result, otherwise will be equal to `df` check_names: list of str, optional Closed set of column names to be compared check_like: bool, optional If True, ignore the order of index & columns. repeat: int, optional How many times to repeat the test """ write_kwargs = write_kwargs or {"compression": None} read_kwargs = read_kwargs or {} if expected is None: expected = df if engine: write_kwargs["engine"] = engine read_kwargs["engine"] = engine def compare(repeat): for _ in range(repeat): df.to_parquet(path, **write_kwargs) with catch_warnings(record=True): actual = read_parquet(path, **read_kwargs) tm.assert_frame_equal( expected, actual, check_names=check_names, check_like=check_like, check_dtype=check_dtype, ) if path is None: with tm.ensure_clean() as path: compare(repeat) else: compare(repeat) def test_invalid_engine(df_compat): msg = "engine must be one of 'pyarrow', 'fastparquet'" with pytest.raises(ValueError, match=msg): check_round_trip(df_compat, "foo", "bar") def test_options_py(df_compat, pa): # use the set option with pd.option_context("io.parquet.engine", "pyarrow"): check_round_trip(df_compat) def test_options_fp(df_compat, fp): # use the set option with pd.option_context("io.parquet.engine", "fastparquet"): check_round_trip(df_compat) def test_options_auto(df_compat, fp, pa): # use the set option with pd.option_context("io.parquet.engine", "auto"): check_round_trip(df_compat) def test_options_get_engine(fp, pa): assert isinstance(get_engine("pyarrow"), PyArrowImpl) assert isinstance(get_engine("fastparquet"), FastParquetImpl) with pd.option_context("io.parquet.engine", "pyarrow"): assert isinstance(get_engine("auto"), PyArrowImpl) assert isinstance(get_engine("pyarrow"), PyArrowImpl) assert isinstance(get_engine("fastparquet"), FastParquetImpl) with pd.option_context("io.parquet.engine", "fastparquet"): assert isinstance(get_engine("auto"), FastParquetImpl) assert isinstance(get_engine("pyarrow"), PyArrowImpl) assert isinstance(get_engine("fastparquet"), FastParquetImpl) with pd.option_context("io.parquet.engine", "auto"): assert isinstance(get_engine("auto"), PyArrowImpl) assert isinstance(get_engine("pyarrow"), PyArrowImpl) assert isinstance(get_engine("fastparquet"), FastParquetImpl) def test_get_engine_auto_error_message(): # Expect different error messages from get_engine(engine="auto") # if engines aren't installed vs. are installed but bad version from pandas.compat._optional import VERSIONS # Do we have engines installed, but a bad version of them? pa_min_ver = VERSIONS.get("pyarrow") fp_min_ver = VERSIONS.get("fastparquet") have_pa_bad_version = ( False if not _HAVE_PYARROW else Version(pyarrow.__version__) < Version(pa_min_ver) ) have_fp_bad_version = ( False if not _HAVE_FASTPARQUET else Version(fastparquet.__version__) < Version(fp_min_ver) ) # Do we have usable engines installed? have_usable_pa = _HAVE_PYARROW and not have_pa_bad_version have_usable_fp = _HAVE_FASTPARQUET and not have_fp_bad_version if not have_usable_pa and not have_usable_fp: # No usable engines found. if have_pa_bad_version: match = f"Pandas requires version .{pa_min_ver}. or newer of .pyarrow." with pytest.raises(ImportError, match=match): get_engine("auto") else: match = "Missing optional dependency .pyarrow." with pytest.raises(ImportError, match=match): get_engine("auto") if have_fp_bad_version: match = f"Pandas requires version .{fp_min_ver}. or newer of .fastparquet." with pytest.raises(ImportError, match=match): get_engine("auto") else: match = "Missing optional dependency .fastparquet." with pytest.raises(ImportError, match=match): get_engine("auto") def test_cross_engine_pa_fp(df_cross_compat, pa, fp): # cross-compat with differing reading/writing engines df = df_cross_compat with tm.ensure_clean() as path: df.to_parquet(path, engine=pa, compression=None) result = read_parquet(path, engine=fp) tm.assert_frame_equal(result, df) result = read_parquet(path, engine=fp, columns=["a", "d"]) tm.assert_frame_equal(result, df[["a", "d"]]) def test_cross_engine_fp_pa(request, df_cross_compat, pa, fp): # cross-compat with differing reading/writing engines df = df_cross_compat with tm.ensure_clean() as path: df.to_parquet(path, engine=fp, compression=None) with catch_warnings(record=True): result = read_parquet(path, engine=pa) tm.assert_frame_equal(result, df) result = read_parquet(path, engine=pa, columns=["a", "d"]) tm.assert_frame_equal(result, df[["a", "d"]]) class Base: def check_error_on_write(self, df, engine, exc, err_msg): # check that we are raising the exception on writing with tm.ensure_clean() as path: with pytest.raises(exc, match=err_msg): to_parquet(df, path, engine, compression=None) def check_external_error_on_write(self, df, engine, exc): # check that an external library is raising the exception on writing with tm.ensure_clean() as path: with tm.external_error_raised(exc): to_parquet(df, path, engine, compression=None) @tm.network def test_parquet_read_from_url(self, df_compat, engine): if engine != "auto": pytest.importorskip(engine) url = ( "https://raw.githubusercontent.com/pandas-dev/pandas/" "master/pandas/tests/io/data/parquet/simple.parquet" ) df = read_parquet(url) tm.assert_frame_equal(df, df_compat) class TestBasic(Base): def test_error(self, engine): for obj in [ pd.Series([1, 2, 3]), 1, "foo", pd.Timestamp("20130101"), np.array([1, 2, 3]), ]: msg = "to_parquet only supports IO with DataFrames" self.check_error_on_write(obj, engine, ValueError, msg) def test_columns_dtypes(self, engine): df = pd.DataFrame({"string": list("abc"), "int": list(range(1, 4))}) # unicode df.columns = ["foo", "bar"] check_round_trip(df, engine) def test_columns_dtypes_invalid(self, engine): df = pd.DataFrame({"string": list("abc"), "int": list(range(1, 4))}) msg = "parquet must have string column names" # numeric df.columns = [0, 1] self.check_error_on_write(df, engine, ValueError, msg) # bytes df.columns = [b"foo", b"bar"] self.check_error_on_write(df, engine, ValueError, msg) # python object df.columns = [ datetime.datetime(2011, 1, 1, 0, 0), datetime.datetime(2011, 1, 1, 1, 1), ] self.check_error_on_write(df, engine, ValueError, msg) @pytest.mark.parametrize("compression", [None, "gzip", "snappy", "brotli"]) def test_compression(self, engine, compression): if compression == "snappy": pytest.importorskip("snappy") elif compression == "brotli": pytest.importorskip("brotli") df = pd.DataFrame({"A": [1, 2, 3]}) check_round_trip(df, engine, write_kwargs={"compression": compression}) def test_read_columns(self, engine): # GH18154 df = pd.DataFrame({"string": list("abc"), "int": list(range(1, 4))}) expected = pd.DataFrame({"string": list("abc")}) check_round_trip( df, engine, expected=expected, read_kwargs={"columns": ["string"]} ) def test_write_index(self, engine): check_names = engine != "fastparquet" df = pd.DataFrame({"A": [1, 2, 3]}) check_round_trip(df, engine) indexes = [ [2, 3, 4], pd.date_range("20130101", periods=3), list("abc"), [1, 3, 4], ] # non-default index for index in indexes: df.index = index if isinstance(index, pd.DatetimeIndex): df.index = df.index._with_freq(None) # freq doesn't round-trip check_round_trip(df, engine, check_names=check_names) # index with meta-data df.index = [0, 1, 2] df.index.name = "foo" check_round_trip(df, engine) def test_write_multiindex(self, pa): # Not supported in fastparquet as of 0.1.3 or older pyarrow version engine = pa df = pd.DataFrame({"A": [1, 2, 3]}) index = pd.MultiIndex.from_tuples([("a", 1), ("a", 2), ("b", 1)]) df.index = index check_round_trip(df, engine) def test_multiindex_with_columns(self, pa): engine = pa dates = pd.date_range("01-Jan-2018", "01-Dec-2018", freq="MS") df = pd.DataFrame(np.random.randn(2 * len(dates), 3), columns=list("ABC")) index1 = pd.MultiIndex.from_product( [["Level1", "Level2"], dates], names=["level", "date"] ) index2 = index1.copy(names=None) for index in [index1, index2]: df.index = index check_round_trip(df, engine) check_round_trip( df, engine, read_kwargs={"columns": ["A", "B"]}, expected=df[["A", "B"]] ) def test_write_ignoring_index(self, engine): # ENH 20768 # Ensure index=False omits the index from the written Parquet file. df = pd.DataFrame({"a": [1, 2, 3], "b": ["q", "r", "s"]}) write_kwargs = {"compression": None, "index": False} # Because we're dropping the index, we expect the loaded dataframe to # have the default integer index. expected = df.reset_index(drop=True) check_round_trip(df, engine, write_kwargs=write_kwargs, expected=expected) # Ignore custom index df = pd.DataFrame( {"a": [1, 2, 3], "b": ["q", "r", "s"]}, index=["zyx", "wvu", "tsr"] ) check_round_trip(df, engine, write_kwargs=write_kwargs, expected=expected) # Ignore multi-indexes as well. arrays = [ ["bar", "bar", "baz", "baz", "foo", "foo", "qux", "qux"], ["one", "two", "one", "two", "one", "two", "one", "two"], ] df = pd.DataFrame( {"one": list(range(8)), "two": [-i for i in range(8)]}, index=arrays ) expected = df.reset_index(drop=True) check_round_trip(df, engine, write_kwargs=write_kwargs, expected=expected) def test_write_column_multiindex(self, engine): # Not able to write column multi-indexes with non-string column names. mi_columns = pd.MultiIndex.from_tuples([("a", 1), ("a", 2), ("b", 1)]) df = pd.DataFrame(np.random.randn(4, 3), columns=mi_columns) msg = ( r"\s*parquet must have string column names for all values in\s*" "each level of the MultiIndex" ) self.check_error_on_write(df, engine, ValueError, msg) def test_write_column_multiindex_nonstring(self, pa): # GH #34777 # Not supported in fastparquet as of 0.1.3 engine = pa # Not able to write column multi-indexes with non-string column names arrays = [ ["bar", "bar", "baz", "baz", "foo", "foo", "qux", "qux"], [1, 2, 1, 2, 1, 2, 1, 2], ] df = pd.DataFrame(np.random.randn(8, 8), columns=arrays) df.columns.names = ["Level1", "Level2"] msg = ( r"\s*parquet must have string column names for all values in\s*" "each level of the MultiIndex" ) self.check_error_on_write(df, engine, ValueError, msg) def test_write_column_multiindex_string(self, pa): # GH #34777 # Not supported in fastparquet as of 0.1.3 engine = pa # Write column multi-indexes with string column names arrays = [ ["bar", "bar", "baz", "baz", "foo", "foo", "qux", "qux"], ["one", "two", "one", "two", "one", "two", "one", "two"], ] df = pd.DataFrame(np.random.randn(8, 8), columns=arrays) df.columns.names = ["ColLevel1", "ColLevel2"] check_round_trip(df, engine) def test_write_column_index_string(self, pa): # GH #34777 # Not supported in fastparquet as of 0.1.3 engine = pa # Write column indexes with string column names arrays = ["bar", "baz", "foo", "qux"] df = pd.DataFrame(np.random.randn(8, 4), columns=arrays) df.columns.name = "StringCol" check_round_trip(df, engine) def test_write_column_index_nonstring(self, pa): # GH #34777 # Not supported in fastparquet as of 0.1.3 engine = pa # Write column indexes with string column names arrays = [1, 2, 3, 4] df = pd.DataFrame(np.random.randn(8, 4), columns=arrays) df.columns.name = "NonStringCol" msg = r"parquet must have string column names" self.check_error_on_write(df, engine, ValueError, msg) @pytest.mark.filterwarnings("ignore:CategoricalBlock is deprecated:DeprecationWarning") class TestParquetPyArrow(Base): def test_basic(self, pa, df_full): df = df_full # additional supported types for pyarrow dti = pd.date_range("20130101", periods=3, tz="Europe/Brussels") dti = dti._with_freq(None) # freq doesn't round-trip df["datetime_tz"] = dti df["bool_with_none"] = [True, None, True] check_round_trip(df, pa) def test_basic_subset_columns(self, pa, df_full): # GH18628 df = df_full # additional supported types for pyarrow df["datetime_tz"] = pd.date_range("20130101", periods=3, tz="Europe/Brussels") check_round_trip( df, pa, expected=df[["string", "int"]], read_kwargs={"columns": ["string", "int"]}, ) def test_to_bytes_without_path_or_buf_provided(self, pa, df_full): # GH 37105 buf_bytes = df_full.to_parquet(engine=pa) assert isinstance(buf_bytes, bytes) buf_stream = BytesIO(buf_bytes) res = read_parquet(buf_stream) tm.assert_frame_equal(df_full, res) def test_duplicate_columns(self, pa): # not currently able to handle duplicate columns df = pd.DataFrame(np.arange(12).reshape(4, 3), columns=list("aaa")).copy() self.check_error_on_write(df, pa, ValueError, "Duplicate column names found") def test_unsupported(self, pa): # timedelta df = pd.DataFrame({"a": pd.timedelta_range("1 day", periods=3)}) self.check_external_error_on_write(df, pa, NotImplementedError) # mixed python objects df = pd.DataFrame({"a": ["a", 1, 2.0]}) # pyarrow 0.11 raises ArrowTypeError # older pyarrows raise ArrowInvalid self.check_external_error_on_write(df, pa, pyarrow.ArrowException) def test_categorical(self, pa): # supported in >= 0.7.0 df = pd.DataFrame() df["a"] = pd.Categorical(list("abcdef")) # test for null, out-of-order values, and unobserved category df["b"] = pd.Categorical( ["bar", "foo", "foo", "bar", None, "bar"], dtype=pd.CategoricalDtype(["foo", "bar", "baz"]), ) # test for ordered flag df["c"] = pd.Categorical( ["a", "b", "c", "a", "c", "b"], categories=["b", "c", "d"], ordered=True ) check_round_trip(df, pa) @pytest.mark.xfail( is_platform_windows() and PY38, reason="localhost connection rejected", strict=False, ) def test_s3_roundtrip_explicit_fs(self, df_compat, s3_resource, pa, s3so): s3fs = pytest.importorskip("s3fs") s3 = s3fs.S3FileSystem(**s3so) kw = {"filesystem": s3} check_round_trip( df_compat, pa, path="pandas-test/pyarrow.parquet", read_kwargs=kw, write_kwargs=kw, ) def test_s3_roundtrip(self, df_compat, s3_resource, pa, s3so): # GH #19134 s3so = {"storage_options": s3so} check_round_trip( df_compat, pa, path="s3://pandas-test/pyarrow.parquet", read_kwargs=s3so, write_kwargs=s3so, ) @td.skip_if_no("s3fs") # also requires flask @pytest.mark.parametrize( "partition_col", [ ["A"], [], ], ) def test_s3_roundtrip_for_dir( self, df_compat, s3_resource, pa, partition_col, s3so ): # GH #26388 expected_df = df_compat.copy() # GH #35791 # read_table uses the new Arrow Datasets API since pyarrow 1.0.0 # Previous behaviour was pyarrow partitioned columns become 'category' dtypes # These are added to back of dataframe on read. In new API category dtype is # only used if partition field is string, but this changed again to use # category dtype for all types (not only strings) in pyarrow 2.0.0 if partition_col: partition_col_type = ( "int32" if (not pa_version_under1p0) and pa_version_under2p0 else "category" ) expected_df[partition_col] = expected_df[partition_col].astype( partition_col_type ) check_round_trip( df_compat, pa, expected=expected_df, path="s3://pandas-test/parquet_dir", read_kwargs={"storage_options": s3so}, write_kwargs={ "partition_cols": partition_col, "compression": None, "storage_options": s3so, }, check_like=True, repeat=1, ) @td.skip_if_no("pyarrow") def test_read_file_like_obj_support(self, df_compat): buffer = BytesIO() df_compat.to_parquet(buffer) df_from_buf = read_parquet(buffer) tm.assert_frame_equal(df_compat, df_from_buf) @td.skip_if_no("pyarrow") def test_expand_user(self, df_compat, monkeypatch): monkeypatch.setenv("HOME", "TestingUser") monkeypatch.setenv("USERPROFILE", "TestingUser") with pytest.raises(OSError, match=r".*TestingUser.*"): read_parquet("~/file.parquet") with pytest.raises(OSError, match=r".*TestingUser.*"): df_compat.to_parquet("~/file.parquet") def test_partition_cols_supported(self, pa, df_full): # GH #23283 partition_cols = ["bool", "int"] df = df_full with tm.ensure_clean_dir() as path: df.to_parquet(path, partition_cols=partition_cols, compression=None) import pyarrow.parquet as pq dataset = pq.ParquetDataset(path, validate_schema=False) assert len(dataset.partitions.partition_names) == 2 assert dataset.partitions.partition_names == set(partition_cols) assert read_parquet(path).shape == df.shape def test_partition_cols_string(self, pa, df_full): # GH #27117 partition_cols = "bool" partition_cols_list = [partition_cols] df = df_full with tm.ensure_clean_dir() as path: df.to_parquet(path, partition_cols=partition_cols, compression=None) import pyarrow.parquet as pq dataset = pq.ParquetDataset(path, validate_schema=False) assert len(dataset.partitions.partition_names) == 1 assert dataset.partitions.partition_names == set(partition_cols_list) assert read_parquet(path).shape == df.shape @pytest.mark.parametrize("path_type", [str, pathlib.Path]) def test_partition_cols_pathlib(self, pa, df_compat, path_type): # GH 35902 partition_cols = "B" partition_cols_list = [partition_cols] df = df_compat with tm.ensure_clean_dir() as path_str: path = path_type(path_str) df.to_parquet(path, partition_cols=partition_cols_list) assert read_parquet(path).shape == df.shape def test_empty_dataframe(self, pa): # GH #27339 df = pd.DataFrame() check_round_trip(df, pa) def test_write_with_schema(self, pa): import pyarrow df = pd.DataFrame({"x": [0, 1]}) schema = pyarrow.schema([pyarrow.field("x", type=pyarrow.bool_())]) out_df = df.astype(bool) check_round_trip(df, pa, write_kwargs={"schema": schema}, expected=out_df) @td.skip_if_no("pyarrow") def test_additional_extension_arrays(self, pa): # test additional ExtensionArrays that are supported through the # __arrow_array__ protocol df = pd.DataFrame( { "a": pd.Series([1, 2, 3], dtype="Int64"), "b": pd.Series([1, 2, 3], dtype="UInt32"), "c": pd.Series(["a", None, "c"], dtype="string"), } ) check_round_trip(df, pa) df = pd.DataFrame({"a": pd.Series([1, 2, 3, None], dtype="Int64")}) check_round_trip(df, pa) @td.skip_if_no("pyarrow", min_version="1.0.0") def test_pyarrow_backed_string_array(self, pa): # test ArrowStringArray supported through the __arrow_array__ protocol from pandas.core.arrays.string_arrow import ArrowStringDtype # noqa: F401 df = pd.DataFrame({"a": pd.Series(["a", None, "c"], dtype="arrow_string")}) check_round_trip(df, pa, expected=df) @td.skip_if_no("pyarrow") def test_additional_extension_types(self, pa): # test additional ExtensionArrays that are supported through the # __arrow_array__ protocol + by defining a custom ExtensionType df = pd.DataFrame( { # Arrow does not yet support struct in writing to Parquet (ARROW-1644) # "c": pd.arrays.IntervalArray.from_tuples([(0, 1), (1, 2), (3, 4)]), "d": pd.period_range("2012-01-01", periods=3, freq="D"), } ) check_round_trip(df, pa) @td.skip_if_no("pyarrow") def test_use_nullable_dtypes(self, pa): import pyarrow.parquet as pq table = pyarrow.table( { "a": pyarrow.array([1, 2, 3, None], "int64"), "b": pyarrow.array([1, 2, 3, None], "uint8"), "c": pyarrow.array(["a", "b", "c", None]), "d": pyarrow.array([True, False, True, None]), } ) with tm.ensure_clean() as path: # write manually with pyarrow to write integers pq.write_table(table, path) result1 = read_parquet(path) result2 = read_parquet(path, use_nullable_dtypes=True) assert result1["a"].dtype == np.dtype("float64") expected = pd.DataFrame( { "a": pd.array([1, 2, 3, None], dtype="Int64"), "b": pd.array([1, 2, 3, None], dtype="UInt8"), "c": pd.array(["a", "b", "c", None], dtype="string"), "d": pd.array([True, False, True, None], dtype="boolean"), } ) tm.assert_frame_equal(result2, expected) def test_timestamp_nanoseconds(self, pa): # with version 2.0, pyarrow defaults to writing the nanoseconds, so # this should work without error df = pd.DataFrame({"a": pd.date_range("2017-01-01", freq="1n", periods=10)}) check_round_trip(df, pa, write_kwargs={"version": "2.0"}) def test_timezone_aware_index(self, pa, timezone_aware_date_list): if not pa_version_under2p0: # temporary skip this test until it is properly resolved # https://github.com/pandas-dev/pandas/issues/37286 pytest.skip() idx = 5 * [timezone_aware_date_list] df = pd.DataFrame(index=idx, data={"index_as_col": idx}) # see gh-36004 # compare time(zone) values only, skip their class: # pyarrow always creates fixed offset timezones using pytz.FixedOffset() # even if it was datetime.timezone() originally # # technically they are the same: # they both implement datetime.tzinfo # they both wrap datetime.timedelta() # this use-case sets the resolution to 1 minute check_round_trip(df, pa, check_dtype=False) @td.skip_if_no("pyarrow", min_version="1.0.0") def test_filter_row_groups(self, pa): # https://github.com/pandas-dev/pandas/issues/26551 df = pd.DataFrame({"a": list(range(0, 3))}) with tm.ensure_clean() as path: df.to_parquet(path, pa) result = read_parquet( path, pa, filters=[("a", "==", 0)], use_legacy_dataset=False ) assert len(result) == 1 def test_read_parquet_manager(self, pa, using_array_manager): # ensure that read_parquet honors the pandas.options.mode.data_manager option df = pd.DataFrame(np.random.randn(10, 3), columns=["A", "B", "C"]) with tm.ensure_clean() as path: df.to_parquet(path, pa) result = read_parquet(path, pa) if using_array_manager: assert isinstance(result._mgr, pd.core.internals.ArrayManager) else: assert isinstance(result._mgr, pd.core.internals.BlockManager) class TestParquetFastParquet(Base): def test_basic(self, fp, df_full): df = df_full dti = pd.date_range("20130101", periods=3, tz="US/Eastern") dti = dti._with_freq(None) # freq doesn't round-trip df["datetime_tz"] = dti df["timedelta"] = pd.timedelta_range("1 day", periods=3) check_round_trip(df, fp) @pytest.mark.skip(reason="not supported") def test_duplicate_columns(self, fp): # not currently able to handle duplicate columns df = pd.DataFrame(np.arange(12).reshape(4, 3), columns=list("aaa")).copy() msg = "Cannot create parquet dataset with duplicate column names" self.check_error_on_write(df, fp, ValueError, msg) def test_bool_with_none(self, fp): df = pd.DataFrame({"a": [True, None, False]}) expected = pd.DataFrame({"a": [1.0, np.nan, 0.0]}, dtype="float16") check_round_trip(df, fp, expected=expected) def test_unsupported(self, fp): # period df = pd.DataFrame({"a": pd.period_range("2013", freq="M", periods=3)}) # error from fastparquet -> don't check exact error message self.check_error_on_write(df, fp, ValueError, None) # mixed df = pd.DataFrame({"a": ["a", 1, 2.0]}) msg = "Can't infer object conversion type" self.check_error_on_write(df, fp, ValueError, msg) def test_categorical(self, fp): df = pd.DataFrame({"a": pd.Categorical(list("abc"))}) check_round_trip(df, fp) def test_filter_row_groups(self, fp): d = {"a": list(range(0, 3))} df = pd.DataFrame(d) with tm.ensure_clean() as path: df.to_parquet(path, fp, compression=None, row_group_offsets=1) result = read_parquet(path, fp, filters=[("a", "==", 0)]) assert len(result) == 1 def test_s3_roundtrip(self, df_compat, s3_resource, fp, s3so): # GH #19134 check_round_trip( df_compat, fp, path="s3://pandas-test/fastparquet.parquet", read_kwargs={"storage_options": s3so}, write_kwargs={"compression": None, "storage_options": s3so}, ) def test_partition_cols_supported(self, fp, df_full): # GH #23283 partition_cols = ["bool", "int"] df = df_full with tm.ensure_clean_dir() as path: df.to_parquet( path, engine="fastparquet", partition_cols=partition_cols, compression=None, ) assert os.path.exists(path) import fastparquet actual_partition_cols = fastparquet.ParquetFile(path, False).cats assert len(actual_partition_cols) == 2 def test_partition_cols_string(self, fp, df_full): # GH #27117 partition_cols = "bool" df = df_full with tm.ensure_clean_dir() as path: df.to_parquet( path, engine="fastparquet", partition_cols=partition_cols, compression=None, ) assert os.path.exists(path) import fastparquet actual_partition_cols = fastparquet.ParquetFile(path, False).cats assert len(actual_partition_cols) == 1 def test_partition_on_supported(self, fp, df_full): # GH #23283 partition_cols = ["bool", "int"] df = df_full with tm.ensure_clean_dir() as path: df.to_parquet( path, engine="fastparquet", compression=None, partition_on=partition_cols, ) assert os.path.exists(path) import fastparquet actual_partition_cols = fastparquet.ParquetFile(path, False).cats assert len(actual_partition_cols) == 2 def test_error_on_using_partition_cols_and_partition_on(self, fp, df_full): # GH #23283 partition_cols = ["bool", "int"] df = df_full msg = ( "Cannot use both partition_on and partition_cols. Use partition_cols for " "partitioning data" ) with pytest.raises(ValueError, match=msg): with tm.ensure_clean_dir() as path: df.to_parquet( path, engine="fastparquet", compression=None, partition_on=partition_cols, partition_cols=partition_cols, ) def test_empty_dataframe(self, fp): # GH #27339 df = pd.DataFrame() expected = df.copy() expected.index.name = "index" check_round_trip(df, fp, expected=expected) def test_timezone_aware_index(self, fp, timezone_aware_date_list): idx = 5 * [timezone_aware_date_list] df = pd.DataFrame(index=idx, data={"index_as_col": idx}) expected = df.copy() expected.index.name = "index" check_round_trip(df, fp, expected=expected) def test_use_nullable_dtypes_not_supported(self, fp): df = pd.DataFrame({"a": [1, 2]}) with tm.ensure_clean() as path: df.to_parquet(path) with pytest.raises(ValueError, match="not supported for the fastparquet"): read_parquet(path, engine="fastparquet", use_nullable_dtypes=True)
datapythonista/pandas
pandas/tests/io/test_parquet.py
pandas/tests/indexes/timedeltas/test_delete.py
from __future__ import annotations from contextlib import contextmanager import re from typing import ( Sequence, Type, cast, ) import warnings @contextmanager def assert_produces_warning( expected_warning: type[Warning] | bool | None = Warning, filter_level="always", check_stacklevel: bool = True, raise_on_extra_warnings: bool = True, match: str | None = None, ): """ Context manager for running code expected to either raise a specific warning, or not raise any warnings. Verifies that the code raises the expected warning, and that it does not raise any other unexpected warnings. It is basically a wrapper around ``warnings.catch_warnings``. Parameters ---------- expected_warning : {Warning, False, None}, default Warning The type of Exception raised. ``exception.Warning`` is the base class for all warnings. To check that no warning is returned, specify ``False`` or ``None``. filter_level : str or None, default "always" Specifies whether warnings are ignored, displayed, or turned into errors. Valid values are: * "error" - turns matching warnings into exceptions * "ignore" - discard the warning * "always" - always emit a warning * "default" - print the warning the first time it is generated from each location * "module" - print the warning the first time it is generated from each module * "once" - print the warning the first time it is generated check_stacklevel : bool, default True If True, displays the line that called the function containing the warning to show were the function is called. Otherwise, the line that implements the function is displayed. raise_on_extra_warnings : bool, default True Whether extra warnings not of the type `expected_warning` should cause the test to fail. match : str, optional Match warning message. Examples -------- >>> import warnings >>> with assert_produces_warning(): ... warnings.warn(UserWarning()) ... >>> with assert_produces_warning(False): ... warnings.warn(RuntimeWarning()) ... Traceback (most recent call last): ... AssertionError: Caused unexpected warning(s): ['RuntimeWarning']. >>> with assert_produces_warning(UserWarning): ... warnings.warn(RuntimeWarning()) Traceback (most recent call last): ... AssertionError: Did not see expected warning of class 'UserWarning'. ..warn:: This is *not* thread-safe. """ __tracebackhide__ = True with warnings.catch_warnings(record=True) as w: warnings.simplefilter(filter_level) yield w if expected_warning: expected_warning = cast(Type[Warning], expected_warning) _assert_caught_expected_warning( caught_warnings=w, expected_warning=expected_warning, match=match, check_stacklevel=check_stacklevel, ) if raise_on_extra_warnings: _assert_caught_no_extra_warnings( caught_warnings=w, expected_warning=expected_warning, ) def _assert_caught_expected_warning( *, caught_warnings: Sequence[warnings.WarningMessage], expected_warning: type[Warning], match: str | None, check_stacklevel: bool, ) -> None: """Assert that there was the expected warning among the caught warnings.""" saw_warning = False matched_message = False for actual_warning in caught_warnings: if issubclass(actual_warning.category, expected_warning): saw_warning = True if check_stacklevel and issubclass( actual_warning.category, (FutureWarning, DeprecationWarning) ): _assert_raised_with_correct_stacklevel(actual_warning) if match is not None and re.search(match, str(actual_warning.message)): matched_message = True if not saw_warning: raise AssertionError( f"Did not see expected warning of class " f"{repr(expected_warning.__name__)}" ) if match and not matched_message: raise AssertionError( f"Did not see warning {repr(expected_warning.__name__)} " f"matching {match}" ) def _assert_caught_no_extra_warnings( *, caught_warnings: Sequence[warnings.WarningMessage], expected_warning: type[Warning] | bool | None, ) -> None: """Assert that no extra warnings apart from the expected ones are caught.""" extra_warnings = [] for actual_warning in caught_warnings: if _is_unexpected_warning(actual_warning, expected_warning): unclosed = "unclosed transport <asyncio.sslproto._SSLProtocolTransport" if actual_warning.category == ResourceWarning and unclosed in str( actual_warning.message ): # FIXME: kludge because pytest.filterwarnings does not # suppress these, xref GH#38630 continue extra_warnings.append( ( actual_warning.category.__name__, actual_warning.message, actual_warning.filename, actual_warning.lineno, ) ) if extra_warnings: raise AssertionError(f"Caused unexpected warning(s): {repr(extra_warnings)}") def _is_unexpected_warning( actual_warning: warnings.WarningMessage, expected_warning: type[Warning] | bool | None, ) -> bool: """Check if the actual warning issued is unexpected.""" if actual_warning and not expected_warning: return True expected_warning = cast(Type[Warning], expected_warning) return bool(not issubclass(actual_warning.category, expected_warning)) def _assert_raised_with_correct_stacklevel( actual_warning: warnings.WarningMessage, ) -> None: from inspect import ( getframeinfo, stack, ) caller = getframeinfo(stack()[4][0]) msg = ( "Warning not set with correct stacklevel. " f"File where warning is raised: {actual_warning.filename} != " f"{caller.filename}. Warning message: {actual_warning.message}" ) assert actual_warning.filename == caller.filename, msg
""" test parquet compat """ import datetime from io import BytesIO import os import pathlib from warnings import ( catch_warnings, filterwarnings, ) import numpy as np import pytest from pandas._config import get_option from pandas.compat import ( PY38, is_platform_windows, ) from pandas.compat.pyarrow import ( pa_version_under1p0, pa_version_under2p0, ) import pandas.util._test_decorators as td import pandas as pd import pandas._testing as tm from pandas.util.version import Version from pandas.io.parquet import ( FastParquetImpl, PyArrowImpl, get_engine, read_parquet, to_parquet, ) try: import pyarrow _HAVE_PYARROW = True except ImportError: _HAVE_PYARROW = False try: with catch_warnings(): # `np.bool` is a deprecated alias... filterwarnings("ignore", "`np.bool`", category=DeprecationWarning) import fastparquet _HAVE_FASTPARQUET = True except ImportError: _HAVE_FASTPARQUET = False pytestmark = pytest.mark.filterwarnings( "ignore:RangeIndex.* is deprecated:DeprecationWarning" ) # TODO(ArrayManager) fastparquet relies on BlockManager internals # setup engines & skips @pytest.fixture( params=[ pytest.param( "fastparquet", marks=pytest.mark.skipif( not _HAVE_FASTPARQUET or get_option("mode.data_manager") == "array", reason="fastparquet is not installed or ArrayManager is used", ), ), pytest.param( "pyarrow", marks=pytest.mark.skipif( not _HAVE_PYARROW, reason="pyarrow is not installed" ), ), ] ) def engine(request): return request.param @pytest.fixture def pa(): if not _HAVE_PYARROW: pytest.skip("pyarrow is not installed") return "pyarrow" @pytest.fixture def fp(): if not _HAVE_FASTPARQUET: pytest.skip("fastparquet is not installed") elif get_option("mode.data_manager") == "array": pytest.skip("ArrayManager is not supported with fastparquet") return "fastparquet" @pytest.fixture def df_compat(): return pd.DataFrame({"A": [1, 2, 3], "B": "foo"}) @pytest.fixture def df_cross_compat(): df = pd.DataFrame( { "a": list("abc"), "b": list(range(1, 4)), # 'c': np.arange(3, 6).astype('u1'), "d": np.arange(4.0, 7.0, dtype="float64"), "e": [True, False, True], "f": pd.date_range("20130101", periods=3), # 'g': pd.date_range('20130101', periods=3, # tz='US/Eastern'), # 'h': pd.date_range('20130101', periods=3, freq='ns') } ) return df @pytest.fixture def df_full(): return pd.DataFrame( { "string": list("abc"), "string_with_nan": ["a", np.nan, "c"], "string_with_none": ["a", None, "c"], "bytes": [b"foo", b"bar", b"baz"], "unicode": ["foo", "bar", "baz"], "int": list(range(1, 4)), "uint": np.arange(3, 6).astype("u1"), "float": np.arange(4.0, 7.0, dtype="float64"), "float_with_nan": [2.0, np.nan, 3.0], "bool": [True, False, True], "datetime": pd.date_range("20130101", periods=3), "datetime_with_nat": [ pd.Timestamp("20130101"), pd.NaT, pd.Timestamp("20130103"), ], } ) @pytest.fixture( params=[ datetime.datetime.now(datetime.timezone.utc), datetime.datetime.now(datetime.timezone.min), datetime.datetime.now(datetime.timezone.max), datetime.datetime.strptime("2019-01-04T16:41:24+0200", "%Y-%m-%dT%H:%M:%S%z"), datetime.datetime.strptime("2019-01-04T16:41:24+0215", "%Y-%m-%dT%H:%M:%S%z"), datetime.datetime.strptime("2019-01-04T16:41:24-0200", "%Y-%m-%dT%H:%M:%S%z"), datetime.datetime.strptime("2019-01-04T16:41:24-0215", "%Y-%m-%dT%H:%M:%S%z"), ] ) def timezone_aware_date_list(request): return request.param def check_round_trip( df, engine=None, path=None, write_kwargs=None, read_kwargs=None, expected=None, check_names=True, check_like=False, check_dtype=True, repeat=2, ): """Verify parquet serializer and deserializer produce the same results. Performs a pandas to disk and disk to pandas round trip, then compares the 2 resulting DataFrames to verify equality. Parameters ---------- df: Dataframe engine: str, optional 'pyarrow' or 'fastparquet' path: str, optional write_kwargs: dict of str:str, optional read_kwargs: dict of str:str, optional expected: DataFrame, optional Expected deserialization result, otherwise will be equal to `df` check_names: list of str, optional Closed set of column names to be compared check_like: bool, optional If True, ignore the order of index & columns. repeat: int, optional How many times to repeat the test """ write_kwargs = write_kwargs or {"compression": None} read_kwargs = read_kwargs or {} if expected is None: expected = df if engine: write_kwargs["engine"] = engine read_kwargs["engine"] = engine def compare(repeat): for _ in range(repeat): df.to_parquet(path, **write_kwargs) with catch_warnings(record=True): actual = read_parquet(path, **read_kwargs) tm.assert_frame_equal( expected, actual, check_names=check_names, check_like=check_like, check_dtype=check_dtype, ) if path is None: with tm.ensure_clean() as path: compare(repeat) else: compare(repeat) def test_invalid_engine(df_compat): msg = "engine must be one of 'pyarrow', 'fastparquet'" with pytest.raises(ValueError, match=msg): check_round_trip(df_compat, "foo", "bar") def test_options_py(df_compat, pa): # use the set option with pd.option_context("io.parquet.engine", "pyarrow"): check_round_trip(df_compat) def test_options_fp(df_compat, fp): # use the set option with pd.option_context("io.parquet.engine", "fastparquet"): check_round_trip(df_compat) def test_options_auto(df_compat, fp, pa): # use the set option with pd.option_context("io.parquet.engine", "auto"): check_round_trip(df_compat) def test_options_get_engine(fp, pa): assert isinstance(get_engine("pyarrow"), PyArrowImpl) assert isinstance(get_engine("fastparquet"), FastParquetImpl) with pd.option_context("io.parquet.engine", "pyarrow"): assert isinstance(get_engine("auto"), PyArrowImpl) assert isinstance(get_engine("pyarrow"), PyArrowImpl) assert isinstance(get_engine("fastparquet"), FastParquetImpl) with pd.option_context("io.parquet.engine", "fastparquet"): assert isinstance(get_engine("auto"), FastParquetImpl) assert isinstance(get_engine("pyarrow"), PyArrowImpl) assert isinstance(get_engine("fastparquet"), FastParquetImpl) with pd.option_context("io.parquet.engine", "auto"): assert isinstance(get_engine("auto"), PyArrowImpl) assert isinstance(get_engine("pyarrow"), PyArrowImpl) assert isinstance(get_engine("fastparquet"), FastParquetImpl) def test_get_engine_auto_error_message(): # Expect different error messages from get_engine(engine="auto") # if engines aren't installed vs. are installed but bad version from pandas.compat._optional import VERSIONS # Do we have engines installed, but a bad version of them? pa_min_ver = VERSIONS.get("pyarrow") fp_min_ver = VERSIONS.get("fastparquet") have_pa_bad_version = ( False if not _HAVE_PYARROW else Version(pyarrow.__version__) < Version(pa_min_ver) ) have_fp_bad_version = ( False if not _HAVE_FASTPARQUET else Version(fastparquet.__version__) < Version(fp_min_ver) ) # Do we have usable engines installed? have_usable_pa = _HAVE_PYARROW and not have_pa_bad_version have_usable_fp = _HAVE_FASTPARQUET and not have_fp_bad_version if not have_usable_pa and not have_usable_fp: # No usable engines found. if have_pa_bad_version: match = f"Pandas requires version .{pa_min_ver}. or newer of .pyarrow." with pytest.raises(ImportError, match=match): get_engine("auto") else: match = "Missing optional dependency .pyarrow." with pytest.raises(ImportError, match=match): get_engine("auto") if have_fp_bad_version: match = f"Pandas requires version .{fp_min_ver}. or newer of .fastparquet." with pytest.raises(ImportError, match=match): get_engine("auto") else: match = "Missing optional dependency .fastparquet." with pytest.raises(ImportError, match=match): get_engine("auto") def test_cross_engine_pa_fp(df_cross_compat, pa, fp): # cross-compat with differing reading/writing engines df = df_cross_compat with tm.ensure_clean() as path: df.to_parquet(path, engine=pa, compression=None) result = read_parquet(path, engine=fp) tm.assert_frame_equal(result, df) result = read_parquet(path, engine=fp, columns=["a", "d"]) tm.assert_frame_equal(result, df[["a", "d"]]) def test_cross_engine_fp_pa(request, df_cross_compat, pa, fp): # cross-compat with differing reading/writing engines df = df_cross_compat with tm.ensure_clean() as path: df.to_parquet(path, engine=fp, compression=None) with catch_warnings(record=True): result = read_parquet(path, engine=pa) tm.assert_frame_equal(result, df) result = read_parquet(path, engine=pa, columns=["a", "d"]) tm.assert_frame_equal(result, df[["a", "d"]]) class Base: def check_error_on_write(self, df, engine, exc, err_msg): # check that we are raising the exception on writing with tm.ensure_clean() as path: with pytest.raises(exc, match=err_msg): to_parquet(df, path, engine, compression=None) def check_external_error_on_write(self, df, engine, exc): # check that an external library is raising the exception on writing with tm.ensure_clean() as path: with tm.external_error_raised(exc): to_parquet(df, path, engine, compression=None) @tm.network def test_parquet_read_from_url(self, df_compat, engine): if engine != "auto": pytest.importorskip(engine) url = ( "https://raw.githubusercontent.com/pandas-dev/pandas/" "master/pandas/tests/io/data/parquet/simple.parquet" ) df = read_parquet(url) tm.assert_frame_equal(df, df_compat) class TestBasic(Base): def test_error(self, engine): for obj in [ pd.Series([1, 2, 3]), 1, "foo", pd.Timestamp("20130101"), np.array([1, 2, 3]), ]: msg = "to_parquet only supports IO with DataFrames" self.check_error_on_write(obj, engine, ValueError, msg) def test_columns_dtypes(self, engine): df = pd.DataFrame({"string": list("abc"), "int": list(range(1, 4))}) # unicode df.columns = ["foo", "bar"] check_round_trip(df, engine) def test_columns_dtypes_invalid(self, engine): df = pd.DataFrame({"string": list("abc"), "int": list(range(1, 4))}) msg = "parquet must have string column names" # numeric df.columns = [0, 1] self.check_error_on_write(df, engine, ValueError, msg) # bytes df.columns = [b"foo", b"bar"] self.check_error_on_write(df, engine, ValueError, msg) # python object df.columns = [ datetime.datetime(2011, 1, 1, 0, 0), datetime.datetime(2011, 1, 1, 1, 1), ] self.check_error_on_write(df, engine, ValueError, msg) @pytest.mark.parametrize("compression", [None, "gzip", "snappy", "brotli"]) def test_compression(self, engine, compression): if compression == "snappy": pytest.importorskip("snappy") elif compression == "brotli": pytest.importorskip("brotli") df = pd.DataFrame({"A": [1, 2, 3]}) check_round_trip(df, engine, write_kwargs={"compression": compression}) def test_read_columns(self, engine): # GH18154 df = pd.DataFrame({"string": list("abc"), "int": list(range(1, 4))}) expected = pd.DataFrame({"string": list("abc")}) check_round_trip( df, engine, expected=expected, read_kwargs={"columns": ["string"]} ) def test_write_index(self, engine): check_names = engine != "fastparquet" df = pd.DataFrame({"A": [1, 2, 3]}) check_round_trip(df, engine) indexes = [ [2, 3, 4], pd.date_range("20130101", periods=3), list("abc"), [1, 3, 4], ] # non-default index for index in indexes: df.index = index if isinstance(index, pd.DatetimeIndex): df.index = df.index._with_freq(None) # freq doesn't round-trip check_round_trip(df, engine, check_names=check_names) # index with meta-data df.index = [0, 1, 2] df.index.name = "foo" check_round_trip(df, engine) def test_write_multiindex(self, pa): # Not supported in fastparquet as of 0.1.3 or older pyarrow version engine = pa df = pd.DataFrame({"A": [1, 2, 3]}) index = pd.MultiIndex.from_tuples([("a", 1), ("a", 2), ("b", 1)]) df.index = index check_round_trip(df, engine) def test_multiindex_with_columns(self, pa): engine = pa dates = pd.date_range("01-Jan-2018", "01-Dec-2018", freq="MS") df = pd.DataFrame(np.random.randn(2 * len(dates), 3), columns=list("ABC")) index1 = pd.MultiIndex.from_product( [["Level1", "Level2"], dates], names=["level", "date"] ) index2 = index1.copy(names=None) for index in [index1, index2]: df.index = index check_round_trip(df, engine) check_round_trip( df, engine, read_kwargs={"columns": ["A", "B"]}, expected=df[["A", "B"]] ) def test_write_ignoring_index(self, engine): # ENH 20768 # Ensure index=False omits the index from the written Parquet file. df = pd.DataFrame({"a": [1, 2, 3], "b": ["q", "r", "s"]}) write_kwargs = {"compression": None, "index": False} # Because we're dropping the index, we expect the loaded dataframe to # have the default integer index. expected = df.reset_index(drop=True) check_round_trip(df, engine, write_kwargs=write_kwargs, expected=expected) # Ignore custom index df = pd.DataFrame( {"a": [1, 2, 3], "b": ["q", "r", "s"]}, index=["zyx", "wvu", "tsr"] ) check_round_trip(df, engine, write_kwargs=write_kwargs, expected=expected) # Ignore multi-indexes as well. arrays = [ ["bar", "bar", "baz", "baz", "foo", "foo", "qux", "qux"], ["one", "two", "one", "two", "one", "two", "one", "two"], ] df = pd.DataFrame( {"one": list(range(8)), "two": [-i for i in range(8)]}, index=arrays ) expected = df.reset_index(drop=True) check_round_trip(df, engine, write_kwargs=write_kwargs, expected=expected) def test_write_column_multiindex(self, engine): # Not able to write column multi-indexes with non-string column names. mi_columns = pd.MultiIndex.from_tuples([("a", 1), ("a", 2), ("b", 1)]) df = pd.DataFrame(np.random.randn(4, 3), columns=mi_columns) msg = ( r"\s*parquet must have string column names for all values in\s*" "each level of the MultiIndex" ) self.check_error_on_write(df, engine, ValueError, msg) def test_write_column_multiindex_nonstring(self, pa): # GH #34777 # Not supported in fastparquet as of 0.1.3 engine = pa # Not able to write column multi-indexes with non-string column names arrays = [ ["bar", "bar", "baz", "baz", "foo", "foo", "qux", "qux"], [1, 2, 1, 2, 1, 2, 1, 2], ] df = pd.DataFrame(np.random.randn(8, 8), columns=arrays) df.columns.names = ["Level1", "Level2"] msg = ( r"\s*parquet must have string column names for all values in\s*" "each level of the MultiIndex" ) self.check_error_on_write(df, engine, ValueError, msg) def test_write_column_multiindex_string(self, pa): # GH #34777 # Not supported in fastparquet as of 0.1.3 engine = pa # Write column multi-indexes with string column names arrays = [ ["bar", "bar", "baz", "baz", "foo", "foo", "qux", "qux"], ["one", "two", "one", "two", "one", "two", "one", "two"], ] df = pd.DataFrame(np.random.randn(8, 8), columns=arrays) df.columns.names = ["ColLevel1", "ColLevel2"] check_round_trip(df, engine) def test_write_column_index_string(self, pa): # GH #34777 # Not supported in fastparquet as of 0.1.3 engine = pa # Write column indexes with string column names arrays = ["bar", "baz", "foo", "qux"] df = pd.DataFrame(np.random.randn(8, 4), columns=arrays) df.columns.name = "StringCol" check_round_trip(df, engine) def test_write_column_index_nonstring(self, pa): # GH #34777 # Not supported in fastparquet as of 0.1.3 engine = pa # Write column indexes with string column names arrays = [1, 2, 3, 4] df = pd.DataFrame(np.random.randn(8, 4), columns=arrays) df.columns.name = "NonStringCol" msg = r"parquet must have string column names" self.check_error_on_write(df, engine, ValueError, msg) @pytest.mark.filterwarnings("ignore:CategoricalBlock is deprecated:DeprecationWarning") class TestParquetPyArrow(Base): def test_basic(self, pa, df_full): df = df_full # additional supported types for pyarrow dti = pd.date_range("20130101", periods=3, tz="Europe/Brussels") dti = dti._with_freq(None) # freq doesn't round-trip df["datetime_tz"] = dti df["bool_with_none"] = [True, None, True] check_round_trip(df, pa) def test_basic_subset_columns(self, pa, df_full): # GH18628 df = df_full # additional supported types for pyarrow df["datetime_tz"] = pd.date_range("20130101", periods=3, tz="Europe/Brussels") check_round_trip( df, pa, expected=df[["string", "int"]], read_kwargs={"columns": ["string", "int"]}, ) def test_to_bytes_without_path_or_buf_provided(self, pa, df_full): # GH 37105 buf_bytes = df_full.to_parquet(engine=pa) assert isinstance(buf_bytes, bytes) buf_stream = BytesIO(buf_bytes) res = read_parquet(buf_stream) tm.assert_frame_equal(df_full, res) def test_duplicate_columns(self, pa): # not currently able to handle duplicate columns df = pd.DataFrame(np.arange(12).reshape(4, 3), columns=list("aaa")).copy() self.check_error_on_write(df, pa, ValueError, "Duplicate column names found") def test_unsupported(self, pa): # timedelta df = pd.DataFrame({"a": pd.timedelta_range("1 day", periods=3)}) self.check_external_error_on_write(df, pa, NotImplementedError) # mixed python objects df = pd.DataFrame({"a": ["a", 1, 2.0]}) # pyarrow 0.11 raises ArrowTypeError # older pyarrows raise ArrowInvalid self.check_external_error_on_write(df, pa, pyarrow.ArrowException) def test_categorical(self, pa): # supported in >= 0.7.0 df = pd.DataFrame() df["a"] = pd.Categorical(list("abcdef")) # test for null, out-of-order values, and unobserved category df["b"] = pd.Categorical( ["bar", "foo", "foo", "bar", None, "bar"], dtype=pd.CategoricalDtype(["foo", "bar", "baz"]), ) # test for ordered flag df["c"] = pd.Categorical( ["a", "b", "c", "a", "c", "b"], categories=["b", "c", "d"], ordered=True ) check_round_trip(df, pa) @pytest.mark.xfail( is_platform_windows() and PY38, reason="localhost connection rejected", strict=False, ) def test_s3_roundtrip_explicit_fs(self, df_compat, s3_resource, pa, s3so): s3fs = pytest.importorskip("s3fs") s3 = s3fs.S3FileSystem(**s3so) kw = {"filesystem": s3} check_round_trip( df_compat, pa, path="pandas-test/pyarrow.parquet", read_kwargs=kw, write_kwargs=kw, ) def test_s3_roundtrip(self, df_compat, s3_resource, pa, s3so): # GH #19134 s3so = {"storage_options": s3so} check_round_trip( df_compat, pa, path="s3://pandas-test/pyarrow.parquet", read_kwargs=s3so, write_kwargs=s3so, ) @td.skip_if_no("s3fs") # also requires flask @pytest.mark.parametrize( "partition_col", [ ["A"], [], ], ) def test_s3_roundtrip_for_dir( self, df_compat, s3_resource, pa, partition_col, s3so ): # GH #26388 expected_df = df_compat.copy() # GH #35791 # read_table uses the new Arrow Datasets API since pyarrow 1.0.0 # Previous behaviour was pyarrow partitioned columns become 'category' dtypes # These are added to back of dataframe on read. In new API category dtype is # only used if partition field is string, but this changed again to use # category dtype for all types (not only strings) in pyarrow 2.0.0 if partition_col: partition_col_type = ( "int32" if (not pa_version_under1p0) and pa_version_under2p0 else "category" ) expected_df[partition_col] = expected_df[partition_col].astype( partition_col_type ) check_round_trip( df_compat, pa, expected=expected_df, path="s3://pandas-test/parquet_dir", read_kwargs={"storage_options": s3so}, write_kwargs={ "partition_cols": partition_col, "compression": None, "storage_options": s3so, }, check_like=True, repeat=1, ) @td.skip_if_no("pyarrow") def test_read_file_like_obj_support(self, df_compat): buffer = BytesIO() df_compat.to_parquet(buffer) df_from_buf = read_parquet(buffer) tm.assert_frame_equal(df_compat, df_from_buf) @td.skip_if_no("pyarrow") def test_expand_user(self, df_compat, monkeypatch): monkeypatch.setenv("HOME", "TestingUser") monkeypatch.setenv("USERPROFILE", "TestingUser") with pytest.raises(OSError, match=r".*TestingUser.*"): read_parquet("~/file.parquet") with pytest.raises(OSError, match=r".*TestingUser.*"): df_compat.to_parquet("~/file.parquet") def test_partition_cols_supported(self, pa, df_full): # GH #23283 partition_cols = ["bool", "int"] df = df_full with tm.ensure_clean_dir() as path: df.to_parquet(path, partition_cols=partition_cols, compression=None) import pyarrow.parquet as pq dataset = pq.ParquetDataset(path, validate_schema=False) assert len(dataset.partitions.partition_names) == 2 assert dataset.partitions.partition_names == set(partition_cols) assert read_parquet(path).shape == df.shape def test_partition_cols_string(self, pa, df_full): # GH #27117 partition_cols = "bool" partition_cols_list = [partition_cols] df = df_full with tm.ensure_clean_dir() as path: df.to_parquet(path, partition_cols=partition_cols, compression=None) import pyarrow.parquet as pq dataset = pq.ParquetDataset(path, validate_schema=False) assert len(dataset.partitions.partition_names) == 1 assert dataset.partitions.partition_names == set(partition_cols_list) assert read_parquet(path).shape == df.shape @pytest.mark.parametrize("path_type", [str, pathlib.Path]) def test_partition_cols_pathlib(self, pa, df_compat, path_type): # GH 35902 partition_cols = "B" partition_cols_list = [partition_cols] df = df_compat with tm.ensure_clean_dir() as path_str: path = path_type(path_str) df.to_parquet(path, partition_cols=partition_cols_list) assert read_parquet(path).shape == df.shape def test_empty_dataframe(self, pa): # GH #27339 df = pd.DataFrame() check_round_trip(df, pa) def test_write_with_schema(self, pa): import pyarrow df = pd.DataFrame({"x": [0, 1]}) schema = pyarrow.schema([pyarrow.field("x", type=pyarrow.bool_())]) out_df = df.astype(bool) check_round_trip(df, pa, write_kwargs={"schema": schema}, expected=out_df) @td.skip_if_no("pyarrow") def test_additional_extension_arrays(self, pa): # test additional ExtensionArrays that are supported through the # __arrow_array__ protocol df = pd.DataFrame( { "a": pd.Series([1, 2, 3], dtype="Int64"), "b": pd.Series([1, 2, 3], dtype="UInt32"), "c": pd.Series(["a", None, "c"], dtype="string"), } ) check_round_trip(df, pa) df = pd.DataFrame({"a": pd.Series([1, 2, 3, None], dtype="Int64")}) check_round_trip(df, pa) @td.skip_if_no("pyarrow", min_version="1.0.0") def test_pyarrow_backed_string_array(self, pa): # test ArrowStringArray supported through the __arrow_array__ protocol from pandas.core.arrays.string_arrow import ArrowStringDtype # noqa: F401 df = pd.DataFrame({"a": pd.Series(["a", None, "c"], dtype="arrow_string")}) check_round_trip(df, pa, expected=df) @td.skip_if_no("pyarrow") def test_additional_extension_types(self, pa): # test additional ExtensionArrays that are supported through the # __arrow_array__ protocol + by defining a custom ExtensionType df = pd.DataFrame( { # Arrow does not yet support struct in writing to Parquet (ARROW-1644) # "c": pd.arrays.IntervalArray.from_tuples([(0, 1), (1, 2), (3, 4)]), "d": pd.period_range("2012-01-01", periods=3, freq="D"), } ) check_round_trip(df, pa) @td.skip_if_no("pyarrow") def test_use_nullable_dtypes(self, pa): import pyarrow.parquet as pq table = pyarrow.table( { "a": pyarrow.array([1, 2, 3, None], "int64"), "b": pyarrow.array([1, 2, 3, None], "uint8"), "c": pyarrow.array(["a", "b", "c", None]), "d": pyarrow.array([True, False, True, None]), } ) with tm.ensure_clean() as path: # write manually with pyarrow to write integers pq.write_table(table, path) result1 = read_parquet(path) result2 = read_parquet(path, use_nullable_dtypes=True) assert result1["a"].dtype == np.dtype("float64") expected = pd.DataFrame( { "a": pd.array([1, 2, 3, None], dtype="Int64"), "b": pd.array([1, 2, 3, None], dtype="UInt8"), "c": pd.array(["a", "b", "c", None], dtype="string"), "d": pd.array([True, False, True, None], dtype="boolean"), } ) tm.assert_frame_equal(result2, expected) def test_timestamp_nanoseconds(self, pa): # with version 2.0, pyarrow defaults to writing the nanoseconds, so # this should work without error df = pd.DataFrame({"a": pd.date_range("2017-01-01", freq="1n", periods=10)}) check_round_trip(df, pa, write_kwargs={"version": "2.0"}) def test_timezone_aware_index(self, pa, timezone_aware_date_list): if not pa_version_under2p0: # temporary skip this test until it is properly resolved # https://github.com/pandas-dev/pandas/issues/37286 pytest.skip() idx = 5 * [timezone_aware_date_list] df = pd.DataFrame(index=idx, data={"index_as_col": idx}) # see gh-36004 # compare time(zone) values only, skip their class: # pyarrow always creates fixed offset timezones using pytz.FixedOffset() # even if it was datetime.timezone() originally # # technically they are the same: # they both implement datetime.tzinfo # they both wrap datetime.timedelta() # this use-case sets the resolution to 1 minute check_round_trip(df, pa, check_dtype=False) @td.skip_if_no("pyarrow", min_version="1.0.0") def test_filter_row_groups(self, pa): # https://github.com/pandas-dev/pandas/issues/26551 df = pd.DataFrame({"a": list(range(0, 3))}) with tm.ensure_clean() as path: df.to_parquet(path, pa) result = read_parquet( path, pa, filters=[("a", "==", 0)], use_legacy_dataset=False ) assert len(result) == 1 def test_read_parquet_manager(self, pa, using_array_manager): # ensure that read_parquet honors the pandas.options.mode.data_manager option df = pd.DataFrame(np.random.randn(10, 3), columns=["A", "B", "C"]) with tm.ensure_clean() as path: df.to_parquet(path, pa) result = read_parquet(path, pa) if using_array_manager: assert isinstance(result._mgr, pd.core.internals.ArrayManager) else: assert isinstance(result._mgr, pd.core.internals.BlockManager) class TestParquetFastParquet(Base): def test_basic(self, fp, df_full): df = df_full dti = pd.date_range("20130101", periods=3, tz="US/Eastern") dti = dti._with_freq(None) # freq doesn't round-trip df["datetime_tz"] = dti df["timedelta"] = pd.timedelta_range("1 day", periods=3) check_round_trip(df, fp) @pytest.mark.skip(reason="not supported") def test_duplicate_columns(self, fp): # not currently able to handle duplicate columns df = pd.DataFrame(np.arange(12).reshape(4, 3), columns=list("aaa")).copy() msg = "Cannot create parquet dataset with duplicate column names" self.check_error_on_write(df, fp, ValueError, msg) def test_bool_with_none(self, fp): df = pd.DataFrame({"a": [True, None, False]}) expected = pd.DataFrame({"a": [1.0, np.nan, 0.0]}, dtype="float16") check_round_trip(df, fp, expected=expected) def test_unsupported(self, fp): # period df = pd.DataFrame({"a": pd.period_range("2013", freq="M", periods=3)}) # error from fastparquet -> don't check exact error message self.check_error_on_write(df, fp, ValueError, None) # mixed df = pd.DataFrame({"a": ["a", 1, 2.0]}) msg = "Can't infer object conversion type" self.check_error_on_write(df, fp, ValueError, msg) def test_categorical(self, fp): df = pd.DataFrame({"a": pd.Categorical(list("abc"))}) check_round_trip(df, fp) def test_filter_row_groups(self, fp): d = {"a": list(range(0, 3))} df = pd.DataFrame(d) with tm.ensure_clean() as path: df.to_parquet(path, fp, compression=None, row_group_offsets=1) result = read_parquet(path, fp, filters=[("a", "==", 0)]) assert len(result) == 1 def test_s3_roundtrip(self, df_compat, s3_resource, fp, s3so): # GH #19134 check_round_trip( df_compat, fp, path="s3://pandas-test/fastparquet.parquet", read_kwargs={"storage_options": s3so}, write_kwargs={"compression": None, "storage_options": s3so}, ) def test_partition_cols_supported(self, fp, df_full): # GH #23283 partition_cols = ["bool", "int"] df = df_full with tm.ensure_clean_dir() as path: df.to_parquet( path, engine="fastparquet", partition_cols=partition_cols, compression=None, ) assert os.path.exists(path) import fastparquet actual_partition_cols = fastparquet.ParquetFile(path, False).cats assert len(actual_partition_cols) == 2 def test_partition_cols_string(self, fp, df_full): # GH #27117 partition_cols = "bool" df = df_full with tm.ensure_clean_dir() as path: df.to_parquet( path, engine="fastparquet", partition_cols=partition_cols, compression=None, ) assert os.path.exists(path) import fastparquet actual_partition_cols = fastparquet.ParquetFile(path, False).cats assert len(actual_partition_cols) == 1 def test_partition_on_supported(self, fp, df_full): # GH #23283 partition_cols = ["bool", "int"] df = df_full with tm.ensure_clean_dir() as path: df.to_parquet( path, engine="fastparquet", compression=None, partition_on=partition_cols, ) assert os.path.exists(path) import fastparquet actual_partition_cols = fastparquet.ParquetFile(path, False).cats assert len(actual_partition_cols) == 2 def test_error_on_using_partition_cols_and_partition_on(self, fp, df_full): # GH #23283 partition_cols = ["bool", "int"] df = df_full msg = ( "Cannot use both partition_on and partition_cols. Use partition_cols for " "partitioning data" ) with pytest.raises(ValueError, match=msg): with tm.ensure_clean_dir() as path: df.to_parquet( path, engine="fastparquet", compression=None, partition_on=partition_cols, partition_cols=partition_cols, ) def test_empty_dataframe(self, fp): # GH #27339 df = pd.DataFrame() expected = df.copy() expected.index.name = "index" check_round_trip(df, fp, expected=expected) def test_timezone_aware_index(self, fp, timezone_aware_date_list): idx = 5 * [timezone_aware_date_list] df = pd.DataFrame(index=idx, data={"index_as_col": idx}) expected = df.copy() expected.index.name = "index" check_round_trip(df, fp, expected=expected) def test_use_nullable_dtypes_not_supported(self, fp): df = pd.DataFrame({"a": [1, 2]}) with tm.ensure_clean() as path: df.to_parquet(path) with pytest.raises(ValueError, match="not supported for the fastparquet"): read_parquet(path, engine="fastparquet", use_nullable_dtypes=True)
datapythonista/pandas
pandas/tests/io/test_parquet.py
pandas/_testing/_warnings.py
from typing import Optional import numpy as np from pandas._libs import lib from pandas.core.dtypes.cast import maybe_downcast_numeric from pandas.core.dtypes.common import ( ensure_object, is_datetime_or_timedelta_dtype, is_decimal, is_integer_dtype, is_number, is_numeric_dtype, is_scalar, needs_i8_conversion, ) from pandas.core.dtypes.generic import ( ABCIndex, ABCSeries, ) import pandas as pd from pandas.core.arrays.numeric import NumericArray def to_numeric(arg, errors="raise", downcast=None): """ Convert argument to a numeric type. The default return dtype is `float64` or `int64` depending on the data supplied. Use the `downcast` parameter to obtain other dtypes. Please note that precision loss may occur if really large numbers are passed in. Due to the internal limitations of `ndarray`, if numbers smaller than `-9223372036854775808` (np.iinfo(np.int64).min) or larger than `18446744073709551615` (np.iinfo(np.uint64).max) are passed in, it is very likely they will be converted to float so that they can stored in an `ndarray`. These warnings apply similarly to `Series` since it internally leverages `ndarray`. Parameters ---------- arg : scalar, list, tuple, 1-d array, or Series Argument to be converted. errors : {'ignore', 'raise', 'coerce'}, default 'raise' - If 'raise', then invalid parsing will raise an exception. - If 'coerce', then invalid parsing will be set as NaN. - If 'ignore', then invalid parsing will return the input. downcast : {'integer', 'signed', 'unsigned', 'float'}, default None If not None, and if the data has been successfully cast to a numerical dtype (or if the data was numeric to begin with), downcast that resulting data to the smallest numerical dtype possible according to the following rules: - 'integer' or 'signed': smallest signed int dtype (min.: np.int8) - 'unsigned': smallest unsigned int dtype (min.: np.uint8) - 'float': smallest float dtype (min.: np.float32) As this behaviour is separate from the core conversion to numeric values, any errors raised during the downcasting will be surfaced regardless of the value of the 'errors' input. In addition, downcasting will only occur if the size of the resulting data's dtype is strictly larger than the dtype it is to be cast to, so if none of the dtypes checked satisfy that specification, no downcasting will be performed on the data. Returns ------- ret Numeric if parsing succeeded. Return type depends on input. Series if Series, otherwise ndarray. See Also -------- DataFrame.astype : Cast argument to a specified dtype. to_datetime : Convert argument to datetime. to_timedelta : Convert argument to timedelta. numpy.ndarray.astype : Cast a numpy array to a specified type. DataFrame.convert_dtypes : Convert dtypes. Examples -------- Take separate series and convert to numeric, coercing when told to >>> s = pd.Series(['1.0', '2', -3]) >>> pd.to_numeric(s) 0 1.0 1 2.0 2 -3.0 dtype: float64 >>> pd.to_numeric(s, downcast='float') 0 1.0 1 2.0 2 -3.0 dtype: float32 >>> pd.to_numeric(s, downcast='signed') 0 1 1 2 2 -3 dtype: int8 >>> s = pd.Series(['apple', '1.0', '2', -3]) >>> pd.to_numeric(s, errors='ignore') 0 apple 1 1.0 2 2 3 -3 dtype: object >>> pd.to_numeric(s, errors='coerce') 0 NaN 1 1.0 2 2.0 3 -3.0 dtype: float64 Downcasting of nullable integer and floating dtypes is supported: >>> s = pd.Series([1, 2, 3], dtype="Int64") >>> pd.to_numeric(s, downcast="integer") 0 1 1 2 2 3 dtype: Int8 >>> s = pd.Series([1.0, 2.1, 3.0], dtype="Float64") >>> pd.to_numeric(s, downcast="float") 0 1.0 1 2.1 2 3.0 dtype: Float32 """ if downcast not in (None, "integer", "signed", "unsigned", "float"): raise ValueError("invalid downcasting method provided") if errors not in ("ignore", "raise", "coerce"): raise ValueError("invalid error value specified") is_series = False is_index = False is_scalars = False if isinstance(arg, ABCSeries): is_series = True values = arg.values elif isinstance(arg, ABCIndex): is_index = True if needs_i8_conversion(arg.dtype): values = arg.asi8 else: values = arg.values elif isinstance(arg, (list, tuple)): values = np.array(arg, dtype="O") elif is_scalar(arg): if is_decimal(arg): return float(arg) if is_number(arg): return arg is_scalars = True values = np.array([arg], dtype="O") elif getattr(arg, "ndim", 1) > 1: raise TypeError("arg must be a list, tuple, 1-d array, or Series") else: values = arg # GH33013: for IntegerArray & FloatingArray extract non-null values for casting # save mask to reconstruct the full array after casting mask: Optional[np.ndarray] = None if isinstance(values, NumericArray): mask = values._mask values = values._data[~mask] values_dtype = getattr(values, "dtype", None) if is_numeric_dtype(values_dtype): pass elif is_datetime_or_timedelta_dtype(values_dtype): values = values.view(np.int64) else: values = ensure_object(values) coerce_numeric = errors not in ("ignore", "raise") try: values, _ = lib.maybe_convert_numeric( values, set(), coerce_numeric=coerce_numeric ) except (ValueError, TypeError): if errors == "raise": raise # attempt downcast only if the data has been successfully converted # to a numerical dtype and if a downcast method has been specified if downcast is not None and is_numeric_dtype(values.dtype): typecodes = None if downcast in ("integer", "signed"): typecodes = np.typecodes["Integer"] elif downcast == "unsigned" and (not len(values) or np.min(values) >= 0): typecodes = np.typecodes["UnsignedInteger"] elif downcast == "float": typecodes = np.typecodes["Float"] # pandas support goes only to np.float32, # as float dtypes smaller than that are # extremely rare and not well supported float_32_char = np.dtype(np.float32).char float_32_ind = typecodes.index(float_32_char) typecodes = typecodes[float_32_ind:] if typecodes is not None: # from smallest to largest for dtype in typecodes: dtype = np.dtype(dtype) if dtype.itemsize <= values.dtype.itemsize: values = maybe_downcast_numeric(values, dtype) # successful conversion if values.dtype == dtype: break # GH33013: for IntegerArray & FloatingArray need to reconstruct masked array if mask is not None: data = np.zeros(mask.shape, dtype=values.dtype) data[~mask] = values from pandas.core.arrays import ( FloatingArray, IntegerArray, ) klass = IntegerArray if is_integer_dtype(data.dtype) else FloatingArray values = klass(data, mask.copy()) if is_series: return arg._constructor(values, index=arg.index, name=arg.name) elif is_index: # because we want to coerce to numeric if possible, # do not use _shallow_copy return pd.Index(values, name=arg.name) elif is_scalars: return values[0] else: return values
""" test parquet compat """ import datetime from io import BytesIO import os import pathlib from warnings import ( catch_warnings, filterwarnings, ) import numpy as np import pytest from pandas._config import get_option from pandas.compat import ( PY38, is_platform_windows, ) from pandas.compat.pyarrow import ( pa_version_under1p0, pa_version_under2p0, ) import pandas.util._test_decorators as td import pandas as pd import pandas._testing as tm from pandas.util.version import Version from pandas.io.parquet import ( FastParquetImpl, PyArrowImpl, get_engine, read_parquet, to_parquet, ) try: import pyarrow _HAVE_PYARROW = True except ImportError: _HAVE_PYARROW = False try: with catch_warnings(): # `np.bool` is a deprecated alias... filterwarnings("ignore", "`np.bool`", category=DeprecationWarning) import fastparquet _HAVE_FASTPARQUET = True except ImportError: _HAVE_FASTPARQUET = False pytestmark = pytest.mark.filterwarnings( "ignore:RangeIndex.* is deprecated:DeprecationWarning" ) # TODO(ArrayManager) fastparquet relies on BlockManager internals # setup engines & skips @pytest.fixture( params=[ pytest.param( "fastparquet", marks=pytest.mark.skipif( not _HAVE_FASTPARQUET or get_option("mode.data_manager") == "array", reason="fastparquet is not installed or ArrayManager is used", ), ), pytest.param( "pyarrow", marks=pytest.mark.skipif( not _HAVE_PYARROW, reason="pyarrow is not installed" ), ), ] ) def engine(request): return request.param @pytest.fixture def pa(): if not _HAVE_PYARROW: pytest.skip("pyarrow is not installed") return "pyarrow" @pytest.fixture def fp(): if not _HAVE_FASTPARQUET: pytest.skip("fastparquet is not installed") elif get_option("mode.data_manager") == "array": pytest.skip("ArrayManager is not supported with fastparquet") return "fastparquet" @pytest.fixture def df_compat(): return pd.DataFrame({"A": [1, 2, 3], "B": "foo"}) @pytest.fixture def df_cross_compat(): df = pd.DataFrame( { "a": list("abc"), "b": list(range(1, 4)), # 'c': np.arange(3, 6).astype('u1'), "d": np.arange(4.0, 7.0, dtype="float64"), "e": [True, False, True], "f": pd.date_range("20130101", periods=3), # 'g': pd.date_range('20130101', periods=3, # tz='US/Eastern'), # 'h': pd.date_range('20130101', periods=3, freq='ns') } ) return df @pytest.fixture def df_full(): return pd.DataFrame( { "string": list("abc"), "string_with_nan": ["a", np.nan, "c"], "string_with_none": ["a", None, "c"], "bytes": [b"foo", b"bar", b"baz"], "unicode": ["foo", "bar", "baz"], "int": list(range(1, 4)), "uint": np.arange(3, 6).astype("u1"), "float": np.arange(4.0, 7.0, dtype="float64"), "float_with_nan": [2.0, np.nan, 3.0], "bool": [True, False, True], "datetime": pd.date_range("20130101", periods=3), "datetime_with_nat": [ pd.Timestamp("20130101"), pd.NaT, pd.Timestamp("20130103"), ], } ) @pytest.fixture( params=[ datetime.datetime.now(datetime.timezone.utc), datetime.datetime.now(datetime.timezone.min), datetime.datetime.now(datetime.timezone.max), datetime.datetime.strptime("2019-01-04T16:41:24+0200", "%Y-%m-%dT%H:%M:%S%z"), datetime.datetime.strptime("2019-01-04T16:41:24+0215", "%Y-%m-%dT%H:%M:%S%z"), datetime.datetime.strptime("2019-01-04T16:41:24-0200", "%Y-%m-%dT%H:%M:%S%z"), datetime.datetime.strptime("2019-01-04T16:41:24-0215", "%Y-%m-%dT%H:%M:%S%z"), ] ) def timezone_aware_date_list(request): return request.param def check_round_trip( df, engine=None, path=None, write_kwargs=None, read_kwargs=None, expected=None, check_names=True, check_like=False, check_dtype=True, repeat=2, ): """Verify parquet serializer and deserializer produce the same results. Performs a pandas to disk and disk to pandas round trip, then compares the 2 resulting DataFrames to verify equality. Parameters ---------- df: Dataframe engine: str, optional 'pyarrow' or 'fastparquet' path: str, optional write_kwargs: dict of str:str, optional read_kwargs: dict of str:str, optional expected: DataFrame, optional Expected deserialization result, otherwise will be equal to `df` check_names: list of str, optional Closed set of column names to be compared check_like: bool, optional If True, ignore the order of index & columns. repeat: int, optional How many times to repeat the test """ write_kwargs = write_kwargs or {"compression": None} read_kwargs = read_kwargs or {} if expected is None: expected = df if engine: write_kwargs["engine"] = engine read_kwargs["engine"] = engine def compare(repeat): for _ in range(repeat): df.to_parquet(path, **write_kwargs) with catch_warnings(record=True): actual = read_parquet(path, **read_kwargs) tm.assert_frame_equal( expected, actual, check_names=check_names, check_like=check_like, check_dtype=check_dtype, ) if path is None: with tm.ensure_clean() as path: compare(repeat) else: compare(repeat) def test_invalid_engine(df_compat): msg = "engine must be one of 'pyarrow', 'fastparquet'" with pytest.raises(ValueError, match=msg): check_round_trip(df_compat, "foo", "bar") def test_options_py(df_compat, pa): # use the set option with pd.option_context("io.parquet.engine", "pyarrow"): check_round_trip(df_compat) def test_options_fp(df_compat, fp): # use the set option with pd.option_context("io.parquet.engine", "fastparquet"): check_round_trip(df_compat) def test_options_auto(df_compat, fp, pa): # use the set option with pd.option_context("io.parquet.engine", "auto"): check_round_trip(df_compat) def test_options_get_engine(fp, pa): assert isinstance(get_engine("pyarrow"), PyArrowImpl) assert isinstance(get_engine("fastparquet"), FastParquetImpl) with pd.option_context("io.parquet.engine", "pyarrow"): assert isinstance(get_engine("auto"), PyArrowImpl) assert isinstance(get_engine("pyarrow"), PyArrowImpl) assert isinstance(get_engine("fastparquet"), FastParquetImpl) with pd.option_context("io.parquet.engine", "fastparquet"): assert isinstance(get_engine("auto"), FastParquetImpl) assert isinstance(get_engine("pyarrow"), PyArrowImpl) assert isinstance(get_engine("fastparquet"), FastParquetImpl) with pd.option_context("io.parquet.engine", "auto"): assert isinstance(get_engine("auto"), PyArrowImpl) assert isinstance(get_engine("pyarrow"), PyArrowImpl) assert isinstance(get_engine("fastparquet"), FastParquetImpl) def test_get_engine_auto_error_message(): # Expect different error messages from get_engine(engine="auto") # if engines aren't installed vs. are installed but bad version from pandas.compat._optional import VERSIONS # Do we have engines installed, but a bad version of them? pa_min_ver = VERSIONS.get("pyarrow") fp_min_ver = VERSIONS.get("fastparquet") have_pa_bad_version = ( False if not _HAVE_PYARROW else Version(pyarrow.__version__) < Version(pa_min_ver) ) have_fp_bad_version = ( False if not _HAVE_FASTPARQUET else Version(fastparquet.__version__) < Version(fp_min_ver) ) # Do we have usable engines installed? have_usable_pa = _HAVE_PYARROW and not have_pa_bad_version have_usable_fp = _HAVE_FASTPARQUET and not have_fp_bad_version if not have_usable_pa and not have_usable_fp: # No usable engines found. if have_pa_bad_version: match = f"Pandas requires version .{pa_min_ver}. or newer of .pyarrow." with pytest.raises(ImportError, match=match): get_engine("auto") else: match = "Missing optional dependency .pyarrow." with pytest.raises(ImportError, match=match): get_engine("auto") if have_fp_bad_version: match = f"Pandas requires version .{fp_min_ver}. or newer of .fastparquet." with pytest.raises(ImportError, match=match): get_engine("auto") else: match = "Missing optional dependency .fastparquet." with pytest.raises(ImportError, match=match): get_engine("auto") def test_cross_engine_pa_fp(df_cross_compat, pa, fp): # cross-compat with differing reading/writing engines df = df_cross_compat with tm.ensure_clean() as path: df.to_parquet(path, engine=pa, compression=None) result = read_parquet(path, engine=fp) tm.assert_frame_equal(result, df) result = read_parquet(path, engine=fp, columns=["a", "d"]) tm.assert_frame_equal(result, df[["a", "d"]]) def test_cross_engine_fp_pa(request, df_cross_compat, pa, fp): # cross-compat with differing reading/writing engines df = df_cross_compat with tm.ensure_clean() as path: df.to_parquet(path, engine=fp, compression=None) with catch_warnings(record=True): result = read_parquet(path, engine=pa) tm.assert_frame_equal(result, df) result = read_parquet(path, engine=pa, columns=["a", "d"]) tm.assert_frame_equal(result, df[["a", "d"]]) class Base: def check_error_on_write(self, df, engine, exc, err_msg): # check that we are raising the exception on writing with tm.ensure_clean() as path: with pytest.raises(exc, match=err_msg): to_parquet(df, path, engine, compression=None) def check_external_error_on_write(self, df, engine, exc): # check that an external library is raising the exception on writing with tm.ensure_clean() as path: with tm.external_error_raised(exc): to_parquet(df, path, engine, compression=None) @tm.network def test_parquet_read_from_url(self, df_compat, engine): if engine != "auto": pytest.importorskip(engine) url = ( "https://raw.githubusercontent.com/pandas-dev/pandas/" "master/pandas/tests/io/data/parquet/simple.parquet" ) df = read_parquet(url) tm.assert_frame_equal(df, df_compat) class TestBasic(Base): def test_error(self, engine): for obj in [ pd.Series([1, 2, 3]), 1, "foo", pd.Timestamp("20130101"), np.array([1, 2, 3]), ]: msg = "to_parquet only supports IO with DataFrames" self.check_error_on_write(obj, engine, ValueError, msg) def test_columns_dtypes(self, engine): df = pd.DataFrame({"string": list("abc"), "int": list(range(1, 4))}) # unicode df.columns = ["foo", "bar"] check_round_trip(df, engine) def test_columns_dtypes_invalid(self, engine): df = pd.DataFrame({"string": list("abc"), "int": list(range(1, 4))}) msg = "parquet must have string column names" # numeric df.columns = [0, 1] self.check_error_on_write(df, engine, ValueError, msg) # bytes df.columns = [b"foo", b"bar"] self.check_error_on_write(df, engine, ValueError, msg) # python object df.columns = [ datetime.datetime(2011, 1, 1, 0, 0), datetime.datetime(2011, 1, 1, 1, 1), ] self.check_error_on_write(df, engine, ValueError, msg) @pytest.mark.parametrize("compression", [None, "gzip", "snappy", "brotli"]) def test_compression(self, engine, compression): if compression == "snappy": pytest.importorskip("snappy") elif compression == "brotli": pytest.importorskip("brotli") df = pd.DataFrame({"A": [1, 2, 3]}) check_round_trip(df, engine, write_kwargs={"compression": compression}) def test_read_columns(self, engine): # GH18154 df = pd.DataFrame({"string": list("abc"), "int": list(range(1, 4))}) expected = pd.DataFrame({"string": list("abc")}) check_round_trip( df, engine, expected=expected, read_kwargs={"columns": ["string"]} ) def test_write_index(self, engine): check_names = engine != "fastparquet" df = pd.DataFrame({"A": [1, 2, 3]}) check_round_trip(df, engine) indexes = [ [2, 3, 4], pd.date_range("20130101", periods=3), list("abc"), [1, 3, 4], ] # non-default index for index in indexes: df.index = index if isinstance(index, pd.DatetimeIndex): df.index = df.index._with_freq(None) # freq doesn't round-trip check_round_trip(df, engine, check_names=check_names) # index with meta-data df.index = [0, 1, 2] df.index.name = "foo" check_round_trip(df, engine) def test_write_multiindex(self, pa): # Not supported in fastparquet as of 0.1.3 or older pyarrow version engine = pa df = pd.DataFrame({"A": [1, 2, 3]}) index = pd.MultiIndex.from_tuples([("a", 1), ("a", 2), ("b", 1)]) df.index = index check_round_trip(df, engine) def test_multiindex_with_columns(self, pa): engine = pa dates = pd.date_range("01-Jan-2018", "01-Dec-2018", freq="MS") df = pd.DataFrame(np.random.randn(2 * len(dates), 3), columns=list("ABC")) index1 = pd.MultiIndex.from_product( [["Level1", "Level2"], dates], names=["level", "date"] ) index2 = index1.copy(names=None) for index in [index1, index2]: df.index = index check_round_trip(df, engine) check_round_trip( df, engine, read_kwargs={"columns": ["A", "B"]}, expected=df[["A", "B"]] ) def test_write_ignoring_index(self, engine): # ENH 20768 # Ensure index=False omits the index from the written Parquet file. df = pd.DataFrame({"a": [1, 2, 3], "b": ["q", "r", "s"]}) write_kwargs = {"compression": None, "index": False} # Because we're dropping the index, we expect the loaded dataframe to # have the default integer index. expected = df.reset_index(drop=True) check_round_trip(df, engine, write_kwargs=write_kwargs, expected=expected) # Ignore custom index df = pd.DataFrame( {"a": [1, 2, 3], "b": ["q", "r", "s"]}, index=["zyx", "wvu", "tsr"] ) check_round_trip(df, engine, write_kwargs=write_kwargs, expected=expected) # Ignore multi-indexes as well. arrays = [ ["bar", "bar", "baz", "baz", "foo", "foo", "qux", "qux"], ["one", "two", "one", "two", "one", "two", "one", "two"], ] df = pd.DataFrame( {"one": list(range(8)), "two": [-i for i in range(8)]}, index=arrays ) expected = df.reset_index(drop=True) check_round_trip(df, engine, write_kwargs=write_kwargs, expected=expected) def test_write_column_multiindex(self, engine): # Not able to write column multi-indexes with non-string column names. mi_columns = pd.MultiIndex.from_tuples([("a", 1), ("a", 2), ("b", 1)]) df = pd.DataFrame(np.random.randn(4, 3), columns=mi_columns) msg = ( r"\s*parquet must have string column names for all values in\s*" "each level of the MultiIndex" ) self.check_error_on_write(df, engine, ValueError, msg) def test_write_column_multiindex_nonstring(self, pa): # GH #34777 # Not supported in fastparquet as of 0.1.3 engine = pa # Not able to write column multi-indexes with non-string column names arrays = [ ["bar", "bar", "baz", "baz", "foo", "foo", "qux", "qux"], [1, 2, 1, 2, 1, 2, 1, 2], ] df = pd.DataFrame(np.random.randn(8, 8), columns=arrays) df.columns.names = ["Level1", "Level2"] msg = ( r"\s*parquet must have string column names for all values in\s*" "each level of the MultiIndex" ) self.check_error_on_write(df, engine, ValueError, msg) def test_write_column_multiindex_string(self, pa): # GH #34777 # Not supported in fastparquet as of 0.1.3 engine = pa # Write column multi-indexes with string column names arrays = [ ["bar", "bar", "baz", "baz", "foo", "foo", "qux", "qux"], ["one", "two", "one", "two", "one", "two", "one", "two"], ] df = pd.DataFrame(np.random.randn(8, 8), columns=arrays) df.columns.names = ["ColLevel1", "ColLevel2"] check_round_trip(df, engine) def test_write_column_index_string(self, pa): # GH #34777 # Not supported in fastparquet as of 0.1.3 engine = pa # Write column indexes with string column names arrays = ["bar", "baz", "foo", "qux"] df = pd.DataFrame(np.random.randn(8, 4), columns=arrays) df.columns.name = "StringCol" check_round_trip(df, engine) def test_write_column_index_nonstring(self, pa): # GH #34777 # Not supported in fastparquet as of 0.1.3 engine = pa # Write column indexes with string column names arrays = [1, 2, 3, 4] df = pd.DataFrame(np.random.randn(8, 4), columns=arrays) df.columns.name = "NonStringCol" msg = r"parquet must have string column names" self.check_error_on_write(df, engine, ValueError, msg) @pytest.mark.filterwarnings("ignore:CategoricalBlock is deprecated:DeprecationWarning") class TestParquetPyArrow(Base): def test_basic(self, pa, df_full): df = df_full # additional supported types for pyarrow dti = pd.date_range("20130101", periods=3, tz="Europe/Brussels") dti = dti._with_freq(None) # freq doesn't round-trip df["datetime_tz"] = dti df["bool_with_none"] = [True, None, True] check_round_trip(df, pa) def test_basic_subset_columns(self, pa, df_full): # GH18628 df = df_full # additional supported types for pyarrow df["datetime_tz"] = pd.date_range("20130101", periods=3, tz="Europe/Brussels") check_round_trip( df, pa, expected=df[["string", "int"]], read_kwargs={"columns": ["string", "int"]}, ) def test_to_bytes_without_path_or_buf_provided(self, pa, df_full): # GH 37105 buf_bytes = df_full.to_parquet(engine=pa) assert isinstance(buf_bytes, bytes) buf_stream = BytesIO(buf_bytes) res = read_parquet(buf_stream) tm.assert_frame_equal(df_full, res) def test_duplicate_columns(self, pa): # not currently able to handle duplicate columns df = pd.DataFrame(np.arange(12).reshape(4, 3), columns=list("aaa")).copy() self.check_error_on_write(df, pa, ValueError, "Duplicate column names found") def test_unsupported(self, pa): # timedelta df = pd.DataFrame({"a": pd.timedelta_range("1 day", periods=3)}) self.check_external_error_on_write(df, pa, NotImplementedError) # mixed python objects df = pd.DataFrame({"a": ["a", 1, 2.0]}) # pyarrow 0.11 raises ArrowTypeError # older pyarrows raise ArrowInvalid self.check_external_error_on_write(df, pa, pyarrow.ArrowException) def test_categorical(self, pa): # supported in >= 0.7.0 df = pd.DataFrame() df["a"] = pd.Categorical(list("abcdef")) # test for null, out-of-order values, and unobserved category df["b"] = pd.Categorical( ["bar", "foo", "foo", "bar", None, "bar"], dtype=pd.CategoricalDtype(["foo", "bar", "baz"]), ) # test for ordered flag df["c"] = pd.Categorical( ["a", "b", "c", "a", "c", "b"], categories=["b", "c", "d"], ordered=True ) check_round_trip(df, pa) @pytest.mark.xfail( is_platform_windows() and PY38, reason="localhost connection rejected", strict=False, ) def test_s3_roundtrip_explicit_fs(self, df_compat, s3_resource, pa, s3so): s3fs = pytest.importorskip("s3fs") s3 = s3fs.S3FileSystem(**s3so) kw = {"filesystem": s3} check_round_trip( df_compat, pa, path="pandas-test/pyarrow.parquet", read_kwargs=kw, write_kwargs=kw, ) def test_s3_roundtrip(self, df_compat, s3_resource, pa, s3so): # GH #19134 s3so = {"storage_options": s3so} check_round_trip( df_compat, pa, path="s3://pandas-test/pyarrow.parquet", read_kwargs=s3so, write_kwargs=s3so, ) @td.skip_if_no("s3fs") # also requires flask @pytest.mark.parametrize( "partition_col", [ ["A"], [], ], ) def test_s3_roundtrip_for_dir( self, df_compat, s3_resource, pa, partition_col, s3so ): # GH #26388 expected_df = df_compat.copy() # GH #35791 # read_table uses the new Arrow Datasets API since pyarrow 1.0.0 # Previous behaviour was pyarrow partitioned columns become 'category' dtypes # These are added to back of dataframe on read. In new API category dtype is # only used if partition field is string, but this changed again to use # category dtype for all types (not only strings) in pyarrow 2.0.0 if partition_col: partition_col_type = ( "int32" if (not pa_version_under1p0) and pa_version_under2p0 else "category" ) expected_df[partition_col] = expected_df[partition_col].astype( partition_col_type ) check_round_trip( df_compat, pa, expected=expected_df, path="s3://pandas-test/parquet_dir", read_kwargs={"storage_options": s3so}, write_kwargs={ "partition_cols": partition_col, "compression": None, "storage_options": s3so, }, check_like=True, repeat=1, ) @td.skip_if_no("pyarrow") def test_read_file_like_obj_support(self, df_compat): buffer = BytesIO() df_compat.to_parquet(buffer) df_from_buf = read_parquet(buffer) tm.assert_frame_equal(df_compat, df_from_buf) @td.skip_if_no("pyarrow") def test_expand_user(self, df_compat, monkeypatch): monkeypatch.setenv("HOME", "TestingUser") monkeypatch.setenv("USERPROFILE", "TestingUser") with pytest.raises(OSError, match=r".*TestingUser.*"): read_parquet("~/file.parquet") with pytest.raises(OSError, match=r".*TestingUser.*"): df_compat.to_parquet("~/file.parquet") def test_partition_cols_supported(self, pa, df_full): # GH #23283 partition_cols = ["bool", "int"] df = df_full with tm.ensure_clean_dir() as path: df.to_parquet(path, partition_cols=partition_cols, compression=None) import pyarrow.parquet as pq dataset = pq.ParquetDataset(path, validate_schema=False) assert len(dataset.partitions.partition_names) == 2 assert dataset.partitions.partition_names == set(partition_cols) assert read_parquet(path).shape == df.shape def test_partition_cols_string(self, pa, df_full): # GH #27117 partition_cols = "bool" partition_cols_list = [partition_cols] df = df_full with tm.ensure_clean_dir() as path: df.to_parquet(path, partition_cols=partition_cols, compression=None) import pyarrow.parquet as pq dataset = pq.ParquetDataset(path, validate_schema=False) assert len(dataset.partitions.partition_names) == 1 assert dataset.partitions.partition_names == set(partition_cols_list) assert read_parquet(path).shape == df.shape @pytest.mark.parametrize("path_type", [str, pathlib.Path]) def test_partition_cols_pathlib(self, pa, df_compat, path_type): # GH 35902 partition_cols = "B" partition_cols_list = [partition_cols] df = df_compat with tm.ensure_clean_dir() as path_str: path = path_type(path_str) df.to_parquet(path, partition_cols=partition_cols_list) assert read_parquet(path).shape == df.shape def test_empty_dataframe(self, pa): # GH #27339 df = pd.DataFrame() check_round_trip(df, pa) def test_write_with_schema(self, pa): import pyarrow df = pd.DataFrame({"x": [0, 1]}) schema = pyarrow.schema([pyarrow.field("x", type=pyarrow.bool_())]) out_df = df.astype(bool) check_round_trip(df, pa, write_kwargs={"schema": schema}, expected=out_df) @td.skip_if_no("pyarrow") def test_additional_extension_arrays(self, pa): # test additional ExtensionArrays that are supported through the # __arrow_array__ protocol df = pd.DataFrame( { "a": pd.Series([1, 2, 3], dtype="Int64"), "b": pd.Series([1, 2, 3], dtype="UInt32"), "c": pd.Series(["a", None, "c"], dtype="string"), } ) check_round_trip(df, pa) df = pd.DataFrame({"a": pd.Series([1, 2, 3, None], dtype="Int64")}) check_round_trip(df, pa) @td.skip_if_no("pyarrow", min_version="1.0.0") def test_pyarrow_backed_string_array(self, pa): # test ArrowStringArray supported through the __arrow_array__ protocol from pandas.core.arrays.string_arrow import ArrowStringDtype # noqa: F401 df = pd.DataFrame({"a": pd.Series(["a", None, "c"], dtype="arrow_string")}) check_round_trip(df, pa, expected=df) @td.skip_if_no("pyarrow") def test_additional_extension_types(self, pa): # test additional ExtensionArrays that are supported through the # __arrow_array__ protocol + by defining a custom ExtensionType df = pd.DataFrame( { # Arrow does not yet support struct in writing to Parquet (ARROW-1644) # "c": pd.arrays.IntervalArray.from_tuples([(0, 1), (1, 2), (3, 4)]), "d": pd.period_range("2012-01-01", periods=3, freq="D"), } ) check_round_trip(df, pa) @td.skip_if_no("pyarrow") def test_use_nullable_dtypes(self, pa): import pyarrow.parquet as pq table = pyarrow.table( { "a": pyarrow.array([1, 2, 3, None], "int64"), "b": pyarrow.array([1, 2, 3, None], "uint8"), "c": pyarrow.array(["a", "b", "c", None]), "d": pyarrow.array([True, False, True, None]), } ) with tm.ensure_clean() as path: # write manually with pyarrow to write integers pq.write_table(table, path) result1 = read_parquet(path) result2 = read_parquet(path, use_nullable_dtypes=True) assert result1["a"].dtype == np.dtype("float64") expected = pd.DataFrame( { "a": pd.array([1, 2, 3, None], dtype="Int64"), "b": pd.array([1, 2, 3, None], dtype="UInt8"), "c": pd.array(["a", "b", "c", None], dtype="string"), "d": pd.array([True, False, True, None], dtype="boolean"), } ) tm.assert_frame_equal(result2, expected) def test_timestamp_nanoseconds(self, pa): # with version 2.0, pyarrow defaults to writing the nanoseconds, so # this should work without error df = pd.DataFrame({"a": pd.date_range("2017-01-01", freq="1n", periods=10)}) check_round_trip(df, pa, write_kwargs={"version": "2.0"}) def test_timezone_aware_index(self, pa, timezone_aware_date_list): if not pa_version_under2p0: # temporary skip this test until it is properly resolved # https://github.com/pandas-dev/pandas/issues/37286 pytest.skip() idx = 5 * [timezone_aware_date_list] df = pd.DataFrame(index=idx, data={"index_as_col": idx}) # see gh-36004 # compare time(zone) values only, skip their class: # pyarrow always creates fixed offset timezones using pytz.FixedOffset() # even if it was datetime.timezone() originally # # technically they are the same: # they both implement datetime.tzinfo # they both wrap datetime.timedelta() # this use-case sets the resolution to 1 minute check_round_trip(df, pa, check_dtype=False) @td.skip_if_no("pyarrow", min_version="1.0.0") def test_filter_row_groups(self, pa): # https://github.com/pandas-dev/pandas/issues/26551 df = pd.DataFrame({"a": list(range(0, 3))}) with tm.ensure_clean() as path: df.to_parquet(path, pa) result = read_parquet( path, pa, filters=[("a", "==", 0)], use_legacy_dataset=False ) assert len(result) == 1 def test_read_parquet_manager(self, pa, using_array_manager): # ensure that read_parquet honors the pandas.options.mode.data_manager option df = pd.DataFrame(np.random.randn(10, 3), columns=["A", "B", "C"]) with tm.ensure_clean() as path: df.to_parquet(path, pa) result = read_parquet(path, pa) if using_array_manager: assert isinstance(result._mgr, pd.core.internals.ArrayManager) else: assert isinstance(result._mgr, pd.core.internals.BlockManager) class TestParquetFastParquet(Base): def test_basic(self, fp, df_full): df = df_full dti = pd.date_range("20130101", periods=3, tz="US/Eastern") dti = dti._with_freq(None) # freq doesn't round-trip df["datetime_tz"] = dti df["timedelta"] = pd.timedelta_range("1 day", periods=3) check_round_trip(df, fp) @pytest.mark.skip(reason="not supported") def test_duplicate_columns(self, fp): # not currently able to handle duplicate columns df = pd.DataFrame(np.arange(12).reshape(4, 3), columns=list("aaa")).copy() msg = "Cannot create parquet dataset with duplicate column names" self.check_error_on_write(df, fp, ValueError, msg) def test_bool_with_none(self, fp): df = pd.DataFrame({"a": [True, None, False]}) expected = pd.DataFrame({"a": [1.0, np.nan, 0.0]}, dtype="float16") check_round_trip(df, fp, expected=expected) def test_unsupported(self, fp): # period df = pd.DataFrame({"a": pd.period_range("2013", freq="M", periods=3)}) # error from fastparquet -> don't check exact error message self.check_error_on_write(df, fp, ValueError, None) # mixed df = pd.DataFrame({"a": ["a", 1, 2.0]}) msg = "Can't infer object conversion type" self.check_error_on_write(df, fp, ValueError, msg) def test_categorical(self, fp): df = pd.DataFrame({"a": pd.Categorical(list("abc"))}) check_round_trip(df, fp) def test_filter_row_groups(self, fp): d = {"a": list(range(0, 3))} df = pd.DataFrame(d) with tm.ensure_clean() as path: df.to_parquet(path, fp, compression=None, row_group_offsets=1) result = read_parquet(path, fp, filters=[("a", "==", 0)]) assert len(result) == 1 def test_s3_roundtrip(self, df_compat, s3_resource, fp, s3so): # GH #19134 check_round_trip( df_compat, fp, path="s3://pandas-test/fastparquet.parquet", read_kwargs={"storage_options": s3so}, write_kwargs={"compression": None, "storage_options": s3so}, ) def test_partition_cols_supported(self, fp, df_full): # GH #23283 partition_cols = ["bool", "int"] df = df_full with tm.ensure_clean_dir() as path: df.to_parquet( path, engine="fastparquet", partition_cols=partition_cols, compression=None, ) assert os.path.exists(path) import fastparquet actual_partition_cols = fastparquet.ParquetFile(path, False).cats assert len(actual_partition_cols) == 2 def test_partition_cols_string(self, fp, df_full): # GH #27117 partition_cols = "bool" df = df_full with tm.ensure_clean_dir() as path: df.to_parquet( path, engine="fastparquet", partition_cols=partition_cols, compression=None, ) assert os.path.exists(path) import fastparquet actual_partition_cols = fastparquet.ParquetFile(path, False).cats assert len(actual_partition_cols) == 1 def test_partition_on_supported(self, fp, df_full): # GH #23283 partition_cols = ["bool", "int"] df = df_full with tm.ensure_clean_dir() as path: df.to_parquet( path, engine="fastparquet", compression=None, partition_on=partition_cols, ) assert os.path.exists(path) import fastparquet actual_partition_cols = fastparquet.ParquetFile(path, False).cats assert len(actual_partition_cols) == 2 def test_error_on_using_partition_cols_and_partition_on(self, fp, df_full): # GH #23283 partition_cols = ["bool", "int"] df = df_full msg = ( "Cannot use both partition_on and partition_cols. Use partition_cols for " "partitioning data" ) with pytest.raises(ValueError, match=msg): with tm.ensure_clean_dir() as path: df.to_parquet( path, engine="fastparquet", compression=None, partition_on=partition_cols, partition_cols=partition_cols, ) def test_empty_dataframe(self, fp): # GH #27339 df = pd.DataFrame() expected = df.copy() expected.index.name = "index" check_round_trip(df, fp, expected=expected) def test_timezone_aware_index(self, fp, timezone_aware_date_list): idx = 5 * [timezone_aware_date_list] df = pd.DataFrame(index=idx, data={"index_as_col": idx}) expected = df.copy() expected.index.name = "index" check_round_trip(df, fp, expected=expected) def test_use_nullable_dtypes_not_supported(self, fp): df = pd.DataFrame({"a": [1, 2]}) with tm.ensure_clean() as path: df.to_parquet(path) with pytest.raises(ValueError, match="not supported for the fastparquet"): read_parquet(path, engine="fastparquet", use_nullable_dtypes=True)
datapythonista/pandas
pandas/tests/io/test_parquet.py
pandas/core/tools/numeric.py
"""Support for monitoring energy usage using the DTE energy bridge.""" import logging import requests import voluptuous as vol from homeassistant.components.sensor import PLATFORM_SCHEMA, SensorEntity from homeassistant.const import CONF_NAME, HTTP_OK import homeassistant.helpers.config_validation as cv _LOGGER = logging.getLogger(__name__) CONF_IP_ADDRESS = "ip" CONF_VERSION = "version" DEFAULT_NAME = "Current Energy Usage" DEFAULT_VERSION = 1 ICON = "mdi:flash" PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( { vol.Required(CONF_IP_ADDRESS): cv.string, vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, vol.Optional(CONF_VERSION, default=DEFAULT_VERSION): vol.All( vol.Coerce(int), vol.Any(1, 2) ), } ) def setup_platform(hass, config, add_entities, discovery_info=None): """Set up the DTE energy bridge sensor.""" name = config[CONF_NAME] ip_address = config[CONF_IP_ADDRESS] version = config[CONF_VERSION] add_entities([DteEnergyBridgeSensor(ip_address, name, version)], True) class DteEnergyBridgeSensor(SensorEntity): """Implementation of the DTE Energy Bridge sensors.""" def __init__(self, ip_address, name, version): """Initialize the sensor.""" self._version = version if self._version == 1: self._url = f"http://{ip_address}/instantaneousdemand" elif self._version == 2: self._url = f"http://{ip_address}:8888/zigbee/se/instantaneousdemand" self._name = name self._unit_of_measurement = "kW" self._state = None @property def name(self): """Return the name of th sensor.""" return self._name @property def state(self): """Return the state of the sensor.""" return self._state @property def unit_of_measurement(self): """Return the unit of measurement of this entity, if any.""" return self._unit_of_measurement @property def icon(self): """Icon to use in the frontend, if any.""" return ICON def update(self): """Get the energy usage data from the DTE energy bridge.""" try: response = requests.get(self._url, timeout=5) except (requests.exceptions.RequestException, ValueError): _LOGGER.warning( "Could not update status for DTE Energy Bridge (%s)", self._name ) return if response.status_code != HTTP_OK: _LOGGER.warning( "Invalid status_code from DTE Energy Bridge: %s (%s)", response.status_code, self._name, ) return response_split = response.text.split() if len(response_split) != 2: _LOGGER.warning( 'Invalid response from DTE Energy Bridge: "%s" (%s)', response.text, self._name, ) return val = float(response_split[0]) # A workaround for a bug in the DTE energy bridge. # The returned value can randomly be in W or kW. Checking for a # a decimal seems to be a reliable way to determine the units. # Limiting to version 1 because version 2 apparently always returns # values in the format 000000.000 kW, but the scaling is Watts # NOT kWatts if self._version == 1 and "." in response_split[0]: self._state = val else: self._state = val / 1000
"""Tests for the Google Assistant traits.""" from datetime import datetime, timedelta from unittest.mock import patch import pytest from homeassistant.components import ( alarm_control_panel, binary_sensor, camera, cover, fan, group, input_boolean, input_select, light, lock, media_player, scene, script, sensor, switch, vacuum, ) from homeassistant.components.climate import const as climate from homeassistant.components.google_assistant import const, error, helpers, trait from homeassistant.components.google_assistant.error import SmartHomeError from homeassistant.components.humidifier import const as humidifier from homeassistant.config import async_process_ha_core_config from homeassistant.const import ( ATTR_ASSUMED_STATE, ATTR_DEVICE_CLASS, ATTR_ENTITY_ID, ATTR_MODE, ATTR_SUPPORTED_FEATURES, ATTR_TEMPERATURE, SERVICE_TURN_OFF, SERVICE_TURN_ON, STATE_ALARM_ARMED_AWAY, STATE_ALARM_DISARMED, STATE_ALARM_PENDING, STATE_IDLE, STATE_OFF, STATE_ON, STATE_PAUSED, STATE_PLAYING, STATE_STANDBY, STATE_UNAVAILABLE, STATE_UNKNOWN, TEMP_CELSIUS, TEMP_FAHRENHEIT, ) from homeassistant.core import DOMAIN as HA_DOMAIN, EVENT_CALL_SERVICE, State from homeassistant.util import color from . import BASIC_CONFIG, MockConfig from tests.common import async_capture_events, async_mock_service REQ_ID = "ff36a3cc-ec34-11e6-b1a0-64510650abcf" BASIC_DATA = helpers.RequestData( BASIC_CONFIG, "test-agent", const.SOURCE_CLOUD, REQ_ID, None ) PIN_CONFIG = MockConfig(secure_devices_pin="1234") PIN_DATA = helpers.RequestData( PIN_CONFIG, "test-agent", const.SOURCE_CLOUD, REQ_ID, None ) async def test_brightness_light(hass): """Test brightness trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert trait.BrightnessTrait.supported(light.DOMAIN, light.SUPPORT_BRIGHTNESS, None) trt = trait.BrightnessTrait( hass, State("light.bla", light.STATE_ON, {light.ATTR_BRIGHTNESS: 243}), BASIC_CONFIG, ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"brightness": 95} events = async_capture_events(hass, EVENT_CALL_SERVICE) calls = async_mock_service(hass, light.DOMAIN, light.SERVICE_TURN_ON) await trt.execute( trait.COMMAND_BRIGHTNESS_ABSOLUTE, BASIC_DATA, {"brightness": 50}, {} ) await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "light.bla", light.ATTR_BRIGHTNESS_PCT: 50} assert len(events) == 1 assert events[0].data == { "domain": "light", "service": "turn_on", "service_data": {"brightness_pct": 50, "entity_id": "light.bla"}, } async def test_camera_stream(hass): """Test camera stream trait support for camera domain.""" await async_process_ha_core_config( hass, {"external_url": "https://example.com"}, ) assert helpers.get_google_type(camera.DOMAIN, None) is not None assert trait.CameraStreamTrait.supported(camera.DOMAIN, camera.SUPPORT_STREAM, None) trt = trait.CameraStreamTrait( hass, State("camera.bla", camera.STATE_IDLE, {}), BASIC_CONFIG ) assert trt.sync_attributes() == { "cameraStreamSupportedProtocols": ["hls"], "cameraStreamNeedAuthToken": False, "cameraStreamNeedDrmEncryption": False, } assert trt.query_attributes() == {} with patch( "homeassistant.components.camera.async_request_stream", return_value="/api/streams/bla", ): await trt.execute(trait.COMMAND_GET_CAMERA_STREAM, BASIC_DATA, {}, {}) assert trt.query_attributes() == { "cameraStreamAccessUrl": "https://example.com/api/streams/bla", "cameraStreamReceiverAppId": "B12CE3CA", } async def test_onoff_group(hass): """Test OnOff trait support for group domain.""" assert helpers.get_google_type(group.DOMAIN, None) is not None assert trait.OnOffTrait.supported(group.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("group.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("group.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, HA_DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "group.bla"} off_calls = async_mock_service(hass, HA_DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "group.bla"} async def test_onoff_input_boolean(hass): """Test OnOff trait support for input_boolean domain.""" assert helpers.get_google_type(input_boolean.DOMAIN, None) is not None assert trait.OnOffTrait.supported(input_boolean.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("input_boolean.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait( hass, State("input_boolean.bla", STATE_OFF), BASIC_CONFIG ) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, input_boolean.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "input_boolean.bla"} off_calls = async_mock_service(hass, input_boolean.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "input_boolean.bla"} async def test_onoff_switch(hass): """Test OnOff trait support for switch domain.""" assert helpers.get_google_type(switch.DOMAIN, None) is not None assert trait.OnOffTrait.supported(switch.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("switch.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("switch.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} trt_assumed = trait.OnOffTrait( hass, State("switch.bla", STATE_OFF, {"assumed_state": True}), BASIC_CONFIG ) assert trt_assumed.sync_attributes() == {"commandOnlyOnOff": True} on_calls = async_mock_service(hass, switch.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "switch.bla"} off_calls = async_mock_service(hass, switch.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "switch.bla"} async def test_onoff_fan(hass): """Test OnOff trait support for fan domain.""" assert helpers.get_google_type(fan.DOMAIN, None) is not None assert trait.OnOffTrait.supported(fan.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("fan.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("fan.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, fan.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "fan.bla"} off_calls = async_mock_service(hass, fan.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "fan.bla"} async def test_onoff_light(hass): """Test OnOff trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert trait.OnOffTrait.supported(light.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("light.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("light.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "light.bla"} off_calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "light.bla"} async def test_onoff_media_player(hass): """Test OnOff trait support for media_player domain.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.OnOffTrait.supported(media_player.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("media_player.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("media_player.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, media_player.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} off_calls = async_mock_service(hass, media_player.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} async def test_onoff_humidifier(hass): """Test OnOff trait support for humidifier domain.""" assert helpers.get_google_type(humidifier.DOMAIN, None) is not None assert trait.OnOffTrait.supported(humidifier.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("humidifier.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("humidifier.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, humidifier.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "humidifier.bla"} off_calls = async_mock_service(hass, humidifier.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "humidifier.bla"} async def test_dock_vacuum(hass): """Test dock trait support for vacuum domain.""" assert helpers.get_google_type(vacuum.DOMAIN, None) is not None assert trait.DockTrait.supported(vacuum.DOMAIN, 0, None) trt = trait.DockTrait(hass, State("vacuum.bla", vacuum.STATE_IDLE), BASIC_CONFIG) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"isDocked": False} calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_RETURN_TO_BASE) await trt.execute(trait.COMMAND_DOCK, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} async def test_startstop_vacuum(hass): """Test startStop trait support for vacuum domain.""" assert helpers.get_google_type(vacuum.DOMAIN, None) is not None assert trait.StartStopTrait.supported(vacuum.DOMAIN, 0, None) trt = trait.StartStopTrait( hass, State( "vacuum.bla", vacuum.STATE_PAUSED, {ATTR_SUPPORTED_FEATURES: vacuum.SUPPORT_PAUSE}, ), BASIC_CONFIG, ) assert trt.sync_attributes() == {"pausable": True} assert trt.query_attributes() == {"isRunning": False, "isPaused": True} start_calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_START) await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": True}, {}) assert len(start_calls) == 1 assert start_calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} stop_calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_STOP) await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": False}, {}) assert len(stop_calls) == 1 assert stop_calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} pause_calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_PAUSE) await trt.execute(trait.COMMAND_PAUSEUNPAUSE, BASIC_DATA, {"pause": True}, {}) assert len(pause_calls) == 1 assert pause_calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} unpause_calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_START) await trt.execute(trait.COMMAND_PAUSEUNPAUSE, BASIC_DATA, {"pause": False}, {}) assert len(unpause_calls) == 1 assert unpause_calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} async def test_startstop_cover(hass): """Test startStop trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.StartStopTrait.supported(cover.DOMAIN, cover.SUPPORT_STOP, None) state = State( "cover.bla", cover.STATE_CLOSED, {ATTR_SUPPORTED_FEATURES: cover.SUPPORT_STOP}, ) trt = trait.StartStopTrait( hass, state, BASIC_CONFIG, ) assert trt.sync_attributes() == {} for state_value in (cover.STATE_CLOSING, cover.STATE_OPENING): state.state = state_value assert trt.query_attributes() == {"isRunning": True} stop_calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_STOP_COVER) await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": False}, {}) assert len(stop_calls) == 1 assert stop_calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} for state_value in (cover.STATE_CLOSED, cover.STATE_OPEN): state.state = state_value assert trt.query_attributes() == {"isRunning": False} with pytest.raises(SmartHomeError, match="Cover is already stopped"): await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": False}, {}) with pytest.raises(SmartHomeError, match="Starting a cover is not supported"): await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": True}, {}) with pytest.raises( SmartHomeError, match="Command action.devices.commands.PauseUnpause is not supported", ): await trt.execute(trait.COMMAND_PAUSEUNPAUSE, BASIC_DATA, {"start": True}, {}) async def test_startstop_cover_assumed(hass): """Test startStop trait support for cover domain of assumed state.""" trt = trait.StartStopTrait( hass, State( "cover.bla", cover.STATE_CLOSED, {ATTR_SUPPORTED_FEATURES: cover.SUPPORT_STOP, ATTR_ASSUMED_STATE: True}, ), BASIC_CONFIG, ) stop_calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_STOP_COVER) await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": False}, {}) assert len(stop_calls) == 1 assert stop_calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} async def test_color_setting_color_light(hass): """Test ColorSpectrum trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert not trait.ColorSettingTrait.supported(light.DOMAIN, 0, None) assert trait.ColorSettingTrait.supported(light.DOMAIN, light.SUPPORT_COLOR, None) trt = trait.ColorSettingTrait( hass, State( "light.bla", STATE_ON, { light.ATTR_HS_COLOR: (20, 94), light.ATTR_BRIGHTNESS: 200, ATTR_SUPPORTED_FEATURES: light.SUPPORT_COLOR, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == {"colorModel": "hsv"} assert trt.query_attributes() == { "color": {"spectrumHsv": {"hue": 20, "saturation": 0.94, "value": 200 / 255}} } assert trt.can_execute( trait.COMMAND_COLOR_ABSOLUTE, {"color": {"spectrumRGB": 16715792}} ) calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON) await trt.execute( trait.COMMAND_COLOR_ABSOLUTE, BASIC_DATA, {"color": {"spectrumRGB": 1052927}}, {}, ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "light.bla", light.ATTR_HS_COLOR: (240, 93.725), } await trt.execute( trait.COMMAND_COLOR_ABSOLUTE, BASIC_DATA, {"color": {"spectrumHSV": {"hue": 100, "saturation": 0.50, "value": 0.20}}}, {}, ) assert len(calls) == 2 assert calls[1].data == { ATTR_ENTITY_ID: "light.bla", light.ATTR_HS_COLOR: [100, 50], light.ATTR_BRIGHTNESS: 0.2 * 255, } async def test_color_setting_temperature_light(hass): """Test ColorTemperature trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert not trait.ColorSettingTrait.supported(light.DOMAIN, 0, None) assert trait.ColorSettingTrait.supported( light.DOMAIN, light.SUPPORT_COLOR_TEMP, None ) trt = trait.ColorSettingTrait( hass, State( "light.bla", STATE_ON, { light.ATTR_MIN_MIREDS: 200, light.ATTR_COLOR_TEMP: 300, light.ATTR_MAX_MIREDS: 500, ATTR_SUPPORTED_FEATURES: light.SUPPORT_COLOR_TEMP, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "colorTemperatureRange": {"temperatureMinK": 2000, "temperatureMaxK": 5000} } assert trt.query_attributes() == {"color": {"temperatureK": 3333}} assert trt.can_execute( trait.COMMAND_COLOR_ABSOLUTE, {"color": {"temperature": 400}} ) calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON) with pytest.raises(helpers.SmartHomeError) as err: await trt.execute( trait.COMMAND_COLOR_ABSOLUTE, BASIC_DATA, {"color": {"temperature": 5555}}, {}, ) assert err.value.code == const.ERR_VALUE_OUT_OF_RANGE await trt.execute( trait.COMMAND_COLOR_ABSOLUTE, BASIC_DATA, {"color": {"temperature": 2857}}, {} ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "light.bla", light.ATTR_COLOR_TEMP: color.color_temperature_kelvin_to_mired(2857), } async def test_color_light_temperature_light_bad_temp(hass): """Test ColorTemperature trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert not trait.ColorSettingTrait.supported(light.DOMAIN, 0, None) assert trait.ColorSettingTrait.supported( light.DOMAIN, light.SUPPORT_COLOR_TEMP, None ) trt = trait.ColorSettingTrait( hass, State( "light.bla", STATE_ON, { light.ATTR_MIN_MIREDS: 200, light.ATTR_COLOR_TEMP: 0, light.ATTR_MAX_MIREDS: 500, }, ), BASIC_CONFIG, ) assert trt.query_attributes() == {} async def test_light_modes(hass): """Test Light Mode trait.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert trait.ModesTrait.supported(light.DOMAIN, light.SUPPORT_EFFECT, None) trt = trait.ModesTrait( hass, State( "light.living_room", light.STATE_ON, attributes={ light.ATTR_EFFECT_LIST: ["random", "colorloop"], light.ATTR_EFFECT: "random", }, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableModes": [ { "name": "effect", "name_values": [{"name_synonym": ["effect"], "lang": "en"}], "settings": [ { "setting_name": "random", "setting_values": [ {"setting_synonym": ["random"], "lang": "en"} ], }, { "setting_name": "colorloop", "setting_values": [ {"setting_synonym": ["colorloop"], "lang": "en"} ], }, ], "ordered": False, } ] } assert trt.query_attributes() == { "currentModeSettings": {"effect": "random"}, "on": True, } assert trt.can_execute( trait.COMMAND_MODES, params={"updateModeSettings": {"effect": "colorloop"}}, ) calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON) await trt.execute( trait.COMMAND_MODES, BASIC_DATA, {"updateModeSettings": {"effect": "colorloop"}}, {}, ) assert len(calls) == 1 assert calls[0].data == { "entity_id": "light.living_room", "effect": "colorloop", } async def test_scene_scene(hass): """Test Scene trait support for scene domain.""" assert helpers.get_google_type(scene.DOMAIN, None) is not None assert trait.SceneTrait.supported(scene.DOMAIN, 0, None) trt = trait.SceneTrait(hass, State("scene.bla", scene.STATE), BASIC_CONFIG) assert trt.sync_attributes() == {} assert trt.query_attributes() == {} assert trt.can_execute(trait.COMMAND_ACTIVATE_SCENE, {}) calls = async_mock_service(hass, scene.DOMAIN, SERVICE_TURN_ON) await trt.execute(trait.COMMAND_ACTIVATE_SCENE, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "scene.bla"} async def test_scene_script(hass): """Test Scene trait support for script domain.""" assert helpers.get_google_type(script.DOMAIN, None) is not None assert trait.SceneTrait.supported(script.DOMAIN, 0, None) trt = trait.SceneTrait(hass, State("script.bla", STATE_OFF), BASIC_CONFIG) assert trt.sync_attributes() == {} assert trt.query_attributes() == {} assert trt.can_execute(trait.COMMAND_ACTIVATE_SCENE, {}) calls = async_mock_service(hass, script.DOMAIN, SERVICE_TURN_ON) await trt.execute(trait.COMMAND_ACTIVATE_SCENE, BASIC_DATA, {}, {}) # We don't wait till script execution is done. await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "script.bla"} async def test_temperature_setting_climate_onoff(hass): """Test TemperatureSetting trait support for climate domain - range.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.TemperatureSettingTrait.supported(climate.DOMAIN, 0, None) hass.config.units.temperature_unit = TEMP_FAHRENHEIT trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_AUTO, { ATTR_SUPPORTED_FEATURES: climate.SUPPORT_TARGET_TEMPERATURE_RANGE, climate.ATTR_HVAC_MODES: [ climate.HVAC_MODE_OFF, climate.HVAC_MODE_COOL, climate.HVAC_MODE_HEAT, climate.HVAC_MODE_HEAT_COOL, ], climate.ATTR_MIN_TEMP: None, climate.ATTR_MAX_TEMP: None, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["off", "cool", "heat", "heatcool", "on"], "thermostatTemperatureUnit": "F", } assert trt.can_execute(trait.COMMAND_THERMOSTAT_SET_MODE, {}) calls = async_mock_service(hass, climate.DOMAIN, SERVICE_TURN_ON) await trt.execute( trait.COMMAND_THERMOSTAT_SET_MODE, BASIC_DATA, {"thermostatMode": "on"}, {} ) assert len(calls) == 1 calls = async_mock_service(hass, climate.DOMAIN, SERVICE_TURN_OFF) await trt.execute( trait.COMMAND_THERMOSTAT_SET_MODE, BASIC_DATA, {"thermostatMode": "off"}, {} ) assert len(calls) == 1 async def test_temperature_setting_climate_no_modes(hass): """Test TemperatureSetting trait support for climate domain not supporting any modes.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.TemperatureSettingTrait.supported(climate.DOMAIN, 0, None) hass.config.units.temperature_unit = TEMP_CELSIUS trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_AUTO, { climate.ATTR_HVAC_MODES: [], climate.ATTR_MIN_TEMP: None, climate.ATTR_MAX_TEMP: None, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["heat"], "thermostatTemperatureUnit": "C", } async def test_temperature_setting_climate_range(hass): """Test TemperatureSetting trait support for climate domain - range.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.TemperatureSettingTrait.supported(climate.DOMAIN, 0, None) hass.config.units.temperature_unit = TEMP_FAHRENHEIT trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_AUTO, { climate.ATTR_CURRENT_TEMPERATURE: 70, climate.ATTR_CURRENT_HUMIDITY: 25, ATTR_SUPPORTED_FEATURES: climate.SUPPORT_TARGET_TEMPERATURE_RANGE, climate.ATTR_HVAC_MODES: [ STATE_OFF, climate.HVAC_MODE_COOL, climate.HVAC_MODE_HEAT, climate.HVAC_MODE_AUTO, ], climate.ATTR_TARGET_TEMP_HIGH: 75, climate.ATTR_TARGET_TEMP_LOW: 65, climate.ATTR_MIN_TEMP: 50, climate.ATTR_MAX_TEMP: 80, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["off", "cool", "heat", "auto", "on"], "thermostatTemperatureUnit": "F", } assert trt.query_attributes() == { "thermostatMode": "auto", "thermostatTemperatureAmbient": 21.1, "thermostatHumidityAmbient": 25, "thermostatTemperatureSetpointLow": 18.3, "thermostatTemperatureSetpointHigh": 23.9, } assert trt.can_execute(trait.COMMAND_THERMOSTAT_TEMPERATURE_SET_RANGE, {}) assert trt.can_execute(trait.COMMAND_THERMOSTAT_SET_MODE, {}) calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_TEMPERATURE) await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SET_RANGE, BASIC_DATA, { "thermostatTemperatureSetpointHigh": 25, "thermostatTemperatureSetpointLow": 20, }, {}, ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "climate.bla", climate.ATTR_TARGET_TEMP_HIGH: 77, climate.ATTR_TARGET_TEMP_LOW: 68, } calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_HVAC_MODE) await trt.execute( trait.COMMAND_THERMOSTAT_SET_MODE, BASIC_DATA, {"thermostatMode": "cool"}, {} ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "climate.bla", climate.ATTR_HVAC_MODE: climate.HVAC_MODE_COOL, } with pytest.raises(helpers.SmartHomeError) as err: await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, BASIC_DATA, {"thermostatTemperatureSetpoint": -100}, {}, ) assert err.value.code == const.ERR_VALUE_OUT_OF_RANGE hass.config.units.temperature_unit = TEMP_CELSIUS async def test_temperature_setting_climate_setpoint(hass): """Test TemperatureSetting trait support for climate domain - setpoint.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.TemperatureSettingTrait.supported(climate.DOMAIN, 0, None) hass.config.units.temperature_unit = TEMP_CELSIUS trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_COOL, { climate.ATTR_HVAC_MODES: [STATE_OFF, climate.HVAC_MODE_COOL], climate.ATTR_MIN_TEMP: 10, climate.ATTR_MAX_TEMP: 30, ATTR_TEMPERATURE: 18, climate.ATTR_CURRENT_TEMPERATURE: 20, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["off", "cool", "on"], "thermostatTemperatureUnit": "C", } assert trt.query_attributes() == { "thermostatMode": "cool", "thermostatTemperatureAmbient": 20, "thermostatTemperatureSetpoint": 18, } assert trt.can_execute(trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, {}) assert trt.can_execute(trait.COMMAND_THERMOSTAT_SET_MODE, {}) calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_TEMPERATURE) with pytest.raises(helpers.SmartHomeError): await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, BASIC_DATA, {"thermostatTemperatureSetpoint": -100}, {}, ) await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, BASIC_DATA, {"thermostatTemperatureSetpoint": 19}, {}, ) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "climate.bla", ATTR_TEMPERATURE: 19} async def test_temperature_setting_climate_setpoint_auto(hass): """ Test TemperatureSetting trait support for climate domain. Setpoint in auto mode. """ hass.config.units.temperature_unit = TEMP_CELSIUS trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_HEAT_COOL, { climate.ATTR_HVAC_MODES: [ climate.HVAC_MODE_OFF, climate.HVAC_MODE_HEAT_COOL, ], climate.ATTR_MIN_TEMP: 10, climate.ATTR_MAX_TEMP: 30, ATTR_TEMPERATURE: 18, climate.ATTR_CURRENT_TEMPERATURE: 20, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["off", "heatcool", "on"], "thermostatTemperatureUnit": "C", } assert trt.query_attributes() == { "thermostatMode": "heatcool", "thermostatTemperatureAmbient": 20, "thermostatTemperatureSetpointHigh": 18, "thermostatTemperatureSetpointLow": 18, } assert trt.can_execute(trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, {}) assert trt.can_execute(trait.COMMAND_THERMOSTAT_SET_MODE, {}) calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_TEMPERATURE) await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, BASIC_DATA, {"thermostatTemperatureSetpoint": 19}, {}, ) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "climate.bla", ATTR_TEMPERATURE: 19} async def test_humidity_setting_humidifier_setpoint(hass): """Test HumiditySetting trait support for humidifier domain - setpoint.""" assert helpers.get_google_type(humidifier.DOMAIN, None) is not None assert trait.HumiditySettingTrait.supported(humidifier.DOMAIN, 0, None) trt = trait.HumiditySettingTrait( hass, State( "humidifier.bla", STATE_ON, { humidifier.ATTR_MIN_HUMIDITY: 20, humidifier.ATTR_MAX_HUMIDITY: 90, humidifier.ATTR_HUMIDITY: 38, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "humiditySetpointRange": {"minPercent": 20, "maxPercent": 90} } assert trt.query_attributes() == { "humiditySetpointPercent": 38, } assert trt.can_execute(trait.COMMAND_SET_HUMIDITY, {}) calls = async_mock_service(hass, humidifier.DOMAIN, humidifier.SERVICE_SET_HUMIDITY) await trt.execute(trait.COMMAND_SET_HUMIDITY, BASIC_DATA, {"humidity": 32}, {}) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "humidifier.bla", humidifier.ATTR_HUMIDITY: 32, } async def test_lock_unlock_lock(hass): """Test LockUnlock trait locking support for lock domain.""" assert helpers.get_google_type(lock.DOMAIN, None) is not None assert trait.LockUnlockTrait.supported(lock.DOMAIN, lock.SUPPORT_OPEN, None) assert trait.LockUnlockTrait.might_2fa(lock.DOMAIN, lock.SUPPORT_OPEN, None) trt = trait.LockUnlockTrait( hass, State("lock.front_door", lock.STATE_LOCKED), PIN_CONFIG ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"isLocked": True} assert trt.can_execute(trait.COMMAND_LOCKUNLOCK, {"lock": True}) calls = async_mock_service(hass, lock.DOMAIN, lock.SERVICE_LOCK) await trt.execute(trait.COMMAND_LOCKUNLOCK, PIN_DATA, {"lock": True}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "lock.front_door"} async def test_lock_unlock_unlock(hass): """Test LockUnlock trait unlocking support for lock domain.""" assert helpers.get_google_type(lock.DOMAIN, None) is not None assert trait.LockUnlockTrait.supported(lock.DOMAIN, lock.SUPPORT_OPEN, None) trt = trait.LockUnlockTrait( hass, State("lock.front_door", lock.STATE_LOCKED), PIN_CONFIG ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"isLocked": True} assert trt.can_execute(trait.COMMAND_LOCKUNLOCK, {"lock": False}) calls = async_mock_service(hass, lock.DOMAIN, lock.SERVICE_UNLOCK) # No challenge data with pytest.raises(error.ChallengeNeeded) as err: await trt.execute(trait.COMMAND_LOCKUNLOCK, PIN_DATA, {"lock": False}, {}) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # invalid pin with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_LOCKUNLOCK, PIN_DATA, {"lock": False}, {"pin": 9999} ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_FAILED_PIN_NEEDED await trt.execute( trait.COMMAND_LOCKUNLOCK, PIN_DATA, {"lock": False}, {"pin": "1234"} ) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "lock.front_door"} # Test without pin trt = trait.LockUnlockTrait( hass, State("lock.front_door", lock.STATE_LOCKED), BASIC_CONFIG ) with pytest.raises(error.SmartHomeError) as err: await trt.execute(trait.COMMAND_LOCKUNLOCK, BASIC_DATA, {"lock": False}, {}) assert len(calls) == 1 assert err.value.code == const.ERR_CHALLENGE_NOT_SETUP # Test with 2FA override with patch.object( BASIC_CONFIG, "should_2fa", return_value=False, ): await trt.execute(trait.COMMAND_LOCKUNLOCK, BASIC_DATA, {"lock": False}, {}) assert len(calls) == 2 async def test_arm_disarm_arm_away(hass): """Test ArmDisarm trait Arming support for alarm_control_panel domain.""" assert helpers.get_google_type(alarm_control_panel.DOMAIN, None) is not None assert trait.ArmDisArmTrait.supported(alarm_control_panel.DOMAIN, 0, None) assert trait.ArmDisArmTrait.might_2fa(alarm_control_panel.DOMAIN, 0, None) trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, { alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True, ATTR_SUPPORTED_FEATURES: alarm_control_panel.const.SUPPORT_ALARM_ARM_HOME | alarm_control_panel.const.SUPPORT_ALARM_ARM_AWAY, }, ), PIN_CONFIG, ) assert trt.sync_attributes() == { "availableArmLevels": { "levels": [ { "level_name": "armed_home", "level_values": [ {"level_synonym": ["armed home", "home"], "lang": "en"} ], }, { "level_name": "armed_away", "level_values": [ {"level_synonym": ["armed away", "away"], "lang": "en"} ], }, ], "ordered": False, } } assert trt.query_attributes() == { "isArmed": True, "currentArmLevel": STATE_ALARM_ARMED_AWAY, } assert trt.can_execute( trait.COMMAND_ARMDISARM, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY} ) calls = async_mock_service( hass, alarm_control_panel.DOMAIN, alarm_control_panel.SERVICE_ALARM_ARM_AWAY ) # Test with no secure_pin configured with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), BASIC_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, BASIC_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {}, ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NOT_SETUP trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), PIN_CONFIG, ) # No challenge data with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {}, ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # invalid pin with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {"pin": 9999}, ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_FAILED_PIN_NEEDED # correct pin await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {"pin": "1234"}, ) assert len(calls) == 1 # Test already armed with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), PIN_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {}, ) assert len(calls) == 1 assert err.value.code == const.ERR_ALREADY_ARMED # Test with code_arm_required False trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: False}, ), PIN_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {}, ) assert len(calls) == 2 with pytest.raises(error.SmartHomeError) as err: await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True}, {}, ) async def test_arm_disarm_disarm(hass): """Test ArmDisarm trait Disarming support for alarm_control_panel domain.""" assert helpers.get_google_type(alarm_control_panel.DOMAIN, None) is not None assert trait.ArmDisArmTrait.supported(alarm_control_panel.DOMAIN, 0, None) assert trait.ArmDisArmTrait.might_2fa(alarm_control_panel.DOMAIN, 0, None) trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, { alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True, ATTR_SUPPORTED_FEATURES: alarm_control_panel.const.SUPPORT_ALARM_TRIGGER | alarm_control_panel.const.SUPPORT_ALARM_ARM_CUSTOM_BYPASS, }, ), PIN_CONFIG, ) assert trt.sync_attributes() == { "availableArmLevels": { "levels": [ { "level_name": "armed_custom_bypass", "level_values": [ { "level_synonym": ["armed custom bypass", "custom"], "lang": "en", } ], }, { "level_name": "triggered", "level_values": [{"level_synonym": ["triggered"], "lang": "en"}], }, ], "ordered": False, } } assert trt.query_attributes() == {"isArmed": False} assert trt.can_execute(trait.COMMAND_ARMDISARM, {"arm": False}) calls = async_mock_service( hass, alarm_control_panel.DOMAIN, alarm_control_panel.SERVICE_ALARM_DISARM ) # Test without secure_pin configured with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), BASIC_CONFIG, ) await trt.execute(trait.COMMAND_ARMDISARM, BASIC_DATA, {"arm": False}, {}) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NOT_SETUP trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), PIN_CONFIG, ) # No challenge data with pytest.raises(error.ChallengeNeeded) as err: await trt.execute(trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": False}, {}) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # invalid pin with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": False}, {"pin": 9999} ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_FAILED_PIN_NEEDED # correct pin await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": False}, {"pin": "1234"} ) assert len(calls) == 1 # Test already disarmed with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), PIN_CONFIG, ) await trt.execute(trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": False}, {}) assert len(calls) == 1 assert err.value.code == const.ERR_ALREADY_DISARMED # Cancel arming after already armed will require pin with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: False}, ), PIN_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "cancel": True}, {} ) assert len(calls) == 1 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # Cancel arming while pending to arm doesn't require pin trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_PENDING, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: False}, ), PIN_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "cancel": True}, {} ) assert len(calls) == 2 async def test_fan_speed(hass): """Test FanSpeed trait speed control support for fan domain.""" assert helpers.get_google_type(fan.DOMAIN, None) is not None assert trait.FanSpeedTrait.supported(fan.DOMAIN, fan.SUPPORT_SET_SPEED, None) trt = trait.FanSpeedTrait( hass, State( "fan.living_room_fan", fan.SPEED_HIGH, attributes={ "speed_list": [ fan.SPEED_OFF, fan.SPEED_LOW, fan.SPEED_MEDIUM, fan.SPEED_HIGH, ], "speed": "low", "percentage": 33, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableFanSpeeds": { "ordered": True, "speeds": [ { "speed_name": "off", "speed_values": [{"speed_synonym": ["stop", "off"], "lang": "en"}], }, { "speed_name": "low", "speed_values": [ { "speed_synonym": ["slow", "low", "slowest", "lowest"], "lang": "en", } ], }, { "speed_name": "medium", "speed_values": [ {"speed_synonym": ["medium", "mid", "middle"], "lang": "en"} ], }, { "speed_name": "high", "speed_values": [ { "speed_synonym": [ "high", "max", "fast", "highest", "fastest", "maximum", ], "lang": "en", } ], }, ], }, "reversible": False, "supportsFanSpeedPercent": True, } assert trt.query_attributes() == { "currentFanSpeedSetting": "low", "on": True, "currentFanSpeedPercent": 33, } assert trt.can_execute(trait.COMMAND_FANSPEED, params={"fanSpeed": "medium"}) calls = async_mock_service(hass, fan.DOMAIN, fan.SERVICE_SET_SPEED) await trt.execute(trait.COMMAND_FANSPEED, BASIC_DATA, {"fanSpeed": "medium"}, {}) assert len(calls) == 1 assert calls[0].data == {"entity_id": "fan.living_room_fan", "speed": "medium"} assert trt.can_execute(trait.COMMAND_FANSPEED, params={"fanSpeedPercent": 10}) calls = async_mock_service(hass, fan.DOMAIN, fan.SERVICE_SET_PERCENTAGE) await trt.execute(trait.COMMAND_FANSPEED, BASIC_DATA, {"fanSpeedPercent": 10}, {}) assert len(calls) == 1 assert calls[0].data == {"entity_id": "fan.living_room_fan", "percentage": 10} async def test_climate_fan_speed(hass): """Test FanSpeed trait speed control support for climate domain.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.FanSpeedTrait.supported(climate.DOMAIN, climate.SUPPORT_FAN_MODE, None) trt = trait.FanSpeedTrait( hass, State( "climate.living_room_ac", "on", attributes={ "fan_modes": ["auto", "low", "medium", "high"], "fan_mode": "low", }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableFanSpeeds": { "ordered": True, "speeds": [ { "speed_name": "auto", "speed_values": [{"speed_synonym": ["auto"], "lang": "en"}], }, { "speed_name": "low", "speed_values": [{"speed_synonym": ["low"], "lang": "en"}], }, { "speed_name": "medium", "speed_values": [{"speed_synonym": ["medium"], "lang": "en"}], }, { "speed_name": "high", "speed_values": [{"speed_synonym": ["high"], "lang": "en"}], }, ], }, "reversible": False, "supportsFanSpeedPercent": True, } assert trt.query_attributes() == { "currentFanSpeedSetting": "low", } assert trt.can_execute(trait.COMMAND_FANSPEED, params={"fanSpeed": "medium"}) calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_FAN_MODE) await trt.execute(trait.COMMAND_FANSPEED, BASIC_DATA, {"fanSpeed": "medium"}, {}) assert len(calls) == 1 assert calls[0].data == { "entity_id": "climate.living_room_ac", "fan_mode": "medium", } async def test_inputselector(hass): """Test input selector trait.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.InputSelectorTrait.supported( media_player.DOMAIN, media_player.SUPPORT_SELECT_SOURCE, None ) trt = trait.InputSelectorTrait( hass, State( "media_player.living_room", media_player.STATE_PLAYING, attributes={ media_player.ATTR_INPUT_SOURCE_LIST: [ "media", "game", "chromecast", "plex", ], media_player.ATTR_INPUT_SOURCE: "game", }, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableInputs": [ {"key": "media", "names": [{"name_synonym": ["media"], "lang": "en"}]}, {"key": "game", "names": [{"name_synonym": ["game"], "lang": "en"}]}, { "key": "chromecast", "names": [{"name_synonym": ["chromecast"], "lang": "en"}], }, {"key": "plex", "names": [{"name_synonym": ["plex"], "lang": "en"}]}, ], "orderedInputs": True, } assert trt.query_attributes() == { "currentInput": "game", } assert trt.can_execute( trait.COMMAND_INPUT, params={"newInput": "media"}, ) calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_SELECT_SOURCE ) await trt.execute( trait.COMMAND_INPUT, BASIC_DATA, {"newInput": "media"}, {}, ) assert len(calls) == 1 assert calls[0].data == {"entity_id": "media_player.living_room", "source": "media"} @pytest.mark.parametrize( "sources,source,source_next,source_prev", [ (["a"], "a", "a", "a"), (["a", "b"], "a", "b", "b"), (["a", "b", "c"], "a", "b", "c"), ], ) async def test_inputselector_nextprev(hass, sources, source, source_next, source_prev): """Test input selector trait.""" trt = trait.InputSelectorTrait( hass, State( "media_player.living_room", media_player.STATE_PLAYING, attributes={ media_player.ATTR_INPUT_SOURCE_LIST: sources, media_player.ATTR_INPUT_SOURCE: source, }, ), BASIC_CONFIG, ) assert trt.can_execute("action.devices.commands.NextInput", params={}) assert trt.can_execute("action.devices.commands.PreviousInput", params={}) calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_SELECT_SOURCE ) await trt.execute( "action.devices.commands.NextInput", BASIC_DATA, {}, {}, ) await trt.execute( "action.devices.commands.PreviousInput", BASIC_DATA, {}, {}, ) assert len(calls) == 2 assert calls[0].data == { "entity_id": "media_player.living_room", "source": source_next, } assert calls[1].data == { "entity_id": "media_player.living_room", "source": source_prev, } @pytest.mark.parametrize( "sources,source", [(None, "a"), (["a", "b"], None), (["a", "b"], "c")] ) async def test_inputselector_nextprev_invalid(hass, sources, source): """Test input selector trait.""" trt = trait.InputSelectorTrait( hass, State( "media_player.living_room", media_player.STATE_PLAYING, attributes={ media_player.ATTR_INPUT_SOURCE_LIST: sources, media_player.ATTR_INPUT_SOURCE: source, }, ), BASIC_CONFIG, ) with pytest.raises(SmartHomeError): await trt.execute( "action.devices.commands.NextInput", BASIC_DATA, {}, {}, ) with pytest.raises(SmartHomeError): await trt.execute( "action.devices.commands.PreviousInput", BASIC_DATA, {}, {}, ) with pytest.raises(SmartHomeError): await trt.execute( "action.devices.commands.InvalidCommand", BASIC_DATA, {}, {}, ) async def test_modes_input_select(hass): """Test Input Select Mode trait.""" assert helpers.get_google_type(input_select.DOMAIN, None) is not None assert trait.ModesTrait.supported(input_select.DOMAIN, None, None) trt = trait.ModesTrait( hass, State("input_select.bla", "unavailable"), BASIC_CONFIG, ) assert trt.sync_attributes() == {"availableModes": []} trt = trait.ModesTrait( hass, State( "input_select.bla", "abc", attributes={input_select.ATTR_OPTIONS: ["abc", "123", "xyz"]}, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableModes": [ { "name": "option", "name_values": [ { "name_synonym": ["option", "setting", "mode", "value"], "lang": "en", } ], "settings": [ { "setting_name": "abc", "setting_values": [{"setting_synonym": ["abc"], "lang": "en"}], }, { "setting_name": "123", "setting_values": [{"setting_synonym": ["123"], "lang": "en"}], }, { "setting_name": "xyz", "setting_values": [{"setting_synonym": ["xyz"], "lang": "en"}], }, ], "ordered": False, } ] } assert trt.query_attributes() == { "currentModeSettings": {"option": "abc"}, "on": True, } assert trt.can_execute( trait.COMMAND_MODES, params={"updateModeSettings": {"option": "xyz"}}, ) calls = async_mock_service( hass, input_select.DOMAIN, input_select.SERVICE_SELECT_OPTION ) await trt.execute( trait.COMMAND_MODES, BASIC_DATA, {"updateModeSettings": {"option": "xyz"}}, {}, ) assert len(calls) == 1 assert calls[0].data == {"entity_id": "input_select.bla", "option": "xyz"} async def test_modes_humidifier(hass): """Test Humidifier Mode trait.""" assert helpers.get_google_type(humidifier.DOMAIN, None) is not None assert trait.ModesTrait.supported(humidifier.DOMAIN, humidifier.SUPPORT_MODES, None) trt = trait.ModesTrait( hass, State( "humidifier.humidifier", STATE_OFF, attributes={ humidifier.ATTR_AVAILABLE_MODES: [ humidifier.MODE_NORMAL, humidifier.MODE_AUTO, humidifier.MODE_AWAY, ], ATTR_SUPPORTED_FEATURES: humidifier.SUPPORT_MODES, humidifier.ATTR_MIN_HUMIDITY: 30, humidifier.ATTR_MAX_HUMIDITY: 99, humidifier.ATTR_HUMIDITY: 50, ATTR_MODE: humidifier.MODE_AUTO, }, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableModes": [ { "name": "mode", "name_values": [{"name_synonym": ["mode"], "lang": "en"}], "settings": [ { "setting_name": "normal", "setting_values": [ {"setting_synonym": ["normal"], "lang": "en"} ], }, { "setting_name": "auto", "setting_values": [{"setting_synonym": ["auto"], "lang": "en"}], }, { "setting_name": "away", "setting_values": [{"setting_synonym": ["away"], "lang": "en"}], }, ], "ordered": False, }, ] } assert trt.query_attributes() == { "currentModeSettings": {"mode": "auto"}, "on": False, } assert trt.can_execute( trait.COMMAND_MODES, params={"updateModeSettings": {"mode": "away"}} ) calls = async_mock_service(hass, humidifier.DOMAIN, humidifier.SERVICE_SET_MODE) await trt.execute( trait.COMMAND_MODES, BASIC_DATA, {"updateModeSettings": {"mode": "away"}}, {}, ) assert len(calls) == 1 assert calls[0].data == { "entity_id": "humidifier.humidifier", "mode": "away", } async def test_sound_modes(hass): """Test Mode trait.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.ModesTrait.supported( media_player.DOMAIN, media_player.SUPPORT_SELECT_SOUND_MODE, None ) trt = trait.ModesTrait( hass, State( "media_player.living_room", media_player.STATE_PLAYING, attributes={ media_player.ATTR_SOUND_MODE_LIST: ["stereo", "prologic"], media_player.ATTR_SOUND_MODE: "stereo", }, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableModes": [ { "name": "sound mode", "name_values": [ {"name_synonym": ["sound mode", "effects"], "lang": "en"} ], "settings": [ { "setting_name": "stereo", "setting_values": [ {"setting_synonym": ["stereo"], "lang": "en"} ], }, { "setting_name": "prologic", "setting_values": [ {"setting_synonym": ["prologic"], "lang": "en"} ], }, ], "ordered": False, } ] } assert trt.query_attributes() == { "currentModeSettings": {"sound mode": "stereo"}, "on": True, } assert trt.can_execute( trait.COMMAND_MODES, params={"updateModeSettings": {"sound mode": "stereo"}}, ) calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_SELECT_SOUND_MODE ) await trt.execute( trait.COMMAND_MODES, BASIC_DATA, {"updateModeSettings": {"sound mode": "stereo"}}, {}, ) assert len(calls) == 1 assert calls[0].data == { "entity_id": "media_player.living_room", "sound_mode": "stereo", } async def test_openclose_cover(hass): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_SET_POSITION, None ) trt = trait.OpenCloseTrait( hass, State( "cover.bla", cover.STATE_OPEN, { cover.ATTR_CURRENT_POSITION: 75, ATTR_SUPPORTED_FEATURES: cover.SUPPORT_SET_POSITION, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"openPercent": 75} calls_set = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_SET_COVER_POSITION) calls_open = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_OPEN_COVER) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 50}, {}) await trt.execute( trait.COMMAND_OPENCLOSE_RELATIVE, BASIC_DATA, {"openRelativePercent": 50}, {} ) assert len(calls_set) == 1 assert calls_set[0].data == {ATTR_ENTITY_ID: "cover.bla", cover.ATTR_POSITION: 50} assert len(calls_open) == 1 assert calls_open[0].data == {ATTR_ENTITY_ID: "cover.bla"} async def test_openclose_cover_unknown_state(hass): """Test OpenClose trait support for cover domain with unknown state.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_SET_POSITION, None ) # No state trt = trait.OpenCloseTrait( hass, State( "cover.bla", STATE_UNKNOWN, {ATTR_SUPPORTED_FEATURES: cover.SUPPORT_OPEN} ), BASIC_CONFIG, ) assert trt.sync_attributes() == {"discreteOnlyOpenClose": True} with pytest.raises(helpers.SmartHomeError): trt.query_attributes() calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_OPEN_COVER) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 100}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} with pytest.raises(helpers.SmartHomeError): trt.query_attributes() async def test_openclose_cover_assumed_state(hass): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_SET_POSITION, None ) trt = trait.OpenCloseTrait( hass, State( "cover.bla", cover.STATE_OPEN, { ATTR_ASSUMED_STATE: True, ATTR_SUPPORTED_FEATURES: cover.SUPPORT_SET_POSITION, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == {"commandOnlyOpenClose": True} assert trt.query_attributes() == {} calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_SET_COVER_POSITION) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 40}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla", cover.ATTR_POSITION: 40} async def test_openclose_cover_query_only(hass): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported(cover.DOMAIN, 0, None) state = State( "cover.bla", cover.STATE_OPEN, ) trt = trait.OpenCloseTrait( hass, state, BASIC_CONFIG, ) assert trt.sync_attributes() == { "discreteOnlyOpenClose": True, "queryOnlyOpenClose": True, } assert trt.query_attributes() == {"openPercent": 100} async def test_openclose_cover_no_position(hass): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_OPEN | cover.SUPPORT_CLOSE, None ) state = State( "cover.bla", cover.STATE_OPEN, { ATTR_SUPPORTED_FEATURES: cover.SUPPORT_OPEN | cover.SUPPORT_CLOSE, }, ) trt = trait.OpenCloseTrait( hass, state, BASIC_CONFIG, ) assert trt.sync_attributes() == {"discreteOnlyOpenClose": True} assert trt.query_attributes() == {"openPercent": 100} state.state = cover.STATE_CLOSED assert trt.sync_attributes() == {"discreteOnlyOpenClose": True} assert trt.query_attributes() == {"openPercent": 0} calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_CLOSE_COVER) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 0}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_OPEN_COVER) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 100}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} with pytest.raises( SmartHomeError, match=r"Current position not know for relative command" ): await trt.execute( trait.COMMAND_OPENCLOSE_RELATIVE, BASIC_DATA, {"openRelativePercent": 100}, {}, ) with pytest.raises(SmartHomeError, match=r"No support for partial open close"): await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 50}, {}) @pytest.mark.parametrize( "device_class", (cover.DEVICE_CLASS_DOOR, cover.DEVICE_CLASS_GARAGE, cover.DEVICE_CLASS_GATE), ) async def test_openclose_cover_secure(hass, device_class): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, device_class) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_SET_POSITION, device_class ) assert trait.OpenCloseTrait.might_2fa( cover.DOMAIN, cover.SUPPORT_SET_POSITION, device_class ) trt = trait.OpenCloseTrait( hass, State( "cover.bla", cover.STATE_OPEN, { ATTR_DEVICE_CLASS: device_class, ATTR_SUPPORTED_FEATURES: cover.SUPPORT_SET_POSITION, cover.ATTR_CURRENT_POSITION: 75, }, ), PIN_CONFIG, ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"openPercent": 75} calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_SET_COVER_POSITION) calls_close = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_CLOSE_COVER) # No challenge data with pytest.raises(error.ChallengeNeeded) as err: await trt.execute(trait.COMMAND_OPENCLOSE, PIN_DATA, {"openPercent": 50}, {}) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # invalid pin with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_OPENCLOSE, PIN_DATA, {"openPercent": 50}, {"pin": "9999"} ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_FAILED_PIN_NEEDED await trt.execute( trait.COMMAND_OPENCLOSE, PIN_DATA, {"openPercent": 50}, {"pin": "1234"} ) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla", cover.ATTR_POSITION: 50} # no challenge on close await trt.execute(trait.COMMAND_OPENCLOSE, PIN_DATA, {"openPercent": 0}, {}) assert len(calls_close) == 1 assert calls_close[0].data == {ATTR_ENTITY_ID: "cover.bla"} @pytest.mark.parametrize( "device_class", ( binary_sensor.DEVICE_CLASS_DOOR, binary_sensor.DEVICE_CLASS_GARAGE_DOOR, binary_sensor.DEVICE_CLASS_LOCK, binary_sensor.DEVICE_CLASS_OPENING, binary_sensor.DEVICE_CLASS_WINDOW, ), ) async def test_openclose_binary_sensor(hass, device_class): """Test OpenClose trait support for binary_sensor domain.""" assert helpers.get_google_type(binary_sensor.DOMAIN, device_class) is not None assert trait.OpenCloseTrait.supported(binary_sensor.DOMAIN, 0, device_class) trt = trait.OpenCloseTrait( hass, State("binary_sensor.test", STATE_ON, {ATTR_DEVICE_CLASS: device_class}), BASIC_CONFIG, ) assert trt.sync_attributes() == { "queryOnlyOpenClose": True, "discreteOnlyOpenClose": True, } assert trt.query_attributes() == {"openPercent": 100} trt = trait.OpenCloseTrait( hass, State("binary_sensor.test", STATE_OFF, {ATTR_DEVICE_CLASS: device_class}), BASIC_CONFIG, ) assert trt.sync_attributes() == { "queryOnlyOpenClose": True, "discreteOnlyOpenClose": True, } assert trt.query_attributes() == {"openPercent": 0} async def test_volume_media_player(hass): """Test volume trait support for media player domain.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.VolumeTrait.supported( media_player.DOMAIN, media_player.SUPPORT_VOLUME_SET, None, ) trt = trait.VolumeTrait( hass, State( "media_player.bla", media_player.STATE_PLAYING, { ATTR_SUPPORTED_FEATURES: media_player.SUPPORT_VOLUME_SET, media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.3, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "volumeMaxLevel": 100, "levelStepSize": 10, "volumeCanMuteAndUnmute": False, "commandOnlyVolume": False, } assert trt.query_attributes() == {"currentVolume": 30} calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_SET ) await trt.execute(trait.COMMAND_SET_VOLUME, BASIC_DATA, {"volumeLevel": 60}, {}) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.6, } calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_SET ) await trt.execute( trait.COMMAND_VOLUME_RELATIVE, BASIC_DATA, {"relativeSteps": 10}, {} ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.4, } async def test_volume_media_player_relative(hass): """Test volume trait support for relative-volume-only media players.""" assert trait.VolumeTrait.supported( media_player.DOMAIN, media_player.SUPPORT_VOLUME_STEP, None, ) trt = trait.VolumeTrait( hass, State( "media_player.bla", media_player.STATE_PLAYING, { ATTR_ASSUMED_STATE: True, ATTR_SUPPORTED_FEATURES: media_player.SUPPORT_VOLUME_STEP, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "volumeMaxLevel": 100, "levelStepSize": 10, "volumeCanMuteAndUnmute": False, "commandOnlyVolume": True, } assert trt.query_attributes() == {} calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_UP ) await trt.execute( trait.COMMAND_VOLUME_RELATIVE, BASIC_DATA, {"relativeSteps": 10}, {}, ) assert len(calls) == 10 for call in calls: assert call.data == { ATTR_ENTITY_ID: "media_player.bla", } calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_DOWN ) await trt.execute( trait.COMMAND_VOLUME_RELATIVE, BASIC_DATA, {"relativeSteps": -10}, {}, ) assert len(calls) == 10 for call in calls: assert call.data == { ATTR_ENTITY_ID: "media_player.bla", } with pytest.raises(SmartHomeError): await trt.execute(trait.COMMAND_SET_VOLUME, BASIC_DATA, {"volumeLevel": 42}, {}) with pytest.raises(SmartHomeError): await trt.execute(trait.COMMAND_MUTE, BASIC_DATA, {"mute": True}, {}) async def test_media_player_mute(hass): """Test volume trait support for muting.""" assert trait.VolumeTrait.supported( media_player.DOMAIN, media_player.SUPPORT_VOLUME_STEP | media_player.SUPPORT_VOLUME_MUTE, None, ) trt = trait.VolumeTrait( hass, State( "media_player.bla", media_player.STATE_PLAYING, { ATTR_SUPPORTED_FEATURES: ( media_player.SUPPORT_VOLUME_STEP | media_player.SUPPORT_VOLUME_MUTE ), media_player.ATTR_MEDIA_VOLUME_MUTED: False, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "volumeMaxLevel": 100, "levelStepSize": 10, "volumeCanMuteAndUnmute": True, "commandOnlyVolume": False, } assert trt.query_attributes() == {"isMuted": False} mute_calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_MUTE ) await trt.execute( trait.COMMAND_MUTE, BASIC_DATA, {"mute": True}, {}, ) assert len(mute_calls) == 1 assert mute_calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_VOLUME_MUTED: True, } unmute_calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_MUTE ) await trt.execute( trait.COMMAND_MUTE, BASIC_DATA, {"mute": False}, {}, ) assert len(unmute_calls) == 1 assert unmute_calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_VOLUME_MUTED: False, } async def test_temperature_setting_sensor(hass): """Test TemperatureSetting trait support for temperature sensor.""" assert ( helpers.get_google_type(sensor.DOMAIN, sensor.DEVICE_CLASS_TEMPERATURE) is not None ) assert not trait.TemperatureSettingTrait.supported( sensor.DOMAIN, 0, sensor.DEVICE_CLASS_HUMIDITY ) assert trait.TemperatureSettingTrait.supported( sensor.DOMAIN, 0, sensor.DEVICE_CLASS_TEMPERATURE ) @pytest.mark.parametrize( "unit_in,unit_out,state,ambient", [ (TEMP_FAHRENHEIT, "F", "70", 21.1), (TEMP_CELSIUS, "C", "21.1", 21.1), (TEMP_FAHRENHEIT, "F", "unavailable", None), (TEMP_FAHRENHEIT, "F", "unknown", None), ], ) async def test_temperature_setting_sensor_data(hass, unit_in, unit_out, state, ambient): """Test TemperatureSetting trait support for temperature sensor.""" hass.config.units.temperature_unit = unit_in trt = trait.TemperatureSettingTrait( hass, State( "sensor.test", state, {ATTR_DEVICE_CLASS: sensor.DEVICE_CLASS_TEMPERATURE} ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "queryOnlyTemperatureSetting": True, "thermostatTemperatureUnit": unit_out, } if ambient: assert trt.query_attributes() == {"thermostatTemperatureAmbient": ambient} else: assert trt.query_attributes() == {} hass.config.units.temperature_unit = TEMP_CELSIUS async def test_humidity_setting_sensor(hass): """Test HumiditySetting trait support for humidity sensor.""" assert ( helpers.get_google_type(sensor.DOMAIN, sensor.DEVICE_CLASS_HUMIDITY) is not None ) assert not trait.HumiditySettingTrait.supported( sensor.DOMAIN, 0, sensor.DEVICE_CLASS_TEMPERATURE ) assert trait.HumiditySettingTrait.supported( sensor.DOMAIN, 0, sensor.DEVICE_CLASS_HUMIDITY ) @pytest.mark.parametrize( "state,ambient", [("70", 70), ("unavailable", None), ("unknown", None)] ) async def test_humidity_setting_sensor_data(hass, state, ambient): """Test HumiditySetting trait support for humidity sensor.""" trt = trait.HumiditySettingTrait( hass, State("sensor.test", state, {ATTR_DEVICE_CLASS: sensor.DEVICE_CLASS_HUMIDITY}), BASIC_CONFIG, ) assert trt.sync_attributes() == {"queryOnlyHumiditySetting": True} if ambient: assert trt.query_attributes() == {"humidityAmbientPercent": ambient} else: assert trt.query_attributes() == {} with pytest.raises(helpers.SmartHomeError) as err: await trt.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert err.value.code == const.ERR_NOT_SUPPORTED async def test_transport_control(hass): """Test the TransportControlTrait.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None for feature in trait.MEDIA_COMMAND_SUPPORT_MAPPING.values(): assert trait.TransportControlTrait.supported(media_player.DOMAIN, feature, None) now = datetime(2020, 1, 1) trt = trait.TransportControlTrait( hass, State( "media_player.bla", media_player.STATE_PLAYING, { media_player.ATTR_MEDIA_POSITION: 100, media_player.ATTR_MEDIA_DURATION: 200, media_player.ATTR_MEDIA_POSITION_UPDATED_AT: now - timedelta(seconds=10), media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.5, ATTR_SUPPORTED_FEATURES: media_player.SUPPORT_PLAY | media_player.SUPPORT_STOP, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "transportControlSupportedCommands": ["RESUME", "STOP"] } assert trt.query_attributes() == {} # COMMAND_MEDIA_SEEK_RELATIVE calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_SEEK ) # Patch to avoid time ticking over during the command failing the test with patch("homeassistant.util.dt.utcnow", return_value=now): await trt.execute( trait.COMMAND_MEDIA_SEEK_RELATIVE, BASIC_DATA, {"relativePositionMs": 10000}, {}, ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", # 100s (current position) + 10s (from command) + 10s (from updated_at) media_player.ATTR_MEDIA_SEEK_POSITION: 120, } # COMMAND_MEDIA_SEEK_TO_POSITION calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_SEEK ) await trt.execute( trait.COMMAND_MEDIA_SEEK_TO_POSITION, BASIC_DATA, {"absPositionMs": 50000}, {} ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_SEEK_POSITION: 50, } # COMMAND_MEDIA_NEXT calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_NEXT_TRACK ) await trt.execute(trait.COMMAND_MEDIA_NEXT, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} # COMMAND_MEDIA_PAUSE calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_PAUSE ) await trt.execute(trait.COMMAND_MEDIA_PAUSE, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} # COMMAND_MEDIA_PREVIOUS calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_PREVIOUS_TRACK ) await trt.execute(trait.COMMAND_MEDIA_PREVIOUS, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} # COMMAND_MEDIA_RESUME calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_PLAY ) await trt.execute(trait.COMMAND_MEDIA_RESUME, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} # COMMAND_MEDIA_SHUFFLE calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_SHUFFLE_SET ) await trt.execute(trait.COMMAND_MEDIA_SHUFFLE, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_SHUFFLE: True, } # COMMAND_MEDIA_STOP calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_STOP ) await trt.execute(trait.COMMAND_MEDIA_STOP, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} @pytest.mark.parametrize( "state", ( STATE_OFF, STATE_IDLE, STATE_PLAYING, STATE_ON, STATE_PAUSED, STATE_STANDBY, STATE_UNAVAILABLE, STATE_UNKNOWN, ), ) async def test_media_state(hass, state): """Test the MediaStateTrait.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.TransportControlTrait.supported( media_player.DOMAIN, media_player.SUPPORT_PLAY, None ) trt = trait.MediaStateTrait( hass, State( "media_player.bla", state, { media_player.ATTR_MEDIA_POSITION: 100, media_player.ATTR_MEDIA_DURATION: 200, media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.5, ATTR_SUPPORTED_FEATURES: media_player.SUPPORT_PLAY | media_player.SUPPORT_STOP, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "supportActivityState": True, "supportPlaybackState": True, } assert trt.query_attributes() == { "activityState": trt.activity_lookup.get(state), "playbackState": trt.playback_lookup.get(state), }
sander76/home-assistant
tests/components/google_assistant/test_trait.py
homeassistant/components/dte_energy_bridge/sensor.py
"""Constants for the Axis component.""" import logging from homeassistant.components.binary_sensor import DOMAIN as BINARY_SENSOR_DOMAIN from homeassistant.components.camera import DOMAIN as CAMERA_DOMAIN from homeassistant.components.light import DOMAIN as LIGHT_DOMAIN from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN LOGGER = logging.getLogger(__package__) DOMAIN = "axis" ATTR_MANUFACTURER = "Axis Communications AB" CONF_EVENTS = "events" CONF_MODEL = "model" CONF_STREAM_PROFILE = "stream_profile" CONF_VIDEO_SOURCE = "video_source" DEFAULT_EVENTS = True DEFAULT_STREAM_PROFILE = "No stream profile" DEFAULT_TRIGGER_TIME = 0 DEFAULT_VIDEO_SOURCE = "No video source" PLATFORMS = [BINARY_SENSOR_DOMAIN, CAMERA_DOMAIN, LIGHT_DOMAIN, SWITCH_DOMAIN]
"""Tests for the Google Assistant traits.""" from datetime import datetime, timedelta from unittest.mock import patch import pytest from homeassistant.components import ( alarm_control_panel, binary_sensor, camera, cover, fan, group, input_boolean, input_select, light, lock, media_player, scene, script, sensor, switch, vacuum, ) from homeassistant.components.climate import const as climate from homeassistant.components.google_assistant import const, error, helpers, trait from homeassistant.components.google_assistant.error import SmartHomeError from homeassistant.components.humidifier import const as humidifier from homeassistant.config import async_process_ha_core_config from homeassistant.const import ( ATTR_ASSUMED_STATE, ATTR_DEVICE_CLASS, ATTR_ENTITY_ID, ATTR_MODE, ATTR_SUPPORTED_FEATURES, ATTR_TEMPERATURE, SERVICE_TURN_OFF, SERVICE_TURN_ON, STATE_ALARM_ARMED_AWAY, STATE_ALARM_DISARMED, STATE_ALARM_PENDING, STATE_IDLE, STATE_OFF, STATE_ON, STATE_PAUSED, STATE_PLAYING, STATE_STANDBY, STATE_UNAVAILABLE, STATE_UNKNOWN, TEMP_CELSIUS, TEMP_FAHRENHEIT, ) from homeassistant.core import DOMAIN as HA_DOMAIN, EVENT_CALL_SERVICE, State from homeassistant.util import color from . import BASIC_CONFIG, MockConfig from tests.common import async_capture_events, async_mock_service REQ_ID = "ff36a3cc-ec34-11e6-b1a0-64510650abcf" BASIC_DATA = helpers.RequestData( BASIC_CONFIG, "test-agent", const.SOURCE_CLOUD, REQ_ID, None ) PIN_CONFIG = MockConfig(secure_devices_pin="1234") PIN_DATA = helpers.RequestData( PIN_CONFIG, "test-agent", const.SOURCE_CLOUD, REQ_ID, None ) async def test_brightness_light(hass): """Test brightness trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert trait.BrightnessTrait.supported(light.DOMAIN, light.SUPPORT_BRIGHTNESS, None) trt = trait.BrightnessTrait( hass, State("light.bla", light.STATE_ON, {light.ATTR_BRIGHTNESS: 243}), BASIC_CONFIG, ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"brightness": 95} events = async_capture_events(hass, EVENT_CALL_SERVICE) calls = async_mock_service(hass, light.DOMAIN, light.SERVICE_TURN_ON) await trt.execute( trait.COMMAND_BRIGHTNESS_ABSOLUTE, BASIC_DATA, {"brightness": 50}, {} ) await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "light.bla", light.ATTR_BRIGHTNESS_PCT: 50} assert len(events) == 1 assert events[0].data == { "domain": "light", "service": "turn_on", "service_data": {"brightness_pct": 50, "entity_id": "light.bla"}, } async def test_camera_stream(hass): """Test camera stream trait support for camera domain.""" await async_process_ha_core_config( hass, {"external_url": "https://example.com"}, ) assert helpers.get_google_type(camera.DOMAIN, None) is not None assert trait.CameraStreamTrait.supported(camera.DOMAIN, camera.SUPPORT_STREAM, None) trt = trait.CameraStreamTrait( hass, State("camera.bla", camera.STATE_IDLE, {}), BASIC_CONFIG ) assert trt.sync_attributes() == { "cameraStreamSupportedProtocols": ["hls"], "cameraStreamNeedAuthToken": False, "cameraStreamNeedDrmEncryption": False, } assert trt.query_attributes() == {} with patch( "homeassistant.components.camera.async_request_stream", return_value="/api/streams/bla", ): await trt.execute(trait.COMMAND_GET_CAMERA_STREAM, BASIC_DATA, {}, {}) assert trt.query_attributes() == { "cameraStreamAccessUrl": "https://example.com/api/streams/bla", "cameraStreamReceiverAppId": "B12CE3CA", } async def test_onoff_group(hass): """Test OnOff trait support for group domain.""" assert helpers.get_google_type(group.DOMAIN, None) is not None assert trait.OnOffTrait.supported(group.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("group.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("group.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, HA_DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "group.bla"} off_calls = async_mock_service(hass, HA_DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "group.bla"} async def test_onoff_input_boolean(hass): """Test OnOff trait support for input_boolean domain.""" assert helpers.get_google_type(input_boolean.DOMAIN, None) is not None assert trait.OnOffTrait.supported(input_boolean.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("input_boolean.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait( hass, State("input_boolean.bla", STATE_OFF), BASIC_CONFIG ) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, input_boolean.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "input_boolean.bla"} off_calls = async_mock_service(hass, input_boolean.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "input_boolean.bla"} async def test_onoff_switch(hass): """Test OnOff trait support for switch domain.""" assert helpers.get_google_type(switch.DOMAIN, None) is not None assert trait.OnOffTrait.supported(switch.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("switch.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("switch.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} trt_assumed = trait.OnOffTrait( hass, State("switch.bla", STATE_OFF, {"assumed_state": True}), BASIC_CONFIG ) assert trt_assumed.sync_attributes() == {"commandOnlyOnOff": True} on_calls = async_mock_service(hass, switch.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "switch.bla"} off_calls = async_mock_service(hass, switch.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "switch.bla"} async def test_onoff_fan(hass): """Test OnOff trait support for fan domain.""" assert helpers.get_google_type(fan.DOMAIN, None) is not None assert trait.OnOffTrait.supported(fan.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("fan.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("fan.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, fan.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "fan.bla"} off_calls = async_mock_service(hass, fan.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "fan.bla"} async def test_onoff_light(hass): """Test OnOff trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert trait.OnOffTrait.supported(light.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("light.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("light.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "light.bla"} off_calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "light.bla"} async def test_onoff_media_player(hass): """Test OnOff trait support for media_player domain.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.OnOffTrait.supported(media_player.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("media_player.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("media_player.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, media_player.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} off_calls = async_mock_service(hass, media_player.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} async def test_onoff_humidifier(hass): """Test OnOff trait support for humidifier domain.""" assert helpers.get_google_type(humidifier.DOMAIN, None) is not None assert trait.OnOffTrait.supported(humidifier.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("humidifier.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("humidifier.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, humidifier.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "humidifier.bla"} off_calls = async_mock_service(hass, humidifier.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "humidifier.bla"} async def test_dock_vacuum(hass): """Test dock trait support for vacuum domain.""" assert helpers.get_google_type(vacuum.DOMAIN, None) is not None assert trait.DockTrait.supported(vacuum.DOMAIN, 0, None) trt = trait.DockTrait(hass, State("vacuum.bla", vacuum.STATE_IDLE), BASIC_CONFIG) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"isDocked": False} calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_RETURN_TO_BASE) await trt.execute(trait.COMMAND_DOCK, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} async def test_startstop_vacuum(hass): """Test startStop trait support for vacuum domain.""" assert helpers.get_google_type(vacuum.DOMAIN, None) is not None assert trait.StartStopTrait.supported(vacuum.DOMAIN, 0, None) trt = trait.StartStopTrait( hass, State( "vacuum.bla", vacuum.STATE_PAUSED, {ATTR_SUPPORTED_FEATURES: vacuum.SUPPORT_PAUSE}, ), BASIC_CONFIG, ) assert trt.sync_attributes() == {"pausable": True} assert trt.query_attributes() == {"isRunning": False, "isPaused": True} start_calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_START) await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": True}, {}) assert len(start_calls) == 1 assert start_calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} stop_calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_STOP) await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": False}, {}) assert len(stop_calls) == 1 assert stop_calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} pause_calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_PAUSE) await trt.execute(trait.COMMAND_PAUSEUNPAUSE, BASIC_DATA, {"pause": True}, {}) assert len(pause_calls) == 1 assert pause_calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} unpause_calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_START) await trt.execute(trait.COMMAND_PAUSEUNPAUSE, BASIC_DATA, {"pause": False}, {}) assert len(unpause_calls) == 1 assert unpause_calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} async def test_startstop_cover(hass): """Test startStop trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.StartStopTrait.supported(cover.DOMAIN, cover.SUPPORT_STOP, None) state = State( "cover.bla", cover.STATE_CLOSED, {ATTR_SUPPORTED_FEATURES: cover.SUPPORT_STOP}, ) trt = trait.StartStopTrait( hass, state, BASIC_CONFIG, ) assert trt.sync_attributes() == {} for state_value in (cover.STATE_CLOSING, cover.STATE_OPENING): state.state = state_value assert trt.query_attributes() == {"isRunning": True} stop_calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_STOP_COVER) await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": False}, {}) assert len(stop_calls) == 1 assert stop_calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} for state_value in (cover.STATE_CLOSED, cover.STATE_OPEN): state.state = state_value assert trt.query_attributes() == {"isRunning": False} with pytest.raises(SmartHomeError, match="Cover is already stopped"): await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": False}, {}) with pytest.raises(SmartHomeError, match="Starting a cover is not supported"): await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": True}, {}) with pytest.raises( SmartHomeError, match="Command action.devices.commands.PauseUnpause is not supported", ): await trt.execute(trait.COMMAND_PAUSEUNPAUSE, BASIC_DATA, {"start": True}, {}) async def test_startstop_cover_assumed(hass): """Test startStop trait support for cover domain of assumed state.""" trt = trait.StartStopTrait( hass, State( "cover.bla", cover.STATE_CLOSED, {ATTR_SUPPORTED_FEATURES: cover.SUPPORT_STOP, ATTR_ASSUMED_STATE: True}, ), BASIC_CONFIG, ) stop_calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_STOP_COVER) await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": False}, {}) assert len(stop_calls) == 1 assert stop_calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} async def test_color_setting_color_light(hass): """Test ColorSpectrum trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert not trait.ColorSettingTrait.supported(light.DOMAIN, 0, None) assert trait.ColorSettingTrait.supported(light.DOMAIN, light.SUPPORT_COLOR, None) trt = trait.ColorSettingTrait( hass, State( "light.bla", STATE_ON, { light.ATTR_HS_COLOR: (20, 94), light.ATTR_BRIGHTNESS: 200, ATTR_SUPPORTED_FEATURES: light.SUPPORT_COLOR, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == {"colorModel": "hsv"} assert trt.query_attributes() == { "color": {"spectrumHsv": {"hue": 20, "saturation": 0.94, "value": 200 / 255}} } assert trt.can_execute( trait.COMMAND_COLOR_ABSOLUTE, {"color": {"spectrumRGB": 16715792}} ) calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON) await trt.execute( trait.COMMAND_COLOR_ABSOLUTE, BASIC_DATA, {"color": {"spectrumRGB": 1052927}}, {}, ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "light.bla", light.ATTR_HS_COLOR: (240, 93.725), } await trt.execute( trait.COMMAND_COLOR_ABSOLUTE, BASIC_DATA, {"color": {"spectrumHSV": {"hue": 100, "saturation": 0.50, "value": 0.20}}}, {}, ) assert len(calls) == 2 assert calls[1].data == { ATTR_ENTITY_ID: "light.bla", light.ATTR_HS_COLOR: [100, 50], light.ATTR_BRIGHTNESS: 0.2 * 255, } async def test_color_setting_temperature_light(hass): """Test ColorTemperature trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert not trait.ColorSettingTrait.supported(light.DOMAIN, 0, None) assert trait.ColorSettingTrait.supported( light.DOMAIN, light.SUPPORT_COLOR_TEMP, None ) trt = trait.ColorSettingTrait( hass, State( "light.bla", STATE_ON, { light.ATTR_MIN_MIREDS: 200, light.ATTR_COLOR_TEMP: 300, light.ATTR_MAX_MIREDS: 500, ATTR_SUPPORTED_FEATURES: light.SUPPORT_COLOR_TEMP, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "colorTemperatureRange": {"temperatureMinK": 2000, "temperatureMaxK": 5000} } assert trt.query_attributes() == {"color": {"temperatureK": 3333}} assert trt.can_execute( trait.COMMAND_COLOR_ABSOLUTE, {"color": {"temperature": 400}} ) calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON) with pytest.raises(helpers.SmartHomeError) as err: await trt.execute( trait.COMMAND_COLOR_ABSOLUTE, BASIC_DATA, {"color": {"temperature": 5555}}, {}, ) assert err.value.code == const.ERR_VALUE_OUT_OF_RANGE await trt.execute( trait.COMMAND_COLOR_ABSOLUTE, BASIC_DATA, {"color": {"temperature": 2857}}, {} ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "light.bla", light.ATTR_COLOR_TEMP: color.color_temperature_kelvin_to_mired(2857), } async def test_color_light_temperature_light_bad_temp(hass): """Test ColorTemperature trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert not trait.ColorSettingTrait.supported(light.DOMAIN, 0, None) assert trait.ColorSettingTrait.supported( light.DOMAIN, light.SUPPORT_COLOR_TEMP, None ) trt = trait.ColorSettingTrait( hass, State( "light.bla", STATE_ON, { light.ATTR_MIN_MIREDS: 200, light.ATTR_COLOR_TEMP: 0, light.ATTR_MAX_MIREDS: 500, }, ), BASIC_CONFIG, ) assert trt.query_attributes() == {} async def test_light_modes(hass): """Test Light Mode trait.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert trait.ModesTrait.supported(light.DOMAIN, light.SUPPORT_EFFECT, None) trt = trait.ModesTrait( hass, State( "light.living_room", light.STATE_ON, attributes={ light.ATTR_EFFECT_LIST: ["random", "colorloop"], light.ATTR_EFFECT: "random", }, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableModes": [ { "name": "effect", "name_values": [{"name_synonym": ["effect"], "lang": "en"}], "settings": [ { "setting_name": "random", "setting_values": [ {"setting_synonym": ["random"], "lang": "en"} ], }, { "setting_name": "colorloop", "setting_values": [ {"setting_synonym": ["colorloop"], "lang": "en"} ], }, ], "ordered": False, } ] } assert trt.query_attributes() == { "currentModeSettings": {"effect": "random"}, "on": True, } assert trt.can_execute( trait.COMMAND_MODES, params={"updateModeSettings": {"effect": "colorloop"}}, ) calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON) await trt.execute( trait.COMMAND_MODES, BASIC_DATA, {"updateModeSettings": {"effect": "colorloop"}}, {}, ) assert len(calls) == 1 assert calls[0].data == { "entity_id": "light.living_room", "effect": "colorloop", } async def test_scene_scene(hass): """Test Scene trait support for scene domain.""" assert helpers.get_google_type(scene.DOMAIN, None) is not None assert trait.SceneTrait.supported(scene.DOMAIN, 0, None) trt = trait.SceneTrait(hass, State("scene.bla", scene.STATE), BASIC_CONFIG) assert trt.sync_attributes() == {} assert trt.query_attributes() == {} assert trt.can_execute(trait.COMMAND_ACTIVATE_SCENE, {}) calls = async_mock_service(hass, scene.DOMAIN, SERVICE_TURN_ON) await trt.execute(trait.COMMAND_ACTIVATE_SCENE, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "scene.bla"} async def test_scene_script(hass): """Test Scene trait support for script domain.""" assert helpers.get_google_type(script.DOMAIN, None) is not None assert trait.SceneTrait.supported(script.DOMAIN, 0, None) trt = trait.SceneTrait(hass, State("script.bla", STATE_OFF), BASIC_CONFIG) assert trt.sync_attributes() == {} assert trt.query_attributes() == {} assert trt.can_execute(trait.COMMAND_ACTIVATE_SCENE, {}) calls = async_mock_service(hass, script.DOMAIN, SERVICE_TURN_ON) await trt.execute(trait.COMMAND_ACTIVATE_SCENE, BASIC_DATA, {}, {}) # We don't wait till script execution is done. await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "script.bla"} async def test_temperature_setting_climate_onoff(hass): """Test TemperatureSetting trait support for climate domain - range.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.TemperatureSettingTrait.supported(climate.DOMAIN, 0, None) hass.config.units.temperature_unit = TEMP_FAHRENHEIT trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_AUTO, { ATTR_SUPPORTED_FEATURES: climate.SUPPORT_TARGET_TEMPERATURE_RANGE, climate.ATTR_HVAC_MODES: [ climate.HVAC_MODE_OFF, climate.HVAC_MODE_COOL, climate.HVAC_MODE_HEAT, climate.HVAC_MODE_HEAT_COOL, ], climate.ATTR_MIN_TEMP: None, climate.ATTR_MAX_TEMP: None, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["off", "cool", "heat", "heatcool", "on"], "thermostatTemperatureUnit": "F", } assert trt.can_execute(trait.COMMAND_THERMOSTAT_SET_MODE, {}) calls = async_mock_service(hass, climate.DOMAIN, SERVICE_TURN_ON) await trt.execute( trait.COMMAND_THERMOSTAT_SET_MODE, BASIC_DATA, {"thermostatMode": "on"}, {} ) assert len(calls) == 1 calls = async_mock_service(hass, climate.DOMAIN, SERVICE_TURN_OFF) await trt.execute( trait.COMMAND_THERMOSTAT_SET_MODE, BASIC_DATA, {"thermostatMode": "off"}, {} ) assert len(calls) == 1 async def test_temperature_setting_climate_no_modes(hass): """Test TemperatureSetting trait support for climate domain not supporting any modes.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.TemperatureSettingTrait.supported(climate.DOMAIN, 0, None) hass.config.units.temperature_unit = TEMP_CELSIUS trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_AUTO, { climate.ATTR_HVAC_MODES: [], climate.ATTR_MIN_TEMP: None, climate.ATTR_MAX_TEMP: None, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["heat"], "thermostatTemperatureUnit": "C", } async def test_temperature_setting_climate_range(hass): """Test TemperatureSetting trait support for climate domain - range.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.TemperatureSettingTrait.supported(climate.DOMAIN, 0, None) hass.config.units.temperature_unit = TEMP_FAHRENHEIT trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_AUTO, { climate.ATTR_CURRENT_TEMPERATURE: 70, climate.ATTR_CURRENT_HUMIDITY: 25, ATTR_SUPPORTED_FEATURES: climate.SUPPORT_TARGET_TEMPERATURE_RANGE, climate.ATTR_HVAC_MODES: [ STATE_OFF, climate.HVAC_MODE_COOL, climate.HVAC_MODE_HEAT, climate.HVAC_MODE_AUTO, ], climate.ATTR_TARGET_TEMP_HIGH: 75, climate.ATTR_TARGET_TEMP_LOW: 65, climate.ATTR_MIN_TEMP: 50, climate.ATTR_MAX_TEMP: 80, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["off", "cool", "heat", "auto", "on"], "thermostatTemperatureUnit": "F", } assert trt.query_attributes() == { "thermostatMode": "auto", "thermostatTemperatureAmbient": 21.1, "thermostatHumidityAmbient": 25, "thermostatTemperatureSetpointLow": 18.3, "thermostatTemperatureSetpointHigh": 23.9, } assert trt.can_execute(trait.COMMAND_THERMOSTAT_TEMPERATURE_SET_RANGE, {}) assert trt.can_execute(trait.COMMAND_THERMOSTAT_SET_MODE, {}) calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_TEMPERATURE) await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SET_RANGE, BASIC_DATA, { "thermostatTemperatureSetpointHigh": 25, "thermostatTemperatureSetpointLow": 20, }, {}, ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "climate.bla", climate.ATTR_TARGET_TEMP_HIGH: 77, climate.ATTR_TARGET_TEMP_LOW: 68, } calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_HVAC_MODE) await trt.execute( trait.COMMAND_THERMOSTAT_SET_MODE, BASIC_DATA, {"thermostatMode": "cool"}, {} ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "climate.bla", climate.ATTR_HVAC_MODE: climate.HVAC_MODE_COOL, } with pytest.raises(helpers.SmartHomeError) as err: await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, BASIC_DATA, {"thermostatTemperatureSetpoint": -100}, {}, ) assert err.value.code == const.ERR_VALUE_OUT_OF_RANGE hass.config.units.temperature_unit = TEMP_CELSIUS async def test_temperature_setting_climate_setpoint(hass): """Test TemperatureSetting trait support for climate domain - setpoint.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.TemperatureSettingTrait.supported(climate.DOMAIN, 0, None) hass.config.units.temperature_unit = TEMP_CELSIUS trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_COOL, { climate.ATTR_HVAC_MODES: [STATE_OFF, climate.HVAC_MODE_COOL], climate.ATTR_MIN_TEMP: 10, climate.ATTR_MAX_TEMP: 30, ATTR_TEMPERATURE: 18, climate.ATTR_CURRENT_TEMPERATURE: 20, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["off", "cool", "on"], "thermostatTemperatureUnit": "C", } assert trt.query_attributes() == { "thermostatMode": "cool", "thermostatTemperatureAmbient": 20, "thermostatTemperatureSetpoint": 18, } assert trt.can_execute(trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, {}) assert trt.can_execute(trait.COMMAND_THERMOSTAT_SET_MODE, {}) calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_TEMPERATURE) with pytest.raises(helpers.SmartHomeError): await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, BASIC_DATA, {"thermostatTemperatureSetpoint": -100}, {}, ) await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, BASIC_DATA, {"thermostatTemperatureSetpoint": 19}, {}, ) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "climate.bla", ATTR_TEMPERATURE: 19} async def test_temperature_setting_climate_setpoint_auto(hass): """ Test TemperatureSetting trait support for climate domain. Setpoint in auto mode. """ hass.config.units.temperature_unit = TEMP_CELSIUS trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_HEAT_COOL, { climate.ATTR_HVAC_MODES: [ climate.HVAC_MODE_OFF, climate.HVAC_MODE_HEAT_COOL, ], climate.ATTR_MIN_TEMP: 10, climate.ATTR_MAX_TEMP: 30, ATTR_TEMPERATURE: 18, climate.ATTR_CURRENT_TEMPERATURE: 20, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["off", "heatcool", "on"], "thermostatTemperatureUnit": "C", } assert trt.query_attributes() == { "thermostatMode": "heatcool", "thermostatTemperatureAmbient": 20, "thermostatTemperatureSetpointHigh": 18, "thermostatTemperatureSetpointLow": 18, } assert trt.can_execute(trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, {}) assert trt.can_execute(trait.COMMAND_THERMOSTAT_SET_MODE, {}) calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_TEMPERATURE) await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, BASIC_DATA, {"thermostatTemperatureSetpoint": 19}, {}, ) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "climate.bla", ATTR_TEMPERATURE: 19} async def test_humidity_setting_humidifier_setpoint(hass): """Test HumiditySetting trait support for humidifier domain - setpoint.""" assert helpers.get_google_type(humidifier.DOMAIN, None) is not None assert trait.HumiditySettingTrait.supported(humidifier.DOMAIN, 0, None) trt = trait.HumiditySettingTrait( hass, State( "humidifier.bla", STATE_ON, { humidifier.ATTR_MIN_HUMIDITY: 20, humidifier.ATTR_MAX_HUMIDITY: 90, humidifier.ATTR_HUMIDITY: 38, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "humiditySetpointRange": {"minPercent": 20, "maxPercent": 90} } assert trt.query_attributes() == { "humiditySetpointPercent": 38, } assert trt.can_execute(trait.COMMAND_SET_HUMIDITY, {}) calls = async_mock_service(hass, humidifier.DOMAIN, humidifier.SERVICE_SET_HUMIDITY) await trt.execute(trait.COMMAND_SET_HUMIDITY, BASIC_DATA, {"humidity": 32}, {}) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "humidifier.bla", humidifier.ATTR_HUMIDITY: 32, } async def test_lock_unlock_lock(hass): """Test LockUnlock trait locking support for lock domain.""" assert helpers.get_google_type(lock.DOMAIN, None) is not None assert trait.LockUnlockTrait.supported(lock.DOMAIN, lock.SUPPORT_OPEN, None) assert trait.LockUnlockTrait.might_2fa(lock.DOMAIN, lock.SUPPORT_OPEN, None) trt = trait.LockUnlockTrait( hass, State("lock.front_door", lock.STATE_LOCKED), PIN_CONFIG ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"isLocked": True} assert trt.can_execute(trait.COMMAND_LOCKUNLOCK, {"lock": True}) calls = async_mock_service(hass, lock.DOMAIN, lock.SERVICE_LOCK) await trt.execute(trait.COMMAND_LOCKUNLOCK, PIN_DATA, {"lock": True}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "lock.front_door"} async def test_lock_unlock_unlock(hass): """Test LockUnlock trait unlocking support for lock domain.""" assert helpers.get_google_type(lock.DOMAIN, None) is not None assert trait.LockUnlockTrait.supported(lock.DOMAIN, lock.SUPPORT_OPEN, None) trt = trait.LockUnlockTrait( hass, State("lock.front_door", lock.STATE_LOCKED), PIN_CONFIG ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"isLocked": True} assert trt.can_execute(trait.COMMAND_LOCKUNLOCK, {"lock": False}) calls = async_mock_service(hass, lock.DOMAIN, lock.SERVICE_UNLOCK) # No challenge data with pytest.raises(error.ChallengeNeeded) as err: await trt.execute(trait.COMMAND_LOCKUNLOCK, PIN_DATA, {"lock": False}, {}) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # invalid pin with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_LOCKUNLOCK, PIN_DATA, {"lock": False}, {"pin": 9999} ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_FAILED_PIN_NEEDED await trt.execute( trait.COMMAND_LOCKUNLOCK, PIN_DATA, {"lock": False}, {"pin": "1234"} ) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "lock.front_door"} # Test without pin trt = trait.LockUnlockTrait( hass, State("lock.front_door", lock.STATE_LOCKED), BASIC_CONFIG ) with pytest.raises(error.SmartHomeError) as err: await trt.execute(trait.COMMAND_LOCKUNLOCK, BASIC_DATA, {"lock": False}, {}) assert len(calls) == 1 assert err.value.code == const.ERR_CHALLENGE_NOT_SETUP # Test with 2FA override with patch.object( BASIC_CONFIG, "should_2fa", return_value=False, ): await trt.execute(trait.COMMAND_LOCKUNLOCK, BASIC_DATA, {"lock": False}, {}) assert len(calls) == 2 async def test_arm_disarm_arm_away(hass): """Test ArmDisarm trait Arming support for alarm_control_panel domain.""" assert helpers.get_google_type(alarm_control_panel.DOMAIN, None) is not None assert trait.ArmDisArmTrait.supported(alarm_control_panel.DOMAIN, 0, None) assert trait.ArmDisArmTrait.might_2fa(alarm_control_panel.DOMAIN, 0, None) trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, { alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True, ATTR_SUPPORTED_FEATURES: alarm_control_panel.const.SUPPORT_ALARM_ARM_HOME | alarm_control_panel.const.SUPPORT_ALARM_ARM_AWAY, }, ), PIN_CONFIG, ) assert trt.sync_attributes() == { "availableArmLevels": { "levels": [ { "level_name": "armed_home", "level_values": [ {"level_synonym": ["armed home", "home"], "lang": "en"} ], }, { "level_name": "armed_away", "level_values": [ {"level_synonym": ["armed away", "away"], "lang": "en"} ], }, ], "ordered": False, } } assert trt.query_attributes() == { "isArmed": True, "currentArmLevel": STATE_ALARM_ARMED_AWAY, } assert trt.can_execute( trait.COMMAND_ARMDISARM, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY} ) calls = async_mock_service( hass, alarm_control_panel.DOMAIN, alarm_control_panel.SERVICE_ALARM_ARM_AWAY ) # Test with no secure_pin configured with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), BASIC_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, BASIC_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {}, ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NOT_SETUP trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), PIN_CONFIG, ) # No challenge data with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {}, ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # invalid pin with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {"pin": 9999}, ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_FAILED_PIN_NEEDED # correct pin await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {"pin": "1234"}, ) assert len(calls) == 1 # Test already armed with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), PIN_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {}, ) assert len(calls) == 1 assert err.value.code == const.ERR_ALREADY_ARMED # Test with code_arm_required False trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: False}, ), PIN_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {}, ) assert len(calls) == 2 with pytest.raises(error.SmartHomeError) as err: await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True}, {}, ) async def test_arm_disarm_disarm(hass): """Test ArmDisarm trait Disarming support for alarm_control_panel domain.""" assert helpers.get_google_type(alarm_control_panel.DOMAIN, None) is not None assert trait.ArmDisArmTrait.supported(alarm_control_panel.DOMAIN, 0, None) assert trait.ArmDisArmTrait.might_2fa(alarm_control_panel.DOMAIN, 0, None) trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, { alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True, ATTR_SUPPORTED_FEATURES: alarm_control_panel.const.SUPPORT_ALARM_TRIGGER | alarm_control_panel.const.SUPPORT_ALARM_ARM_CUSTOM_BYPASS, }, ), PIN_CONFIG, ) assert trt.sync_attributes() == { "availableArmLevels": { "levels": [ { "level_name": "armed_custom_bypass", "level_values": [ { "level_synonym": ["armed custom bypass", "custom"], "lang": "en", } ], }, { "level_name": "triggered", "level_values": [{"level_synonym": ["triggered"], "lang": "en"}], }, ], "ordered": False, } } assert trt.query_attributes() == {"isArmed": False} assert trt.can_execute(trait.COMMAND_ARMDISARM, {"arm": False}) calls = async_mock_service( hass, alarm_control_panel.DOMAIN, alarm_control_panel.SERVICE_ALARM_DISARM ) # Test without secure_pin configured with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), BASIC_CONFIG, ) await trt.execute(trait.COMMAND_ARMDISARM, BASIC_DATA, {"arm": False}, {}) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NOT_SETUP trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), PIN_CONFIG, ) # No challenge data with pytest.raises(error.ChallengeNeeded) as err: await trt.execute(trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": False}, {}) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # invalid pin with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": False}, {"pin": 9999} ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_FAILED_PIN_NEEDED # correct pin await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": False}, {"pin": "1234"} ) assert len(calls) == 1 # Test already disarmed with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), PIN_CONFIG, ) await trt.execute(trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": False}, {}) assert len(calls) == 1 assert err.value.code == const.ERR_ALREADY_DISARMED # Cancel arming after already armed will require pin with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: False}, ), PIN_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "cancel": True}, {} ) assert len(calls) == 1 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # Cancel arming while pending to arm doesn't require pin trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_PENDING, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: False}, ), PIN_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "cancel": True}, {} ) assert len(calls) == 2 async def test_fan_speed(hass): """Test FanSpeed trait speed control support for fan domain.""" assert helpers.get_google_type(fan.DOMAIN, None) is not None assert trait.FanSpeedTrait.supported(fan.DOMAIN, fan.SUPPORT_SET_SPEED, None) trt = trait.FanSpeedTrait( hass, State( "fan.living_room_fan", fan.SPEED_HIGH, attributes={ "speed_list": [ fan.SPEED_OFF, fan.SPEED_LOW, fan.SPEED_MEDIUM, fan.SPEED_HIGH, ], "speed": "low", "percentage": 33, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableFanSpeeds": { "ordered": True, "speeds": [ { "speed_name": "off", "speed_values": [{"speed_synonym": ["stop", "off"], "lang": "en"}], }, { "speed_name": "low", "speed_values": [ { "speed_synonym": ["slow", "low", "slowest", "lowest"], "lang": "en", } ], }, { "speed_name": "medium", "speed_values": [ {"speed_synonym": ["medium", "mid", "middle"], "lang": "en"} ], }, { "speed_name": "high", "speed_values": [ { "speed_synonym": [ "high", "max", "fast", "highest", "fastest", "maximum", ], "lang": "en", } ], }, ], }, "reversible": False, "supportsFanSpeedPercent": True, } assert trt.query_attributes() == { "currentFanSpeedSetting": "low", "on": True, "currentFanSpeedPercent": 33, } assert trt.can_execute(trait.COMMAND_FANSPEED, params={"fanSpeed": "medium"}) calls = async_mock_service(hass, fan.DOMAIN, fan.SERVICE_SET_SPEED) await trt.execute(trait.COMMAND_FANSPEED, BASIC_DATA, {"fanSpeed": "medium"}, {}) assert len(calls) == 1 assert calls[0].data == {"entity_id": "fan.living_room_fan", "speed": "medium"} assert trt.can_execute(trait.COMMAND_FANSPEED, params={"fanSpeedPercent": 10}) calls = async_mock_service(hass, fan.DOMAIN, fan.SERVICE_SET_PERCENTAGE) await trt.execute(trait.COMMAND_FANSPEED, BASIC_DATA, {"fanSpeedPercent": 10}, {}) assert len(calls) == 1 assert calls[0].data == {"entity_id": "fan.living_room_fan", "percentage": 10} async def test_climate_fan_speed(hass): """Test FanSpeed trait speed control support for climate domain.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.FanSpeedTrait.supported(climate.DOMAIN, climate.SUPPORT_FAN_MODE, None) trt = trait.FanSpeedTrait( hass, State( "climate.living_room_ac", "on", attributes={ "fan_modes": ["auto", "low", "medium", "high"], "fan_mode": "low", }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableFanSpeeds": { "ordered": True, "speeds": [ { "speed_name": "auto", "speed_values": [{"speed_synonym": ["auto"], "lang": "en"}], }, { "speed_name": "low", "speed_values": [{"speed_synonym": ["low"], "lang": "en"}], }, { "speed_name": "medium", "speed_values": [{"speed_synonym": ["medium"], "lang": "en"}], }, { "speed_name": "high", "speed_values": [{"speed_synonym": ["high"], "lang": "en"}], }, ], }, "reversible": False, "supportsFanSpeedPercent": True, } assert trt.query_attributes() == { "currentFanSpeedSetting": "low", } assert trt.can_execute(trait.COMMAND_FANSPEED, params={"fanSpeed": "medium"}) calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_FAN_MODE) await trt.execute(trait.COMMAND_FANSPEED, BASIC_DATA, {"fanSpeed": "medium"}, {}) assert len(calls) == 1 assert calls[0].data == { "entity_id": "climate.living_room_ac", "fan_mode": "medium", } async def test_inputselector(hass): """Test input selector trait.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.InputSelectorTrait.supported( media_player.DOMAIN, media_player.SUPPORT_SELECT_SOURCE, None ) trt = trait.InputSelectorTrait( hass, State( "media_player.living_room", media_player.STATE_PLAYING, attributes={ media_player.ATTR_INPUT_SOURCE_LIST: [ "media", "game", "chromecast", "plex", ], media_player.ATTR_INPUT_SOURCE: "game", }, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableInputs": [ {"key": "media", "names": [{"name_synonym": ["media"], "lang": "en"}]}, {"key": "game", "names": [{"name_synonym": ["game"], "lang": "en"}]}, { "key": "chromecast", "names": [{"name_synonym": ["chromecast"], "lang": "en"}], }, {"key": "plex", "names": [{"name_synonym": ["plex"], "lang": "en"}]}, ], "orderedInputs": True, } assert trt.query_attributes() == { "currentInput": "game", } assert trt.can_execute( trait.COMMAND_INPUT, params={"newInput": "media"}, ) calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_SELECT_SOURCE ) await trt.execute( trait.COMMAND_INPUT, BASIC_DATA, {"newInput": "media"}, {}, ) assert len(calls) == 1 assert calls[0].data == {"entity_id": "media_player.living_room", "source": "media"} @pytest.mark.parametrize( "sources,source,source_next,source_prev", [ (["a"], "a", "a", "a"), (["a", "b"], "a", "b", "b"), (["a", "b", "c"], "a", "b", "c"), ], ) async def test_inputselector_nextprev(hass, sources, source, source_next, source_prev): """Test input selector trait.""" trt = trait.InputSelectorTrait( hass, State( "media_player.living_room", media_player.STATE_PLAYING, attributes={ media_player.ATTR_INPUT_SOURCE_LIST: sources, media_player.ATTR_INPUT_SOURCE: source, }, ), BASIC_CONFIG, ) assert trt.can_execute("action.devices.commands.NextInput", params={}) assert trt.can_execute("action.devices.commands.PreviousInput", params={}) calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_SELECT_SOURCE ) await trt.execute( "action.devices.commands.NextInput", BASIC_DATA, {}, {}, ) await trt.execute( "action.devices.commands.PreviousInput", BASIC_DATA, {}, {}, ) assert len(calls) == 2 assert calls[0].data == { "entity_id": "media_player.living_room", "source": source_next, } assert calls[1].data == { "entity_id": "media_player.living_room", "source": source_prev, } @pytest.mark.parametrize( "sources,source", [(None, "a"), (["a", "b"], None), (["a", "b"], "c")] ) async def test_inputselector_nextprev_invalid(hass, sources, source): """Test input selector trait.""" trt = trait.InputSelectorTrait( hass, State( "media_player.living_room", media_player.STATE_PLAYING, attributes={ media_player.ATTR_INPUT_SOURCE_LIST: sources, media_player.ATTR_INPUT_SOURCE: source, }, ), BASIC_CONFIG, ) with pytest.raises(SmartHomeError): await trt.execute( "action.devices.commands.NextInput", BASIC_DATA, {}, {}, ) with pytest.raises(SmartHomeError): await trt.execute( "action.devices.commands.PreviousInput", BASIC_DATA, {}, {}, ) with pytest.raises(SmartHomeError): await trt.execute( "action.devices.commands.InvalidCommand", BASIC_DATA, {}, {}, ) async def test_modes_input_select(hass): """Test Input Select Mode trait.""" assert helpers.get_google_type(input_select.DOMAIN, None) is not None assert trait.ModesTrait.supported(input_select.DOMAIN, None, None) trt = trait.ModesTrait( hass, State("input_select.bla", "unavailable"), BASIC_CONFIG, ) assert trt.sync_attributes() == {"availableModes": []} trt = trait.ModesTrait( hass, State( "input_select.bla", "abc", attributes={input_select.ATTR_OPTIONS: ["abc", "123", "xyz"]}, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableModes": [ { "name": "option", "name_values": [ { "name_synonym": ["option", "setting", "mode", "value"], "lang": "en", } ], "settings": [ { "setting_name": "abc", "setting_values": [{"setting_synonym": ["abc"], "lang": "en"}], }, { "setting_name": "123", "setting_values": [{"setting_synonym": ["123"], "lang": "en"}], }, { "setting_name": "xyz", "setting_values": [{"setting_synonym": ["xyz"], "lang": "en"}], }, ], "ordered": False, } ] } assert trt.query_attributes() == { "currentModeSettings": {"option": "abc"}, "on": True, } assert trt.can_execute( trait.COMMAND_MODES, params={"updateModeSettings": {"option": "xyz"}}, ) calls = async_mock_service( hass, input_select.DOMAIN, input_select.SERVICE_SELECT_OPTION ) await trt.execute( trait.COMMAND_MODES, BASIC_DATA, {"updateModeSettings": {"option": "xyz"}}, {}, ) assert len(calls) == 1 assert calls[0].data == {"entity_id": "input_select.bla", "option": "xyz"} async def test_modes_humidifier(hass): """Test Humidifier Mode trait.""" assert helpers.get_google_type(humidifier.DOMAIN, None) is not None assert trait.ModesTrait.supported(humidifier.DOMAIN, humidifier.SUPPORT_MODES, None) trt = trait.ModesTrait( hass, State( "humidifier.humidifier", STATE_OFF, attributes={ humidifier.ATTR_AVAILABLE_MODES: [ humidifier.MODE_NORMAL, humidifier.MODE_AUTO, humidifier.MODE_AWAY, ], ATTR_SUPPORTED_FEATURES: humidifier.SUPPORT_MODES, humidifier.ATTR_MIN_HUMIDITY: 30, humidifier.ATTR_MAX_HUMIDITY: 99, humidifier.ATTR_HUMIDITY: 50, ATTR_MODE: humidifier.MODE_AUTO, }, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableModes": [ { "name": "mode", "name_values": [{"name_synonym": ["mode"], "lang": "en"}], "settings": [ { "setting_name": "normal", "setting_values": [ {"setting_synonym": ["normal"], "lang": "en"} ], }, { "setting_name": "auto", "setting_values": [{"setting_synonym": ["auto"], "lang": "en"}], }, { "setting_name": "away", "setting_values": [{"setting_synonym": ["away"], "lang": "en"}], }, ], "ordered": False, }, ] } assert trt.query_attributes() == { "currentModeSettings": {"mode": "auto"}, "on": False, } assert trt.can_execute( trait.COMMAND_MODES, params={"updateModeSettings": {"mode": "away"}} ) calls = async_mock_service(hass, humidifier.DOMAIN, humidifier.SERVICE_SET_MODE) await trt.execute( trait.COMMAND_MODES, BASIC_DATA, {"updateModeSettings": {"mode": "away"}}, {}, ) assert len(calls) == 1 assert calls[0].data == { "entity_id": "humidifier.humidifier", "mode": "away", } async def test_sound_modes(hass): """Test Mode trait.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.ModesTrait.supported( media_player.DOMAIN, media_player.SUPPORT_SELECT_SOUND_MODE, None ) trt = trait.ModesTrait( hass, State( "media_player.living_room", media_player.STATE_PLAYING, attributes={ media_player.ATTR_SOUND_MODE_LIST: ["stereo", "prologic"], media_player.ATTR_SOUND_MODE: "stereo", }, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableModes": [ { "name": "sound mode", "name_values": [ {"name_synonym": ["sound mode", "effects"], "lang": "en"} ], "settings": [ { "setting_name": "stereo", "setting_values": [ {"setting_synonym": ["stereo"], "lang": "en"} ], }, { "setting_name": "prologic", "setting_values": [ {"setting_synonym": ["prologic"], "lang": "en"} ], }, ], "ordered": False, } ] } assert trt.query_attributes() == { "currentModeSettings": {"sound mode": "stereo"}, "on": True, } assert trt.can_execute( trait.COMMAND_MODES, params={"updateModeSettings": {"sound mode": "stereo"}}, ) calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_SELECT_SOUND_MODE ) await trt.execute( trait.COMMAND_MODES, BASIC_DATA, {"updateModeSettings": {"sound mode": "stereo"}}, {}, ) assert len(calls) == 1 assert calls[0].data == { "entity_id": "media_player.living_room", "sound_mode": "stereo", } async def test_openclose_cover(hass): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_SET_POSITION, None ) trt = trait.OpenCloseTrait( hass, State( "cover.bla", cover.STATE_OPEN, { cover.ATTR_CURRENT_POSITION: 75, ATTR_SUPPORTED_FEATURES: cover.SUPPORT_SET_POSITION, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"openPercent": 75} calls_set = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_SET_COVER_POSITION) calls_open = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_OPEN_COVER) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 50}, {}) await trt.execute( trait.COMMAND_OPENCLOSE_RELATIVE, BASIC_DATA, {"openRelativePercent": 50}, {} ) assert len(calls_set) == 1 assert calls_set[0].data == {ATTR_ENTITY_ID: "cover.bla", cover.ATTR_POSITION: 50} assert len(calls_open) == 1 assert calls_open[0].data == {ATTR_ENTITY_ID: "cover.bla"} async def test_openclose_cover_unknown_state(hass): """Test OpenClose trait support for cover domain with unknown state.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_SET_POSITION, None ) # No state trt = trait.OpenCloseTrait( hass, State( "cover.bla", STATE_UNKNOWN, {ATTR_SUPPORTED_FEATURES: cover.SUPPORT_OPEN} ), BASIC_CONFIG, ) assert trt.sync_attributes() == {"discreteOnlyOpenClose": True} with pytest.raises(helpers.SmartHomeError): trt.query_attributes() calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_OPEN_COVER) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 100}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} with pytest.raises(helpers.SmartHomeError): trt.query_attributes() async def test_openclose_cover_assumed_state(hass): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_SET_POSITION, None ) trt = trait.OpenCloseTrait( hass, State( "cover.bla", cover.STATE_OPEN, { ATTR_ASSUMED_STATE: True, ATTR_SUPPORTED_FEATURES: cover.SUPPORT_SET_POSITION, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == {"commandOnlyOpenClose": True} assert trt.query_attributes() == {} calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_SET_COVER_POSITION) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 40}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla", cover.ATTR_POSITION: 40} async def test_openclose_cover_query_only(hass): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported(cover.DOMAIN, 0, None) state = State( "cover.bla", cover.STATE_OPEN, ) trt = trait.OpenCloseTrait( hass, state, BASIC_CONFIG, ) assert trt.sync_attributes() == { "discreteOnlyOpenClose": True, "queryOnlyOpenClose": True, } assert trt.query_attributes() == {"openPercent": 100} async def test_openclose_cover_no_position(hass): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_OPEN | cover.SUPPORT_CLOSE, None ) state = State( "cover.bla", cover.STATE_OPEN, { ATTR_SUPPORTED_FEATURES: cover.SUPPORT_OPEN | cover.SUPPORT_CLOSE, }, ) trt = trait.OpenCloseTrait( hass, state, BASIC_CONFIG, ) assert trt.sync_attributes() == {"discreteOnlyOpenClose": True} assert trt.query_attributes() == {"openPercent": 100} state.state = cover.STATE_CLOSED assert trt.sync_attributes() == {"discreteOnlyOpenClose": True} assert trt.query_attributes() == {"openPercent": 0} calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_CLOSE_COVER) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 0}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_OPEN_COVER) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 100}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} with pytest.raises( SmartHomeError, match=r"Current position not know for relative command" ): await trt.execute( trait.COMMAND_OPENCLOSE_RELATIVE, BASIC_DATA, {"openRelativePercent": 100}, {}, ) with pytest.raises(SmartHomeError, match=r"No support for partial open close"): await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 50}, {}) @pytest.mark.parametrize( "device_class", (cover.DEVICE_CLASS_DOOR, cover.DEVICE_CLASS_GARAGE, cover.DEVICE_CLASS_GATE), ) async def test_openclose_cover_secure(hass, device_class): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, device_class) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_SET_POSITION, device_class ) assert trait.OpenCloseTrait.might_2fa( cover.DOMAIN, cover.SUPPORT_SET_POSITION, device_class ) trt = trait.OpenCloseTrait( hass, State( "cover.bla", cover.STATE_OPEN, { ATTR_DEVICE_CLASS: device_class, ATTR_SUPPORTED_FEATURES: cover.SUPPORT_SET_POSITION, cover.ATTR_CURRENT_POSITION: 75, }, ), PIN_CONFIG, ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"openPercent": 75} calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_SET_COVER_POSITION) calls_close = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_CLOSE_COVER) # No challenge data with pytest.raises(error.ChallengeNeeded) as err: await trt.execute(trait.COMMAND_OPENCLOSE, PIN_DATA, {"openPercent": 50}, {}) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # invalid pin with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_OPENCLOSE, PIN_DATA, {"openPercent": 50}, {"pin": "9999"} ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_FAILED_PIN_NEEDED await trt.execute( trait.COMMAND_OPENCLOSE, PIN_DATA, {"openPercent": 50}, {"pin": "1234"} ) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla", cover.ATTR_POSITION: 50} # no challenge on close await trt.execute(trait.COMMAND_OPENCLOSE, PIN_DATA, {"openPercent": 0}, {}) assert len(calls_close) == 1 assert calls_close[0].data == {ATTR_ENTITY_ID: "cover.bla"} @pytest.mark.parametrize( "device_class", ( binary_sensor.DEVICE_CLASS_DOOR, binary_sensor.DEVICE_CLASS_GARAGE_DOOR, binary_sensor.DEVICE_CLASS_LOCK, binary_sensor.DEVICE_CLASS_OPENING, binary_sensor.DEVICE_CLASS_WINDOW, ), ) async def test_openclose_binary_sensor(hass, device_class): """Test OpenClose trait support for binary_sensor domain.""" assert helpers.get_google_type(binary_sensor.DOMAIN, device_class) is not None assert trait.OpenCloseTrait.supported(binary_sensor.DOMAIN, 0, device_class) trt = trait.OpenCloseTrait( hass, State("binary_sensor.test", STATE_ON, {ATTR_DEVICE_CLASS: device_class}), BASIC_CONFIG, ) assert trt.sync_attributes() == { "queryOnlyOpenClose": True, "discreteOnlyOpenClose": True, } assert trt.query_attributes() == {"openPercent": 100} trt = trait.OpenCloseTrait( hass, State("binary_sensor.test", STATE_OFF, {ATTR_DEVICE_CLASS: device_class}), BASIC_CONFIG, ) assert trt.sync_attributes() == { "queryOnlyOpenClose": True, "discreteOnlyOpenClose": True, } assert trt.query_attributes() == {"openPercent": 0} async def test_volume_media_player(hass): """Test volume trait support for media player domain.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.VolumeTrait.supported( media_player.DOMAIN, media_player.SUPPORT_VOLUME_SET, None, ) trt = trait.VolumeTrait( hass, State( "media_player.bla", media_player.STATE_PLAYING, { ATTR_SUPPORTED_FEATURES: media_player.SUPPORT_VOLUME_SET, media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.3, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "volumeMaxLevel": 100, "levelStepSize": 10, "volumeCanMuteAndUnmute": False, "commandOnlyVolume": False, } assert trt.query_attributes() == {"currentVolume": 30} calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_SET ) await trt.execute(trait.COMMAND_SET_VOLUME, BASIC_DATA, {"volumeLevel": 60}, {}) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.6, } calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_SET ) await trt.execute( trait.COMMAND_VOLUME_RELATIVE, BASIC_DATA, {"relativeSteps": 10}, {} ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.4, } async def test_volume_media_player_relative(hass): """Test volume trait support for relative-volume-only media players.""" assert trait.VolumeTrait.supported( media_player.DOMAIN, media_player.SUPPORT_VOLUME_STEP, None, ) trt = trait.VolumeTrait( hass, State( "media_player.bla", media_player.STATE_PLAYING, { ATTR_ASSUMED_STATE: True, ATTR_SUPPORTED_FEATURES: media_player.SUPPORT_VOLUME_STEP, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "volumeMaxLevel": 100, "levelStepSize": 10, "volumeCanMuteAndUnmute": False, "commandOnlyVolume": True, } assert trt.query_attributes() == {} calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_UP ) await trt.execute( trait.COMMAND_VOLUME_RELATIVE, BASIC_DATA, {"relativeSteps": 10}, {}, ) assert len(calls) == 10 for call in calls: assert call.data == { ATTR_ENTITY_ID: "media_player.bla", } calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_DOWN ) await trt.execute( trait.COMMAND_VOLUME_RELATIVE, BASIC_DATA, {"relativeSteps": -10}, {}, ) assert len(calls) == 10 for call in calls: assert call.data == { ATTR_ENTITY_ID: "media_player.bla", } with pytest.raises(SmartHomeError): await trt.execute(trait.COMMAND_SET_VOLUME, BASIC_DATA, {"volumeLevel": 42}, {}) with pytest.raises(SmartHomeError): await trt.execute(trait.COMMAND_MUTE, BASIC_DATA, {"mute": True}, {}) async def test_media_player_mute(hass): """Test volume trait support for muting.""" assert trait.VolumeTrait.supported( media_player.DOMAIN, media_player.SUPPORT_VOLUME_STEP | media_player.SUPPORT_VOLUME_MUTE, None, ) trt = trait.VolumeTrait( hass, State( "media_player.bla", media_player.STATE_PLAYING, { ATTR_SUPPORTED_FEATURES: ( media_player.SUPPORT_VOLUME_STEP | media_player.SUPPORT_VOLUME_MUTE ), media_player.ATTR_MEDIA_VOLUME_MUTED: False, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "volumeMaxLevel": 100, "levelStepSize": 10, "volumeCanMuteAndUnmute": True, "commandOnlyVolume": False, } assert trt.query_attributes() == {"isMuted": False} mute_calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_MUTE ) await trt.execute( trait.COMMAND_MUTE, BASIC_DATA, {"mute": True}, {}, ) assert len(mute_calls) == 1 assert mute_calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_VOLUME_MUTED: True, } unmute_calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_MUTE ) await trt.execute( trait.COMMAND_MUTE, BASIC_DATA, {"mute": False}, {}, ) assert len(unmute_calls) == 1 assert unmute_calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_VOLUME_MUTED: False, } async def test_temperature_setting_sensor(hass): """Test TemperatureSetting trait support for temperature sensor.""" assert ( helpers.get_google_type(sensor.DOMAIN, sensor.DEVICE_CLASS_TEMPERATURE) is not None ) assert not trait.TemperatureSettingTrait.supported( sensor.DOMAIN, 0, sensor.DEVICE_CLASS_HUMIDITY ) assert trait.TemperatureSettingTrait.supported( sensor.DOMAIN, 0, sensor.DEVICE_CLASS_TEMPERATURE ) @pytest.mark.parametrize( "unit_in,unit_out,state,ambient", [ (TEMP_FAHRENHEIT, "F", "70", 21.1), (TEMP_CELSIUS, "C", "21.1", 21.1), (TEMP_FAHRENHEIT, "F", "unavailable", None), (TEMP_FAHRENHEIT, "F", "unknown", None), ], ) async def test_temperature_setting_sensor_data(hass, unit_in, unit_out, state, ambient): """Test TemperatureSetting trait support for temperature sensor.""" hass.config.units.temperature_unit = unit_in trt = trait.TemperatureSettingTrait( hass, State( "sensor.test", state, {ATTR_DEVICE_CLASS: sensor.DEVICE_CLASS_TEMPERATURE} ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "queryOnlyTemperatureSetting": True, "thermostatTemperatureUnit": unit_out, } if ambient: assert trt.query_attributes() == {"thermostatTemperatureAmbient": ambient} else: assert trt.query_attributes() == {} hass.config.units.temperature_unit = TEMP_CELSIUS async def test_humidity_setting_sensor(hass): """Test HumiditySetting trait support for humidity sensor.""" assert ( helpers.get_google_type(sensor.DOMAIN, sensor.DEVICE_CLASS_HUMIDITY) is not None ) assert not trait.HumiditySettingTrait.supported( sensor.DOMAIN, 0, sensor.DEVICE_CLASS_TEMPERATURE ) assert trait.HumiditySettingTrait.supported( sensor.DOMAIN, 0, sensor.DEVICE_CLASS_HUMIDITY ) @pytest.mark.parametrize( "state,ambient", [("70", 70), ("unavailable", None), ("unknown", None)] ) async def test_humidity_setting_sensor_data(hass, state, ambient): """Test HumiditySetting trait support for humidity sensor.""" trt = trait.HumiditySettingTrait( hass, State("sensor.test", state, {ATTR_DEVICE_CLASS: sensor.DEVICE_CLASS_HUMIDITY}), BASIC_CONFIG, ) assert trt.sync_attributes() == {"queryOnlyHumiditySetting": True} if ambient: assert trt.query_attributes() == {"humidityAmbientPercent": ambient} else: assert trt.query_attributes() == {} with pytest.raises(helpers.SmartHomeError) as err: await trt.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert err.value.code == const.ERR_NOT_SUPPORTED async def test_transport_control(hass): """Test the TransportControlTrait.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None for feature in trait.MEDIA_COMMAND_SUPPORT_MAPPING.values(): assert trait.TransportControlTrait.supported(media_player.DOMAIN, feature, None) now = datetime(2020, 1, 1) trt = trait.TransportControlTrait( hass, State( "media_player.bla", media_player.STATE_PLAYING, { media_player.ATTR_MEDIA_POSITION: 100, media_player.ATTR_MEDIA_DURATION: 200, media_player.ATTR_MEDIA_POSITION_UPDATED_AT: now - timedelta(seconds=10), media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.5, ATTR_SUPPORTED_FEATURES: media_player.SUPPORT_PLAY | media_player.SUPPORT_STOP, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "transportControlSupportedCommands": ["RESUME", "STOP"] } assert trt.query_attributes() == {} # COMMAND_MEDIA_SEEK_RELATIVE calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_SEEK ) # Patch to avoid time ticking over during the command failing the test with patch("homeassistant.util.dt.utcnow", return_value=now): await trt.execute( trait.COMMAND_MEDIA_SEEK_RELATIVE, BASIC_DATA, {"relativePositionMs": 10000}, {}, ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", # 100s (current position) + 10s (from command) + 10s (from updated_at) media_player.ATTR_MEDIA_SEEK_POSITION: 120, } # COMMAND_MEDIA_SEEK_TO_POSITION calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_SEEK ) await trt.execute( trait.COMMAND_MEDIA_SEEK_TO_POSITION, BASIC_DATA, {"absPositionMs": 50000}, {} ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_SEEK_POSITION: 50, } # COMMAND_MEDIA_NEXT calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_NEXT_TRACK ) await trt.execute(trait.COMMAND_MEDIA_NEXT, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} # COMMAND_MEDIA_PAUSE calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_PAUSE ) await trt.execute(trait.COMMAND_MEDIA_PAUSE, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} # COMMAND_MEDIA_PREVIOUS calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_PREVIOUS_TRACK ) await trt.execute(trait.COMMAND_MEDIA_PREVIOUS, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} # COMMAND_MEDIA_RESUME calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_PLAY ) await trt.execute(trait.COMMAND_MEDIA_RESUME, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} # COMMAND_MEDIA_SHUFFLE calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_SHUFFLE_SET ) await trt.execute(trait.COMMAND_MEDIA_SHUFFLE, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_SHUFFLE: True, } # COMMAND_MEDIA_STOP calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_STOP ) await trt.execute(trait.COMMAND_MEDIA_STOP, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} @pytest.mark.parametrize( "state", ( STATE_OFF, STATE_IDLE, STATE_PLAYING, STATE_ON, STATE_PAUSED, STATE_STANDBY, STATE_UNAVAILABLE, STATE_UNKNOWN, ), ) async def test_media_state(hass, state): """Test the MediaStateTrait.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.TransportControlTrait.supported( media_player.DOMAIN, media_player.SUPPORT_PLAY, None ) trt = trait.MediaStateTrait( hass, State( "media_player.bla", state, { media_player.ATTR_MEDIA_POSITION: 100, media_player.ATTR_MEDIA_DURATION: 200, media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.5, ATTR_SUPPORTED_FEATURES: media_player.SUPPORT_PLAY | media_player.SUPPORT_STOP, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "supportActivityState": True, "supportPlaybackState": True, } assert trt.query_attributes() == { "activityState": trt.activity_lookup.get(state), "playbackState": trt.playback_lookup.get(state), }
sander76/home-assistant
tests/components/google_assistant/test_trait.py
homeassistant/components/axis/const.py
"""Set up the demo environment that mimics interaction with devices.""" import asyncio from homeassistant import bootstrap, config_entries from homeassistant.const import ATTR_ENTITY_ID, EVENT_HOMEASSISTANT_START import homeassistant.core as ha DOMAIN = "demo" COMPONENTS_WITH_CONFIG_ENTRY_DEMO_PLATFORM = [ "air_quality", "alarm_control_panel", "binary_sensor", "camera", "climate", "cover", "fan", "humidifier", "light", "lock", "media_player", "number", "sensor", "switch", "vacuum", "water_heater", ] COMPONENTS_WITH_DEMO_PLATFORM = [ "tts", "stt", "mailbox", "notify", "image_processing", "calendar", "device_tracker", ] async def async_setup(hass, config): """Set up the demo environment.""" if DOMAIN not in config: return True if not hass.config_entries.async_entries(DOMAIN): hass.async_create_task( hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data={} ) ) # Set up demo platforms for platform in COMPONENTS_WITH_DEMO_PLATFORM: hass.async_create_task( hass.helpers.discovery.async_load_platform(platform, DOMAIN, {}, config) ) config.setdefault(ha.DOMAIN, {}) config.setdefault(DOMAIN, {}) # Set up sun if not hass.config.latitude: hass.config.latitude = 32.87336 if not hass.config.longitude: hass.config.longitude = 117.22743 tasks = [bootstrap.async_setup_component(hass, "sun", config)] # Set up input select tasks.append( bootstrap.async_setup_component( hass, "input_select", { "input_select": { "living_room_preset": { "options": ["Visitors", "Visitors with kids", "Home Alone"] }, "who_cooks": { "icon": "mdi:panda", "initial": "Anne Therese", "name": "Cook today", "options": ["Paulus", "Anne Therese"], }, } }, ) ) # Set up input boolean tasks.append( bootstrap.async_setup_component( hass, "input_boolean", { "input_boolean": { "notify": { "icon": "mdi:car", "initial": False, "name": "Notify Anne Therese is home", } } }, ) ) # Set up input number tasks.append( bootstrap.async_setup_component( hass, "input_number", { "input_number": { "noise_allowance": { "icon": "mdi:bell-ring", "min": 0, "max": 10, "name": "Allowed Noise", "unit_of_measurement": "dB", } } }, ) ) results = await asyncio.gather(*tasks) if any(not result for result in results): return False # Set up example persistent notification hass.components.persistent_notification.async_create( "This is an example of a persistent notification.", title="Example Notification" ) async def demo_start_listener(_event): """Finish set up.""" await finish_setup(hass, config) hass.bus.async_listen(EVENT_HOMEASSISTANT_START, demo_start_listener) return True async def async_setup_entry(hass, config_entry): """Set the config entry up.""" # Set up demo platforms with config entry for platform in COMPONENTS_WITH_CONFIG_ENTRY_DEMO_PLATFORM: hass.async_create_task( hass.config_entries.async_forward_entry_setup(config_entry, platform) ) return True async def finish_setup(hass, config): """Finish set up once demo platforms are set up.""" switches = None lights = None while not switches and not lights: # Not all platforms might be loaded. if switches is not None: await asyncio.sleep(0) switches = sorted(hass.states.async_entity_ids("switch")) lights = sorted(hass.states.async_entity_ids("light")) # Set up scripts await bootstrap.async_setup_component( hass, "script", { "script": { "demo": { "alias": f"Toggle {lights[0].split('.')[1]}", "sequence": [ { "service": "light.turn_off", "data": {ATTR_ENTITY_ID: lights[0]}, }, {"delay": {"seconds": 5}}, { "service": "light.turn_on", "data": {ATTR_ENTITY_ID: lights[0]}, }, {"delay": {"seconds": 5}}, { "service": "light.turn_off", "data": {ATTR_ENTITY_ID: lights[0]}, }, ], } } }, ) # Set up scenes await bootstrap.async_setup_component( hass, "scene", { "scene": [ { "name": "Romantic lights", "entities": { lights[0]: True, lights[1]: { "state": "on", "xy_color": [0.33, 0.66], "brightness": 200, }, }, }, { "name": "Switch on and off", "entities": {switches[0]: True, switches[1]: False}, }, ] }, )
"""Tests for the Google Assistant traits.""" from datetime import datetime, timedelta from unittest.mock import patch import pytest from homeassistant.components import ( alarm_control_panel, binary_sensor, camera, cover, fan, group, input_boolean, input_select, light, lock, media_player, scene, script, sensor, switch, vacuum, ) from homeassistant.components.climate import const as climate from homeassistant.components.google_assistant import const, error, helpers, trait from homeassistant.components.google_assistant.error import SmartHomeError from homeassistant.components.humidifier import const as humidifier from homeassistant.config import async_process_ha_core_config from homeassistant.const import ( ATTR_ASSUMED_STATE, ATTR_DEVICE_CLASS, ATTR_ENTITY_ID, ATTR_MODE, ATTR_SUPPORTED_FEATURES, ATTR_TEMPERATURE, SERVICE_TURN_OFF, SERVICE_TURN_ON, STATE_ALARM_ARMED_AWAY, STATE_ALARM_DISARMED, STATE_ALARM_PENDING, STATE_IDLE, STATE_OFF, STATE_ON, STATE_PAUSED, STATE_PLAYING, STATE_STANDBY, STATE_UNAVAILABLE, STATE_UNKNOWN, TEMP_CELSIUS, TEMP_FAHRENHEIT, ) from homeassistant.core import DOMAIN as HA_DOMAIN, EVENT_CALL_SERVICE, State from homeassistant.util import color from . import BASIC_CONFIG, MockConfig from tests.common import async_capture_events, async_mock_service REQ_ID = "ff36a3cc-ec34-11e6-b1a0-64510650abcf" BASIC_DATA = helpers.RequestData( BASIC_CONFIG, "test-agent", const.SOURCE_CLOUD, REQ_ID, None ) PIN_CONFIG = MockConfig(secure_devices_pin="1234") PIN_DATA = helpers.RequestData( PIN_CONFIG, "test-agent", const.SOURCE_CLOUD, REQ_ID, None ) async def test_brightness_light(hass): """Test brightness trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert trait.BrightnessTrait.supported(light.DOMAIN, light.SUPPORT_BRIGHTNESS, None) trt = trait.BrightnessTrait( hass, State("light.bla", light.STATE_ON, {light.ATTR_BRIGHTNESS: 243}), BASIC_CONFIG, ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"brightness": 95} events = async_capture_events(hass, EVENT_CALL_SERVICE) calls = async_mock_service(hass, light.DOMAIN, light.SERVICE_TURN_ON) await trt.execute( trait.COMMAND_BRIGHTNESS_ABSOLUTE, BASIC_DATA, {"brightness": 50}, {} ) await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "light.bla", light.ATTR_BRIGHTNESS_PCT: 50} assert len(events) == 1 assert events[0].data == { "domain": "light", "service": "turn_on", "service_data": {"brightness_pct": 50, "entity_id": "light.bla"}, } async def test_camera_stream(hass): """Test camera stream trait support for camera domain.""" await async_process_ha_core_config( hass, {"external_url": "https://example.com"}, ) assert helpers.get_google_type(camera.DOMAIN, None) is not None assert trait.CameraStreamTrait.supported(camera.DOMAIN, camera.SUPPORT_STREAM, None) trt = trait.CameraStreamTrait( hass, State("camera.bla", camera.STATE_IDLE, {}), BASIC_CONFIG ) assert trt.sync_attributes() == { "cameraStreamSupportedProtocols": ["hls"], "cameraStreamNeedAuthToken": False, "cameraStreamNeedDrmEncryption": False, } assert trt.query_attributes() == {} with patch( "homeassistant.components.camera.async_request_stream", return_value="/api/streams/bla", ): await trt.execute(trait.COMMAND_GET_CAMERA_STREAM, BASIC_DATA, {}, {}) assert trt.query_attributes() == { "cameraStreamAccessUrl": "https://example.com/api/streams/bla", "cameraStreamReceiverAppId": "B12CE3CA", } async def test_onoff_group(hass): """Test OnOff trait support for group domain.""" assert helpers.get_google_type(group.DOMAIN, None) is not None assert trait.OnOffTrait.supported(group.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("group.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("group.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, HA_DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "group.bla"} off_calls = async_mock_service(hass, HA_DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "group.bla"} async def test_onoff_input_boolean(hass): """Test OnOff trait support for input_boolean domain.""" assert helpers.get_google_type(input_boolean.DOMAIN, None) is not None assert trait.OnOffTrait.supported(input_boolean.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("input_boolean.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait( hass, State("input_boolean.bla", STATE_OFF), BASIC_CONFIG ) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, input_boolean.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "input_boolean.bla"} off_calls = async_mock_service(hass, input_boolean.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "input_boolean.bla"} async def test_onoff_switch(hass): """Test OnOff trait support for switch domain.""" assert helpers.get_google_type(switch.DOMAIN, None) is not None assert trait.OnOffTrait.supported(switch.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("switch.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("switch.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} trt_assumed = trait.OnOffTrait( hass, State("switch.bla", STATE_OFF, {"assumed_state": True}), BASIC_CONFIG ) assert trt_assumed.sync_attributes() == {"commandOnlyOnOff": True} on_calls = async_mock_service(hass, switch.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "switch.bla"} off_calls = async_mock_service(hass, switch.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "switch.bla"} async def test_onoff_fan(hass): """Test OnOff trait support for fan domain.""" assert helpers.get_google_type(fan.DOMAIN, None) is not None assert trait.OnOffTrait.supported(fan.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("fan.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("fan.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, fan.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "fan.bla"} off_calls = async_mock_service(hass, fan.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "fan.bla"} async def test_onoff_light(hass): """Test OnOff trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert trait.OnOffTrait.supported(light.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("light.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("light.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "light.bla"} off_calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "light.bla"} async def test_onoff_media_player(hass): """Test OnOff trait support for media_player domain.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.OnOffTrait.supported(media_player.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("media_player.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("media_player.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, media_player.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} off_calls = async_mock_service(hass, media_player.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} async def test_onoff_humidifier(hass): """Test OnOff trait support for humidifier domain.""" assert helpers.get_google_type(humidifier.DOMAIN, None) is not None assert trait.OnOffTrait.supported(humidifier.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("humidifier.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("humidifier.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, humidifier.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "humidifier.bla"} off_calls = async_mock_service(hass, humidifier.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "humidifier.bla"} async def test_dock_vacuum(hass): """Test dock trait support for vacuum domain.""" assert helpers.get_google_type(vacuum.DOMAIN, None) is not None assert trait.DockTrait.supported(vacuum.DOMAIN, 0, None) trt = trait.DockTrait(hass, State("vacuum.bla", vacuum.STATE_IDLE), BASIC_CONFIG) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"isDocked": False} calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_RETURN_TO_BASE) await trt.execute(trait.COMMAND_DOCK, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} async def test_startstop_vacuum(hass): """Test startStop trait support for vacuum domain.""" assert helpers.get_google_type(vacuum.DOMAIN, None) is not None assert trait.StartStopTrait.supported(vacuum.DOMAIN, 0, None) trt = trait.StartStopTrait( hass, State( "vacuum.bla", vacuum.STATE_PAUSED, {ATTR_SUPPORTED_FEATURES: vacuum.SUPPORT_PAUSE}, ), BASIC_CONFIG, ) assert trt.sync_attributes() == {"pausable": True} assert trt.query_attributes() == {"isRunning": False, "isPaused": True} start_calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_START) await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": True}, {}) assert len(start_calls) == 1 assert start_calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} stop_calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_STOP) await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": False}, {}) assert len(stop_calls) == 1 assert stop_calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} pause_calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_PAUSE) await trt.execute(trait.COMMAND_PAUSEUNPAUSE, BASIC_DATA, {"pause": True}, {}) assert len(pause_calls) == 1 assert pause_calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} unpause_calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_START) await trt.execute(trait.COMMAND_PAUSEUNPAUSE, BASIC_DATA, {"pause": False}, {}) assert len(unpause_calls) == 1 assert unpause_calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} async def test_startstop_cover(hass): """Test startStop trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.StartStopTrait.supported(cover.DOMAIN, cover.SUPPORT_STOP, None) state = State( "cover.bla", cover.STATE_CLOSED, {ATTR_SUPPORTED_FEATURES: cover.SUPPORT_STOP}, ) trt = trait.StartStopTrait( hass, state, BASIC_CONFIG, ) assert trt.sync_attributes() == {} for state_value in (cover.STATE_CLOSING, cover.STATE_OPENING): state.state = state_value assert trt.query_attributes() == {"isRunning": True} stop_calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_STOP_COVER) await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": False}, {}) assert len(stop_calls) == 1 assert stop_calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} for state_value in (cover.STATE_CLOSED, cover.STATE_OPEN): state.state = state_value assert trt.query_attributes() == {"isRunning": False} with pytest.raises(SmartHomeError, match="Cover is already stopped"): await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": False}, {}) with pytest.raises(SmartHomeError, match="Starting a cover is not supported"): await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": True}, {}) with pytest.raises( SmartHomeError, match="Command action.devices.commands.PauseUnpause is not supported", ): await trt.execute(trait.COMMAND_PAUSEUNPAUSE, BASIC_DATA, {"start": True}, {}) async def test_startstop_cover_assumed(hass): """Test startStop trait support for cover domain of assumed state.""" trt = trait.StartStopTrait( hass, State( "cover.bla", cover.STATE_CLOSED, {ATTR_SUPPORTED_FEATURES: cover.SUPPORT_STOP, ATTR_ASSUMED_STATE: True}, ), BASIC_CONFIG, ) stop_calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_STOP_COVER) await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": False}, {}) assert len(stop_calls) == 1 assert stop_calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} async def test_color_setting_color_light(hass): """Test ColorSpectrum trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert not trait.ColorSettingTrait.supported(light.DOMAIN, 0, None) assert trait.ColorSettingTrait.supported(light.DOMAIN, light.SUPPORT_COLOR, None) trt = trait.ColorSettingTrait( hass, State( "light.bla", STATE_ON, { light.ATTR_HS_COLOR: (20, 94), light.ATTR_BRIGHTNESS: 200, ATTR_SUPPORTED_FEATURES: light.SUPPORT_COLOR, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == {"colorModel": "hsv"} assert trt.query_attributes() == { "color": {"spectrumHsv": {"hue": 20, "saturation": 0.94, "value": 200 / 255}} } assert trt.can_execute( trait.COMMAND_COLOR_ABSOLUTE, {"color": {"spectrumRGB": 16715792}} ) calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON) await trt.execute( trait.COMMAND_COLOR_ABSOLUTE, BASIC_DATA, {"color": {"spectrumRGB": 1052927}}, {}, ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "light.bla", light.ATTR_HS_COLOR: (240, 93.725), } await trt.execute( trait.COMMAND_COLOR_ABSOLUTE, BASIC_DATA, {"color": {"spectrumHSV": {"hue": 100, "saturation": 0.50, "value": 0.20}}}, {}, ) assert len(calls) == 2 assert calls[1].data == { ATTR_ENTITY_ID: "light.bla", light.ATTR_HS_COLOR: [100, 50], light.ATTR_BRIGHTNESS: 0.2 * 255, } async def test_color_setting_temperature_light(hass): """Test ColorTemperature trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert not trait.ColorSettingTrait.supported(light.DOMAIN, 0, None) assert trait.ColorSettingTrait.supported( light.DOMAIN, light.SUPPORT_COLOR_TEMP, None ) trt = trait.ColorSettingTrait( hass, State( "light.bla", STATE_ON, { light.ATTR_MIN_MIREDS: 200, light.ATTR_COLOR_TEMP: 300, light.ATTR_MAX_MIREDS: 500, ATTR_SUPPORTED_FEATURES: light.SUPPORT_COLOR_TEMP, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "colorTemperatureRange": {"temperatureMinK": 2000, "temperatureMaxK": 5000} } assert trt.query_attributes() == {"color": {"temperatureK": 3333}} assert trt.can_execute( trait.COMMAND_COLOR_ABSOLUTE, {"color": {"temperature": 400}} ) calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON) with pytest.raises(helpers.SmartHomeError) as err: await trt.execute( trait.COMMAND_COLOR_ABSOLUTE, BASIC_DATA, {"color": {"temperature": 5555}}, {}, ) assert err.value.code == const.ERR_VALUE_OUT_OF_RANGE await trt.execute( trait.COMMAND_COLOR_ABSOLUTE, BASIC_DATA, {"color": {"temperature": 2857}}, {} ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "light.bla", light.ATTR_COLOR_TEMP: color.color_temperature_kelvin_to_mired(2857), } async def test_color_light_temperature_light_bad_temp(hass): """Test ColorTemperature trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert not trait.ColorSettingTrait.supported(light.DOMAIN, 0, None) assert trait.ColorSettingTrait.supported( light.DOMAIN, light.SUPPORT_COLOR_TEMP, None ) trt = trait.ColorSettingTrait( hass, State( "light.bla", STATE_ON, { light.ATTR_MIN_MIREDS: 200, light.ATTR_COLOR_TEMP: 0, light.ATTR_MAX_MIREDS: 500, }, ), BASIC_CONFIG, ) assert trt.query_attributes() == {} async def test_light_modes(hass): """Test Light Mode trait.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert trait.ModesTrait.supported(light.DOMAIN, light.SUPPORT_EFFECT, None) trt = trait.ModesTrait( hass, State( "light.living_room", light.STATE_ON, attributes={ light.ATTR_EFFECT_LIST: ["random", "colorloop"], light.ATTR_EFFECT: "random", }, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableModes": [ { "name": "effect", "name_values": [{"name_synonym": ["effect"], "lang": "en"}], "settings": [ { "setting_name": "random", "setting_values": [ {"setting_synonym": ["random"], "lang": "en"} ], }, { "setting_name": "colorloop", "setting_values": [ {"setting_synonym": ["colorloop"], "lang": "en"} ], }, ], "ordered": False, } ] } assert trt.query_attributes() == { "currentModeSettings": {"effect": "random"}, "on": True, } assert trt.can_execute( trait.COMMAND_MODES, params={"updateModeSettings": {"effect": "colorloop"}}, ) calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON) await trt.execute( trait.COMMAND_MODES, BASIC_DATA, {"updateModeSettings": {"effect": "colorloop"}}, {}, ) assert len(calls) == 1 assert calls[0].data == { "entity_id": "light.living_room", "effect": "colorloop", } async def test_scene_scene(hass): """Test Scene trait support for scene domain.""" assert helpers.get_google_type(scene.DOMAIN, None) is not None assert trait.SceneTrait.supported(scene.DOMAIN, 0, None) trt = trait.SceneTrait(hass, State("scene.bla", scene.STATE), BASIC_CONFIG) assert trt.sync_attributes() == {} assert trt.query_attributes() == {} assert trt.can_execute(trait.COMMAND_ACTIVATE_SCENE, {}) calls = async_mock_service(hass, scene.DOMAIN, SERVICE_TURN_ON) await trt.execute(trait.COMMAND_ACTIVATE_SCENE, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "scene.bla"} async def test_scene_script(hass): """Test Scene trait support for script domain.""" assert helpers.get_google_type(script.DOMAIN, None) is not None assert trait.SceneTrait.supported(script.DOMAIN, 0, None) trt = trait.SceneTrait(hass, State("script.bla", STATE_OFF), BASIC_CONFIG) assert trt.sync_attributes() == {} assert trt.query_attributes() == {} assert trt.can_execute(trait.COMMAND_ACTIVATE_SCENE, {}) calls = async_mock_service(hass, script.DOMAIN, SERVICE_TURN_ON) await trt.execute(trait.COMMAND_ACTIVATE_SCENE, BASIC_DATA, {}, {}) # We don't wait till script execution is done. await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "script.bla"} async def test_temperature_setting_climate_onoff(hass): """Test TemperatureSetting trait support for climate domain - range.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.TemperatureSettingTrait.supported(climate.DOMAIN, 0, None) hass.config.units.temperature_unit = TEMP_FAHRENHEIT trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_AUTO, { ATTR_SUPPORTED_FEATURES: climate.SUPPORT_TARGET_TEMPERATURE_RANGE, climate.ATTR_HVAC_MODES: [ climate.HVAC_MODE_OFF, climate.HVAC_MODE_COOL, climate.HVAC_MODE_HEAT, climate.HVAC_MODE_HEAT_COOL, ], climate.ATTR_MIN_TEMP: None, climate.ATTR_MAX_TEMP: None, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["off", "cool", "heat", "heatcool", "on"], "thermostatTemperatureUnit": "F", } assert trt.can_execute(trait.COMMAND_THERMOSTAT_SET_MODE, {}) calls = async_mock_service(hass, climate.DOMAIN, SERVICE_TURN_ON) await trt.execute( trait.COMMAND_THERMOSTAT_SET_MODE, BASIC_DATA, {"thermostatMode": "on"}, {} ) assert len(calls) == 1 calls = async_mock_service(hass, climate.DOMAIN, SERVICE_TURN_OFF) await trt.execute( trait.COMMAND_THERMOSTAT_SET_MODE, BASIC_DATA, {"thermostatMode": "off"}, {} ) assert len(calls) == 1 async def test_temperature_setting_climate_no_modes(hass): """Test TemperatureSetting trait support for climate domain not supporting any modes.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.TemperatureSettingTrait.supported(climate.DOMAIN, 0, None) hass.config.units.temperature_unit = TEMP_CELSIUS trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_AUTO, { climate.ATTR_HVAC_MODES: [], climate.ATTR_MIN_TEMP: None, climate.ATTR_MAX_TEMP: None, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["heat"], "thermostatTemperatureUnit": "C", } async def test_temperature_setting_climate_range(hass): """Test TemperatureSetting trait support for climate domain - range.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.TemperatureSettingTrait.supported(climate.DOMAIN, 0, None) hass.config.units.temperature_unit = TEMP_FAHRENHEIT trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_AUTO, { climate.ATTR_CURRENT_TEMPERATURE: 70, climate.ATTR_CURRENT_HUMIDITY: 25, ATTR_SUPPORTED_FEATURES: climate.SUPPORT_TARGET_TEMPERATURE_RANGE, climate.ATTR_HVAC_MODES: [ STATE_OFF, climate.HVAC_MODE_COOL, climate.HVAC_MODE_HEAT, climate.HVAC_MODE_AUTO, ], climate.ATTR_TARGET_TEMP_HIGH: 75, climate.ATTR_TARGET_TEMP_LOW: 65, climate.ATTR_MIN_TEMP: 50, climate.ATTR_MAX_TEMP: 80, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["off", "cool", "heat", "auto", "on"], "thermostatTemperatureUnit": "F", } assert trt.query_attributes() == { "thermostatMode": "auto", "thermostatTemperatureAmbient": 21.1, "thermostatHumidityAmbient": 25, "thermostatTemperatureSetpointLow": 18.3, "thermostatTemperatureSetpointHigh": 23.9, } assert trt.can_execute(trait.COMMAND_THERMOSTAT_TEMPERATURE_SET_RANGE, {}) assert trt.can_execute(trait.COMMAND_THERMOSTAT_SET_MODE, {}) calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_TEMPERATURE) await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SET_RANGE, BASIC_DATA, { "thermostatTemperatureSetpointHigh": 25, "thermostatTemperatureSetpointLow": 20, }, {}, ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "climate.bla", climate.ATTR_TARGET_TEMP_HIGH: 77, climate.ATTR_TARGET_TEMP_LOW: 68, } calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_HVAC_MODE) await trt.execute( trait.COMMAND_THERMOSTAT_SET_MODE, BASIC_DATA, {"thermostatMode": "cool"}, {} ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "climate.bla", climate.ATTR_HVAC_MODE: climate.HVAC_MODE_COOL, } with pytest.raises(helpers.SmartHomeError) as err: await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, BASIC_DATA, {"thermostatTemperatureSetpoint": -100}, {}, ) assert err.value.code == const.ERR_VALUE_OUT_OF_RANGE hass.config.units.temperature_unit = TEMP_CELSIUS async def test_temperature_setting_climate_setpoint(hass): """Test TemperatureSetting trait support for climate domain - setpoint.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.TemperatureSettingTrait.supported(climate.DOMAIN, 0, None) hass.config.units.temperature_unit = TEMP_CELSIUS trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_COOL, { climate.ATTR_HVAC_MODES: [STATE_OFF, climate.HVAC_MODE_COOL], climate.ATTR_MIN_TEMP: 10, climate.ATTR_MAX_TEMP: 30, ATTR_TEMPERATURE: 18, climate.ATTR_CURRENT_TEMPERATURE: 20, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["off", "cool", "on"], "thermostatTemperatureUnit": "C", } assert trt.query_attributes() == { "thermostatMode": "cool", "thermostatTemperatureAmbient": 20, "thermostatTemperatureSetpoint": 18, } assert trt.can_execute(trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, {}) assert trt.can_execute(trait.COMMAND_THERMOSTAT_SET_MODE, {}) calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_TEMPERATURE) with pytest.raises(helpers.SmartHomeError): await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, BASIC_DATA, {"thermostatTemperatureSetpoint": -100}, {}, ) await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, BASIC_DATA, {"thermostatTemperatureSetpoint": 19}, {}, ) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "climate.bla", ATTR_TEMPERATURE: 19} async def test_temperature_setting_climate_setpoint_auto(hass): """ Test TemperatureSetting trait support for climate domain. Setpoint in auto mode. """ hass.config.units.temperature_unit = TEMP_CELSIUS trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_HEAT_COOL, { climate.ATTR_HVAC_MODES: [ climate.HVAC_MODE_OFF, climate.HVAC_MODE_HEAT_COOL, ], climate.ATTR_MIN_TEMP: 10, climate.ATTR_MAX_TEMP: 30, ATTR_TEMPERATURE: 18, climate.ATTR_CURRENT_TEMPERATURE: 20, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["off", "heatcool", "on"], "thermostatTemperatureUnit": "C", } assert trt.query_attributes() == { "thermostatMode": "heatcool", "thermostatTemperatureAmbient": 20, "thermostatTemperatureSetpointHigh": 18, "thermostatTemperatureSetpointLow": 18, } assert trt.can_execute(trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, {}) assert trt.can_execute(trait.COMMAND_THERMOSTAT_SET_MODE, {}) calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_TEMPERATURE) await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, BASIC_DATA, {"thermostatTemperatureSetpoint": 19}, {}, ) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "climate.bla", ATTR_TEMPERATURE: 19} async def test_humidity_setting_humidifier_setpoint(hass): """Test HumiditySetting trait support for humidifier domain - setpoint.""" assert helpers.get_google_type(humidifier.DOMAIN, None) is not None assert trait.HumiditySettingTrait.supported(humidifier.DOMAIN, 0, None) trt = trait.HumiditySettingTrait( hass, State( "humidifier.bla", STATE_ON, { humidifier.ATTR_MIN_HUMIDITY: 20, humidifier.ATTR_MAX_HUMIDITY: 90, humidifier.ATTR_HUMIDITY: 38, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "humiditySetpointRange": {"minPercent": 20, "maxPercent": 90} } assert trt.query_attributes() == { "humiditySetpointPercent": 38, } assert trt.can_execute(trait.COMMAND_SET_HUMIDITY, {}) calls = async_mock_service(hass, humidifier.DOMAIN, humidifier.SERVICE_SET_HUMIDITY) await trt.execute(trait.COMMAND_SET_HUMIDITY, BASIC_DATA, {"humidity": 32}, {}) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "humidifier.bla", humidifier.ATTR_HUMIDITY: 32, } async def test_lock_unlock_lock(hass): """Test LockUnlock trait locking support for lock domain.""" assert helpers.get_google_type(lock.DOMAIN, None) is not None assert trait.LockUnlockTrait.supported(lock.DOMAIN, lock.SUPPORT_OPEN, None) assert trait.LockUnlockTrait.might_2fa(lock.DOMAIN, lock.SUPPORT_OPEN, None) trt = trait.LockUnlockTrait( hass, State("lock.front_door", lock.STATE_LOCKED), PIN_CONFIG ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"isLocked": True} assert trt.can_execute(trait.COMMAND_LOCKUNLOCK, {"lock": True}) calls = async_mock_service(hass, lock.DOMAIN, lock.SERVICE_LOCK) await trt.execute(trait.COMMAND_LOCKUNLOCK, PIN_DATA, {"lock": True}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "lock.front_door"} async def test_lock_unlock_unlock(hass): """Test LockUnlock trait unlocking support for lock domain.""" assert helpers.get_google_type(lock.DOMAIN, None) is not None assert trait.LockUnlockTrait.supported(lock.DOMAIN, lock.SUPPORT_OPEN, None) trt = trait.LockUnlockTrait( hass, State("lock.front_door", lock.STATE_LOCKED), PIN_CONFIG ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"isLocked": True} assert trt.can_execute(trait.COMMAND_LOCKUNLOCK, {"lock": False}) calls = async_mock_service(hass, lock.DOMAIN, lock.SERVICE_UNLOCK) # No challenge data with pytest.raises(error.ChallengeNeeded) as err: await trt.execute(trait.COMMAND_LOCKUNLOCK, PIN_DATA, {"lock": False}, {}) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # invalid pin with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_LOCKUNLOCK, PIN_DATA, {"lock": False}, {"pin": 9999} ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_FAILED_PIN_NEEDED await trt.execute( trait.COMMAND_LOCKUNLOCK, PIN_DATA, {"lock": False}, {"pin": "1234"} ) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "lock.front_door"} # Test without pin trt = trait.LockUnlockTrait( hass, State("lock.front_door", lock.STATE_LOCKED), BASIC_CONFIG ) with pytest.raises(error.SmartHomeError) as err: await trt.execute(trait.COMMAND_LOCKUNLOCK, BASIC_DATA, {"lock": False}, {}) assert len(calls) == 1 assert err.value.code == const.ERR_CHALLENGE_NOT_SETUP # Test with 2FA override with patch.object( BASIC_CONFIG, "should_2fa", return_value=False, ): await trt.execute(trait.COMMAND_LOCKUNLOCK, BASIC_DATA, {"lock": False}, {}) assert len(calls) == 2 async def test_arm_disarm_arm_away(hass): """Test ArmDisarm trait Arming support for alarm_control_panel domain.""" assert helpers.get_google_type(alarm_control_panel.DOMAIN, None) is not None assert trait.ArmDisArmTrait.supported(alarm_control_panel.DOMAIN, 0, None) assert trait.ArmDisArmTrait.might_2fa(alarm_control_panel.DOMAIN, 0, None) trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, { alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True, ATTR_SUPPORTED_FEATURES: alarm_control_panel.const.SUPPORT_ALARM_ARM_HOME | alarm_control_panel.const.SUPPORT_ALARM_ARM_AWAY, }, ), PIN_CONFIG, ) assert trt.sync_attributes() == { "availableArmLevels": { "levels": [ { "level_name": "armed_home", "level_values": [ {"level_synonym": ["armed home", "home"], "lang": "en"} ], }, { "level_name": "armed_away", "level_values": [ {"level_synonym": ["armed away", "away"], "lang": "en"} ], }, ], "ordered": False, } } assert trt.query_attributes() == { "isArmed": True, "currentArmLevel": STATE_ALARM_ARMED_AWAY, } assert trt.can_execute( trait.COMMAND_ARMDISARM, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY} ) calls = async_mock_service( hass, alarm_control_panel.DOMAIN, alarm_control_panel.SERVICE_ALARM_ARM_AWAY ) # Test with no secure_pin configured with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), BASIC_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, BASIC_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {}, ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NOT_SETUP trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), PIN_CONFIG, ) # No challenge data with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {}, ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # invalid pin with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {"pin": 9999}, ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_FAILED_PIN_NEEDED # correct pin await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {"pin": "1234"}, ) assert len(calls) == 1 # Test already armed with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), PIN_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {}, ) assert len(calls) == 1 assert err.value.code == const.ERR_ALREADY_ARMED # Test with code_arm_required False trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: False}, ), PIN_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {}, ) assert len(calls) == 2 with pytest.raises(error.SmartHomeError) as err: await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True}, {}, ) async def test_arm_disarm_disarm(hass): """Test ArmDisarm trait Disarming support for alarm_control_panel domain.""" assert helpers.get_google_type(alarm_control_panel.DOMAIN, None) is not None assert trait.ArmDisArmTrait.supported(alarm_control_panel.DOMAIN, 0, None) assert trait.ArmDisArmTrait.might_2fa(alarm_control_panel.DOMAIN, 0, None) trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, { alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True, ATTR_SUPPORTED_FEATURES: alarm_control_panel.const.SUPPORT_ALARM_TRIGGER | alarm_control_panel.const.SUPPORT_ALARM_ARM_CUSTOM_BYPASS, }, ), PIN_CONFIG, ) assert trt.sync_attributes() == { "availableArmLevels": { "levels": [ { "level_name": "armed_custom_bypass", "level_values": [ { "level_synonym": ["armed custom bypass", "custom"], "lang": "en", } ], }, { "level_name": "triggered", "level_values": [{"level_synonym": ["triggered"], "lang": "en"}], }, ], "ordered": False, } } assert trt.query_attributes() == {"isArmed": False} assert trt.can_execute(trait.COMMAND_ARMDISARM, {"arm": False}) calls = async_mock_service( hass, alarm_control_panel.DOMAIN, alarm_control_panel.SERVICE_ALARM_DISARM ) # Test without secure_pin configured with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), BASIC_CONFIG, ) await trt.execute(trait.COMMAND_ARMDISARM, BASIC_DATA, {"arm": False}, {}) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NOT_SETUP trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), PIN_CONFIG, ) # No challenge data with pytest.raises(error.ChallengeNeeded) as err: await trt.execute(trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": False}, {}) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # invalid pin with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": False}, {"pin": 9999} ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_FAILED_PIN_NEEDED # correct pin await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": False}, {"pin": "1234"} ) assert len(calls) == 1 # Test already disarmed with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), PIN_CONFIG, ) await trt.execute(trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": False}, {}) assert len(calls) == 1 assert err.value.code == const.ERR_ALREADY_DISARMED # Cancel arming after already armed will require pin with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: False}, ), PIN_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "cancel": True}, {} ) assert len(calls) == 1 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # Cancel arming while pending to arm doesn't require pin trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_PENDING, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: False}, ), PIN_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "cancel": True}, {} ) assert len(calls) == 2 async def test_fan_speed(hass): """Test FanSpeed trait speed control support for fan domain.""" assert helpers.get_google_type(fan.DOMAIN, None) is not None assert trait.FanSpeedTrait.supported(fan.DOMAIN, fan.SUPPORT_SET_SPEED, None) trt = trait.FanSpeedTrait( hass, State( "fan.living_room_fan", fan.SPEED_HIGH, attributes={ "speed_list": [ fan.SPEED_OFF, fan.SPEED_LOW, fan.SPEED_MEDIUM, fan.SPEED_HIGH, ], "speed": "low", "percentage": 33, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableFanSpeeds": { "ordered": True, "speeds": [ { "speed_name": "off", "speed_values": [{"speed_synonym": ["stop", "off"], "lang": "en"}], }, { "speed_name": "low", "speed_values": [ { "speed_synonym": ["slow", "low", "slowest", "lowest"], "lang": "en", } ], }, { "speed_name": "medium", "speed_values": [ {"speed_synonym": ["medium", "mid", "middle"], "lang": "en"} ], }, { "speed_name": "high", "speed_values": [ { "speed_synonym": [ "high", "max", "fast", "highest", "fastest", "maximum", ], "lang": "en", } ], }, ], }, "reversible": False, "supportsFanSpeedPercent": True, } assert trt.query_attributes() == { "currentFanSpeedSetting": "low", "on": True, "currentFanSpeedPercent": 33, } assert trt.can_execute(trait.COMMAND_FANSPEED, params={"fanSpeed": "medium"}) calls = async_mock_service(hass, fan.DOMAIN, fan.SERVICE_SET_SPEED) await trt.execute(trait.COMMAND_FANSPEED, BASIC_DATA, {"fanSpeed": "medium"}, {}) assert len(calls) == 1 assert calls[0].data == {"entity_id": "fan.living_room_fan", "speed": "medium"} assert trt.can_execute(trait.COMMAND_FANSPEED, params={"fanSpeedPercent": 10}) calls = async_mock_service(hass, fan.DOMAIN, fan.SERVICE_SET_PERCENTAGE) await trt.execute(trait.COMMAND_FANSPEED, BASIC_DATA, {"fanSpeedPercent": 10}, {}) assert len(calls) == 1 assert calls[0].data == {"entity_id": "fan.living_room_fan", "percentage": 10} async def test_climate_fan_speed(hass): """Test FanSpeed trait speed control support for climate domain.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.FanSpeedTrait.supported(climate.DOMAIN, climate.SUPPORT_FAN_MODE, None) trt = trait.FanSpeedTrait( hass, State( "climate.living_room_ac", "on", attributes={ "fan_modes": ["auto", "low", "medium", "high"], "fan_mode": "low", }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableFanSpeeds": { "ordered": True, "speeds": [ { "speed_name": "auto", "speed_values": [{"speed_synonym": ["auto"], "lang": "en"}], }, { "speed_name": "low", "speed_values": [{"speed_synonym": ["low"], "lang": "en"}], }, { "speed_name": "medium", "speed_values": [{"speed_synonym": ["medium"], "lang": "en"}], }, { "speed_name": "high", "speed_values": [{"speed_synonym": ["high"], "lang": "en"}], }, ], }, "reversible": False, "supportsFanSpeedPercent": True, } assert trt.query_attributes() == { "currentFanSpeedSetting": "low", } assert trt.can_execute(trait.COMMAND_FANSPEED, params={"fanSpeed": "medium"}) calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_FAN_MODE) await trt.execute(trait.COMMAND_FANSPEED, BASIC_DATA, {"fanSpeed": "medium"}, {}) assert len(calls) == 1 assert calls[0].data == { "entity_id": "climate.living_room_ac", "fan_mode": "medium", } async def test_inputselector(hass): """Test input selector trait.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.InputSelectorTrait.supported( media_player.DOMAIN, media_player.SUPPORT_SELECT_SOURCE, None ) trt = trait.InputSelectorTrait( hass, State( "media_player.living_room", media_player.STATE_PLAYING, attributes={ media_player.ATTR_INPUT_SOURCE_LIST: [ "media", "game", "chromecast", "plex", ], media_player.ATTR_INPUT_SOURCE: "game", }, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableInputs": [ {"key": "media", "names": [{"name_synonym": ["media"], "lang": "en"}]}, {"key": "game", "names": [{"name_synonym": ["game"], "lang": "en"}]}, { "key": "chromecast", "names": [{"name_synonym": ["chromecast"], "lang": "en"}], }, {"key": "plex", "names": [{"name_synonym": ["plex"], "lang": "en"}]}, ], "orderedInputs": True, } assert trt.query_attributes() == { "currentInput": "game", } assert trt.can_execute( trait.COMMAND_INPUT, params={"newInput": "media"}, ) calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_SELECT_SOURCE ) await trt.execute( trait.COMMAND_INPUT, BASIC_DATA, {"newInput": "media"}, {}, ) assert len(calls) == 1 assert calls[0].data == {"entity_id": "media_player.living_room", "source": "media"} @pytest.mark.parametrize( "sources,source,source_next,source_prev", [ (["a"], "a", "a", "a"), (["a", "b"], "a", "b", "b"), (["a", "b", "c"], "a", "b", "c"), ], ) async def test_inputselector_nextprev(hass, sources, source, source_next, source_prev): """Test input selector trait.""" trt = trait.InputSelectorTrait( hass, State( "media_player.living_room", media_player.STATE_PLAYING, attributes={ media_player.ATTR_INPUT_SOURCE_LIST: sources, media_player.ATTR_INPUT_SOURCE: source, }, ), BASIC_CONFIG, ) assert trt.can_execute("action.devices.commands.NextInput", params={}) assert trt.can_execute("action.devices.commands.PreviousInput", params={}) calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_SELECT_SOURCE ) await trt.execute( "action.devices.commands.NextInput", BASIC_DATA, {}, {}, ) await trt.execute( "action.devices.commands.PreviousInput", BASIC_DATA, {}, {}, ) assert len(calls) == 2 assert calls[0].data == { "entity_id": "media_player.living_room", "source": source_next, } assert calls[1].data == { "entity_id": "media_player.living_room", "source": source_prev, } @pytest.mark.parametrize( "sources,source", [(None, "a"), (["a", "b"], None), (["a", "b"], "c")] ) async def test_inputselector_nextprev_invalid(hass, sources, source): """Test input selector trait.""" trt = trait.InputSelectorTrait( hass, State( "media_player.living_room", media_player.STATE_PLAYING, attributes={ media_player.ATTR_INPUT_SOURCE_LIST: sources, media_player.ATTR_INPUT_SOURCE: source, }, ), BASIC_CONFIG, ) with pytest.raises(SmartHomeError): await trt.execute( "action.devices.commands.NextInput", BASIC_DATA, {}, {}, ) with pytest.raises(SmartHomeError): await trt.execute( "action.devices.commands.PreviousInput", BASIC_DATA, {}, {}, ) with pytest.raises(SmartHomeError): await trt.execute( "action.devices.commands.InvalidCommand", BASIC_DATA, {}, {}, ) async def test_modes_input_select(hass): """Test Input Select Mode trait.""" assert helpers.get_google_type(input_select.DOMAIN, None) is not None assert trait.ModesTrait.supported(input_select.DOMAIN, None, None) trt = trait.ModesTrait( hass, State("input_select.bla", "unavailable"), BASIC_CONFIG, ) assert trt.sync_attributes() == {"availableModes": []} trt = trait.ModesTrait( hass, State( "input_select.bla", "abc", attributes={input_select.ATTR_OPTIONS: ["abc", "123", "xyz"]}, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableModes": [ { "name": "option", "name_values": [ { "name_synonym": ["option", "setting", "mode", "value"], "lang": "en", } ], "settings": [ { "setting_name": "abc", "setting_values": [{"setting_synonym": ["abc"], "lang": "en"}], }, { "setting_name": "123", "setting_values": [{"setting_synonym": ["123"], "lang": "en"}], }, { "setting_name": "xyz", "setting_values": [{"setting_synonym": ["xyz"], "lang": "en"}], }, ], "ordered": False, } ] } assert trt.query_attributes() == { "currentModeSettings": {"option": "abc"}, "on": True, } assert trt.can_execute( trait.COMMAND_MODES, params={"updateModeSettings": {"option": "xyz"}}, ) calls = async_mock_service( hass, input_select.DOMAIN, input_select.SERVICE_SELECT_OPTION ) await trt.execute( trait.COMMAND_MODES, BASIC_DATA, {"updateModeSettings": {"option": "xyz"}}, {}, ) assert len(calls) == 1 assert calls[0].data == {"entity_id": "input_select.bla", "option": "xyz"} async def test_modes_humidifier(hass): """Test Humidifier Mode trait.""" assert helpers.get_google_type(humidifier.DOMAIN, None) is not None assert trait.ModesTrait.supported(humidifier.DOMAIN, humidifier.SUPPORT_MODES, None) trt = trait.ModesTrait( hass, State( "humidifier.humidifier", STATE_OFF, attributes={ humidifier.ATTR_AVAILABLE_MODES: [ humidifier.MODE_NORMAL, humidifier.MODE_AUTO, humidifier.MODE_AWAY, ], ATTR_SUPPORTED_FEATURES: humidifier.SUPPORT_MODES, humidifier.ATTR_MIN_HUMIDITY: 30, humidifier.ATTR_MAX_HUMIDITY: 99, humidifier.ATTR_HUMIDITY: 50, ATTR_MODE: humidifier.MODE_AUTO, }, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableModes": [ { "name": "mode", "name_values": [{"name_synonym": ["mode"], "lang": "en"}], "settings": [ { "setting_name": "normal", "setting_values": [ {"setting_synonym": ["normal"], "lang": "en"} ], }, { "setting_name": "auto", "setting_values": [{"setting_synonym": ["auto"], "lang": "en"}], }, { "setting_name": "away", "setting_values": [{"setting_synonym": ["away"], "lang": "en"}], }, ], "ordered": False, }, ] } assert trt.query_attributes() == { "currentModeSettings": {"mode": "auto"}, "on": False, } assert trt.can_execute( trait.COMMAND_MODES, params={"updateModeSettings": {"mode": "away"}} ) calls = async_mock_service(hass, humidifier.DOMAIN, humidifier.SERVICE_SET_MODE) await trt.execute( trait.COMMAND_MODES, BASIC_DATA, {"updateModeSettings": {"mode": "away"}}, {}, ) assert len(calls) == 1 assert calls[0].data == { "entity_id": "humidifier.humidifier", "mode": "away", } async def test_sound_modes(hass): """Test Mode trait.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.ModesTrait.supported( media_player.DOMAIN, media_player.SUPPORT_SELECT_SOUND_MODE, None ) trt = trait.ModesTrait( hass, State( "media_player.living_room", media_player.STATE_PLAYING, attributes={ media_player.ATTR_SOUND_MODE_LIST: ["stereo", "prologic"], media_player.ATTR_SOUND_MODE: "stereo", }, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableModes": [ { "name": "sound mode", "name_values": [ {"name_synonym": ["sound mode", "effects"], "lang": "en"} ], "settings": [ { "setting_name": "stereo", "setting_values": [ {"setting_synonym": ["stereo"], "lang": "en"} ], }, { "setting_name": "prologic", "setting_values": [ {"setting_synonym": ["prologic"], "lang": "en"} ], }, ], "ordered": False, } ] } assert trt.query_attributes() == { "currentModeSettings": {"sound mode": "stereo"}, "on": True, } assert trt.can_execute( trait.COMMAND_MODES, params={"updateModeSettings": {"sound mode": "stereo"}}, ) calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_SELECT_SOUND_MODE ) await trt.execute( trait.COMMAND_MODES, BASIC_DATA, {"updateModeSettings": {"sound mode": "stereo"}}, {}, ) assert len(calls) == 1 assert calls[0].data == { "entity_id": "media_player.living_room", "sound_mode": "stereo", } async def test_openclose_cover(hass): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_SET_POSITION, None ) trt = trait.OpenCloseTrait( hass, State( "cover.bla", cover.STATE_OPEN, { cover.ATTR_CURRENT_POSITION: 75, ATTR_SUPPORTED_FEATURES: cover.SUPPORT_SET_POSITION, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"openPercent": 75} calls_set = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_SET_COVER_POSITION) calls_open = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_OPEN_COVER) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 50}, {}) await trt.execute( trait.COMMAND_OPENCLOSE_RELATIVE, BASIC_DATA, {"openRelativePercent": 50}, {} ) assert len(calls_set) == 1 assert calls_set[0].data == {ATTR_ENTITY_ID: "cover.bla", cover.ATTR_POSITION: 50} assert len(calls_open) == 1 assert calls_open[0].data == {ATTR_ENTITY_ID: "cover.bla"} async def test_openclose_cover_unknown_state(hass): """Test OpenClose trait support for cover domain with unknown state.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_SET_POSITION, None ) # No state trt = trait.OpenCloseTrait( hass, State( "cover.bla", STATE_UNKNOWN, {ATTR_SUPPORTED_FEATURES: cover.SUPPORT_OPEN} ), BASIC_CONFIG, ) assert trt.sync_attributes() == {"discreteOnlyOpenClose": True} with pytest.raises(helpers.SmartHomeError): trt.query_attributes() calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_OPEN_COVER) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 100}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} with pytest.raises(helpers.SmartHomeError): trt.query_attributes() async def test_openclose_cover_assumed_state(hass): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_SET_POSITION, None ) trt = trait.OpenCloseTrait( hass, State( "cover.bla", cover.STATE_OPEN, { ATTR_ASSUMED_STATE: True, ATTR_SUPPORTED_FEATURES: cover.SUPPORT_SET_POSITION, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == {"commandOnlyOpenClose": True} assert trt.query_attributes() == {} calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_SET_COVER_POSITION) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 40}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla", cover.ATTR_POSITION: 40} async def test_openclose_cover_query_only(hass): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported(cover.DOMAIN, 0, None) state = State( "cover.bla", cover.STATE_OPEN, ) trt = trait.OpenCloseTrait( hass, state, BASIC_CONFIG, ) assert trt.sync_attributes() == { "discreteOnlyOpenClose": True, "queryOnlyOpenClose": True, } assert trt.query_attributes() == {"openPercent": 100} async def test_openclose_cover_no_position(hass): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_OPEN | cover.SUPPORT_CLOSE, None ) state = State( "cover.bla", cover.STATE_OPEN, { ATTR_SUPPORTED_FEATURES: cover.SUPPORT_OPEN | cover.SUPPORT_CLOSE, }, ) trt = trait.OpenCloseTrait( hass, state, BASIC_CONFIG, ) assert trt.sync_attributes() == {"discreteOnlyOpenClose": True} assert trt.query_attributes() == {"openPercent": 100} state.state = cover.STATE_CLOSED assert trt.sync_attributes() == {"discreteOnlyOpenClose": True} assert trt.query_attributes() == {"openPercent": 0} calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_CLOSE_COVER) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 0}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_OPEN_COVER) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 100}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} with pytest.raises( SmartHomeError, match=r"Current position not know for relative command" ): await trt.execute( trait.COMMAND_OPENCLOSE_RELATIVE, BASIC_DATA, {"openRelativePercent": 100}, {}, ) with pytest.raises(SmartHomeError, match=r"No support for partial open close"): await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 50}, {}) @pytest.mark.parametrize( "device_class", (cover.DEVICE_CLASS_DOOR, cover.DEVICE_CLASS_GARAGE, cover.DEVICE_CLASS_GATE), ) async def test_openclose_cover_secure(hass, device_class): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, device_class) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_SET_POSITION, device_class ) assert trait.OpenCloseTrait.might_2fa( cover.DOMAIN, cover.SUPPORT_SET_POSITION, device_class ) trt = trait.OpenCloseTrait( hass, State( "cover.bla", cover.STATE_OPEN, { ATTR_DEVICE_CLASS: device_class, ATTR_SUPPORTED_FEATURES: cover.SUPPORT_SET_POSITION, cover.ATTR_CURRENT_POSITION: 75, }, ), PIN_CONFIG, ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"openPercent": 75} calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_SET_COVER_POSITION) calls_close = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_CLOSE_COVER) # No challenge data with pytest.raises(error.ChallengeNeeded) as err: await trt.execute(trait.COMMAND_OPENCLOSE, PIN_DATA, {"openPercent": 50}, {}) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # invalid pin with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_OPENCLOSE, PIN_DATA, {"openPercent": 50}, {"pin": "9999"} ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_FAILED_PIN_NEEDED await trt.execute( trait.COMMAND_OPENCLOSE, PIN_DATA, {"openPercent": 50}, {"pin": "1234"} ) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla", cover.ATTR_POSITION: 50} # no challenge on close await trt.execute(trait.COMMAND_OPENCLOSE, PIN_DATA, {"openPercent": 0}, {}) assert len(calls_close) == 1 assert calls_close[0].data == {ATTR_ENTITY_ID: "cover.bla"} @pytest.mark.parametrize( "device_class", ( binary_sensor.DEVICE_CLASS_DOOR, binary_sensor.DEVICE_CLASS_GARAGE_DOOR, binary_sensor.DEVICE_CLASS_LOCK, binary_sensor.DEVICE_CLASS_OPENING, binary_sensor.DEVICE_CLASS_WINDOW, ), ) async def test_openclose_binary_sensor(hass, device_class): """Test OpenClose trait support for binary_sensor domain.""" assert helpers.get_google_type(binary_sensor.DOMAIN, device_class) is not None assert trait.OpenCloseTrait.supported(binary_sensor.DOMAIN, 0, device_class) trt = trait.OpenCloseTrait( hass, State("binary_sensor.test", STATE_ON, {ATTR_DEVICE_CLASS: device_class}), BASIC_CONFIG, ) assert trt.sync_attributes() == { "queryOnlyOpenClose": True, "discreteOnlyOpenClose": True, } assert trt.query_attributes() == {"openPercent": 100} trt = trait.OpenCloseTrait( hass, State("binary_sensor.test", STATE_OFF, {ATTR_DEVICE_CLASS: device_class}), BASIC_CONFIG, ) assert trt.sync_attributes() == { "queryOnlyOpenClose": True, "discreteOnlyOpenClose": True, } assert trt.query_attributes() == {"openPercent": 0} async def test_volume_media_player(hass): """Test volume trait support for media player domain.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.VolumeTrait.supported( media_player.DOMAIN, media_player.SUPPORT_VOLUME_SET, None, ) trt = trait.VolumeTrait( hass, State( "media_player.bla", media_player.STATE_PLAYING, { ATTR_SUPPORTED_FEATURES: media_player.SUPPORT_VOLUME_SET, media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.3, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "volumeMaxLevel": 100, "levelStepSize": 10, "volumeCanMuteAndUnmute": False, "commandOnlyVolume": False, } assert trt.query_attributes() == {"currentVolume": 30} calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_SET ) await trt.execute(trait.COMMAND_SET_VOLUME, BASIC_DATA, {"volumeLevel": 60}, {}) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.6, } calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_SET ) await trt.execute( trait.COMMAND_VOLUME_RELATIVE, BASIC_DATA, {"relativeSteps": 10}, {} ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.4, } async def test_volume_media_player_relative(hass): """Test volume trait support for relative-volume-only media players.""" assert trait.VolumeTrait.supported( media_player.DOMAIN, media_player.SUPPORT_VOLUME_STEP, None, ) trt = trait.VolumeTrait( hass, State( "media_player.bla", media_player.STATE_PLAYING, { ATTR_ASSUMED_STATE: True, ATTR_SUPPORTED_FEATURES: media_player.SUPPORT_VOLUME_STEP, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "volumeMaxLevel": 100, "levelStepSize": 10, "volumeCanMuteAndUnmute": False, "commandOnlyVolume": True, } assert trt.query_attributes() == {} calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_UP ) await trt.execute( trait.COMMAND_VOLUME_RELATIVE, BASIC_DATA, {"relativeSteps": 10}, {}, ) assert len(calls) == 10 for call in calls: assert call.data == { ATTR_ENTITY_ID: "media_player.bla", } calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_DOWN ) await trt.execute( trait.COMMAND_VOLUME_RELATIVE, BASIC_DATA, {"relativeSteps": -10}, {}, ) assert len(calls) == 10 for call in calls: assert call.data == { ATTR_ENTITY_ID: "media_player.bla", } with pytest.raises(SmartHomeError): await trt.execute(trait.COMMAND_SET_VOLUME, BASIC_DATA, {"volumeLevel": 42}, {}) with pytest.raises(SmartHomeError): await trt.execute(trait.COMMAND_MUTE, BASIC_DATA, {"mute": True}, {}) async def test_media_player_mute(hass): """Test volume trait support for muting.""" assert trait.VolumeTrait.supported( media_player.DOMAIN, media_player.SUPPORT_VOLUME_STEP | media_player.SUPPORT_VOLUME_MUTE, None, ) trt = trait.VolumeTrait( hass, State( "media_player.bla", media_player.STATE_PLAYING, { ATTR_SUPPORTED_FEATURES: ( media_player.SUPPORT_VOLUME_STEP | media_player.SUPPORT_VOLUME_MUTE ), media_player.ATTR_MEDIA_VOLUME_MUTED: False, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "volumeMaxLevel": 100, "levelStepSize": 10, "volumeCanMuteAndUnmute": True, "commandOnlyVolume": False, } assert trt.query_attributes() == {"isMuted": False} mute_calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_MUTE ) await trt.execute( trait.COMMAND_MUTE, BASIC_DATA, {"mute": True}, {}, ) assert len(mute_calls) == 1 assert mute_calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_VOLUME_MUTED: True, } unmute_calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_MUTE ) await trt.execute( trait.COMMAND_MUTE, BASIC_DATA, {"mute": False}, {}, ) assert len(unmute_calls) == 1 assert unmute_calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_VOLUME_MUTED: False, } async def test_temperature_setting_sensor(hass): """Test TemperatureSetting trait support for temperature sensor.""" assert ( helpers.get_google_type(sensor.DOMAIN, sensor.DEVICE_CLASS_TEMPERATURE) is not None ) assert not trait.TemperatureSettingTrait.supported( sensor.DOMAIN, 0, sensor.DEVICE_CLASS_HUMIDITY ) assert trait.TemperatureSettingTrait.supported( sensor.DOMAIN, 0, sensor.DEVICE_CLASS_TEMPERATURE ) @pytest.mark.parametrize( "unit_in,unit_out,state,ambient", [ (TEMP_FAHRENHEIT, "F", "70", 21.1), (TEMP_CELSIUS, "C", "21.1", 21.1), (TEMP_FAHRENHEIT, "F", "unavailable", None), (TEMP_FAHRENHEIT, "F", "unknown", None), ], ) async def test_temperature_setting_sensor_data(hass, unit_in, unit_out, state, ambient): """Test TemperatureSetting trait support for temperature sensor.""" hass.config.units.temperature_unit = unit_in trt = trait.TemperatureSettingTrait( hass, State( "sensor.test", state, {ATTR_DEVICE_CLASS: sensor.DEVICE_CLASS_TEMPERATURE} ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "queryOnlyTemperatureSetting": True, "thermostatTemperatureUnit": unit_out, } if ambient: assert trt.query_attributes() == {"thermostatTemperatureAmbient": ambient} else: assert trt.query_attributes() == {} hass.config.units.temperature_unit = TEMP_CELSIUS async def test_humidity_setting_sensor(hass): """Test HumiditySetting trait support for humidity sensor.""" assert ( helpers.get_google_type(sensor.DOMAIN, sensor.DEVICE_CLASS_HUMIDITY) is not None ) assert not trait.HumiditySettingTrait.supported( sensor.DOMAIN, 0, sensor.DEVICE_CLASS_TEMPERATURE ) assert trait.HumiditySettingTrait.supported( sensor.DOMAIN, 0, sensor.DEVICE_CLASS_HUMIDITY ) @pytest.mark.parametrize( "state,ambient", [("70", 70), ("unavailable", None), ("unknown", None)] ) async def test_humidity_setting_sensor_data(hass, state, ambient): """Test HumiditySetting trait support for humidity sensor.""" trt = trait.HumiditySettingTrait( hass, State("sensor.test", state, {ATTR_DEVICE_CLASS: sensor.DEVICE_CLASS_HUMIDITY}), BASIC_CONFIG, ) assert trt.sync_attributes() == {"queryOnlyHumiditySetting": True} if ambient: assert trt.query_attributes() == {"humidityAmbientPercent": ambient} else: assert trt.query_attributes() == {} with pytest.raises(helpers.SmartHomeError) as err: await trt.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert err.value.code == const.ERR_NOT_SUPPORTED async def test_transport_control(hass): """Test the TransportControlTrait.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None for feature in trait.MEDIA_COMMAND_SUPPORT_MAPPING.values(): assert trait.TransportControlTrait.supported(media_player.DOMAIN, feature, None) now = datetime(2020, 1, 1) trt = trait.TransportControlTrait( hass, State( "media_player.bla", media_player.STATE_PLAYING, { media_player.ATTR_MEDIA_POSITION: 100, media_player.ATTR_MEDIA_DURATION: 200, media_player.ATTR_MEDIA_POSITION_UPDATED_AT: now - timedelta(seconds=10), media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.5, ATTR_SUPPORTED_FEATURES: media_player.SUPPORT_PLAY | media_player.SUPPORT_STOP, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "transportControlSupportedCommands": ["RESUME", "STOP"] } assert trt.query_attributes() == {} # COMMAND_MEDIA_SEEK_RELATIVE calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_SEEK ) # Patch to avoid time ticking over during the command failing the test with patch("homeassistant.util.dt.utcnow", return_value=now): await trt.execute( trait.COMMAND_MEDIA_SEEK_RELATIVE, BASIC_DATA, {"relativePositionMs": 10000}, {}, ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", # 100s (current position) + 10s (from command) + 10s (from updated_at) media_player.ATTR_MEDIA_SEEK_POSITION: 120, } # COMMAND_MEDIA_SEEK_TO_POSITION calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_SEEK ) await trt.execute( trait.COMMAND_MEDIA_SEEK_TO_POSITION, BASIC_DATA, {"absPositionMs": 50000}, {} ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_SEEK_POSITION: 50, } # COMMAND_MEDIA_NEXT calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_NEXT_TRACK ) await trt.execute(trait.COMMAND_MEDIA_NEXT, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} # COMMAND_MEDIA_PAUSE calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_PAUSE ) await trt.execute(trait.COMMAND_MEDIA_PAUSE, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} # COMMAND_MEDIA_PREVIOUS calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_PREVIOUS_TRACK ) await trt.execute(trait.COMMAND_MEDIA_PREVIOUS, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} # COMMAND_MEDIA_RESUME calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_PLAY ) await trt.execute(trait.COMMAND_MEDIA_RESUME, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} # COMMAND_MEDIA_SHUFFLE calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_SHUFFLE_SET ) await trt.execute(trait.COMMAND_MEDIA_SHUFFLE, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_SHUFFLE: True, } # COMMAND_MEDIA_STOP calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_STOP ) await trt.execute(trait.COMMAND_MEDIA_STOP, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} @pytest.mark.parametrize( "state", ( STATE_OFF, STATE_IDLE, STATE_PLAYING, STATE_ON, STATE_PAUSED, STATE_STANDBY, STATE_UNAVAILABLE, STATE_UNKNOWN, ), ) async def test_media_state(hass, state): """Test the MediaStateTrait.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.TransportControlTrait.supported( media_player.DOMAIN, media_player.SUPPORT_PLAY, None ) trt = trait.MediaStateTrait( hass, State( "media_player.bla", state, { media_player.ATTR_MEDIA_POSITION: 100, media_player.ATTR_MEDIA_DURATION: 200, media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.5, ATTR_SUPPORTED_FEATURES: media_player.SUPPORT_PLAY | media_player.SUPPORT_STOP, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "supportActivityState": True, "supportPlaybackState": True, } assert trt.query_attributes() == { "activityState": trt.activity_lookup.get(state), "playbackState": trt.playback_lookup.get(state), }
sander76/home-assistant
tests/components/google_assistant/test_trait.py
homeassistant/components/demo/__init__.py
"""The Wolf SmartSet sensors.""" from wolf_smartset.models import ( HoursParameter, ListItemParameter, Parameter, PercentageParameter, Pressure, SimpleParameter, Temperature, ) from homeassistant.components.sensor import SensorEntity from homeassistant.const import ( DEVICE_CLASS_PRESSURE, DEVICE_CLASS_TEMPERATURE, PRESSURE_BAR, TEMP_CELSIUS, TIME_HOURS, ) from homeassistant.helpers.update_coordinator import CoordinatorEntity from .const import COORDINATOR, DEVICE_ID, DOMAIN, PARAMETERS, STATES async def async_setup_entry(hass, config_entry, async_add_entities): """Set up all entries for Wolf Platform.""" coordinator = hass.data[DOMAIN][config_entry.entry_id][COORDINATOR] parameters = hass.data[DOMAIN][config_entry.entry_id][PARAMETERS] device_id = hass.data[DOMAIN][config_entry.entry_id][DEVICE_ID] entities = [] for parameter in parameters: if isinstance(parameter, Temperature): entities.append(WolfLinkTemperature(coordinator, parameter, device_id)) if isinstance(parameter, Pressure): entities.append(WolfLinkPressure(coordinator, parameter, device_id)) if isinstance(parameter, PercentageParameter): entities.append(WolfLinkPercentage(coordinator, parameter, device_id)) if isinstance(parameter, ListItemParameter): entities.append(WolfLinkState(coordinator, parameter, device_id)) if isinstance(parameter, HoursParameter): entities.append(WolfLinkHours(coordinator, parameter, device_id)) if isinstance(parameter, SimpleParameter): entities.append(WolfLinkSensor(coordinator, parameter, device_id)) async_add_entities(entities, True) class WolfLinkSensor(CoordinatorEntity, SensorEntity): """Base class for all Wolf entities.""" def __init__(self, coordinator, wolf_object: Parameter, device_id): """Initialize.""" super().__init__(coordinator) self.wolf_object = wolf_object self.device_id = device_id self._state = None @property def name(self): """Return the name.""" return f"{self.wolf_object.name}" @property def state(self): """Return the state. Wolf Client is returning only changed values so we need to store old value here.""" if self.wolf_object.value_id in self.coordinator.data: self._state = self.coordinator.data[self.wolf_object.value_id] return self._state @property def extra_state_attributes(self): """Return the state attributes.""" return { "parameter_id": self.wolf_object.parameter_id, "value_id": self.wolf_object.value_id, "parent": self.wolf_object.parent, } @property def unique_id(self): """Return a unique_id for this entity.""" return f"{self.device_id}:{self.wolf_object.parameter_id}" class WolfLinkHours(WolfLinkSensor): """Class for hour based entities.""" @property def icon(self): """Icon to display in the front Aend.""" return "mdi:clock" @property def unit_of_measurement(self): """Return the unit the value is expressed in.""" return TIME_HOURS class WolfLinkTemperature(WolfLinkSensor): """Class for temperature based entities.""" @property def device_class(self): """Return the device_class.""" return DEVICE_CLASS_TEMPERATURE @property def unit_of_measurement(self): """Return the unit the value is expressed in.""" return TEMP_CELSIUS class WolfLinkPressure(WolfLinkSensor): """Class for pressure based entities.""" @property def device_class(self): """Return the device_class.""" return DEVICE_CLASS_PRESSURE @property def unit_of_measurement(self): """Return the unit the value is expressed in.""" return PRESSURE_BAR class WolfLinkPercentage(WolfLinkSensor): """Class for percentage based entities.""" @property def unit_of_measurement(self): """Return the unit the value is expressed in.""" return self.wolf_object.unit class WolfLinkState(WolfLinkSensor): """Class for entities which has defined list of state.""" @property def device_class(self): """Return the device class.""" return "wolflink__state" @property def state(self): """Return the state converting with supported values.""" state = super().state resolved_state = [ item for item in self.wolf_object.items if item.value == int(state) ] if resolved_state: resolved_name = resolved_state[0].name return STATES.get(resolved_name, resolved_name) return state
"""Tests for the Google Assistant traits.""" from datetime import datetime, timedelta from unittest.mock import patch import pytest from homeassistant.components import ( alarm_control_panel, binary_sensor, camera, cover, fan, group, input_boolean, input_select, light, lock, media_player, scene, script, sensor, switch, vacuum, ) from homeassistant.components.climate import const as climate from homeassistant.components.google_assistant import const, error, helpers, trait from homeassistant.components.google_assistant.error import SmartHomeError from homeassistant.components.humidifier import const as humidifier from homeassistant.config import async_process_ha_core_config from homeassistant.const import ( ATTR_ASSUMED_STATE, ATTR_DEVICE_CLASS, ATTR_ENTITY_ID, ATTR_MODE, ATTR_SUPPORTED_FEATURES, ATTR_TEMPERATURE, SERVICE_TURN_OFF, SERVICE_TURN_ON, STATE_ALARM_ARMED_AWAY, STATE_ALARM_DISARMED, STATE_ALARM_PENDING, STATE_IDLE, STATE_OFF, STATE_ON, STATE_PAUSED, STATE_PLAYING, STATE_STANDBY, STATE_UNAVAILABLE, STATE_UNKNOWN, TEMP_CELSIUS, TEMP_FAHRENHEIT, ) from homeassistant.core import DOMAIN as HA_DOMAIN, EVENT_CALL_SERVICE, State from homeassistant.util import color from . import BASIC_CONFIG, MockConfig from tests.common import async_capture_events, async_mock_service REQ_ID = "ff36a3cc-ec34-11e6-b1a0-64510650abcf" BASIC_DATA = helpers.RequestData( BASIC_CONFIG, "test-agent", const.SOURCE_CLOUD, REQ_ID, None ) PIN_CONFIG = MockConfig(secure_devices_pin="1234") PIN_DATA = helpers.RequestData( PIN_CONFIG, "test-agent", const.SOURCE_CLOUD, REQ_ID, None ) async def test_brightness_light(hass): """Test brightness trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert trait.BrightnessTrait.supported(light.DOMAIN, light.SUPPORT_BRIGHTNESS, None) trt = trait.BrightnessTrait( hass, State("light.bla", light.STATE_ON, {light.ATTR_BRIGHTNESS: 243}), BASIC_CONFIG, ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"brightness": 95} events = async_capture_events(hass, EVENT_CALL_SERVICE) calls = async_mock_service(hass, light.DOMAIN, light.SERVICE_TURN_ON) await trt.execute( trait.COMMAND_BRIGHTNESS_ABSOLUTE, BASIC_DATA, {"brightness": 50}, {} ) await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "light.bla", light.ATTR_BRIGHTNESS_PCT: 50} assert len(events) == 1 assert events[0].data == { "domain": "light", "service": "turn_on", "service_data": {"brightness_pct": 50, "entity_id": "light.bla"}, } async def test_camera_stream(hass): """Test camera stream trait support for camera domain.""" await async_process_ha_core_config( hass, {"external_url": "https://example.com"}, ) assert helpers.get_google_type(camera.DOMAIN, None) is not None assert trait.CameraStreamTrait.supported(camera.DOMAIN, camera.SUPPORT_STREAM, None) trt = trait.CameraStreamTrait( hass, State("camera.bla", camera.STATE_IDLE, {}), BASIC_CONFIG ) assert trt.sync_attributes() == { "cameraStreamSupportedProtocols": ["hls"], "cameraStreamNeedAuthToken": False, "cameraStreamNeedDrmEncryption": False, } assert trt.query_attributes() == {} with patch( "homeassistant.components.camera.async_request_stream", return_value="/api/streams/bla", ): await trt.execute(trait.COMMAND_GET_CAMERA_STREAM, BASIC_DATA, {}, {}) assert trt.query_attributes() == { "cameraStreamAccessUrl": "https://example.com/api/streams/bla", "cameraStreamReceiverAppId": "B12CE3CA", } async def test_onoff_group(hass): """Test OnOff trait support for group domain.""" assert helpers.get_google_type(group.DOMAIN, None) is not None assert trait.OnOffTrait.supported(group.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("group.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("group.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, HA_DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "group.bla"} off_calls = async_mock_service(hass, HA_DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "group.bla"} async def test_onoff_input_boolean(hass): """Test OnOff trait support for input_boolean domain.""" assert helpers.get_google_type(input_boolean.DOMAIN, None) is not None assert trait.OnOffTrait.supported(input_boolean.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("input_boolean.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait( hass, State("input_boolean.bla", STATE_OFF), BASIC_CONFIG ) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, input_boolean.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "input_boolean.bla"} off_calls = async_mock_service(hass, input_boolean.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "input_boolean.bla"} async def test_onoff_switch(hass): """Test OnOff trait support for switch domain.""" assert helpers.get_google_type(switch.DOMAIN, None) is not None assert trait.OnOffTrait.supported(switch.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("switch.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("switch.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} trt_assumed = trait.OnOffTrait( hass, State("switch.bla", STATE_OFF, {"assumed_state": True}), BASIC_CONFIG ) assert trt_assumed.sync_attributes() == {"commandOnlyOnOff": True} on_calls = async_mock_service(hass, switch.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "switch.bla"} off_calls = async_mock_service(hass, switch.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "switch.bla"} async def test_onoff_fan(hass): """Test OnOff trait support for fan domain.""" assert helpers.get_google_type(fan.DOMAIN, None) is not None assert trait.OnOffTrait.supported(fan.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("fan.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("fan.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, fan.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "fan.bla"} off_calls = async_mock_service(hass, fan.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "fan.bla"} async def test_onoff_light(hass): """Test OnOff trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert trait.OnOffTrait.supported(light.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("light.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("light.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "light.bla"} off_calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "light.bla"} async def test_onoff_media_player(hass): """Test OnOff trait support for media_player domain.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.OnOffTrait.supported(media_player.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("media_player.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("media_player.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, media_player.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} off_calls = async_mock_service(hass, media_player.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} async def test_onoff_humidifier(hass): """Test OnOff trait support for humidifier domain.""" assert helpers.get_google_type(humidifier.DOMAIN, None) is not None assert trait.OnOffTrait.supported(humidifier.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("humidifier.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("humidifier.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, humidifier.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "humidifier.bla"} off_calls = async_mock_service(hass, humidifier.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "humidifier.bla"} async def test_dock_vacuum(hass): """Test dock trait support for vacuum domain.""" assert helpers.get_google_type(vacuum.DOMAIN, None) is not None assert trait.DockTrait.supported(vacuum.DOMAIN, 0, None) trt = trait.DockTrait(hass, State("vacuum.bla", vacuum.STATE_IDLE), BASIC_CONFIG) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"isDocked": False} calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_RETURN_TO_BASE) await trt.execute(trait.COMMAND_DOCK, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} async def test_startstop_vacuum(hass): """Test startStop trait support for vacuum domain.""" assert helpers.get_google_type(vacuum.DOMAIN, None) is not None assert trait.StartStopTrait.supported(vacuum.DOMAIN, 0, None) trt = trait.StartStopTrait( hass, State( "vacuum.bla", vacuum.STATE_PAUSED, {ATTR_SUPPORTED_FEATURES: vacuum.SUPPORT_PAUSE}, ), BASIC_CONFIG, ) assert trt.sync_attributes() == {"pausable": True} assert trt.query_attributes() == {"isRunning": False, "isPaused": True} start_calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_START) await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": True}, {}) assert len(start_calls) == 1 assert start_calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} stop_calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_STOP) await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": False}, {}) assert len(stop_calls) == 1 assert stop_calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} pause_calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_PAUSE) await trt.execute(trait.COMMAND_PAUSEUNPAUSE, BASIC_DATA, {"pause": True}, {}) assert len(pause_calls) == 1 assert pause_calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} unpause_calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_START) await trt.execute(trait.COMMAND_PAUSEUNPAUSE, BASIC_DATA, {"pause": False}, {}) assert len(unpause_calls) == 1 assert unpause_calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} async def test_startstop_cover(hass): """Test startStop trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.StartStopTrait.supported(cover.DOMAIN, cover.SUPPORT_STOP, None) state = State( "cover.bla", cover.STATE_CLOSED, {ATTR_SUPPORTED_FEATURES: cover.SUPPORT_STOP}, ) trt = trait.StartStopTrait( hass, state, BASIC_CONFIG, ) assert trt.sync_attributes() == {} for state_value in (cover.STATE_CLOSING, cover.STATE_OPENING): state.state = state_value assert trt.query_attributes() == {"isRunning": True} stop_calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_STOP_COVER) await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": False}, {}) assert len(stop_calls) == 1 assert stop_calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} for state_value in (cover.STATE_CLOSED, cover.STATE_OPEN): state.state = state_value assert trt.query_attributes() == {"isRunning": False} with pytest.raises(SmartHomeError, match="Cover is already stopped"): await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": False}, {}) with pytest.raises(SmartHomeError, match="Starting a cover is not supported"): await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": True}, {}) with pytest.raises( SmartHomeError, match="Command action.devices.commands.PauseUnpause is not supported", ): await trt.execute(trait.COMMAND_PAUSEUNPAUSE, BASIC_DATA, {"start": True}, {}) async def test_startstop_cover_assumed(hass): """Test startStop trait support for cover domain of assumed state.""" trt = trait.StartStopTrait( hass, State( "cover.bla", cover.STATE_CLOSED, {ATTR_SUPPORTED_FEATURES: cover.SUPPORT_STOP, ATTR_ASSUMED_STATE: True}, ), BASIC_CONFIG, ) stop_calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_STOP_COVER) await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": False}, {}) assert len(stop_calls) == 1 assert stop_calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} async def test_color_setting_color_light(hass): """Test ColorSpectrum trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert not trait.ColorSettingTrait.supported(light.DOMAIN, 0, None) assert trait.ColorSettingTrait.supported(light.DOMAIN, light.SUPPORT_COLOR, None) trt = trait.ColorSettingTrait( hass, State( "light.bla", STATE_ON, { light.ATTR_HS_COLOR: (20, 94), light.ATTR_BRIGHTNESS: 200, ATTR_SUPPORTED_FEATURES: light.SUPPORT_COLOR, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == {"colorModel": "hsv"} assert trt.query_attributes() == { "color": {"spectrumHsv": {"hue": 20, "saturation": 0.94, "value": 200 / 255}} } assert trt.can_execute( trait.COMMAND_COLOR_ABSOLUTE, {"color": {"spectrumRGB": 16715792}} ) calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON) await trt.execute( trait.COMMAND_COLOR_ABSOLUTE, BASIC_DATA, {"color": {"spectrumRGB": 1052927}}, {}, ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "light.bla", light.ATTR_HS_COLOR: (240, 93.725), } await trt.execute( trait.COMMAND_COLOR_ABSOLUTE, BASIC_DATA, {"color": {"spectrumHSV": {"hue": 100, "saturation": 0.50, "value": 0.20}}}, {}, ) assert len(calls) == 2 assert calls[1].data == { ATTR_ENTITY_ID: "light.bla", light.ATTR_HS_COLOR: [100, 50], light.ATTR_BRIGHTNESS: 0.2 * 255, } async def test_color_setting_temperature_light(hass): """Test ColorTemperature trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert not trait.ColorSettingTrait.supported(light.DOMAIN, 0, None) assert trait.ColorSettingTrait.supported( light.DOMAIN, light.SUPPORT_COLOR_TEMP, None ) trt = trait.ColorSettingTrait( hass, State( "light.bla", STATE_ON, { light.ATTR_MIN_MIREDS: 200, light.ATTR_COLOR_TEMP: 300, light.ATTR_MAX_MIREDS: 500, ATTR_SUPPORTED_FEATURES: light.SUPPORT_COLOR_TEMP, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "colorTemperatureRange": {"temperatureMinK": 2000, "temperatureMaxK": 5000} } assert trt.query_attributes() == {"color": {"temperatureK": 3333}} assert trt.can_execute( trait.COMMAND_COLOR_ABSOLUTE, {"color": {"temperature": 400}} ) calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON) with pytest.raises(helpers.SmartHomeError) as err: await trt.execute( trait.COMMAND_COLOR_ABSOLUTE, BASIC_DATA, {"color": {"temperature": 5555}}, {}, ) assert err.value.code == const.ERR_VALUE_OUT_OF_RANGE await trt.execute( trait.COMMAND_COLOR_ABSOLUTE, BASIC_DATA, {"color": {"temperature": 2857}}, {} ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "light.bla", light.ATTR_COLOR_TEMP: color.color_temperature_kelvin_to_mired(2857), } async def test_color_light_temperature_light_bad_temp(hass): """Test ColorTemperature trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert not trait.ColorSettingTrait.supported(light.DOMAIN, 0, None) assert trait.ColorSettingTrait.supported( light.DOMAIN, light.SUPPORT_COLOR_TEMP, None ) trt = trait.ColorSettingTrait( hass, State( "light.bla", STATE_ON, { light.ATTR_MIN_MIREDS: 200, light.ATTR_COLOR_TEMP: 0, light.ATTR_MAX_MIREDS: 500, }, ), BASIC_CONFIG, ) assert trt.query_attributes() == {} async def test_light_modes(hass): """Test Light Mode trait.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert trait.ModesTrait.supported(light.DOMAIN, light.SUPPORT_EFFECT, None) trt = trait.ModesTrait( hass, State( "light.living_room", light.STATE_ON, attributes={ light.ATTR_EFFECT_LIST: ["random", "colorloop"], light.ATTR_EFFECT: "random", }, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableModes": [ { "name": "effect", "name_values": [{"name_synonym": ["effect"], "lang": "en"}], "settings": [ { "setting_name": "random", "setting_values": [ {"setting_synonym": ["random"], "lang": "en"} ], }, { "setting_name": "colorloop", "setting_values": [ {"setting_synonym": ["colorloop"], "lang": "en"} ], }, ], "ordered": False, } ] } assert trt.query_attributes() == { "currentModeSettings": {"effect": "random"}, "on": True, } assert trt.can_execute( trait.COMMAND_MODES, params={"updateModeSettings": {"effect": "colorloop"}}, ) calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON) await trt.execute( trait.COMMAND_MODES, BASIC_DATA, {"updateModeSettings": {"effect": "colorloop"}}, {}, ) assert len(calls) == 1 assert calls[0].data == { "entity_id": "light.living_room", "effect": "colorloop", } async def test_scene_scene(hass): """Test Scene trait support for scene domain.""" assert helpers.get_google_type(scene.DOMAIN, None) is not None assert trait.SceneTrait.supported(scene.DOMAIN, 0, None) trt = trait.SceneTrait(hass, State("scene.bla", scene.STATE), BASIC_CONFIG) assert trt.sync_attributes() == {} assert trt.query_attributes() == {} assert trt.can_execute(trait.COMMAND_ACTIVATE_SCENE, {}) calls = async_mock_service(hass, scene.DOMAIN, SERVICE_TURN_ON) await trt.execute(trait.COMMAND_ACTIVATE_SCENE, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "scene.bla"} async def test_scene_script(hass): """Test Scene trait support for script domain.""" assert helpers.get_google_type(script.DOMAIN, None) is not None assert trait.SceneTrait.supported(script.DOMAIN, 0, None) trt = trait.SceneTrait(hass, State("script.bla", STATE_OFF), BASIC_CONFIG) assert trt.sync_attributes() == {} assert trt.query_attributes() == {} assert trt.can_execute(trait.COMMAND_ACTIVATE_SCENE, {}) calls = async_mock_service(hass, script.DOMAIN, SERVICE_TURN_ON) await trt.execute(trait.COMMAND_ACTIVATE_SCENE, BASIC_DATA, {}, {}) # We don't wait till script execution is done. await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "script.bla"} async def test_temperature_setting_climate_onoff(hass): """Test TemperatureSetting trait support for climate domain - range.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.TemperatureSettingTrait.supported(climate.DOMAIN, 0, None) hass.config.units.temperature_unit = TEMP_FAHRENHEIT trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_AUTO, { ATTR_SUPPORTED_FEATURES: climate.SUPPORT_TARGET_TEMPERATURE_RANGE, climate.ATTR_HVAC_MODES: [ climate.HVAC_MODE_OFF, climate.HVAC_MODE_COOL, climate.HVAC_MODE_HEAT, climate.HVAC_MODE_HEAT_COOL, ], climate.ATTR_MIN_TEMP: None, climate.ATTR_MAX_TEMP: None, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["off", "cool", "heat", "heatcool", "on"], "thermostatTemperatureUnit": "F", } assert trt.can_execute(trait.COMMAND_THERMOSTAT_SET_MODE, {}) calls = async_mock_service(hass, climate.DOMAIN, SERVICE_TURN_ON) await trt.execute( trait.COMMAND_THERMOSTAT_SET_MODE, BASIC_DATA, {"thermostatMode": "on"}, {} ) assert len(calls) == 1 calls = async_mock_service(hass, climate.DOMAIN, SERVICE_TURN_OFF) await trt.execute( trait.COMMAND_THERMOSTAT_SET_MODE, BASIC_DATA, {"thermostatMode": "off"}, {} ) assert len(calls) == 1 async def test_temperature_setting_climate_no_modes(hass): """Test TemperatureSetting trait support for climate domain not supporting any modes.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.TemperatureSettingTrait.supported(climate.DOMAIN, 0, None) hass.config.units.temperature_unit = TEMP_CELSIUS trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_AUTO, { climate.ATTR_HVAC_MODES: [], climate.ATTR_MIN_TEMP: None, climate.ATTR_MAX_TEMP: None, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["heat"], "thermostatTemperatureUnit": "C", } async def test_temperature_setting_climate_range(hass): """Test TemperatureSetting trait support for climate domain - range.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.TemperatureSettingTrait.supported(climate.DOMAIN, 0, None) hass.config.units.temperature_unit = TEMP_FAHRENHEIT trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_AUTO, { climate.ATTR_CURRENT_TEMPERATURE: 70, climate.ATTR_CURRENT_HUMIDITY: 25, ATTR_SUPPORTED_FEATURES: climate.SUPPORT_TARGET_TEMPERATURE_RANGE, climate.ATTR_HVAC_MODES: [ STATE_OFF, climate.HVAC_MODE_COOL, climate.HVAC_MODE_HEAT, climate.HVAC_MODE_AUTO, ], climate.ATTR_TARGET_TEMP_HIGH: 75, climate.ATTR_TARGET_TEMP_LOW: 65, climate.ATTR_MIN_TEMP: 50, climate.ATTR_MAX_TEMP: 80, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["off", "cool", "heat", "auto", "on"], "thermostatTemperatureUnit": "F", } assert trt.query_attributes() == { "thermostatMode": "auto", "thermostatTemperatureAmbient": 21.1, "thermostatHumidityAmbient": 25, "thermostatTemperatureSetpointLow": 18.3, "thermostatTemperatureSetpointHigh": 23.9, } assert trt.can_execute(trait.COMMAND_THERMOSTAT_TEMPERATURE_SET_RANGE, {}) assert trt.can_execute(trait.COMMAND_THERMOSTAT_SET_MODE, {}) calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_TEMPERATURE) await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SET_RANGE, BASIC_DATA, { "thermostatTemperatureSetpointHigh": 25, "thermostatTemperatureSetpointLow": 20, }, {}, ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "climate.bla", climate.ATTR_TARGET_TEMP_HIGH: 77, climate.ATTR_TARGET_TEMP_LOW: 68, } calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_HVAC_MODE) await trt.execute( trait.COMMAND_THERMOSTAT_SET_MODE, BASIC_DATA, {"thermostatMode": "cool"}, {} ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "climate.bla", climate.ATTR_HVAC_MODE: climate.HVAC_MODE_COOL, } with pytest.raises(helpers.SmartHomeError) as err: await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, BASIC_DATA, {"thermostatTemperatureSetpoint": -100}, {}, ) assert err.value.code == const.ERR_VALUE_OUT_OF_RANGE hass.config.units.temperature_unit = TEMP_CELSIUS async def test_temperature_setting_climate_setpoint(hass): """Test TemperatureSetting trait support for climate domain - setpoint.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.TemperatureSettingTrait.supported(climate.DOMAIN, 0, None) hass.config.units.temperature_unit = TEMP_CELSIUS trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_COOL, { climate.ATTR_HVAC_MODES: [STATE_OFF, climate.HVAC_MODE_COOL], climate.ATTR_MIN_TEMP: 10, climate.ATTR_MAX_TEMP: 30, ATTR_TEMPERATURE: 18, climate.ATTR_CURRENT_TEMPERATURE: 20, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["off", "cool", "on"], "thermostatTemperatureUnit": "C", } assert trt.query_attributes() == { "thermostatMode": "cool", "thermostatTemperatureAmbient": 20, "thermostatTemperatureSetpoint": 18, } assert trt.can_execute(trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, {}) assert trt.can_execute(trait.COMMAND_THERMOSTAT_SET_MODE, {}) calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_TEMPERATURE) with pytest.raises(helpers.SmartHomeError): await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, BASIC_DATA, {"thermostatTemperatureSetpoint": -100}, {}, ) await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, BASIC_DATA, {"thermostatTemperatureSetpoint": 19}, {}, ) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "climate.bla", ATTR_TEMPERATURE: 19} async def test_temperature_setting_climate_setpoint_auto(hass): """ Test TemperatureSetting trait support for climate domain. Setpoint in auto mode. """ hass.config.units.temperature_unit = TEMP_CELSIUS trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_HEAT_COOL, { climate.ATTR_HVAC_MODES: [ climate.HVAC_MODE_OFF, climate.HVAC_MODE_HEAT_COOL, ], climate.ATTR_MIN_TEMP: 10, climate.ATTR_MAX_TEMP: 30, ATTR_TEMPERATURE: 18, climate.ATTR_CURRENT_TEMPERATURE: 20, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["off", "heatcool", "on"], "thermostatTemperatureUnit": "C", } assert trt.query_attributes() == { "thermostatMode": "heatcool", "thermostatTemperatureAmbient": 20, "thermostatTemperatureSetpointHigh": 18, "thermostatTemperatureSetpointLow": 18, } assert trt.can_execute(trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, {}) assert trt.can_execute(trait.COMMAND_THERMOSTAT_SET_MODE, {}) calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_TEMPERATURE) await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, BASIC_DATA, {"thermostatTemperatureSetpoint": 19}, {}, ) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "climate.bla", ATTR_TEMPERATURE: 19} async def test_humidity_setting_humidifier_setpoint(hass): """Test HumiditySetting trait support for humidifier domain - setpoint.""" assert helpers.get_google_type(humidifier.DOMAIN, None) is not None assert trait.HumiditySettingTrait.supported(humidifier.DOMAIN, 0, None) trt = trait.HumiditySettingTrait( hass, State( "humidifier.bla", STATE_ON, { humidifier.ATTR_MIN_HUMIDITY: 20, humidifier.ATTR_MAX_HUMIDITY: 90, humidifier.ATTR_HUMIDITY: 38, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "humiditySetpointRange": {"minPercent": 20, "maxPercent": 90} } assert trt.query_attributes() == { "humiditySetpointPercent": 38, } assert trt.can_execute(trait.COMMAND_SET_HUMIDITY, {}) calls = async_mock_service(hass, humidifier.DOMAIN, humidifier.SERVICE_SET_HUMIDITY) await trt.execute(trait.COMMAND_SET_HUMIDITY, BASIC_DATA, {"humidity": 32}, {}) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "humidifier.bla", humidifier.ATTR_HUMIDITY: 32, } async def test_lock_unlock_lock(hass): """Test LockUnlock trait locking support for lock domain.""" assert helpers.get_google_type(lock.DOMAIN, None) is not None assert trait.LockUnlockTrait.supported(lock.DOMAIN, lock.SUPPORT_OPEN, None) assert trait.LockUnlockTrait.might_2fa(lock.DOMAIN, lock.SUPPORT_OPEN, None) trt = trait.LockUnlockTrait( hass, State("lock.front_door", lock.STATE_LOCKED), PIN_CONFIG ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"isLocked": True} assert trt.can_execute(trait.COMMAND_LOCKUNLOCK, {"lock": True}) calls = async_mock_service(hass, lock.DOMAIN, lock.SERVICE_LOCK) await trt.execute(trait.COMMAND_LOCKUNLOCK, PIN_DATA, {"lock": True}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "lock.front_door"} async def test_lock_unlock_unlock(hass): """Test LockUnlock trait unlocking support for lock domain.""" assert helpers.get_google_type(lock.DOMAIN, None) is not None assert trait.LockUnlockTrait.supported(lock.DOMAIN, lock.SUPPORT_OPEN, None) trt = trait.LockUnlockTrait( hass, State("lock.front_door", lock.STATE_LOCKED), PIN_CONFIG ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"isLocked": True} assert trt.can_execute(trait.COMMAND_LOCKUNLOCK, {"lock": False}) calls = async_mock_service(hass, lock.DOMAIN, lock.SERVICE_UNLOCK) # No challenge data with pytest.raises(error.ChallengeNeeded) as err: await trt.execute(trait.COMMAND_LOCKUNLOCK, PIN_DATA, {"lock": False}, {}) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # invalid pin with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_LOCKUNLOCK, PIN_DATA, {"lock": False}, {"pin": 9999} ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_FAILED_PIN_NEEDED await trt.execute( trait.COMMAND_LOCKUNLOCK, PIN_DATA, {"lock": False}, {"pin": "1234"} ) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "lock.front_door"} # Test without pin trt = trait.LockUnlockTrait( hass, State("lock.front_door", lock.STATE_LOCKED), BASIC_CONFIG ) with pytest.raises(error.SmartHomeError) as err: await trt.execute(trait.COMMAND_LOCKUNLOCK, BASIC_DATA, {"lock": False}, {}) assert len(calls) == 1 assert err.value.code == const.ERR_CHALLENGE_NOT_SETUP # Test with 2FA override with patch.object( BASIC_CONFIG, "should_2fa", return_value=False, ): await trt.execute(trait.COMMAND_LOCKUNLOCK, BASIC_DATA, {"lock": False}, {}) assert len(calls) == 2 async def test_arm_disarm_arm_away(hass): """Test ArmDisarm trait Arming support for alarm_control_panel domain.""" assert helpers.get_google_type(alarm_control_panel.DOMAIN, None) is not None assert trait.ArmDisArmTrait.supported(alarm_control_panel.DOMAIN, 0, None) assert trait.ArmDisArmTrait.might_2fa(alarm_control_panel.DOMAIN, 0, None) trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, { alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True, ATTR_SUPPORTED_FEATURES: alarm_control_panel.const.SUPPORT_ALARM_ARM_HOME | alarm_control_panel.const.SUPPORT_ALARM_ARM_AWAY, }, ), PIN_CONFIG, ) assert trt.sync_attributes() == { "availableArmLevels": { "levels": [ { "level_name": "armed_home", "level_values": [ {"level_synonym": ["armed home", "home"], "lang": "en"} ], }, { "level_name": "armed_away", "level_values": [ {"level_synonym": ["armed away", "away"], "lang": "en"} ], }, ], "ordered": False, } } assert trt.query_attributes() == { "isArmed": True, "currentArmLevel": STATE_ALARM_ARMED_AWAY, } assert trt.can_execute( trait.COMMAND_ARMDISARM, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY} ) calls = async_mock_service( hass, alarm_control_panel.DOMAIN, alarm_control_panel.SERVICE_ALARM_ARM_AWAY ) # Test with no secure_pin configured with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), BASIC_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, BASIC_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {}, ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NOT_SETUP trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), PIN_CONFIG, ) # No challenge data with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {}, ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # invalid pin with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {"pin": 9999}, ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_FAILED_PIN_NEEDED # correct pin await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {"pin": "1234"}, ) assert len(calls) == 1 # Test already armed with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), PIN_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {}, ) assert len(calls) == 1 assert err.value.code == const.ERR_ALREADY_ARMED # Test with code_arm_required False trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: False}, ), PIN_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {}, ) assert len(calls) == 2 with pytest.raises(error.SmartHomeError) as err: await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True}, {}, ) async def test_arm_disarm_disarm(hass): """Test ArmDisarm trait Disarming support for alarm_control_panel domain.""" assert helpers.get_google_type(alarm_control_panel.DOMAIN, None) is not None assert trait.ArmDisArmTrait.supported(alarm_control_panel.DOMAIN, 0, None) assert trait.ArmDisArmTrait.might_2fa(alarm_control_panel.DOMAIN, 0, None) trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, { alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True, ATTR_SUPPORTED_FEATURES: alarm_control_panel.const.SUPPORT_ALARM_TRIGGER | alarm_control_panel.const.SUPPORT_ALARM_ARM_CUSTOM_BYPASS, }, ), PIN_CONFIG, ) assert trt.sync_attributes() == { "availableArmLevels": { "levels": [ { "level_name": "armed_custom_bypass", "level_values": [ { "level_synonym": ["armed custom bypass", "custom"], "lang": "en", } ], }, { "level_name": "triggered", "level_values": [{"level_synonym": ["triggered"], "lang": "en"}], }, ], "ordered": False, } } assert trt.query_attributes() == {"isArmed": False} assert trt.can_execute(trait.COMMAND_ARMDISARM, {"arm": False}) calls = async_mock_service( hass, alarm_control_panel.DOMAIN, alarm_control_panel.SERVICE_ALARM_DISARM ) # Test without secure_pin configured with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), BASIC_CONFIG, ) await trt.execute(trait.COMMAND_ARMDISARM, BASIC_DATA, {"arm": False}, {}) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NOT_SETUP trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), PIN_CONFIG, ) # No challenge data with pytest.raises(error.ChallengeNeeded) as err: await trt.execute(trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": False}, {}) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # invalid pin with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": False}, {"pin": 9999} ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_FAILED_PIN_NEEDED # correct pin await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": False}, {"pin": "1234"} ) assert len(calls) == 1 # Test already disarmed with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), PIN_CONFIG, ) await trt.execute(trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": False}, {}) assert len(calls) == 1 assert err.value.code == const.ERR_ALREADY_DISARMED # Cancel arming after already armed will require pin with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: False}, ), PIN_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "cancel": True}, {} ) assert len(calls) == 1 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # Cancel arming while pending to arm doesn't require pin trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_PENDING, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: False}, ), PIN_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "cancel": True}, {} ) assert len(calls) == 2 async def test_fan_speed(hass): """Test FanSpeed trait speed control support for fan domain.""" assert helpers.get_google_type(fan.DOMAIN, None) is not None assert trait.FanSpeedTrait.supported(fan.DOMAIN, fan.SUPPORT_SET_SPEED, None) trt = trait.FanSpeedTrait( hass, State( "fan.living_room_fan", fan.SPEED_HIGH, attributes={ "speed_list": [ fan.SPEED_OFF, fan.SPEED_LOW, fan.SPEED_MEDIUM, fan.SPEED_HIGH, ], "speed": "low", "percentage": 33, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableFanSpeeds": { "ordered": True, "speeds": [ { "speed_name": "off", "speed_values": [{"speed_synonym": ["stop", "off"], "lang": "en"}], }, { "speed_name": "low", "speed_values": [ { "speed_synonym": ["slow", "low", "slowest", "lowest"], "lang": "en", } ], }, { "speed_name": "medium", "speed_values": [ {"speed_synonym": ["medium", "mid", "middle"], "lang": "en"} ], }, { "speed_name": "high", "speed_values": [ { "speed_synonym": [ "high", "max", "fast", "highest", "fastest", "maximum", ], "lang": "en", } ], }, ], }, "reversible": False, "supportsFanSpeedPercent": True, } assert trt.query_attributes() == { "currentFanSpeedSetting": "low", "on": True, "currentFanSpeedPercent": 33, } assert trt.can_execute(trait.COMMAND_FANSPEED, params={"fanSpeed": "medium"}) calls = async_mock_service(hass, fan.DOMAIN, fan.SERVICE_SET_SPEED) await trt.execute(trait.COMMAND_FANSPEED, BASIC_DATA, {"fanSpeed": "medium"}, {}) assert len(calls) == 1 assert calls[0].data == {"entity_id": "fan.living_room_fan", "speed": "medium"} assert trt.can_execute(trait.COMMAND_FANSPEED, params={"fanSpeedPercent": 10}) calls = async_mock_service(hass, fan.DOMAIN, fan.SERVICE_SET_PERCENTAGE) await trt.execute(trait.COMMAND_FANSPEED, BASIC_DATA, {"fanSpeedPercent": 10}, {}) assert len(calls) == 1 assert calls[0].data == {"entity_id": "fan.living_room_fan", "percentage": 10} async def test_climate_fan_speed(hass): """Test FanSpeed trait speed control support for climate domain.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.FanSpeedTrait.supported(climate.DOMAIN, climate.SUPPORT_FAN_MODE, None) trt = trait.FanSpeedTrait( hass, State( "climate.living_room_ac", "on", attributes={ "fan_modes": ["auto", "low", "medium", "high"], "fan_mode": "low", }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableFanSpeeds": { "ordered": True, "speeds": [ { "speed_name": "auto", "speed_values": [{"speed_synonym": ["auto"], "lang": "en"}], }, { "speed_name": "low", "speed_values": [{"speed_synonym": ["low"], "lang": "en"}], }, { "speed_name": "medium", "speed_values": [{"speed_synonym": ["medium"], "lang": "en"}], }, { "speed_name": "high", "speed_values": [{"speed_synonym": ["high"], "lang": "en"}], }, ], }, "reversible": False, "supportsFanSpeedPercent": True, } assert trt.query_attributes() == { "currentFanSpeedSetting": "low", } assert trt.can_execute(trait.COMMAND_FANSPEED, params={"fanSpeed": "medium"}) calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_FAN_MODE) await trt.execute(trait.COMMAND_FANSPEED, BASIC_DATA, {"fanSpeed": "medium"}, {}) assert len(calls) == 1 assert calls[0].data == { "entity_id": "climate.living_room_ac", "fan_mode": "medium", } async def test_inputselector(hass): """Test input selector trait.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.InputSelectorTrait.supported( media_player.DOMAIN, media_player.SUPPORT_SELECT_SOURCE, None ) trt = trait.InputSelectorTrait( hass, State( "media_player.living_room", media_player.STATE_PLAYING, attributes={ media_player.ATTR_INPUT_SOURCE_LIST: [ "media", "game", "chromecast", "plex", ], media_player.ATTR_INPUT_SOURCE: "game", }, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableInputs": [ {"key": "media", "names": [{"name_synonym": ["media"], "lang": "en"}]}, {"key": "game", "names": [{"name_synonym": ["game"], "lang": "en"}]}, { "key": "chromecast", "names": [{"name_synonym": ["chromecast"], "lang": "en"}], }, {"key": "plex", "names": [{"name_synonym": ["plex"], "lang": "en"}]}, ], "orderedInputs": True, } assert trt.query_attributes() == { "currentInput": "game", } assert trt.can_execute( trait.COMMAND_INPUT, params={"newInput": "media"}, ) calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_SELECT_SOURCE ) await trt.execute( trait.COMMAND_INPUT, BASIC_DATA, {"newInput": "media"}, {}, ) assert len(calls) == 1 assert calls[0].data == {"entity_id": "media_player.living_room", "source": "media"} @pytest.mark.parametrize( "sources,source,source_next,source_prev", [ (["a"], "a", "a", "a"), (["a", "b"], "a", "b", "b"), (["a", "b", "c"], "a", "b", "c"), ], ) async def test_inputselector_nextprev(hass, sources, source, source_next, source_prev): """Test input selector trait.""" trt = trait.InputSelectorTrait( hass, State( "media_player.living_room", media_player.STATE_PLAYING, attributes={ media_player.ATTR_INPUT_SOURCE_LIST: sources, media_player.ATTR_INPUT_SOURCE: source, }, ), BASIC_CONFIG, ) assert trt.can_execute("action.devices.commands.NextInput", params={}) assert trt.can_execute("action.devices.commands.PreviousInput", params={}) calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_SELECT_SOURCE ) await trt.execute( "action.devices.commands.NextInput", BASIC_DATA, {}, {}, ) await trt.execute( "action.devices.commands.PreviousInput", BASIC_DATA, {}, {}, ) assert len(calls) == 2 assert calls[0].data == { "entity_id": "media_player.living_room", "source": source_next, } assert calls[1].data == { "entity_id": "media_player.living_room", "source": source_prev, } @pytest.mark.parametrize( "sources,source", [(None, "a"), (["a", "b"], None), (["a", "b"], "c")] ) async def test_inputselector_nextprev_invalid(hass, sources, source): """Test input selector trait.""" trt = trait.InputSelectorTrait( hass, State( "media_player.living_room", media_player.STATE_PLAYING, attributes={ media_player.ATTR_INPUT_SOURCE_LIST: sources, media_player.ATTR_INPUT_SOURCE: source, }, ), BASIC_CONFIG, ) with pytest.raises(SmartHomeError): await trt.execute( "action.devices.commands.NextInput", BASIC_DATA, {}, {}, ) with pytest.raises(SmartHomeError): await trt.execute( "action.devices.commands.PreviousInput", BASIC_DATA, {}, {}, ) with pytest.raises(SmartHomeError): await trt.execute( "action.devices.commands.InvalidCommand", BASIC_DATA, {}, {}, ) async def test_modes_input_select(hass): """Test Input Select Mode trait.""" assert helpers.get_google_type(input_select.DOMAIN, None) is not None assert trait.ModesTrait.supported(input_select.DOMAIN, None, None) trt = trait.ModesTrait( hass, State("input_select.bla", "unavailable"), BASIC_CONFIG, ) assert trt.sync_attributes() == {"availableModes": []} trt = trait.ModesTrait( hass, State( "input_select.bla", "abc", attributes={input_select.ATTR_OPTIONS: ["abc", "123", "xyz"]}, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableModes": [ { "name": "option", "name_values": [ { "name_synonym": ["option", "setting", "mode", "value"], "lang": "en", } ], "settings": [ { "setting_name": "abc", "setting_values": [{"setting_synonym": ["abc"], "lang": "en"}], }, { "setting_name": "123", "setting_values": [{"setting_synonym": ["123"], "lang": "en"}], }, { "setting_name": "xyz", "setting_values": [{"setting_synonym": ["xyz"], "lang": "en"}], }, ], "ordered": False, } ] } assert trt.query_attributes() == { "currentModeSettings": {"option": "abc"}, "on": True, } assert trt.can_execute( trait.COMMAND_MODES, params={"updateModeSettings": {"option": "xyz"}}, ) calls = async_mock_service( hass, input_select.DOMAIN, input_select.SERVICE_SELECT_OPTION ) await trt.execute( trait.COMMAND_MODES, BASIC_DATA, {"updateModeSettings": {"option": "xyz"}}, {}, ) assert len(calls) == 1 assert calls[0].data == {"entity_id": "input_select.bla", "option": "xyz"} async def test_modes_humidifier(hass): """Test Humidifier Mode trait.""" assert helpers.get_google_type(humidifier.DOMAIN, None) is not None assert trait.ModesTrait.supported(humidifier.DOMAIN, humidifier.SUPPORT_MODES, None) trt = trait.ModesTrait( hass, State( "humidifier.humidifier", STATE_OFF, attributes={ humidifier.ATTR_AVAILABLE_MODES: [ humidifier.MODE_NORMAL, humidifier.MODE_AUTO, humidifier.MODE_AWAY, ], ATTR_SUPPORTED_FEATURES: humidifier.SUPPORT_MODES, humidifier.ATTR_MIN_HUMIDITY: 30, humidifier.ATTR_MAX_HUMIDITY: 99, humidifier.ATTR_HUMIDITY: 50, ATTR_MODE: humidifier.MODE_AUTO, }, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableModes": [ { "name": "mode", "name_values": [{"name_synonym": ["mode"], "lang": "en"}], "settings": [ { "setting_name": "normal", "setting_values": [ {"setting_synonym": ["normal"], "lang": "en"} ], }, { "setting_name": "auto", "setting_values": [{"setting_synonym": ["auto"], "lang": "en"}], }, { "setting_name": "away", "setting_values": [{"setting_synonym": ["away"], "lang": "en"}], }, ], "ordered": False, }, ] } assert trt.query_attributes() == { "currentModeSettings": {"mode": "auto"}, "on": False, } assert trt.can_execute( trait.COMMAND_MODES, params={"updateModeSettings": {"mode": "away"}} ) calls = async_mock_service(hass, humidifier.DOMAIN, humidifier.SERVICE_SET_MODE) await trt.execute( trait.COMMAND_MODES, BASIC_DATA, {"updateModeSettings": {"mode": "away"}}, {}, ) assert len(calls) == 1 assert calls[0].data == { "entity_id": "humidifier.humidifier", "mode": "away", } async def test_sound_modes(hass): """Test Mode trait.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.ModesTrait.supported( media_player.DOMAIN, media_player.SUPPORT_SELECT_SOUND_MODE, None ) trt = trait.ModesTrait( hass, State( "media_player.living_room", media_player.STATE_PLAYING, attributes={ media_player.ATTR_SOUND_MODE_LIST: ["stereo", "prologic"], media_player.ATTR_SOUND_MODE: "stereo", }, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableModes": [ { "name": "sound mode", "name_values": [ {"name_synonym": ["sound mode", "effects"], "lang": "en"} ], "settings": [ { "setting_name": "stereo", "setting_values": [ {"setting_synonym": ["stereo"], "lang": "en"} ], }, { "setting_name": "prologic", "setting_values": [ {"setting_synonym": ["prologic"], "lang": "en"} ], }, ], "ordered": False, } ] } assert trt.query_attributes() == { "currentModeSettings": {"sound mode": "stereo"}, "on": True, } assert trt.can_execute( trait.COMMAND_MODES, params={"updateModeSettings": {"sound mode": "stereo"}}, ) calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_SELECT_SOUND_MODE ) await trt.execute( trait.COMMAND_MODES, BASIC_DATA, {"updateModeSettings": {"sound mode": "stereo"}}, {}, ) assert len(calls) == 1 assert calls[0].data == { "entity_id": "media_player.living_room", "sound_mode": "stereo", } async def test_openclose_cover(hass): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_SET_POSITION, None ) trt = trait.OpenCloseTrait( hass, State( "cover.bla", cover.STATE_OPEN, { cover.ATTR_CURRENT_POSITION: 75, ATTR_SUPPORTED_FEATURES: cover.SUPPORT_SET_POSITION, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"openPercent": 75} calls_set = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_SET_COVER_POSITION) calls_open = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_OPEN_COVER) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 50}, {}) await trt.execute( trait.COMMAND_OPENCLOSE_RELATIVE, BASIC_DATA, {"openRelativePercent": 50}, {} ) assert len(calls_set) == 1 assert calls_set[0].data == {ATTR_ENTITY_ID: "cover.bla", cover.ATTR_POSITION: 50} assert len(calls_open) == 1 assert calls_open[0].data == {ATTR_ENTITY_ID: "cover.bla"} async def test_openclose_cover_unknown_state(hass): """Test OpenClose trait support for cover domain with unknown state.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_SET_POSITION, None ) # No state trt = trait.OpenCloseTrait( hass, State( "cover.bla", STATE_UNKNOWN, {ATTR_SUPPORTED_FEATURES: cover.SUPPORT_OPEN} ), BASIC_CONFIG, ) assert trt.sync_attributes() == {"discreteOnlyOpenClose": True} with pytest.raises(helpers.SmartHomeError): trt.query_attributes() calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_OPEN_COVER) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 100}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} with pytest.raises(helpers.SmartHomeError): trt.query_attributes() async def test_openclose_cover_assumed_state(hass): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_SET_POSITION, None ) trt = trait.OpenCloseTrait( hass, State( "cover.bla", cover.STATE_OPEN, { ATTR_ASSUMED_STATE: True, ATTR_SUPPORTED_FEATURES: cover.SUPPORT_SET_POSITION, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == {"commandOnlyOpenClose": True} assert trt.query_attributes() == {} calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_SET_COVER_POSITION) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 40}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla", cover.ATTR_POSITION: 40} async def test_openclose_cover_query_only(hass): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported(cover.DOMAIN, 0, None) state = State( "cover.bla", cover.STATE_OPEN, ) trt = trait.OpenCloseTrait( hass, state, BASIC_CONFIG, ) assert trt.sync_attributes() == { "discreteOnlyOpenClose": True, "queryOnlyOpenClose": True, } assert trt.query_attributes() == {"openPercent": 100} async def test_openclose_cover_no_position(hass): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_OPEN | cover.SUPPORT_CLOSE, None ) state = State( "cover.bla", cover.STATE_OPEN, { ATTR_SUPPORTED_FEATURES: cover.SUPPORT_OPEN | cover.SUPPORT_CLOSE, }, ) trt = trait.OpenCloseTrait( hass, state, BASIC_CONFIG, ) assert trt.sync_attributes() == {"discreteOnlyOpenClose": True} assert trt.query_attributes() == {"openPercent": 100} state.state = cover.STATE_CLOSED assert trt.sync_attributes() == {"discreteOnlyOpenClose": True} assert trt.query_attributes() == {"openPercent": 0} calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_CLOSE_COVER) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 0}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_OPEN_COVER) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 100}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} with pytest.raises( SmartHomeError, match=r"Current position not know for relative command" ): await trt.execute( trait.COMMAND_OPENCLOSE_RELATIVE, BASIC_DATA, {"openRelativePercent": 100}, {}, ) with pytest.raises(SmartHomeError, match=r"No support for partial open close"): await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 50}, {}) @pytest.mark.parametrize( "device_class", (cover.DEVICE_CLASS_DOOR, cover.DEVICE_CLASS_GARAGE, cover.DEVICE_CLASS_GATE), ) async def test_openclose_cover_secure(hass, device_class): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, device_class) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_SET_POSITION, device_class ) assert trait.OpenCloseTrait.might_2fa( cover.DOMAIN, cover.SUPPORT_SET_POSITION, device_class ) trt = trait.OpenCloseTrait( hass, State( "cover.bla", cover.STATE_OPEN, { ATTR_DEVICE_CLASS: device_class, ATTR_SUPPORTED_FEATURES: cover.SUPPORT_SET_POSITION, cover.ATTR_CURRENT_POSITION: 75, }, ), PIN_CONFIG, ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"openPercent": 75} calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_SET_COVER_POSITION) calls_close = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_CLOSE_COVER) # No challenge data with pytest.raises(error.ChallengeNeeded) as err: await trt.execute(trait.COMMAND_OPENCLOSE, PIN_DATA, {"openPercent": 50}, {}) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # invalid pin with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_OPENCLOSE, PIN_DATA, {"openPercent": 50}, {"pin": "9999"} ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_FAILED_PIN_NEEDED await trt.execute( trait.COMMAND_OPENCLOSE, PIN_DATA, {"openPercent": 50}, {"pin": "1234"} ) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla", cover.ATTR_POSITION: 50} # no challenge on close await trt.execute(trait.COMMAND_OPENCLOSE, PIN_DATA, {"openPercent": 0}, {}) assert len(calls_close) == 1 assert calls_close[0].data == {ATTR_ENTITY_ID: "cover.bla"} @pytest.mark.parametrize( "device_class", ( binary_sensor.DEVICE_CLASS_DOOR, binary_sensor.DEVICE_CLASS_GARAGE_DOOR, binary_sensor.DEVICE_CLASS_LOCK, binary_sensor.DEVICE_CLASS_OPENING, binary_sensor.DEVICE_CLASS_WINDOW, ), ) async def test_openclose_binary_sensor(hass, device_class): """Test OpenClose trait support for binary_sensor domain.""" assert helpers.get_google_type(binary_sensor.DOMAIN, device_class) is not None assert trait.OpenCloseTrait.supported(binary_sensor.DOMAIN, 0, device_class) trt = trait.OpenCloseTrait( hass, State("binary_sensor.test", STATE_ON, {ATTR_DEVICE_CLASS: device_class}), BASIC_CONFIG, ) assert trt.sync_attributes() == { "queryOnlyOpenClose": True, "discreteOnlyOpenClose": True, } assert trt.query_attributes() == {"openPercent": 100} trt = trait.OpenCloseTrait( hass, State("binary_sensor.test", STATE_OFF, {ATTR_DEVICE_CLASS: device_class}), BASIC_CONFIG, ) assert trt.sync_attributes() == { "queryOnlyOpenClose": True, "discreteOnlyOpenClose": True, } assert trt.query_attributes() == {"openPercent": 0} async def test_volume_media_player(hass): """Test volume trait support for media player domain.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.VolumeTrait.supported( media_player.DOMAIN, media_player.SUPPORT_VOLUME_SET, None, ) trt = trait.VolumeTrait( hass, State( "media_player.bla", media_player.STATE_PLAYING, { ATTR_SUPPORTED_FEATURES: media_player.SUPPORT_VOLUME_SET, media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.3, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "volumeMaxLevel": 100, "levelStepSize": 10, "volumeCanMuteAndUnmute": False, "commandOnlyVolume": False, } assert trt.query_attributes() == {"currentVolume": 30} calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_SET ) await trt.execute(trait.COMMAND_SET_VOLUME, BASIC_DATA, {"volumeLevel": 60}, {}) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.6, } calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_SET ) await trt.execute( trait.COMMAND_VOLUME_RELATIVE, BASIC_DATA, {"relativeSteps": 10}, {} ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.4, } async def test_volume_media_player_relative(hass): """Test volume trait support for relative-volume-only media players.""" assert trait.VolumeTrait.supported( media_player.DOMAIN, media_player.SUPPORT_VOLUME_STEP, None, ) trt = trait.VolumeTrait( hass, State( "media_player.bla", media_player.STATE_PLAYING, { ATTR_ASSUMED_STATE: True, ATTR_SUPPORTED_FEATURES: media_player.SUPPORT_VOLUME_STEP, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "volumeMaxLevel": 100, "levelStepSize": 10, "volumeCanMuteAndUnmute": False, "commandOnlyVolume": True, } assert trt.query_attributes() == {} calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_UP ) await trt.execute( trait.COMMAND_VOLUME_RELATIVE, BASIC_DATA, {"relativeSteps": 10}, {}, ) assert len(calls) == 10 for call in calls: assert call.data == { ATTR_ENTITY_ID: "media_player.bla", } calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_DOWN ) await trt.execute( trait.COMMAND_VOLUME_RELATIVE, BASIC_DATA, {"relativeSteps": -10}, {}, ) assert len(calls) == 10 for call in calls: assert call.data == { ATTR_ENTITY_ID: "media_player.bla", } with pytest.raises(SmartHomeError): await trt.execute(trait.COMMAND_SET_VOLUME, BASIC_DATA, {"volumeLevel": 42}, {}) with pytest.raises(SmartHomeError): await trt.execute(trait.COMMAND_MUTE, BASIC_DATA, {"mute": True}, {}) async def test_media_player_mute(hass): """Test volume trait support for muting.""" assert trait.VolumeTrait.supported( media_player.DOMAIN, media_player.SUPPORT_VOLUME_STEP | media_player.SUPPORT_VOLUME_MUTE, None, ) trt = trait.VolumeTrait( hass, State( "media_player.bla", media_player.STATE_PLAYING, { ATTR_SUPPORTED_FEATURES: ( media_player.SUPPORT_VOLUME_STEP | media_player.SUPPORT_VOLUME_MUTE ), media_player.ATTR_MEDIA_VOLUME_MUTED: False, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "volumeMaxLevel": 100, "levelStepSize": 10, "volumeCanMuteAndUnmute": True, "commandOnlyVolume": False, } assert trt.query_attributes() == {"isMuted": False} mute_calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_MUTE ) await trt.execute( trait.COMMAND_MUTE, BASIC_DATA, {"mute": True}, {}, ) assert len(mute_calls) == 1 assert mute_calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_VOLUME_MUTED: True, } unmute_calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_MUTE ) await trt.execute( trait.COMMAND_MUTE, BASIC_DATA, {"mute": False}, {}, ) assert len(unmute_calls) == 1 assert unmute_calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_VOLUME_MUTED: False, } async def test_temperature_setting_sensor(hass): """Test TemperatureSetting trait support for temperature sensor.""" assert ( helpers.get_google_type(sensor.DOMAIN, sensor.DEVICE_CLASS_TEMPERATURE) is not None ) assert not trait.TemperatureSettingTrait.supported( sensor.DOMAIN, 0, sensor.DEVICE_CLASS_HUMIDITY ) assert trait.TemperatureSettingTrait.supported( sensor.DOMAIN, 0, sensor.DEVICE_CLASS_TEMPERATURE ) @pytest.mark.parametrize( "unit_in,unit_out,state,ambient", [ (TEMP_FAHRENHEIT, "F", "70", 21.1), (TEMP_CELSIUS, "C", "21.1", 21.1), (TEMP_FAHRENHEIT, "F", "unavailable", None), (TEMP_FAHRENHEIT, "F", "unknown", None), ], ) async def test_temperature_setting_sensor_data(hass, unit_in, unit_out, state, ambient): """Test TemperatureSetting trait support for temperature sensor.""" hass.config.units.temperature_unit = unit_in trt = trait.TemperatureSettingTrait( hass, State( "sensor.test", state, {ATTR_DEVICE_CLASS: sensor.DEVICE_CLASS_TEMPERATURE} ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "queryOnlyTemperatureSetting": True, "thermostatTemperatureUnit": unit_out, } if ambient: assert trt.query_attributes() == {"thermostatTemperatureAmbient": ambient} else: assert trt.query_attributes() == {} hass.config.units.temperature_unit = TEMP_CELSIUS async def test_humidity_setting_sensor(hass): """Test HumiditySetting trait support for humidity sensor.""" assert ( helpers.get_google_type(sensor.DOMAIN, sensor.DEVICE_CLASS_HUMIDITY) is not None ) assert not trait.HumiditySettingTrait.supported( sensor.DOMAIN, 0, sensor.DEVICE_CLASS_TEMPERATURE ) assert trait.HumiditySettingTrait.supported( sensor.DOMAIN, 0, sensor.DEVICE_CLASS_HUMIDITY ) @pytest.mark.parametrize( "state,ambient", [("70", 70), ("unavailable", None), ("unknown", None)] ) async def test_humidity_setting_sensor_data(hass, state, ambient): """Test HumiditySetting trait support for humidity sensor.""" trt = trait.HumiditySettingTrait( hass, State("sensor.test", state, {ATTR_DEVICE_CLASS: sensor.DEVICE_CLASS_HUMIDITY}), BASIC_CONFIG, ) assert trt.sync_attributes() == {"queryOnlyHumiditySetting": True} if ambient: assert trt.query_attributes() == {"humidityAmbientPercent": ambient} else: assert trt.query_attributes() == {} with pytest.raises(helpers.SmartHomeError) as err: await trt.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert err.value.code == const.ERR_NOT_SUPPORTED async def test_transport_control(hass): """Test the TransportControlTrait.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None for feature in trait.MEDIA_COMMAND_SUPPORT_MAPPING.values(): assert trait.TransportControlTrait.supported(media_player.DOMAIN, feature, None) now = datetime(2020, 1, 1) trt = trait.TransportControlTrait( hass, State( "media_player.bla", media_player.STATE_PLAYING, { media_player.ATTR_MEDIA_POSITION: 100, media_player.ATTR_MEDIA_DURATION: 200, media_player.ATTR_MEDIA_POSITION_UPDATED_AT: now - timedelta(seconds=10), media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.5, ATTR_SUPPORTED_FEATURES: media_player.SUPPORT_PLAY | media_player.SUPPORT_STOP, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "transportControlSupportedCommands": ["RESUME", "STOP"] } assert trt.query_attributes() == {} # COMMAND_MEDIA_SEEK_RELATIVE calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_SEEK ) # Patch to avoid time ticking over during the command failing the test with patch("homeassistant.util.dt.utcnow", return_value=now): await trt.execute( trait.COMMAND_MEDIA_SEEK_RELATIVE, BASIC_DATA, {"relativePositionMs": 10000}, {}, ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", # 100s (current position) + 10s (from command) + 10s (from updated_at) media_player.ATTR_MEDIA_SEEK_POSITION: 120, } # COMMAND_MEDIA_SEEK_TO_POSITION calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_SEEK ) await trt.execute( trait.COMMAND_MEDIA_SEEK_TO_POSITION, BASIC_DATA, {"absPositionMs": 50000}, {} ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_SEEK_POSITION: 50, } # COMMAND_MEDIA_NEXT calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_NEXT_TRACK ) await trt.execute(trait.COMMAND_MEDIA_NEXT, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} # COMMAND_MEDIA_PAUSE calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_PAUSE ) await trt.execute(trait.COMMAND_MEDIA_PAUSE, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} # COMMAND_MEDIA_PREVIOUS calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_PREVIOUS_TRACK ) await trt.execute(trait.COMMAND_MEDIA_PREVIOUS, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} # COMMAND_MEDIA_RESUME calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_PLAY ) await trt.execute(trait.COMMAND_MEDIA_RESUME, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} # COMMAND_MEDIA_SHUFFLE calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_SHUFFLE_SET ) await trt.execute(trait.COMMAND_MEDIA_SHUFFLE, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_SHUFFLE: True, } # COMMAND_MEDIA_STOP calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_STOP ) await trt.execute(trait.COMMAND_MEDIA_STOP, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} @pytest.mark.parametrize( "state", ( STATE_OFF, STATE_IDLE, STATE_PLAYING, STATE_ON, STATE_PAUSED, STATE_STANDBY, STATE_UNAVAILABLE, STATE_UNKNOWN, ), ) async def test_media_state(hass, state): """Test the MediaStateTrait.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.TransportControlTrait.supported( media_player.DOMAIN, media_player.SUPPORT_PLAY, None ) trt = trait.MediaStateTrait( hass, State( "media_player.bla", state, { media_player.ATTR_MEDIA_POSITION: 100, media_player.ATTR_MEDIA_DURATION: 200, media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.5, ATTR_SUPPORTED_FEATURES: media_player.SUPPORT_PLAY | media_player.SUPPORT_STOP, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "supportActivityState": True, "supportPlaybackState": True, } assert trt.query_attributes() == { "activityState": trt.activity_lookup.get(state), "playbackState": trt.playback_lookup.get(state), }
sander76/home-assistant
tests/components/google_assistant/test_trait.py
homeassistant/components/wolflink/sensor.py
"""Support for Google travel time sensors.""" from datetime import datetime, timedelta import logging import googlemaps import voluptuous as vol from homeassistant.components.sensor import PLATFORM_SCHEMA, SensorEntity from homeassistant.const import ( ATTR_ATTRIBUTION, ATTR_LATITUDE, ATTR_LONGITUDE, CONF_API_KEY, CONF_MODE, CONF_NAME, EVENT_HOMEASSISTANT_START, TIME_MINUTES, ) from homeassistant.helpers import location import homeassistant.helpers.config_validation as cv import homeassistant.util.dt as dt_util _LOGGER = logging.getLogger(__name__) ATTRIBUTION = "Powered by Google" CONF_DESTINATION = "destination" CONF_OPTIONS = "options" CONF_ORIGIN = "origin" CONF_TRAVEL_MODE = "travel_mode" DEFAULT_NAME = "Google Travel Time" SCAN_INTERVAL = timedelta(minutes=5) ALL_LANGUAGES = [ "ar", "bg", "bn", "ca", "cs", "da", "de", "el", "en", "es", "eu", "fa", "fi", "fr", "gl", "gu", "hi", "hr", "hu", "id", "it", "iw", "ja", "kn", "ko", "lt", "lv", "ml", "mr", "nl", "no", "pl", "pt", "pt-BR", "pt-PT", "ro", "ru", "sk", "sl", "sr", "sv", "ta", "te", "th", "tl", "tr", "uk", "vi", "zh-CN", "zh-TW", ] AVOID = ["tolls", "highways", "ferries", "indoor"] TRANSIT_PREFS = ["less_walking", "fewer_transfers"] TRANSPORT_TYPE = ["bus", "subway", "train", "tram", "rail"] TRAVEL_MODE = ["driving", "walking", "bicycling", "transit"] TRAVEL_MODEL = ["best_guess", "pessimistic", "optimistic"] UNITS = ["metric", "imperial"] PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( { vol.Required(CONF_API_KEY): cv.string, vol.Required(CONF_DESTINATION): cv.string, vol.Required(CONF_ORIGIN): cv.string, vol.Optional(CONF_NAME): cv.string, vol.Optional(CONF_TRAVEL_MODE): vol.In(TRAVEL_MODE), vol.Optional(CONF_OPTIONS, default={CONF_MODE: "driving"}): vol.All( dict, vol.Schema( { vol.Optional(CONF_MODE, default="driving"): vol.In(TRAVEL_MODE), vol.Optional("language"): vol.In(ALL_LANGUAGES), vol.Optional("avoid"): vol.In(AVOID), vol.Optional("units"): vol.In(UNITS), vol.Exclusive("arrival_time", "time"): cv.string, vol.Exclusive("departure_time", "time"): cv.string, vol.Optional("traffic_model"): vol.In(TRAVEL_MODEL), vol.Optional("transit_mode"): vol.In(TRANSPORT_TYPE), vol.Optional("transit_routing_preference"): vol.In(TRANSIT_PREFS), } ), ), } ) TRACKABLE_DOMAINS = ["device_tracker", "sensor", "zone", "person"] DATA_KEY = "google_travel_time" def convert_time_to_utc(timestr): """Take a string like 08:00:00 and convert it to a unix timestamp.""" combined = datetime.combine( dt_util.start_of_local_day(), dt_util.parse_time(timestr) ) if combined < datetime.now(): combined = combined + timedelta(days=1) return dt_util.as_timestamp(combined) def setup_platform(hass, config, add_entities_callback, discovery_info=None): """Set up the Google travel time platform.""" def run_setup(event): """ Delay the setup until Home Assistant is fully initialized. This allows any entities to be created already """ hass.data.setdefault(DATA_KEY, []) options = config.get(CONF_OPTIONS) if options.get("units") is None: options["units"] = hass.config.units.name travel_mode = config.get(CONF_TRAVEL_MODE) mode = options.get(CONF_MODE) if travel_mode is not None: wstr = ( "Google Travel Time: travel_mode is deprecated, please " "add mode to the options dictionary instead!" ) _LOGGER.warning(wstr) if mode is None: options[CONF_MODE] = travel_mode titled_mode = options.get(CONF_MODE).title() formatted_name = f"{DEFAULT_NAME} - {titled_mode}" name = config.get(CONF_NAME, formatted_name) api_key = config.get(CONF_API_KEY) origin = config.get(CONF_ORIGIN) destination = config.get(CONF_DESTINATION) sensor = GoogleTravelTimeSensor( hass, name, api_key, origin, destination, options ) hass.data[DATA_KEY].append(sensor) if sensor.valid_api_connection: add_entities_callback([sensor]) # Wait until start event is sent to load this component. hass.bus.listen_once(EVENT_HOMEASSISTANT_START, run_setup) class GoogleTravelTimeSensor(SensorEntity): """Representation of a Google travel time sensor.""" def __init__(self, hass, name, api_key, origin, destination, options): """Initialize the sensor.""" self._hass = hass self._name = name self._options = options self._unit_of_measurement = TIME_MINUTES self._matrix = None self.valid_api_connection = True # Check if location is a trackable entity if origin.split(".", 1)[0] in TRACKABLE_DOMAINS: self._origin_entity_id = origin else: self._origin = origin if destination.split(".", 1)[0] in TRACKABLE_DOMAINS: self._destination_entity_id = destination else: self._destination = destination self._client = googlemaps.Client(api_key, timeout=10) try: self.update() except googlemaps.exceptions.ApiError as exp: _LOGGER.error(exp) self.valid_api_connection = False return @property def state(self): """Return the state of the sensor.""" if self._matrix is None: return None _data = self._matrix["rows"][0]["elements"][0] if "duration_in_traffic" in _data: return round(_data["duration_in_traffic"]["value"] / 60) if "duration" in _data: return round(_data["duration"]["value"] / 60) return None @property def name(self): """Get the name of the sensor.""" return self._name @property def extra_state_attributes(self): """Return the state attributes.""" if self._matrix is None: return None res = self._matrix.copy() res.update(self._options) del res["rows"] _data = self._matrix["rows"][0]["elements"][0] if "duration_in_traffic" in _data: res["duration_in_traffic"] = _data["duration_in_traffic"]["text"] if "duration" in _data: res["duration"] = _data["duration"]["text"] if "distance" in _data: res["distance"] = _data["distance"]["text"] res["origin"] = self._origin res["destination"] = self._destination res[ATTR_ATTRIBUTION] = ATTRIBUTION return res @property def unit_of_measurement(self): """Return the unit this state is expressed in.""" return self._unit_of_measurement def update(self): """Get the latest data from Google.""" options_copy = self._options.copy() dtime = options_copy.get("departure_time") atime = options_copy.get("arrival_time") if dtime is not None and ":" in dtime: options_copy["departure_time"] = convert_time_to_utc(dtime) elif dtime is not None: options_copy["departure_time"] = dtime elif atime is None: options_copy["departure_time"] = "now" if atime is not None and ":" in atime: options_copy["arrival_time"] = convert_time_to_utc(atime) elif atime is not None: options_copy["arrival_time"] = atime # Convert device_trackers to google friendly location if hasattr(self, "_origin_entity_id"): self._origin = self._get_location_from_entity(self._origin_entity_id) if hasattr(self, "_destination_entity_id"): self._destination = self._get_location_from_entity( self._destination_entity_id ) self._destination = self._resolve_zone(self._destination) self._origin = self._resolve_zone(self._origin) if self._destination is not None and self._origin is not None: self._matrix = self._client.distance_matrix( self._origin, self._destination, **options_copy ) def _get_location_from_entity(self, entity_id): """Get the location from the entity state or attributes.""" entity = self._hass.states.get(entity_id) if entity is None: _LOGGER.error("Unable to find entity %s", entity_id) self.valid_api_connection = False return None # Check if the entity has location attributes if location.has_location(entity): return self._get_location_from_attributes(entity) # Check if device is in a zone zone_entity = self._hass.states.get("zone.%s" % entity.state) if location.has_location(zone_entity): _LOGGER.debug( "%s is in %s, getting zone location", entity_id, zone_entity.entity_id ) return self._get_location_from_attributes(zone_entity) # If zone was not found in state then use the state as the location if entity_id.startswith("sensor."): return entity.state # When everything fails just return nothing return None @staticmethod def _get_location_from_attributes(entity): """Get the lat/long string from an entities attributes.""" attr = entity.attributes return f"{attr.get(ATTR_LATITUDE)},{attr.get(ATTR_LONGITUDE)}" def _resolve_zone(self, friendly_name): entities = self._hass.states.all() for entity in entities: if entity.domain == "zone" and entity.name == friendly_name: return self._get_location_from_attributes(entity) return friendly_name
"""Tests for the Google Assistant traits.""" from datetime import datetime, timedelta from unittest.mock import patch import pytest from homeassistant.components import ( alarm_control_panel, binary_sensor, camera, cover, fan, group, input_boolean, input_select, light, lock, media_player, scene, script, sensor, switch, vacuum, ) from homeassistant.components.climate import const as climate from homeassistant.components.google_assistant import const, error, helpers, trait from homeassistant.components.google_assistant.error import SmartHomeError from homeassistant.components.humidifier import const as humidifier from homeassistant.config import async_process_ha_core_config from homeassistant.const import ( ATTR_ASSUMED_STATE, ATTR_DEVICE_CLASS, ATTR_ENTITY_ID, ATTR_MODE, ATTR_SUPPORTED_FEATURES, ATTR_TEMPERATURE, SERVICE_TURN_OFF, SERVICE_TURN_ON, STATE_ALARM_ARMED_AWAY, STATE_ALARM_DISARMED, STATE_ALARM_PENDING, STATE_IDLE, STATE_OFF, STATE_ON, STATE_PAUSED, STATE_PLAYING, STATE_STANDBY, STATE_UNAVAILABLE, STATE_UNKNOWN, TEMP_CELSIUS, TEMP_FAHRENHEIT, ) from homeassistant.core import DOMAIN as HA_DOMAIN, EVENT_CALL_SERVICE, State from homeassistant.util import color from . import BASIC_CONFIG, MockConfig from tests.common import async_capture_events, async_mock_service REQ_ID = "ff36a3cc-ec34-11e6-b1a0-64510650abcf" BASIC_DATA = helpers.RequestData( BASIC_CONFIG, "test-agent", const.SOURCE_CLOUD, REQ_ID, None ) PIN_CONFIG = MockConfig(secure_devices_pin="1234") PIN_DATA = helpers.RequestData( PIN_CONFIG, "test-agent", const.SOURCE_CLOUD, REQ_ID, None ) async def test_brightness_light(hass): """Test brightness trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert trait.BrightnessTrait.supported(light.DOMAIN, light.SUPPORT_BRIGHTNESS, None) trt = trait.BrightnessTrait( hass, State("light.bla", light.STATE_ON, {light.ATTR_BRIGHTNESS: 243}), BASIC_CONFIG, ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"brightness": 95} events = async_capture_events(hass, EVENT_CALL_SERVICE) calls = async_mock_service(hass, light.DOMAIN, light.SERVICE_TURN_ON) await trt.execute( trait.COMMAND_BRIGHTNESS_ABSOLUTE, BASIC_DATA, {"brightness": 50}, {} ) await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "light.bla", light.ATTR_BRIGHTNESS_PCT: 50} assert len(events) == 1 assert events[0].data == { "domain": "light", "service": "turn_on", "service_data": {"brightness_pct": 50, "entity_id": "light.bla"}, } async def test_camera_stream(hass): """Test camera stream trait support for camera domain.""" await async_process_ha_core_config( hass, {"external_url": "https://example.com"}, ) assert helpers.get_google_type(camera.DOMAIN, None) is not None assert trait.CameraStreamTrait.supported(camera.DOMAIN, camera.SUPPORT_STREAM, None) trt = trait.CameraStreamTrait( hass, State("camera.bla", camera.STATE_IDLE, {}), BASIC_CONFIG ) assert trt.sync_attributes() == { "cameraStreamSupportedProtocols": ["hls"], "cameraStreamNeedAuthToken": False, "cameraStreamNeedDrmEncryption": False, } assert trt.query_attributes() == {} with patch( "homeassistant.components.camera.async_request_stream", return_value="/api/streams/bla", ): await trt.execute(trait.COMMAND_GET_CAMERA_STREAM, BASIC_DATA, {}, {}) assert trt.query_attributes() == { "cameraStreamAccessUrl": "https://example.com/api/streams/bla", "cameraStreamReceiverAppId": "B12CE3CA", } async def test_onoff_group(hass): """Test OnOff trait support for group domain.""" assert helpers.get_google_type(group.DOMAIN, None) is not None assert trait.OnOffTrait.supported(group.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("group.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("group.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, HA_DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "group.bla"} off_calls = async_mock_service(hass, HA_DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "group.bla"} async def test_onoff_input_boolean(hass): """Test OnOff trait support for input_boolean domain.""" assert helpers.get_google_type(input_boolean.DOMAIN, None) is not None assert trait.OnOffTrait.supported(input_boolean.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("input_boolean.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait( hass, State("input_boolean.bla", STATE_OFF), BASIC_CONFIG ) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, input_boolean.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "input_boolean.bla"} off_calls = async_mock_service(hass, input_boolean.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "input_boolean.bla"} async def test_onoff_switch(hass): """Test OnOff trait support for switch domain.""" assert helpers.get_google_type(switch.DOMAIN, None) is not None assert trait.OnOffTrait.supported(switch.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("switch.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("switch.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} trt_assumed = trait.OnOffTrait( hass, State("switch.bla", STATE_OFF, {"assumed_state": True}), BASIC_CONFIG ) assert trt_assumed.sync_attributes() == {"commandOnlyOnOff": True} on_calls = async_mock_service(hass, switch.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "switch.bla"} off_calls = async_mock_service(hass, switch.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "switch.bla"} async def test_onoff_fan(hass): """Test OnOff trait support for fan domain.""" assert helpers.get_google_type(fan.DOMAIN, None) is not None assert trait.OnOffTrait.supported(fan.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("fan.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("fan.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, fan.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "fan.bla"} off_calls = async_mock_service(hass, fan.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "fan.bla"} async def test_onoff_light(hass): """Test OnOff trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert trait.OnOffTrait.supported(light.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("light.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("light.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "light.bla"} off_calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "light.bla"} async def test_onoff_media_player(hass): """Test OnOff trait support for media_player domain.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.OnOffTrait.supported(media_player.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("media_player.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("media_player.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, media_player.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} off_calls = async_mock_service(hass, media_player.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} async def test_onoff_humidifier(hass): """Test OnOff trait support for humidifier domain.""" assert helpers.get_google_type(humidifier.DOMAIN, None) is not None assert trait.OnOffTrait.supported(humidifier.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("humidifier.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("humidifier.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, humidifier.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "humidifier.bla"} off_calls = async_mock_service(hass, humidifier.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "humidifier.bla"} async def test_dock_vacuum(hass): """Test dock trait support for vacuum domain.""" assert helpers.get_google_type(vacuum.DOMAIN, None) is not None assert trait.DockTrait.supported(vacuum.DOMAIN, 0, None) trt = trait.DockTrait(hass, State("vacuum.bla", vacuum.STATE_IDLE), BASIC_CONFIG) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"isDocked": False} calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_RETURN_TO_BASE) await trt.execute(trait.COMMAND_DOCK, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} async def test_startstop_vacuum(hass): """Test startStop trait support for vacuum domain.""" assert helpers.get_google_type(vacuum.DOMAIN, None) is not None assert trait.StartStopTrait.supported(vacuum.DOMAIN, 0, None) trt = trait.StartStopTrait( hass, State( "vacuum.bla", vacuum.STATE_PAUSED, {ATTR_SUPPORTED_FEATURES: vacuum.SUPPORT_PAUSE}, ), BASIC_CONFIG, ) assert trt.sync_attributes() == {"pausable": True} assert trt.query_attributes() == {"isRunning": False, "isPaused": True} start_calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_START) await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": True}, {}) assert len(start_calls) == 1 assert start_calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} stop_calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_STOP) await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": False}, {}) assert len(stop_calls) == 1 assert stop_calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} pause_calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_PAUSE) await trt.execute(trait.COMMAND_PAUSEUNPAUSE, BASIC_DATA, {"pause": True}, {}) assert len(pause_calls) == 1 assert pause_calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} unpause_calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_START) await trt.execute(trait.COMMAND_PAUSEUNPAUSE, BASIC_DATA, {"pause": False}, {}) assert len(unpause_calls) == 1 assert unpause_calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} async def test_startstop_cover(hass): """Test startStop trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.StartStopTrait.supported(cover.DOMAIN, cover.SUPPORT_STOP, None) state = State( "cover.bla", cover.STATE_CLOSED, {ATTR_SUPPORTED_FEATURES: cover.SUPPORT_STOP}, ) trt = trait.StartStopTrait( hass, state, BASIC_CONFIG, ) assert trt.sync_attributes() == {} for state_value in (cover.STATE_CLOSING, cover.STATE_OPENING): state.state = state_value assert trt.query_attributes() == {"isRunning": True} stop_calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_STOP_COVER) await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": False}, {}) assert len(stop_calls) == 1 assert stop_calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} for state_value in (cover.STATE_CLOSED, cover.STATE_OPEN): state.state = state_value assert trt.query_attributes() == {"isRunning": False} with pytest.raises(SmartHomeError, match="Cover is already stopped"): await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": False}, {}) with pytest.raises(SmartHomeError, match="Starting a cover is not supported"): await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": True}, {}) with pytest.raises( SmartHomeError, match="Command action.devices.commands.PauseUnpause is not supported", ): await trt.execute(trait.COMMAND_PAUSEUNPAUSE, BASIC_DATA, {"start": True}, {}) async def test_startstop_cover_assumed(hass): """Test startStop trait support for cover domain of assumed state.""" trt = trait.StartStopTrait( hass, State( "cover.bla", cover.STATE_CLOSED, {ATTR_SUPPORTED_FEATURES: cover.SUPPORT_STOP, ATTR_ASSUMED_STATE: True}, ), BASIC_CONFIG, ) stop_calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_STOP_COVER) await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": False}, {}) assert len(stop_calls) == 1 assert stop_calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} async def test_color_setting_color_light(hass): """Test ColorSpectrum trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert not trait.ColorSettingTrait.supported(light.DOMAIN, 0, None) assert trait.ColorSettingTrait.supported(light.DOMAIN, light.SUPPORT_COLOR, None) trt = trait.ColorSettingTrait( hass, State( "light.bla", STATE_ON, { light.ATTR_HS_COLOR: (20, 94), light.ATTR_BRIGHTNESS: 200, ATTR_SUPPORTED_FEATURES: light.SUPPORT_COLOR, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == {"colorModel": "hsv"} assert trt.query_attributes() == { "color": {"spectrumHsv": {"hue": 20, "saturation": 0.94, "value": 200 / 255}} } assert trt.can_execute( trait.COMMAND_COLOR_ABSOLUTE, {"color": {"spectrumRGB": 16715792}} ) calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON) await trt.execute( trait.COMMAND_COLOR_ABSOLUTE, BASIC_DATA, {"color": {"spectrumRGB": 1052927}}, {}, ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "light.bla", light.ATTR_HS_COLOR: (240, 93.725), } await trt.execute( trait.COMMAND_COLOR_ABSOLUTE, BASIC_DATA, {"color": {"spectrumHSV": {"hue": 100, "saturation": 0.50, "value": 0.20}}}, {}, ) assert len(calls) == 2 assert calls[1].data == { ATTR_ENTITY_ID: "light.bla", light.ATTR_HS_COLOR: [100, 50], light.ATTR_BRIGHTNESS: 0.2 * 255, } async def test_color_setting_temperature_light(hass): """Test ColorTemperature trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert not trait.ColorSettingTrait.supported(light.DOMAIN, 0, None) assert trait.ColorSettingTrait.supported( light.DOMAIN, light.SUPPORT_COLOR_TEMP, None ) trt = trait.ColorSettingTrait( hass, State( "light.bla", STATE_ON, { light.ATTR_MIN_MIREDS: 200, light.ATTR_COLOR_TEMP: 300, light.ATTR_MAX_MIREDS: 500, ATTR_SUPPORTED_FEATURES: light.SUPPORT_COLOR_TEMP, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "colorTemperatureRange": {"temperatureMinK": 2000, "temperatureMaxK": 5000} } assert trt.query_attributes() == {"color": {"temperatureK": 3333}} assert trt.can_execute( trait.COMMAND_COLOR_ABSOLUTE, {"color": {"temperature": 400}} ) calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON) with pytest.raises(helpers.SmartHomeError) as err: await trt.execute( trait.COMMAND_COLOR_ABSOLUTE, BASIC_DATA, {"color": {"temperature": 5555}}, {}, ) assert err.value.code == const.ERR_VALUE_OUT_OF_RANGE await trt.execute( trait.COMMAND_COLOR_ABSOLUTE, BASIC_DATA, {"color": {"temperature": 2857}}, {} ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "light.bla", light.ATTR_COLOR_TEMP: color.color_temperature_kelvin_to_mired(2857), } async def test_color_light_temperature_light_bad_temp(hass): """Test ColorTemperature trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert not trait.ColorSettingTrait.supported(light.DOMAIN, 0, None) assert trait.ColorSettingTrait.supported( light.DOMAIN, light.SUPPORT_COLOR_TEMP, None ) trt = trait.ColorSettingTrait( hass, State( "light.bla", STATE_ON, { light.ATTR_MIN_MIREDS: 200, light.ATTR_COLOR_TEMP: 0, light.ATTR_MAX_MIREDS: 500, }, ), BASIC_CONFIG, ) assert trt.query_attributes() == {} async def test_light_modes(hass): """Test Light Mode trait.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert trait.ModesTrait.supported(light.DOMAIN, light.SUPPORT_EFFECT, None) trt = trait.ModesTrait( hass, State( "light.living_room", light.STATE_ON, attributes={ light.ATTR_EFFECT_LIST: ["random", "colorloop"], light.ATTR_EFFECT: "random", }, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableModes": [ { "name": "effect", "name_values": [{"name_synonym": ["effect"], "lang": "en"}], "settings": [ { "setting_name": "random", "setting_values": [ {"setting_synonym": ["random"], "lang": "en"} ], }, { "setting_name": "colorloop", "setting_values": [ {"setting_synonym": ["colorloop"], "lang": "en"} ], }, ], "ordered": False, } ] } assert trt.query_attributes() == { "currentModeSettings": {"effect": "random"}, "on": True, } assert trt.can_execute( trait.COMMAND_MODES, params={"updateModeSettings": {"effect": "colorloop"}}, ) calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON) await trt.execute( trait.COMMAND_MODES, BASIC_DATA, {"updateModeSettings": {"effect": "colorloop"}}, {}, ) assert len(calls) == 1 assert calls[0].data == { "entity_id": "light.living_room", "effect": "colorloop", } async def test_scene_scene(hass): """Test Scene trait support for scene domain.""" assert helpers.get_google_type(scene.DOMAIN, None) is not None assert trait.SceneTrait.supported(scene.DOMAIN, 0, None) trt = trait.SceneTrait(hass, State("scene.bla", scene.STATE), BASIC_CONFIG) assert trt.sync_attributes() == {} assert trt.query_attributes() == {} assert trt.can_execute(trait.COMMAND_ACTIVATE_SCENE, {}) calls = async_mock_service(hass, scene.DOMAIN, SERVICE_TURN_ON) await trt.execute(trait.COMMAND_ACTIVATE_SCENE, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "scene.bla"} async def test_scene_script(hass): """Test Scene trait support for script domain.""" assert helpers.get_google_type(script.DOMAIN, None) is not None assert trait.SceneTrait.supported(script.DOMAIN, 0, None) trt = trait.SceneTrait(hass, State("script.bla", STATE_OFF), BASIC_CONFIG) assert trt.sync_attributes() == {} assert trt.query_attributes() == {} assert trt.can_execute(trait.COMMAND_ACTIVATE_SCENE, {}) calls = async_mock_service(hass, script.DOMAIN, SERVICE_TURN_ON) await trt.execute(trait.COMMAND_ACTIVATE_SCENE, BASIC_DATA, {}, {}) # We don't wait till script execution is done. await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "script.bla"} async def test_temperature_setting_climate_onoff(hass): """Test TemperatureSetting trait support for climate domain - range.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.TemperatureSettingTrait.supported(climate.DOMAIN, 0, None) hass.config.units.temperature_unit = TEMP_FAHRENHEIT trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_AUTO, { ATTR_SUPPORTED_FEATURES: climate.SUPPORT_TARGET_TEMPERATURE_RANGE, climate.ATTR_HVAC_MODES: [ climate.HVAC_MODE_OFF, climate.HVAC_MODE_COOL, climate.HVAC_MODE_HEAT, climate.HVAC_MODE_HEAT_COOL, ], climate.ATTR_MIN_TEMP: None, climate.ATTR_MAX_TEMP: None, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["off", "cool", "heat", "heatcool", "on"], "thermostatTemperatureUnit": "F", } assert trt.can_execute(trait.COMMAND_THERMOSTAT_SET_MODE, {}) calls = async_mock_service(hass, climate.DOMAIN, SERVICE_TURN_ON) await trt.execute( trait.COMMAND_THERMOSTAT_SET_MODE, BASIC_DATA, {"thermostatMode": "on"}, {} ) assert len(calls) == 1 calls = async_mock_service(hass, climate.DOMAIN, SERVICE_TURN_OFF) await trt.execute( trait.COMMAND_THERMOSTAT_SET_MODE, BASIC_DATA, {"thermostatMode": "off"}, {} ) assert len(calls) == 1 async def test_temperature_setting_climate_no_modes(hass): """Test TemperatureSetting trait support for climate domain not supporting any modes.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.TemperatureSettingTrait.supported(climate.DOMAIN, 0, None) hass.config.units.temperature_unit = TEMP_CELSIUS trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_AUTO, { climate.ATTR_HVAC_MODES: [], climate.ATTR_MIN_TEMP: None, climate.ATTR_MAX_TEMP: None, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["heat"], "thermostatTemperatureUnit": "C", } async def test_temperature_setting_climate_range(hass): """Test TemperatureSetting trait support for climate domain - range.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.TemperatureSettingTrait.supported(climate.DOMAIN, 0, None) hass.config.units.temperature_unit = TEMP_FAHRENHEIT trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_AUTO, { climate.ATTR_CURRENT_TEMPERATURE: 70, climate.ATTR_CURRENT_HUMIDITY: 25, ATTR_SUPPORTED_FEATURES: climate.SUPPORT_TARGET_TEMPERATURE_RANGE, climate.ATTR_HVAC_MODES: [ STATE_OFF, climate.HVAC_MODE_COOL, climate.HVAC_MODE_HEAT, climate.HVAC_MODE_AUTO, ], climate.ATTR_TARGET_TEMP_HIGH: 75, climate.ATTR_TARGET_TEMP_LOW: 65, climate.ATTR_MIN_TEMP: 50, climate.ATTR_MAX_TEMP: 80, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["off", "cool", "heat", "auto", "on"], "thermostatTemperatureUnit": "F", } assert trt.query_attributes() == { "thermostatMode": "auto", "thermostatTemperatureAmbient": 21.1, "thermostatHumidityAmbient": 25, "thermostatTemperatureSetpointLow": 18.3, "thermostatTemperatureSetpointHigh": 23.9, } assert trt.can_execute(trait.COMMAND_THERMOSTAT_TEMPERATURE_SET_RANGE, {}) assert trt.can_execute(trait.COMMAND_THERMOSTAT_SET_MODE, {}) calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_TEMPERATURE) await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SET_RANGE, BASIC_DATA, { "thermostatTemperatureSetpointHigh": 25, "thermostatTemperatureSetpointLow": 20, }, {}, ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "climate.bla", climate.ATTR_TARGET_TEMP_HIGH: 77, climate.ATTR_TARGET_TEMP_LOW: 68, } calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_HVAC_MODE) await trt.execute( trait.COMMAND_THERMOSTAT_SET_MODE, BASIC_DATA, {"thermostatMode": "cool"}, {} ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "climate.bla", climate.ATTR_HVAC_MODE: climate.HVAC_MODE_COOL, } with pytest.raises(helpers.SmartHomeError) as err: await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, BASIC_DATA, {"thermostatTemperatureSetpoint": -100}, {}, ) assert err.value.code == const.ERR_VALUE_OUT_OF_RANGE hass.config.units.temperature_unit = TEMP_CELSIUS async def test_temperature_setting_climate_setpoint(hass): """Test TemperatureSetting trait support for climate domain - setpoint.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.TemperatureSettingTrait.supported(climate.DOMAIN, 0, None) hass.config.units.temperature_unit = TEMP_CELSIUS trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_COOL, { climate.ATTR_HVAC_MODES: [STATE_OFF, climate.HVAC_MODE_COOL], climate.ATTR_MIN_TEMP: 10, climate.ATTR_MAX_TEMP: 30, ATTR_TEMPERATURE: 18, climate.ATTR_CURRENT_TEMPERATURE: 20, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["off", "cool", "on"], "thermostatTemperatureUnit": "C", } assert trt.query_attributes() == { "thermostatMode": "cool", "thermostatTemperatureAmbient": 20, "thermostatTemperatureSetpoint": 18, } assert trt.can_execute(trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, {}) assert trt.can_execute(trait.COMMAND_THERMOSTAT_SET_MODE, {}) calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_TEMPERATURE) with pytest.raises(helpers.SmartHomeError): await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, BASIC_DATA, {"thermostatTemperatureSetpoint": -100}, {}, ) await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, BASIC_DATA, {"thermostatTemperatureSetpoint": 19}, {}, ) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "climate.bla", ATTR_TEMPERATURE: 19} async def test_temperature_setting_climate_setpoint_auto(hass): """ Test TemperatureSetting trait support for climate domain. Setpoint in auto mode. """ hass.config.units.temperature_unit = TEMP_CELSIUS trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_HEAT_COOL, { climate.ATTR_HVAC_MODES: [ climate.HVAC_MODE_OFF, climate.HVAC_MODE_HEAT_COOL, ], climate.ATTR_MIN_TEMP: 10, climate.ATTR_MAX_TEMP: 30, ATTR_TEMPERATURE: 18, climate.ATTR_CURRENT_TEMPERATURE: 20, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["off", "heatcool", "on"], "thermostatTemperatureUnit": "C", } assert trt.query_attributes() == { "thermostatMode": "heatcool", "thermostatTemperatureAmbient": 20, "thermostatTemperatureSetpointHigh": 18, "thermostatTemperatureSetpointLow": 18, } assert trt.can_execute(trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, {}) assert trt.can_execute(trait.COMMAND_THERMOSTAT_SET_MODE, {}) calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_TEMPERATURE) await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, BASIC_DATA, {"thermostatTemperatureSetpoint": 19}, {}, ) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "climate.bla", ATTR_TEMPERATURE: 19} async def test_humidity_setting_humidifier_setpoint(hass): """Test HumiditySetting trait support for humidifier domain - setpoint.""" assert helpers.get_google_type(humidifier.DOMAIN, None) is not None assert trait.HumiditySettingTrait.supported(humidifier.DOMAIN, 0, None) trt = trait.HumiditySettingTrait( hass, State( "humidifier.bla", STATE_ON, { humidifier.ATTR_MIN_HUMIDITY: 20, humidifier.ATTR_MAX_HUMIDITY: 90, humidifier.ATTR_HUMIDITY: 38, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "humiditySetpointRange": {"minPercent": 20, "maxPercent": 90} } assert trt.query_attributes() == { "humiditySetpointPercent": 38, } assert trt.can_execute(trait.COMMAND_SET_HUMIDITY, {}) calls = async_mock_service(hass, humidifier.DOMAIN, humidifier.SERVICE_SET_HUMIDITY) await trt.execute(trait.COMMAND_SET_HUMIDITY, BASIC_DATA, {"humidity": 32}, {}) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "humidifier.bla", humidifier.ATTR_HUMIDITY: 32, } async def test_lock_unlock_lock(hass): """Test LockUnlock trait locking support for lock domain.""" assert helpers.get_google_type(lock.DOMAIN, None) is not None assert trait.LockUnlockTrait.supported(lock.DOMAIN, lock.SUPPORT_OPEN, None) assert trait.LockUnlockTrait.might_2fa(lock.DOMAIN, lock.SUPPORT_OPEN, None) trt = trait.LockUnlockTrait( hass, State("lock.front_door", lock.STATE_LOCKED), PIN_CONFIG ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"isLocked": True} assert trt.can_execute(trait.COMMAND_LOCKUNLOCK, {"lock": True}) calls = async_mock_service(hass, lock.DOMAIN, lock.SERVICE_LOCK) await trt.execute(trait.COMMAND_LOCKUNLOCK, PIN_DATA, {"lock": True}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "lock.front_door"} async def test_lock_unlock_unlock(hass): """Test LockUnlock trait unlocking support for lock domain.""" assert helpers.get_google_type(lock.DOMAIN, None) is not None assert trait.LockUnlockTrait.supported(lock.DOMAIN, lock.SUPPORT_OPEN, None) trt = trait.LockUnlockTrait( hass, State("lock.front_door", lock.STATE_LOCKED), PIN_CONFIG ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"isLocked": True} assert trt.can_execute(trait.COMMAND_LOCKUNLOCK, {"lock": False}) calls = async_mock_service(hass, lock.DOMAIN, lock.SERVICE_UNLOCK) # No challenge data with pytest.raises(error.ChallengeNeeded) as err: await trt.execute(trait.COMMAND_LOCKUNLOCK, PIN_DATA, {"lock": False}, {}) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # invalid pin with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_LOCKUNLOCK, PIN_DATA, {"lock": False}, {"pin": 9999} ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_FAILED_PIN_NEEDED await trt.execute( trait.COMMAND_LOCKUNLOCK, PIN_DATA, {"lock": False}, {"pin": "1234"} ) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "lock.front_door"} # Test without pin trt = trait.LockUnlockTrait( hass, State("lock.front_door", lock.STATE_LOCKED), BASIC_CONFIG ) with pytest.raises(error.SmartHomeError) as err: await trt.execute(trait.COMMAND_LOCKUNLOCK, BASIC_DATA, {"lock": False}, {}) assert len(calls) == 1 assert err.value.code == const.ERR_CHALLENGE_NOT_SETUP # Test with 2FA override with patch.object( BASIC_CONFIG, "should_2fa", return_value=False, ): await trt.execute(trait.COMMAND_LOCKUNLOCK, BASIC_DATA, {"lock": False}, {}) assert len(calls) == 2 async def test_arm_disarm_arm_away(hass): """Test ArmDisarm trait Arming support for alarm_control_panel domain.""" assert helpers.get_google_type(alarm_control_panel.DOMAIN, None) is not None assert trait.ArmDisArmTrait.supported(alarm_control_panel.DOMAIN, 0, None) assert trait.ArmDisArmTrait.might_2fa(alarm_control_panel.DOMAIN, 0, None) trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, { alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True, ATTR_SUPPORTED_FEATURES: alarm_control_panel.const.SUPPORT_ALARM_ARM_HOME | alarm_control_panel.const.SUPPORT_ALARM_ARM_AWAY, }, ), PIN_CONFIG, ) assert trt.sync_attributes() == { "availableArmLevels": { "levels": [ { "level_name": "armed_home", "level_values": [ {"level_synonym": ["armed home", "home"], "lang": "en"} ], }, { "level_name": "armed_away", "level_values": [ {"level_synonym": ["armed away", "away"], "lang": "en"} ], }, ], "ordered": False, } } assert trt.query_attributes() == { "isArmed": True, "currentArmLevel": STATE_ALARM_ARMED_AWAY, } assert trt.can_execute( trait.COMMAND_ARMDISARM, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY} ) calls = async_mock_service( hass, alarm_control_panel.DOMAIN, alarm_control_panel.SERVICE_ALARM_ARM_AWAY ) # Test with no secure_pin configured with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), BASIC_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, BASIC_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {}, ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NOT_SETUP trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), PIN_CONFIG, ) # No challenge data with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {}, ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # invalid pin with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {"pin": 9999}, ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_FAILED_PIN_NEEDED # correct pin await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {"pin": "1234"}, ) assert len(calls) == 1 # Test already armed with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), PIN_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {}, ) assert len(calls) == 1 assert err.value.code == const.ERR_ALREADY_ARMED # Test with code_arm_required False trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: False}, ), PIN_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {}, ) assert len(calls) == 2 with pytest.raises(error.SmartHomeError) as err: await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True}, {}, ) async def test_arm_disarm_disarm(hass): """Test ArmDisarm trait Disarming support for alarm_control_panel domain.""" assert helpers.get_google_type(alarm_control_panel.DOMAIN, None) is not None assert trait.ArmDisArmTrait.supported(alarm_control_panel.DOMAIN, 0, None) assert trait.ArmDisArmTrait.might_2fa(alarm_control_panel.DOMAIN, 0, None) trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, { alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True, ATTR_SUPPORTED_FEATURES: alarm_control_panel.const.SUPPORT_ALARM_TRIGGER | alarm_control_panel.const.SUPPORT_ALARM_ARM_CUSTOM_BYPASS, }, ), PIN_CONFIG, ) assert trt.sync_attributes() == { "availableArmLevels": { "levels": [ { "level_name": "armed_custom_bypass", "level_values": [ { "level_synonym": ["armed custom bypass", "custom"], "lang": "en", } ], }, { "level_name": "triggered", "level_values": [{"level_synonym": ["triggered"], "lang": "en"}], }, ], "ordered": False, } } assert trt.query_attributes() == {"isArmed": False} assert trt.can_execute(trait.COMMAND_ARMDISARM, {"arm": False}) calls = async_mock_service( hass, alarm_control_panel.DOMAIN, alarm_control_panel.SERVICE_ALARM_DISARM ) # Test without secure_pin configured with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), BASIC_CONFIG, ) await trt.execute(trait.COMMAND_ARMDISARM, BASIC_DATA, {"arm": False}, {}) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NOT_SETUP trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), PIN_CONFIG, ) # No challenge data with pytest.raises(error.ChallengeNeeded) as err: await trt.execute(trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": False}, {}) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # invalid pin with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": False}, {"pin": 9999} ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_FAILED_PIN_NEEDED # correct pin await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": False}, {"pin": "1234"} ) assert len(calls) == 1 # Test already disarmed with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), PIN_CONFIG, ) await trt.execute(trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": False}, {}) assert len(calls) == 1 assert err.value.code == const.ERR_ALREADY_DISARMED # Cancel arming after already armed will require pin with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: False}, ), PIN_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "cancel": True}, {} ) assert len(calls) == 1 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # Cancel arming while pending to arm doesn't require pin trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_PENDING, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: False}, ), PIN_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "cancel": True}, {} ) assert len(calls) == 2 async def test_fan_speed(hass): """Test FanSpeed trait speed control support for fan domain.""" assert helpers.get_google_type(fan.DOMAIN, None) is not None assert trait.FanSpeedTrait.supported(fan.DOMAIN, fan.SUPPORT_SET_SPEED, None) trt = trait.FanSpeedTrait( hass, State( "fan.living_room_fan", fan.SPEED_HIGH, attributes={ "speed_list": [ fan.SPEED_OFF, fan.SPEED_LOW, fan.SPEED_MEDIUM, fan.SPEED_HIGH, ], "speed": "low", "percentage": 33, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableFanSpeeds": { "ordered": True, "speeds": [ { "speed_name": "off", "speed_values": [{"speed_synonym": ["stop", "off"], "lang": "en"}], }, { "speed_name": "low", "speed_values": [ { "speed_synonym": ["slow", "low", "slowest", "lowest"], "lang": "en", } ], }, { "speed_name": "medium", "speed_values": [ {"speed_synonym": ["medium", "mid", "middle"], "lang": "en"} ], }, { "speed_name": "high", "speed_values": [ { "speed_synonym": [ "high", "max", "fast", "highest", "fastest", "maximum", ], "lang": "en", } ], }, ], }, "reversible": False, "supportsFanSpeedPercent": True, } assert trt.query_attributes() == { "currentFanSpeedSetting": "low", "on": True, "currentFanSpeedPercent": 33, } assert trt.can_execute(trait.COMMAND_FANSPEED, params={"fanSpeed": "medium"}) calls = async_mock_service(hass, fan.DOMAIN, fan.SERVICE_SET_SPEED) await trt.execute(trait.COMMAND_FANSPEED, BASIC_DATA, {"fanSpeed": "medium"}, {}) assert len(calls) == 1 assert calls[0].data == {"entity_id": "fan.living_room_fan", "speed": "medium"} assert trt.can_execute(trait.COMMAND_FANSPEED, params={"fanSpeedPercent": 10}) calls = async_mock_service(hass, fan.DOMAIN, fan.SERVICE_SET_PERCENTAGE) await trt.execute(trait.COMMAND_FANSPEED, BASIC_DATA, {"fanSpeedPercent": 10}, {}) assert len(calls) == 1 assert calls[0].data == {"entity_id": "fan.living_room_fan", "percentage": 10} async def test_climate_fan_speed(hass): """Test FanSpeed trait speed control support for climate domain.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.FanSpeedTrait.supported(climate.DOMAIN, climate.SUPPORT_FAN_MODE, None) trt = trait.FanSpeedTrait( hass, State( "climate.living_room_ac", "on", attributes={ "fan_modes": ["auto", "low", "medium", "high"], "fan_mode": "low", }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableFanSpeeds": { "ordered": True, "speeds": [ { "speed_name": "auto", "speed_values": [{"speed_synonym": ["auto"], "lang": "en"}], }, { "speed_name": "low", "speed_values": [{"speed_synonym": ["low"], "lang": "en"}], }, { "speed_name": "medium", "speed_values": [{"speed_synonym": ["medium"], "lang": "en"}], }, { "speed_name": "high", "speed_values": [{"speed_synonym": ["high"], "lang": "en"}], }, ], }, "reversible": False, "supportsFanSpeedPercent": True, } assert trt.query_attributes() == { "currentFanSpeedSetting": "low", } assert trt.can_execute(trait.COMMAND_FANSPEED, params={"fanSpeed": "medium"}) calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_FAN_MODE) await trt.execute(trait.COMMAND_FANSPEED, BASIC_DATA, {"fanSpeed": "medium"}, {}) assert len(calls) == 1 assert calls[0].data == { "entity_id": "climate.living_room_ac", "fan_mode": "medium", } async def test_inputselector(hass): """Test input selector trait.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.InputSelectorTrait.supported( media_player.DOMAIN, media_player.SUPPORT_SELECT_SOURCE, None ) trt = trait.InputSelectorTrait( hass, State( "media_player.living_room", media_player.STATE_PLAYING, attributes={ media_player.ATTR_INPUT_SOURCE_LIST: [ "media", "game", "chromecast", "plex", ], media_player.ATTR_INPUT_SOURCE: "game", }, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableInputs": [ {"key": "media", "names": [{"name_synonym": ["media"], "lang": "en"}]}, {"key": "game", "names": [{"name_synonym": ["game"], "lang": "en"}]}, { "key": "chromecast", "names": [{"name_synonym": ["chromecast"], "lang": "en"}], }, {"key": "plex", "names": [{"name_synonym": ["plex"], "lang": "en"}]}, ], "orderedInputs": True, } assert trt.query_attributes() == { "currentInput": "game", } assert trt.can_execute( trait.COMMAND_INPUT, params={"newInput": "media"}, ) calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_SELECT_SOURCE ) await trt.execute( trait.COMMAND_INPUT, BASIC_DATA, {"newInput": "media"}, {}, ) assert len(calls) == 1 assert calls[0].data == {"entity_id": "media_player.living_room", "source": "media"} @pytest.mark.parametrize( "sources,source,source_next,source_prev", [ (["a"], "a", "a", "a"), (["a", "b"], "a", "b", "b"), (["a", "b", "c"], "a", "b", "c"), ], ) async def test_inputselector_nextprev(hass, sources, source, source_next, source_prev): """Test input selector trait.""" trt = trait.InputSelectorTrait( hass, State( "media_player.living_room", media_player.STATE_PLAYING, attributes={ media_player.ATTR_INPUT_SOURCE_LIST: sources, media_player.ATTR_INPUT_SOURCE: source, }, ), BASIC_CONFIG, ) assert trt.can_execute("action.devices.commands.NextInput", params={}) assert trt.can_execute("action.devices.commands.PreviousInput", params={}) calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_SELECT_SOURCE ) await trt.execute( "action.devices.commands.NextInput", BASIC_DATA, {}, {}, ) await trt.execute( "action.devices.commands.PreviousInput", BASIC_DATA, {}, {}, ) assert len(calls) == 2 assert calls[0].data == { "entity_id": "media_player.living_room", "source": source_next, } assert calls[1].data == { "entity_id": "media_player.living_room", "source": source_prev, } @pytest.mark.parametrize( "sources,source", [(None, "a"), (["a", "b"], None), (["a", "b"], "c")] ) async def test_inputselector_nextprev_invalid(hass, sources, source): """Test input selector trait.""" trt = trait.InputSelectorTrait( hass, State( "media_player.living_room", media_player.STATE_PLAYING, attributes={ media_player.ATTR_INPUT_SOURCE_LIST: sources, media_player.ATTR_INPUT_SOURCE: source, }, ), BASIC_CONFIG, ) with pytest.raises(SmartHomeError): await trt.execute( "action.devices.commands.NextInput", BASIC_DATA, {}, {}, ) with pytest.raises(SmartHomeError): await trt.execute( "action.devices.commands.PreviousInput", BASIC_DATA, {}, {}, ) with pytest.raises(SmartHomeError): await trt.execute( "action.devices.commands.InvalidCommand", BASIC_DATA, {}, {}, ) async def test_modes_input_select(hass): """Test Input Select Mode trait.""" assert helpers.get_google_type(input_select.DOMAIN, None) is not None assert trait.ModesTrait.supported(input_select.DOMAIN, None, None) trt = trait.ModesTrait( hass, State("input_select.bla", "unavailable"), BASIC_CONFIG, ) assert trt.sync_attributes() == {"availableModes": []} trt = trait.ModesTrait( hass, State( "input_select.bla", "abc", attributes={input_select.ATTR_OPTIONS: ["abc", "123", "xyz"]}, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableModes": [ { "name": "option", "name_values": [ { "name_synonym": ["option", "setting", "mode", "value"], "lang": "en", } ], "settings": [ { "setting_name": "abc", "setting_values": [{"setting_synonym": ["abc"], "lang": "en"}], }, { "setting_name": "123", "setting_values": [{"setting_synonym": ["123"], "lang": "en"}], }, { "setting_name": "xyz", "setting_values": [{"setting_synonym": ["xyz"], "lang": "en"}], }, ], "ordered": False, } ] } assert trt.query_attributes() == { "currentModeSettings": {"option": "abc"}, "on": True, } assert trt.can_execute( trait.COMMAND_MODES, params={"updateModeSettings": {"option": "xyz"}}, ) calls = async_mock_service( hass, input_select.DOMAIN, input_select.SERVICE_SELECT_OPTION ) await trt.execute( trait.COMMAND_MODES, BASIC_DATA, {"updateModeSettings": {"option": "xyz"}}, {}, ) assert len(calls) == 1 assert calls[0].data == {"entity_id": "input_select.bla", "option": "xyz"} async def test_modes_humidifier(hass): """Test Humidifier Mode trait.""" assert helpers.get_google_type(humidifier.DOMAIN, None) is not None assert trait.ModesTrait.supported(humidifier.DOMAIN, humidifier.SUPPORT_MODES, None) trt = trait.ModesTrait( hass, State( "humidifier.humidifier", STATE_OFF, attributes={ humidifier.ATTR_AVAILABLE_MODES: [ humidifier.MODE_NORMAL, humidifier.MODE_AUTO, humidifier.MODE_AWAY, ], ATTR_SUPPORTED_FEATURES: humidifier.SUPPORT_MODES, humidifier.ATTR_MIN_HUMIDITY: 30, humidifier.ATTR_MAX_HUMIDITY: 99, humidifier.ATTR_HUMIDITY: 50, ATTR_MODE: humidifier.MODE_AUTO, }, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableModes": [ { "name": "mode", "name_values": [{"name_synonym": ["mode"], "lang": "en"}], "settings": [ { "setting_name": "normal", "setting_values": [ {"setting_synonym": ["normal"], "lang": "en"} ], }, { "setting_name": "auto", "setting_values": [{"setting_synonym": ["auto"], "lang": "en"}], }, { "setting_name": "away", "setting_values": [{"setting_synonym": ["away"], "lang": "en"}], }, ], "ordered": False, }, ] } assert trt.query_attributes() == { "currentModeSettings": {"mode": "auto"}, "on": False, } assert trt.can_execute( trait.COMMAND_MODES, params={"updateModeSettings": {"mode": "away"}} ) calls = async_mock_service(hass, humidifier.DOMAIN, humidifier.SERVICE_SET_MODE) await trt.execute( trait.COMMAND_MODES, BASIC_DATA, {"updateModeSettings": {"mode": "away"}}, {}, ) assert len(calls) == 1 assert calls[0].data == { "entity_id": "humidifier.humidifier", "mode": "away", } async def test_sound_modes(hass): """Test Mode trait.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.ModesTrait.supported( media_player.DOMAIN, media_player.SUPPORT_SELECT_SOUND_MODE, None ) trt = trait.ModesTrait( hass, State( "media_player.living_room", media_player.STATE_PLAYING, attributes={ media_player.ATTR_SOUND_MODE_LIST: ["stereo", "prologic"], media_player.ATTR_SOUND_MODE: "stereo", }, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableModes": [ { "name": "sound mode", "name_values": [ {"name_synonym": ["sound mode", "effects"], "lang": "en"} ], "settings": [ { "setting_name": "stereo", "setting_values": [ {"setting_synonym": ["stereo"], "lang": "en"} ], }, { "setting_name": "prologic", "setting_values": [ {"setting_synonym": ["prologic"], "lang": "en"} ], }, ], "ordered": False, } ] } assert trt.query_attributes() == { "currentModeSettings": {"sound mode": "stereo"}, "on": True, } assert trt.can_execute( trait.COMMAND_MODES, params={"updateModeSettings": {"sound mode": "stereo"}}, ) calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_SELECT_SOUND_MODE ) await trt.execute( trait.COMMAND_MODES, BASIC_DATA, {"updateModeSettings": {"sound mode": "stereo"}}, {}, ) assert len(calls) == 1 assert calls[0].data == { "entity_id": "media_player.living_room", "sound_mode": "stereo", } async def test_openclose_cover(hass): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_SET_POSITION, None ) trt = trait.OpenCloseTrait( hass, State( "cover.bla", cover.STATE_OPEN, { cover.ATTR_CURRENT_POSITION: 75, ATTR_SUPPORTED_FEATURES: cover.SUPPORT_SET_POSITION, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"openPercent": 75} calls_set = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_SET_COVER_POSITION) calls_open = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_OPEN_COVER) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 50}, {}) await trt.execute( trait.COMMAND_OPENCLOSE_RELATIVE, BASIC_DATA, {"openRelativePercent": 50}, {} ) assert len(calls_set) == 1 assert calls_set[0].data == {ATTR_ENTITY_ID: "cover.bla", cover.ATTR_POSITION: 50} assert len(calls_open) == 1 assert calls_open[0].data == {ATTR_ENTITY_ID: "cover.bla"} async def test_openclose_cover_unknown_state(hass): """Test OpenClose trait support for cover domain with unknown state.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_SET_POSITION, None ) # No state trt = trait.OpenCloseTrait( hass, State( "cover.bla", STATE_UNKNOWN, {ATTR_SUPPORTED_FEATURES: cover.SUPPORT_OPEN} ), BASIC_CONFIG, ) assert trt.sync_attributes() == {"discreteOnlyOpenClose": True} with pytest.raises(helpers.SmartHomeError): trt.query_attributes() calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_OPEN_COVER) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 100}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} with pytest.raises(helpers.SmartHomeError): trt.query_attributes() async def test_openclose_cover_assumed_state(hass): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_SET_POSITION, None ) trt = trait.OpenCloseTrait( hass, State( "cover.bla", cover.STATE_OPEN, { ATTR_ASSUMED_STATE: True, ATTR_SUPPORTED_FEATURES: cover.SUPPORT_SET_POSITION, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == {"commandOnlyOpenClose": True} assert trt.query_attributes() == {} calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_SET_COVER_POSITION) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 40}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla", cover.ATTR_POSITION: 40} async def test_openclose_cover_query_only(hass): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported(cover.DOMAIN, 0, None) state = State( "cover.bla", cover.STATE_OPEN, ) trt = trait.OpenCloseTrait( hass, state, BASIC_CONFIG, ) assert trt.sync_attributes() == { "discreteOnlyOpenClose": True, "queryOnlyOpenClose": True, } assert trt.query_attributes() == {"openPercent": 100} async def test_openclose_cover_no_position(hass): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_OPEN | cover.SUPPORT_CLOSE, None ) state = State( "cover.bla", cover.STATE_OPEN, { ATTR_SUPPORTED_FEATURES: cover.SUPPORT_OPEN | cover.SUPPORT_CLOSE, }, ) trt = trait.OpenCloseTrait( hass, state, BASIC_CONFIG, ) assert trt.sync_attributes() == {"discreteOnlyOpenClose": True} assert trt.query_attributes() == {"openPercent": 100} state.state = cover.STATE_CLOSED assert trt.sync_attributes() == {"discreteOnlyOpenClose": True} assert trt.query_attributes() == {"openPercent": 0} calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_CLOSE_COVER) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 0}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_OPEN_COVER) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 100}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} with pytest.raises( SmartHomeError, match=r"Current position not know for relative command" ): await trt.execute( trait.COMMAND_OPENCLOSE_RELATIVE, BASIC_DATA, {"openRelativePercent": 100}, {}, ) with pytest.raises(SmartHomeError, match=r"No support for partial open close"): await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 50}, {}) @pytest.mark.parametrize( "device_class", (cover.DEVICE_CLASS_DOOR, cover.DEVICE_CLASS_GARAGE, cover.DEVICE_CLASS_GATE), ) async def test_openclose_cover_secure(hass, device_class): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, device_class) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_SET_POSITION, device_class ) assert trait.OpenCloseTrait.might_2fa( cover.DOMAIN, cover.SUPPORT_SET_POSITION, device_class ) trt = trait.OpenCloseTrait( hass, State( "cover.bla", cover.STATE_OPEN, { ATTR_DEVICE_CLASS: device_class, ATTR_SUPPORTED_FEATURES: cover.SUPPORT_SET_POSITION, cover.ATTR_CURRENT_POSITION: 75, }, ), PIN_CONFIG, ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"openPercent": 75} calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_SET_COVER_POSITION) calls_close = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_CLOSE_COVER) # No challenge data with pytest.raises(error.ChallengeNeeded) as err: await trt.execute(trait.COMMAND_OPENCLOSE, PIN_DATA, {"openPercent": 50}, {}) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # invalid pin with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_OPENCLOSE, PIN_DATA, {"openPercent": 50}, {"pin": "9999"} ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_FAILED_PIN_NEEDED await trt.execute( trait.COMMAND_OPENCLOSE, PIN_DATA, {"openPercent": 50}, {"pin": "1234"} ) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla", cover.ATTR_POSITION: 50} # no challenge on close await trt.execute(trait.COMMAND_OPENCLOSE, PIN_DATA, {"openPercent": 0}, {}) assert len(calls_close) == 1 assert calls_close[0].data == {ATTR_ENTITY_ID: "cover.bla"} @pytest.mark.parametrize( "device_class", ( binary_sensor.DEVICE_CLASS_DOOR, binary_sensor.DEVICE_CLASS_GARAGE_DOOR, binary_sensor.DEVICE_CLASS_LOCK, binary_sensor.DEVICE_CLASS_OPENING, binary_sensor.DEVICE_CLASS_WINDOW, ), ) async def test_openclose_binary_sensor(hass, device_class): """Test OpenClose trait support for binary_sensor domain.""" assert helpers.get_google_type(binary_sensor.DOMAIN, device_class) is not None assert trait.OpenCloseTrait.supported(binary_sensor.DOMAIN, 0, device_class) trt = trait.OpenCloseTrait( hass, State("binary_sensor.test", STATE_ON, {ATTR_DEVICE_CLASS: device_class}), BASIC_CONFIG, ) assert trt.sync_attributes() == { "queryOnlyOpenClose": True, "discreteOnlyOpenClose": True, } assert trt.query_attributes() == {"openPercent": 100} trt = trait.OpenCloseTrait( hass, State("binary_sensor.test", STATE_OFF, {ATTR_DEVICE_CLASS: device_class}), BASIC_CONFIG, ) assert trt.sync_attributes() == { "queryOnlyOpenClose": True, "discreteOnlyOpenClose": True, } assert trt.query_attributes() == {"openPercent": 0} async def test_volume_media_player(hass): """Test volume trait support for media player domain.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.VolumeTrait.supported( media_player.DOMAIN, media_player.SUPPORT_VOLUME_SET, None, ) trt = trait.VolumeTrait( hass, State( "media_player.bla", media_player.STATE_PLAYING, { ATTR_SUPPORTED_FEATURES: media_player.SUPPORT_VOLUME_SET, media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.3, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "volumeMaxLevel": 100, "levelStepSize": 10, "volumeCanMuteAndUnmute": False, "commandOnlyVolume": False, } assert trt.query_attributes() == {"currentVolume": 30} calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_SET ) await trt.execute(trait.COMMAND_SET_VOLUME, BASIC_DATA, {"volumeLevel": 60}, {}) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.6, } calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_SET ) await trt.execute( trait.COMMAND_VOLUME_RELATIVE, BASIC_DATA, {"relativeSteps": 10}, {} ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.4, } async def test_volume_media_player_relative(hass): """Test volume trait support for relative-volume-only media players.""" assert trait.VolumeTrait.supported( media_player.DOMAIN, media_player.SUPPORT_VOLUME_STEP, None, ) trt = trait.VolumeTrait( hass, State( "media_player.bla", media_player.STATE_PLAYING, { ATTR_ASSUMED_STATE: True, ATTR_SUPPORTED_FEATURES: media_player.SUPPORT_VOLUME_STEP, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "volumeMaxLevel": 100, "levelStepSize": 10, "volumeCanMuteAndUnmute": False, "commandOnlyVolume": True, } assert trt.query_attributes() == {} calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_UP ) await trt.execute( trait.COMMAND_VOLUME_RELATIVE, BASIC_DATA, {"relativeSteps": 10}, {}, ) assert len(calls) == 10 for call in calls: assert call.data == { ATTR_ENTITY_ID: "media_player.bla", } calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_DOWN ) await trt.execute( trait.COMMAND_VOLUME_RELATIVE, BASIC_DATA, {"relativeSteps": -10}, {}, ) assert len(calls) == 10 for call in calls: assert call.data == { ATTR_ENTITY_ID: "media_player.bla", } with pytest.raises(SmartHomeError): await trt.execute(trait.COMMAND_SET_VOLUME, BASIC_DATA, {"volumeLevel": 42}, {}) with pytest.raises(SmartHomeError): await trt.execute(trait.COMMAND_MUTE, BASIC_DATA, {"mute": True}, {}) async def test_media_player_mute(hass): """Test volume trait support for muting.""" assert trait.VolumeTrait.supported( media_player.DOMAIN, media_player.SUPPORT_VOLUME_STEP | media_player.SUPPORT_VOLUME_MUTE, None, ) trt = trait.VolumeTrait( hass, State( "media_player.bla", media_player.STATE_PLAYING, { ATTR_SUPPORTED_FEATURES: ( media_player.SUPPORT_VOLUME_STEP | media_player.SUPPORT_VOLUME_MUTE ), media_player.ATTR_MEDIA_VOLUME_MUTED: False, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "volumeMaxLevel": 100, "levelStepSize": 10, "volumeCanMuteAndUnmute": True, "commandOnlyVolume": False, } assert trt.query_attributes() == {"isMuted": False} mute_calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_MUTE ) await trt.execute( trait.COMMAND_MUTE, BASIC_DATA, {"mute": True}, {}, ) assert len(mute_calls) == 1 assert mute_calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_VOLUME_MUTED: True, } unmute_calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_MUTE ) await trt.execute( trait.COMMAND_MUTE, BASIC_DATA, {"mute": False}, {}, ) assert len(unmute_calls) == 1 assert unmute_calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_VOLUME_MUTED: False, } async def test_temperature_setting_sensor(hass): """Test TemperatureSetting trait support for temperature sensor.""" assert ( helpers.get_google_type(sensor.DOMAIN, sensor.DEVICE_CLASS_TEMPERATURE) is not None ) assert not trait.TemperatureSettingTrait.supported( sensor.DOMAIN, 0, sensor.DEVICE_CLASS_HUMIDITY ) assert trait.TemperatureSettingTrait.supported( sensor.DOMAIN, 0, sensor.DEVICE_CLASS_TEMPERATURE ) @pytest.mark.parametrize( "unit_in,unit_out,state,ambient", [ (TEMP_FAHRENHEIT, "F", "70", 21.1), (TEMP_CELSIUS, "C", "21.1", 21.1), (TEMP_FAHRENHEIT, "F", "unavailable", None), (TEMP_FAHRENHEIT, "F", "unknown", None), ], ) async def test_temperature_setting_sensor_data(hass, unit_in, unit_out, state, ambient): """Test TemperatureSetting trait support for temperature sensor.""" hass.config.units.temperature_unit = unit_in trt = trait.TemperatureSettingTrait( hass, State( "sensor.test", state, {ATTR_DEVICE_CLASS: sensor.DEVICE_CLASS_TEMPERATURE} ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "queryOnlyTemperatureSetting": True, "thermostatTemperatureUnit": unit_out, } if ambient: assert trt.query_attributes() == {"thermostatTemperatureAmbient": ambient} else: assert trt.query_attributes() == {} hass.config.units.temperature_unit = TEMP_CELSIUS async def test_humidity_setting_sensor(hass): """Test HumiditySetting trait support for humidity sensor.""" assert ( helpers.get_google_type(sensor.DOMAIN, sensor.DEVICE_CLASS_HUMIDITY) is not None ) assert not trait.HumiditySettingTrait.supported( sensor.DOMAIN, 0, sensor.DEVICE_CLASS_TEMPERATURE ) assert trait.HumiditySettingTrait.supported( sensor.DOMAIN, 0, sensor.DEVICE_CLASS_HUMIDITY ) @pytest.mark.parametrize( "state,ambient", [("70", 70), ("unavailable", None), ("unknown", None)] ) async def test_humidity_setting_sensor_data(hass, state, ambient): """Test HumiditySetting trait support for humidity sensor.""" trt = trait.HumiditySettingTrait( hass, State("sensor.test", state, {ATTR_DEVICE_CLASS: sensor.DEVICE_CLASS_HUMIDITY}), BASIC_CONFIG, ) assert trt.sync_attributes() == {"queryOnlyHumiditySetting": True} if ambient: assert trt.query_attributes() == {"humidityAmbientPercent": ambient} else: assert trt.query_attributes() == {} with pytest.raises(helpers.SmartHomeError) as err: await trt.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert err.value.code == const.ERR_NOT_SUPPORTED async def test_transport_control(hass): """Test the TransportControlTrait.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None for feature in trait.MEDIA_COMMAND_SUPPORT_MAPPING.values(): assert trait.TransportControlTrait.supported(media_player.DOMAIN, feature, None) now = datetime(2020, 1, 1) trt = trait.TransportControlTrait( hass, State( "media_player.bla", media_player.STATE_PLAYING, { media_player.ATTR_MEDIA_POSITION: 100, media_player.ATTR_MEDIA_DURATION: 200, media_player.ATTR_MEDIA_POSITION_UPDATED_AT: now - timedelta(seconds=10), media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.5, ATTR_SUPPORTED_FEATURES: media_player.SUPPORT_PLAY | media_player.SUPPORT_STOP, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "transportControlSupportedCommands": ["RESUME", "STOP"] } assert trt.query_attributes() == {} # COMMAND_MEDIA_SEEK_RELATIVE calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_SEEK ) # Patch to avoid time ticking over during the command failing the test with patch("homeassistant.util.dt.utcnow", return_value=now): await trt.execute( trait.COMMAND_MEDIA_SEEK_RELATIVE, BASIC_DATA, {"relativePositionMs": 10000}, {}, ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", # 100s (current position) + 10s (from command) + 10s (from updated_at) media_player.ATTR_MEDIA_SEEK_POSITION: 120, } # COMMAND_MEDIA_SEEK_TO_POSITION calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_SEEK ) await trt.execute( trait.COMMAND_MEDIA_SEEK_TO_POSITION, BASIC_DATA, {"absPositionMs": 50000}, {} ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_SEEK_POSITION: 50, } # COMMAND_MEDIA_NEXT calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_NEXT_TRACK ) await trt.execute(trait.COMMAND_MEDIA_NEXT, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} # COMMAND_MEDIA_PAUSE calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_PAUSE ) await trt.execute(trait.COMMAND_MEDIA_PAUSE, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} # COMMAND_MEDIA_PREVIOUS calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_PREVIOUS_TRACK ) await trt.execute(trait.COMMAND_MEDIA_PREVIOUS, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} # COMMAND_MEDIA_RESUME calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_PLAY ) await trt.execute(trait.COMMAND_MEDIA_RESUME, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} # COMMAND_MEDIA_SHUFFLE calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_SHUFFLE_SET ) await trt.execute(trait.COMMAND_MEDIA_SHUFFLE, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_SHUFFLE: True, } # COMMAND_MEDIA_STOP calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_STOP ) await trt.execute(trait.COMMAND_MEDIA_STOP, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} @pytest.mark.parametrize( "state", ( STATE_OFF, STATE_IDLE, STATE_PLAYING, STATE_ON, STATE_PAUSED, STATE_STANDBY, STATE_UNAVAILABLE, STATE_UNKNOWN, ), ) async def test_media_state(hass, state): """Test the MediaStateTrait.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.TransportControlTrait.supported( media_player.DOMAIN, media_player.SUPPORT_PLAY, None ) trt = trait.MediaStateTrait( hass, State( "media_player.bla", state, { media_player.ATTR_MEDIA_POSITION: 100, media_player.ATTR_MEDIA_DURATION: 200, media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.5, ATTR_SUPPORTED_FEATURES: media_player.SUPPORT_PLAY | media_player.SUPPORT_STOP, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "supportActivityState": True, "supportPlaybackState": True, } assert trt.query_attributes() == { "activityState": trt.activity_lookup.get(state), "playbackState": trt.playback_lookup.get(state), }
sander76/home-assistant
tests/components/google_assistant/test_trait.py
homeassistant/components/google_travel_time/sensor.py
"""Support UPNP discovery method that mimics Hue hubs.""" import asyncio import logging import socket from aiohttp import web from homeassistant import core from homeassistant.components.http import HomeAssistantView from .const import HUE_SERIAL_NUMBER, HUE_UUID _LOGGER = logging.getLogger(__name__) BROADCAST_PORT = 1900 BROADCAST_ADDR = "239.255.255.250" class DescriptionXmlView(HomeAssistantView): """Handles requests for the description.xml file.""" url = "/description.xml" name = "description:xml" requires_auth = False def __init__(self, config): """Initialize the instance of the view.""" self.config = config @core.callback def get(self, request): """Handle a GET request.""" resp_text = f"""<?xml version="1.0" encoding="UTF-8" ?> <root xmlns="urn:schemas-upnp-org:device-1-0"> <specVersion> <major>1</major> <minor>0</minor> </specVersion> <URLBase>http://{self.config.advertise_ip}:{self.config.advertise_port}/</URLBase> <device> <deviceType>urn:schemas-upnp-org:device:Basic:1</deviceType> <friendlyName>Home Assistant Bridge ({self.config.advertise_ip})</friendlyName> <manufacturer>Royal Philips Electronics</manufacturer> <manufacturerURL>http://www.philips.com</manufacturerURL> <modelDescription>Philips hue Personal Wireless Lighting</modelDescription> <modelName>Philips hue bridge 2015</modelName> <modelNumber>BSB002</modelNumber> <modelURL>http://www.meethue.com</modelURL> <serialNumber>{HUE_SERIAL_NUMBER}</serialNumber> <UDN>uuid:{HUE_UUID}</UDN> </device> </root> """ return web.Response(text=resp_text, content_type="text/xml") @core.callback def create_upnp_datagram_endpoint( host_ip_addr, upnp_bind_multicast, advertise_ip, advertise_port, ): """Create the UPNP socket and protocol.""" # Listen for UDP port 1900 packets sent to SSDP multicast address ssdp_socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) ssdp_socket.setblocking(False) # Required for receiving multicast ssdp_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) ssdp_socket.setsockopt( socket.SOL_IP, socket.IP_MULTICAST_IF, socket.inet_aton(host_ip_addr) ) ssdp_socket.setsockopt( socket.SOL_IP, socket.IP_ADD_MEMBERSHIP, socket.inet_aton(BROADCAST_ADDR) + socket.inet_aton(host_ip_addr), ) ssdp_socket.bind(("" if upnp_bind_multicast else host_ip_addr, BROADCAST_PORT)) loop = asyncio.get_event_loop() return loop.create_datagram_endpoint( lambda: UPNPResponderProtocol(loop, ssdp_socket, advertise_ip, advertise_port), sock=ssdp_socket, ) class UPNPResponderProtocol: """Handle responding to UPNP/SSDP discovery requests.""" def __init__(self, loop, ssdp_socket, advertise_ip, advertise_port): """Initialize the class.""" self.transport = None self._loop = loop self._sock = ssdp_socket self.advertise_ip = advertise_ip self.advertise_port = advertise_port self._upnp_root_response = self._prepare_response( "upnp:rootdevice", f"uuid:{HUE_UUID}::upnp:rootdevice" ) self._upnp_device_response = self._prepare_response( "urn:schemas-upnp-org:device:basic:1", f"uuid:{HUE_UUID}" ) def connection_made(self, transport): """Set the transport.""" self.transport = transport def connection_lost(self, exc): """Handle connection lost.""" def datagram_received(self, data, addr): """Respond to msearch packets.""" decoded_data = data.decode("utf-8", errors="ignore") if "M-SEARCH" not in decoded_data: return _LOGGER.debug("UPNP Responder M-SEARCH method received: %s", data) # SSDP M-SEARCH method received, respond to it with our info response = self._handle_request(decoded_data) _LOGGER.debug("UPNP Responder responding with: %s", response) self.transport.sendto(response, addr) def error_received(self, exc): # pylint: disable=no-self-use """Log UPNP errors.""" _LOGGER.error("UPNP Error received: %s", exc) def close(self): """Stop the server.""" _LOGGER.info("UPNP responder shutting down") if self.transport: self.transport.close() self._loop.remove_writer(self._sock.fileno()) self._loop.remove_reader(self._sock.fileno()) self._sock.close() def _handle_request(self, decoded_data): if "upnp:rootdevice" in decoded_data: return self._upnp_root_response return self._upnp_device_response def _prepare_response(self, search_target, unique_service_name): # Note that the double newline at the end of # this string is required per the SSDP spec response = f"""HTTP/1.1 200 OK CACHE-CONTROL: max-age=60 EXT: LOCATION: http://{self.advertise_ip}:{self.advertise_port}/description.xml SERVER: FreeRTOS/6.0.5, UPnP/1.0, IpBridge/1.16.0 hue-bridgeid: {HUE_SERIAL_NUMBER} ST: {search_target} USN: {unique_service_name} """ return response.replace("\n", "\r\n").encode("utf-8")
"""Tests for the Google Assistant traits.""" from datetime import datetime, timedelta from unittest.mock import patch import pytest from homeassistant.components import ( alarm_control_panel, binary_sensor, camera, cover, fan, group, input_boolean, input_select, light, lock, media_player, scene, script, sensor, switch, vacuum, ) from homeassistant.components.climate import const as climate from homeassistant.components.google_assistant import const, error, helpers, trait from homeassistant.components.google_assistant.error import SmartHomeError from homeassistant.components.humidifier import const as humidifier from homeassistant.config import async_process_ha_core_config from homeassistant.const import ( ATTR_ASSUMED_STATE, ATTR_DEVICE_CLASS, ATTR_ENTITY_ID, ATTR_MODE, ATTR_SUPPORTED_FEATURES, ATTR_TEMPERATURE, SERVICE_TURN_OFF, SERVICE_TURN_ON, STATE_ALARM_ARMED_AWAY, STATE_ALARM_DISARMED, STATE_ALARM_PENDING, STATE_IDLE, STATE_OFF, STATE_ON, STATE_PAUSED, STATE_PLAYING, STATE_STANDBY, STATE_UNAVAILABLE, STATE_UNKNOWN, TEMP_CELSIUS, TEMP_FAHRENHEIT, ) from homeassistant.core import DOMAIN as HA_DOMAIN, EVENT_CALL_SERVICE, State from homeassistant.util import color from . import BASIC_CONFIG, MockConfig from tests.common import async_capture_events, async_mock_service REQ_ID = "ff36a3cc-ec34-11e6-b1a0-64510650abcf" BASIC_DATA = helpers.RequestData( BASIC_CONFIG, "test-agent", const.SOURCE_CLOUD, REQ_ID, None ) PIN_CONFIG = MockConfig(secure_devices_pin="1234") PIN_DATA = helpers.RequestData( PIN_CONFIG, "test-agent", const.SOURCE_CLOUD, REQ_ID, None ) async def test_brightness_light(hass): """Test brightness trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert trait.BrightnessTrait.supported(light.DOMAIN, light.SUPPORT_BRIGHTNESS, None) trt = trait.BrightnessTrait( hass, State("light.bla", light.STATE_ON, {light.ATTR_BRIGHTNESS: 243}), BASIC_CONFIG, ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"brightness": 95} events = async_capture_events(hass, EVENT_CALL_SERVICE) calls = async_mock_service(hass, light.DOMAIN, light.SERVICE_TURN_ON) await trt.execute( trait.COMMAND_BRIGHTNESS_ABSOLUTE, BASIC_DATA, {"brightness": 50}, {} ) await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "light.bla", light.ATTR_BRIGHTNESS_PCT: 50} assert len(events) == 1 assert events[0].data == { "domain": "light", "service": "turn_on", "service_data": {"brightness_pct": 50, "entity_id": "light.bla"}, } async def test_camera_stream(hass): """Test camera stream trait support for camera domain.""" await async_process_ha_core_config( hass, {"external_url": "https://example.com"}, ) assert helpers.get_google_type(camera.DOMAIN, None) is not None assert trait.CameraStreamTrait.supported(camera.DOMAIN, camera.SUPPORT_STREAM, None) trt = trait.CameraStreamTrait( hass, State("camera.bla", camera.STATE_IDLE, {}), BASIC_CONFIG ) assert trt.sync_attributes() == { "cameraStreamSupportedProtocols": ["hls"], "cameraStreamNeedAuthToken": False, "cameraStreamNeedDrmEncryption": False, } assert trt.query_attributes() == {} with patch( "homeassistant.components.camera.async_request_stream", return_value="/api/streams/bla", ): await trt.execute(trait.COMMAND_GET_CAMERA_STREAM, BASIC_DATA, {}, {}) assert trt.query_attributes() == { "cameraStreamAccessUrl": "https://example.com/api/streams/bla", "cameraStreamReceiverAppId": "B12CE3CA", } async def test_onoff_group(hass): """Test OnOff trait support for group domain.""" assert helpers.get_google_type(group.DOMAIN, None) is not None assert trait.OnOffTrait.supported(group.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("group.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("group.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, HA_DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "group.bla"} off_calls = async_mock_service(hass, HA_DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "group.bla"} async def test_onoff_input_boolean(hass): """Test OnOff trait support for input_boolean domain.""" assert helpers.get_google_type(input_boolean.DOMAIN, None) is not None assert trait.OnOffTrait.supported(input_boolean.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("input_boolean.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait( hass, State("input_boolean.bla", STATE_OFF), BASIC_CONFIG ) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, input_boolean.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "input_boolean.bla"} off_calls = async_mock_service(hass, input_boolean.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "input_boolean.bla"} async def test_onoff_switch(hass): """Test OnOff trait support for switch domain.""" assert helpers.get_google_type(switch.DOMAIN, None) is not None assert trait.OnOffTrait.supported(switch.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("switch.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("switch.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} trt_assumed = trait.OnOffTrait( hass, State("switch.bla", STATE_OFF, {"assumed_state": True}), BASIC_CONFIG ) assert trt_assumed.sync_attributes() == {"commandOnlyOnOff": True} on_calls = async_mock_service(hass, switch.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "switch.bla"} off_calls = async_mock_service(hass, switch.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "switch.bla"} async def test_onoff_fan(hass): """Test OnOff trait support for fan domain.""" assert helpers.get_google_type(fan.DOMAIN, None) is not None assert trait.OnOffTrait.supported(fan.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("fan.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("fan.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, fan.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "fan.bla"} off_calls = async_mock_service(hass, fan.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "fan.bla"} async def test_onoff_light(hass): """Test OnOff trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert trait.OnOffTrait.supported(light.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("light.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("light.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "light.bla"} off_calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "light.bla"} async def test_onoff_media_player(hass): """Test OnOff trait support for media_player domain.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.OnOffTrait.supported(media_player.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("media_player.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("media_player.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, media_player.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} off_calls = async_mock_service(hass, media_player.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} async def test_onoff_humidifier(hass): """Test OnOff trait support for humidifier domain.""" assert helpers.get_google_type(humidifier.DOMAIN, None) is not None assert trait.OnOffTrait.supported(humidifier.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("humidifier.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("humidifier.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, humidifier.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "humidifier.bla"} off_calls = async_mock_service(hass, humidifier.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "humidifier.bla"} async def test_dock_vacuum(hass): """Test dock trait support for vacuum domain.""" assert helpers.get_google_type(vacuum.DOMAIN, None) is not None assert trait.DockTrait.supported(vacuum.DOMAIN, 0, None) trt = trait.DockTrait(hass, State("vacuum.bla", vacuum.STATE_IDLE), BASIC_CONFIG) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"isDocked": False} calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_RETURN_TO_BASE) await trt.execute(trait.COMMAND_DOCK, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} async def test_startstop_vacuum(hass): """Test startStop trait support for vacuum domain.""" assert helpers.get_google_type(vacuum.DOMAIN, None) is not None assert trait.StartStopTrait.supported(vacuum.DOMAIN, 0, None) trt = trait.StartStopTrait( hass, State( "vacuum.bla", vacuum.STATE_PAUSED, {ATTR_SUPPORTED_FEATURES: vacuum.SUPPORT_PAUSE}, ), BASIC_CONFIG, ) assert trt.sync_attributes() == {"pausable": True} assert trt.query_attributes() == {"isRunning": False, "isPaused": True} start_calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_START) await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": True}, {}) assert len(start_calls) == 1 assert start_calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} stop_calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_STOP) await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": False}, {}) assert len(stop_calls) == 1 assert stop_calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} pause_calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_PAUSE) await trt.execute(trait.COMMAND_PAUSEUNPAUSE, BASIC_DATA, {"pause": True}, {}) assert len(pause_calls) == 1 assert pause_calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} unpause_calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_START) await trt.execute(trait.COMMAND_PAUSEUNPAUSE, BASIC_DATA, {"pause": False}, {}) assert len(unpause_calls) == 1 assert unpause_calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} async def test_startstop_cover(hass): """Test startStop trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.StartStopTrait.supported(cover.DOMAIN, cover.SUPPORT_STOP, None) state = State( "cover.bla", cover.STATE_CLOSED, {ATTR_SUPPORTED_FEATURES: cover.SUPPORT_STOP}, ) trt = trait.StartStopTrait( hass, state, BASIC_CONFIG, ) assert trt.sync_attributes() == {} for state_value in (cover.STATE_CLOSING, cover.STATE_OPENING): state.state = state_value assert trt.query_attributes() == {"isRunning": True} stop_calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_STOP_COVER) await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": False}, {}) assert len(stop_calls) == 1 assert stop_calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} for state_value in (cover.STATE_CLOSED, cover.STATE_OPEN): state.state = state_value assert trt.query_attributes() == {"isRunning": False} with pytest.raises(SmartHomeError, match="Cover is already stopped"): await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": False}, {}) with pytest.raises(SmartHomeError, match="Starting a cover is not supported"): await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": True}, {}) with pytest.raises( SmartHomeError, match="Command action.devices.commands.PauseUnpause is not supported", ): await trt.execute(trait.COMMAND_PAUSEUNPAUSE, BASIC_DATA, {"start": True}, {}) async def test_startstop_cover_assumed(hass): """Test startStop trait support for cover domain of assumed state.""" trt = trait.StartStopTrait( hass, State( "cover.bla", cover.STATE_CLOSED, {ATTR_SUPPORTED_FEATURES: cover.SUPPORT_STOP, ATTR_ASSUMED_STATE: True}, ), BASIC_CONFIG, ) stop_calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_STOP_COVER) await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": False}, {}) assert len(stop_calls) == 1 assert stop_calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} async def test_color_setting_color_light(hass): """Test ColorSpectrum trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert not trait.ColorSettingTrait.supported(light.DOMAIN, 0, None) assert trait.ColorSettingTrait.supported(light.DOMAIN, light.SUPPORT_COLOR, None) trt = trait.ColorSettingTrait( hass, State( "light.bla", STATE_ON, { light.ATTR_HS_COLOR: (20, 94), light.ATTR_BRIGHTNESS: 200, ATTR_SUPPORTED_FEATURES: light.SUPPORT_COLOR, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == {"colorModel": "hsv"} assert trt.query_attributes() == { "color": {"spectrumHsv": {"hue": 20, "saturation": 0.94, "value": 200 / 255}} } assert trt.can_execute( trait.COMMAND_COLOR_ABSOLUTE, {"color": {"spectrumRGB": 16715792}} ) calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON) await trt.execute( trait.COMMAND_COLOR_ABSOLUTE, BASIC_DATA, {"color": {"spectrumRGB": 1052927}}, {}, ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "light.bla", light.ATTR_HS_COLOR: (240, 93.725), } await trt.execute( trait.COMMAND_COLOR_ABSOLUTE, BASIC_DATA, {"color": {"spectrumHSV": {"hue": 100, "saturation": 0.50, "value": 0.20}}}, {}, ) assert len(calls) == 2 assert calls[1].data == { ATTR_ENTITY_ID: "light.bla", light.ATTR_HS_COLOR: [100, 50], light.ATTR_BRIGHTNESS: 0.2 * 255, } async def test_color_setting_temperature_light(hass): """Test ColorTemperature trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert not trait.ColorSettingTrait.supported(light.DOMAIN, 0, None) assert trait.ColorSettingTrait.supported( light.DOMAIN, light.SUPPORT_COLOR_TEMP, None ) trt = trait.ColorSettingTrait( hass, State( "light.bla", STATE_ON, { light.ATTR_MIN_MIREDS: 200, light.ATTR_COLOR_TEMP: 300, light.ATTR_MAX_MIREDS: 500, ATTR_SUPPORTED_FEATURES: light.SUPPORT_COLOR_TEMP, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "colorTemperatureRange": {"temperatureMinK": 2000, "temperatureMaxK": 5000} } assert trt.query_attributes() == {"color": {"temperatureK": 3333}} assert trt.can_execute( trait.COMMAND_COLOR_ABSOLUTE, {"color": {"temperature": 400}} ) calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON) with pytest.raises(helpers.SmartHomeError) as err: await trt.execute( trait.COMMAND_COLOR_ABSOLUTE, BASIC_DATA, {"color": {"temperature": 5555}}, {}, ) assert err.value.code == const.ERR_VALUE_OUT_OF_RANGE await trt.execute( trait.COMMAND_COLOR_ABSOLUTE, BASIC_DATA, {"color": {"temperature": 2857}}, {} ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "light.bla", light.ATTR_COLOR_TEMP: color.color_temperature_kelvin_to_mired(2857), } async def test_color_light_temperature_light_bad_temp(hass): """Test ColorTemperature trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert not trait.ColorSettingTrait.supported(light.DOMAIN, 0, None) assert trait.ColorSettingTrait.supported( light.DOMAIN, light.SUPPORT_COLOR_TEMP, None ) trt = trait.ColorSettingTrait( hass, State( "light.bla", STATE_ON, { light.ATTR_MIN_MIREDS: 200, light.ATTR_COLOR_TEMP: 0, light.ATTR_MAX_MIREDS: 500, }, ), BASIC_CONFIG, ) assert trt.query_attributes() == {} async def test_light_modes(hass): """Test Light Mode trait.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert trait.ModesTrait.supported(light.DOMAIN, light.SUPPORT_EFFECT, None) trt = trait.ModesTrait( hass, State( "light.living_room", light.STATE_ON, attributes={ light.ATTR_EFFECT_LIST: ["random", "colorloop"], light.ATTR_EFFECT: "random", }, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableModes": [ { "name": "effect", "name_values": [{"name_synonym": ["effect"], "lang": "en"}], "settings": [ { "setting_name": "random", "setting_values": [ {"setting_synonym": ["random"], "lang": "en"} ], }, { "setting_name": "colorloop", "setting_values": [ {"setting_synonym": ["colorloop"], "lang": "en"} ], }, ], "ordered": False, } ] } assert trt.query_attributes() == { "currentModeSettings": {"effect": "random"}, "on": True, } assert trt.can_execute( trait.COMMAND_MODES, params={"updateModeSettings": {"effect": "colorloop"}}, ) calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON) await trt.execute( trait.COMMAND_MODES, BASIC_DATA, {"updateModeSettings": {"effect": "colorloop"}}, {}, ) assert len(calls) == 1 assert calls[0].data == { "entity_id": "light.living_room", "effect": "colorloop", } async def test_scene_scene(hass): """Test Scene trait support for scene domain.""" assert helpers.get_google_type(scene.DOMAIN, None) is not None assert trait.SceneTrait.supported(scene.DOMAIN, 0, None) trt = trait.SceneTrait(hass, State("scene.bla", scene.STATE), BASIC_CONFIG) assert trt.sync_attributes() == {} assert trt.query_attributes() == {} assert trt.can_execute(trait.COMMAND_ACTIVATE_SCENE, {}) calls = async_mock_service(hass, scene.DOMAIN, SERVICE_TURN_ON) await trt.execute(trait.COMMAND_ACTIVATE_SCENE, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "scene.bla"} async def test_scene_script(hass): """Test Scene trait support for script domain.""" assert helpers.get_google_type(script.DOMAIN, None) is not None assert trait.SceneTrait.supported(script.DOMAIN, 0, None) trt = trait.SceneTrait(hass, State("script.bla", STATE_OFF), BASIC_CONFIG) assert trt.sync_attributes() == {} assert trt.query_attributes() == {} assert trt.can_execute(trait.COMMAND_ACTIVATE_SCENE, {}) calls = async_mock_service(hass, script.DOMAIN, SERVICE_TURN_ON) await trt.execute(trait.COMMAND_ACTIVATE_SCENE, BASIC_DATA, {}, {}) # We don't wait till script execution is done. await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "script.bla"} async def test_temperature_setting_climate_onoff(hass): """Test TemperatureSetting trait support for climate domain - range.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.TemperatureSettingTrait.supported(climate.DOMAIN, 0, None) hass.config.units.temperature_unit = TEMP_FAHRENHEIT trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_AUTO, { ATTR_SUPPORTED_FEATURES: climate.SUPPORT_TARGET_TEMPERATURE_RANGE, climate.ATTR_HVAC_MODES: [ climate.HVAC_MODE_OFF, climate.HVAC_MODE_COOL, climate.HVAC_MODE_HEAT, climate.HVAC_MODE_HEAT_COOL, ], climate.ATTR_MIN_TEMP: None, climate.ATTR_MAX_TEMP: None, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["off", "cool", "heat", "heatcool", "on"], "thermostatTemperatureUnit": "F", } assert trt.can_execute(trait.COMMAND_THERMOSTAT_SET_MODE, {}) calls = async_mock_service(hass, climate.DOMAIN, SERVICE_TURN_ON) await trt.execute( trait.COMMAND_THERMOSTAT_SET_MODE, BASIC_DATA, {"thermostatMode": "on"}, {} ) assert len(calls) == 1 calls = async_mock_service(hass, climate.DOMAIN, SERVICE_TURN_OFF) await trt.execute( trait.COMMAND_THERMOSTAT_SET_MODE, BASIC_DATA, {"thermostatMode": "off"}, {} ) assert len(calls) == 1 async def test_temperature_setting_climate_no_modes(hass): """Test TemperatureSetting trait support for climate domain not supporting any modes.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.TemperatureSettingTrait.supported(climate.DOMAIN, 0, None) hass.config.units.temperature_unit = TEMP_CELSIUS trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_AUTO, { climate.ATTR_HVAC_MODES: [], climate.ATTR_MIN_TEMP: None, climate.ATTR_MAX_TEMP: None, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["heat"], "thermostatTemperatureUnit": "C", } async def test_temperature_setting_climate_range(hass): """Test TemperatureSetting trait support for climate domain - range.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.TemperatureSettingTrait.supported(climate.DOMAIN, 0, None) hass.config.units.temperature_unit = TEMP_FAHRENHEIT trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_AUTO, { climate.ATTR_CURRENT_TEMPERATURE: 70, climate.ATTR_CURRENT_HUMIDITY: 25, ATTR_SUPPORTED_FEATURES: climate.SUPPORT_TARGET_TEMPERATURE_RANGE, climate.ATTR_HVAC_MODES: [ STATE_OFF, climate.HVAC_MODE_COOL, climate.HVAC_MODE_HEAT, climate.HVAC_MODE_AUTO, ], climate.ATTR_TARGET_TEMP_HIGH: 75, climate.ATTR_TARGET_TEMP_LOW: 65, climate.ATTR_MIN_TEMP: 50, climate.ATTR_MAX_TEMP: 80, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["off", "cool", "heat", "auto", "on"], "thermostatTemperatureUnit": "F", } assert trt.query_attributes() == { "thermostatMode": "auto", "thermostatTemperatureAmbient": 21.1, "thermostatHumidityAmbient": 25, "thermostatTemperatureSetpointLow": 18.3, "thermostatTemperatureSetpointHigh": 23.9, } assert trt.can_execute(trait.COMMAND_THERMOSTAT_TEMPERATURE_SET_RANGE, {}) assert trt.can_execute(trait.COMMAND_THERMOSTAT_SET_MODE, {}) calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_TEMPERATURE) await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SET_RANGE, BASIC_DATA, { "thermostatTemperatureSetpointHigh": 25, "thermostatTemperatureSetpointLow": 20, }, {}, ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "climate.bla", climate.ATTR_TARGET_TEMP_HIGH: 77, climate.ATTR_TARGET_TEMP_LOW: 68, } calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_HVAC_MODE) await trt.execute( trait.COMMAND_THERMOSTAT_SET_MODE, BASIC_DATA, {"thermostatMode": "cool"}, {} ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "climate.bla", climate.ATTR_HVAC_MODE: climate.HVAC_MODE_COOL, } with pytest.raises(helpers.SmartHomeError) as err: await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, BASIC_DATA, {"thermostatTemperatureSetpoint": -100}, {}, ) assert err.value.code == const.ERR_VALUE_OUT_OF_RANGE hass.config.units.temperature_unit = TEMP_CELSIUS async def test_temperature_setting_climate_setpoint(hass): """Test TemperatureSetting trait support for climate domain - setpoint.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.TemperatureSettingTrait.supported(climate.DOMAIN, 0, None) hass.config.units.temperature_unit = TEMP_CELSIUS trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_COOL, { climate.ATTR_HVAC_MODES: [STATE_OFF, climate.HVAC_MODE_COOL], climate.ATTR_MIN_TEMP: 10, climate.ATTR_MAX_TEMP: 30, ATTR_TEMPERATURE: 18, climate.ATTR_CURRENT_TEMPERATURE: 20, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["off", "cool", "on"], "thermostatTemperatureUnit": "C", } assert trt.query_attributes() == { "thermostatMode": "cool", "thermostatTemperatureAmbient": 20, "thermostatTemperatureSetpoint": 18, } assert trt.can_execute(trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, {}) assert trt.can_execute(trait.COMMAND_THERMOSTAT_SET_MODE, {}) calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_TEMPERATURE) with pytest.raises(helpers.SmartHomeError): await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, BASIC_DATA, {"thermostatTemperatureSetpoint": -100}, {}, ) await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, BASIC_DATA, {"thermostatTemperatureSetpoint": 19}, {}, ) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "climate.bla", ATTR_TEMPERATURE: 19} async def test_temperature_setting_climate_setpoint_auto(hass): """ Test TemperatureSetting trait support for climate domain. Setpoint in auto mode. """ hass.config.units.temperature_unit = TEMP_CELSIUS trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_HEAT_COOL, { climate.ATTR_HVAC_MODES: [ climate.HVAC_MODE_OFF, climate.HVAC_MODE_HEAT_COOL, ], climate.ATTR_MIN_TEMP: 10, climate.ATTR_MAX_TEMP: 30, ATTR_TEMPERATURE: 18, climate.ATTR_CURRENT_TEMPERATURE: 20, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["off", "heatcool", "on"], "thermostatTemperatureUnit": "C", } assert trt.query_attributes() == { "thermostatMode": "heatcool", "thermostatTemperatureAmbient": 20, "thermostatTemperatureSetpointHigh": 18, "thermostatTemperatureSetpointLow": 18, } assert trt.can_execute(trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, {}) assert trt.can_execute(trait.COMMAND_THERMOSTAT_SET_MODE, {}) calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_TEMPERATURE) await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, BASIC_DATA, {"thermostatTemperatureSetpoint": 19}, {}, ) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "climate.bla", ATTR_TEMPERATURE: 19} async def test_humidity_setting_humidifier_setpoint(hass): """Test HumiditySetting trait support for humidifier domain - setpoint.""" assert helpers.get_google_type(humidifier.DOMAIN, None) is not None assert trait.HumiditySettingTrait.supported(humidifier.DOMAIN, 0, None) trt = trait.HumiditySettingTrait( hass, State( "humidifier.bla", STATE_ON, { humidifier.ATTR_MIN_HUMIDITY: 20, humidifier.ATTR_MAX_HUMIDITY: 90, humidifier.ATTR_HUMIDITY: 38, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "humiditySetpointRange": {"minPercent": 20, "maxPercent": 90} } assert trt.query_attributes() == { "humiditySetpointPercent": 38, } assert trt.can_execute(trait.COMMAND_SET_HUMIDITY, {}) calls = async_mock_service(hass, humidifier.DOMAIN, humidifier.SERVICE_SET_HUMIDITY) await trt.execute(trait.COMMAND_SET_HUMIDITY, BASIC_DATA, {"humidity": 32}, {}) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "humidifier.bla", humidifier.ATTR_HUMIDITY: 32, } async def test_lock_unlock_lock(hass): """Test LockUnlock trait locking support for lock domain.""" assert helpers.get_google_type(lock.DOMAIN, None) is not None assert trait.LockUnlockTrait.supported(lock.DOMAIN, lock.SUPPORT_OPEN, None) assert trait.LockUnlockTrait.might_2fa(lock.DOMAIN, lock.SUPPORT_OPEN, None) trt = trait.LockUnlockTrait( hass, State("lock.front_door", lock.STATE_LOCKED), PIN_CONFIG ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"isLocked": True} assert trt.can_execute(trait.COMMAND_LOCKUNLOCK, {"lock": True}) calls = async_mock_service(hass, lock.DOMAIN, lock.SERVICE_LOCK) await trt.execute(trait.COMMAND_LOCKUNLOCK, PIN_DATA, {"lock": True}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "lock.front_door"} async def test_lock_unlock_unlock(hass): """Test LockUnlock trait unlocking support for lock domain.""" assert helpers.get_google_type(lock.DOMAIN, None) is not None assert trait.LockUnlockTrait.supported(lock.DOMAIN, lock.SUPPORT_OPEN, None) trt = trait.LockUnlockTrait( hass, State("lock.front_door", lock.STATE_LOCKED), PIN_CONFIG ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"isLocked": True} assert trt.can_execute(trait.COMMAND_LOCKUNLOCK, {"lock": False}) calls = async_mock_service(hass, lock.DOMAIN, lock.SERVICE_UNLOCK) # No challenge data with pytest.raises(error.ChallengeNeeded) as err: await trt.execute(trait.COMMAND_LOCKUNLOCK, PIN_DATA, {"lock": False}, {}) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # invalid pin with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_LOCKUNLOCK, PIN_DATA, {"lock": False}, {"pin": 9999} ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_FAILED_PIN_NEEDED await trt.execute( trait.COMMAND_LOCKUNLOCK, PIN_DATA, {"lock": False}, {"pin": "1234"} ) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "lock.front_door"} # Test without pin trt = trait.LockUnlockTrait( hass, State("lock.front_door", lock.STATE_LOCKED), BASIC_CONFIG ) with pytest.raises(error.SmartHomeError) as err: await trt.execute(trait.COMMAND_LOCKUNLOCK, BASIC_DATA, {"lock": False}, {}) assert len(calls) == 1 assert err.value.code == const.ERR_CHALLENGE_NOT_SETUP # Test with 2FA override with patch.object( BASIC_CONFIG, "should_2fa", return_value=False, ): await trt.execute(trait.COMMAND_LOCKUNLOCK, BASIC_DATA, {"lock": False}, {}) assert len(calls) == 2 async def test_arm_disarm_arm_away(hass): """Test ArmDisarm trait Arming support for alarm_control_panel domain.""" assert helpers.get_google_type(alarm_control_panel.DOMAIN, None) is not None assert trait.ArmDisArmTrait.supported(alarm_control_panel.DOMAIN, 0, None) assert trait.ArmDisArmTrait.might_2fa(alarm_control_panel.DOMAIN, 0, None) trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, { alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True, ATTR_SUPPORTED_FEATURES: alarm_control_panel.const.SUPPORT_ALARM_ARM_HOME | alarm_control_panel.const.SUPPORT_ALARM_ARM_AWAY, }, ), PIN_CONFIG, ) assert trt.sync_attributes() == { "availableArmLevels": { "levels": [ { "level_name": "armed_home", "level_values": [ {"level_synonym": ["armed home", "home"], "lang": "en"} ], }, { "level_name": "armed_away", "level_values": [ {"level_synonym": ["armed away", "away"], "lang": "en"} ], }, ], "ordered": False, } } assert trt.query_attributes() == { "isArmed": True, "currentArmLevel": STATE_ALARM_ARMED_AWAY, } assert trt.can_execute( trait.COMMAND_ARMDISARM, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY} ) calls = async_mock_service( hass, alarm_control_panel.DOMAIN, alarm_control_panel.SERVICE_ALARM_ARM_AWAY ) # Test with no secure_pin configured with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), BASIC_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, BASIC_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {}, ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NOT_SETUP trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), PIN_CONFIG, ) # No challenge data with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {}, ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # invalid pin with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {"pin": 9999}, ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_FAILED_PIN_NEEDED # correct pin await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {"pin": "1234"}, ) assert len(calls) == 1 # Test already armed with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), PIN_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {}, ) assert len(calls) == 1 assert err.value.code == const.ERR_ALREADY_ARMED # Test with code_arm_required False trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: False}, ), PIN_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {}, ) assert len(calls) == 2 with pytest.raises(error.SmartHomeError) as err: await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True}, {}, ) async def test_arm_disarm_disarm(hass): """Test ArmDisarm trait Disarming support for alarm_control_panel domain.""" assert helpers.get_google_type(alarm_control_panel.DOMAIN, None) is not None assert trait.ArmDisArmTrait.supported(alarm_control_panel.DOMAIN, 0, None) assert trait.ArmDisArmTrait.might_2fa(alarm_control_panel.DOMAIN, 0, None) trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, { alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True, ATTR_SUPPORTED_FEATURES: alarm_control_panel.const.SUPPORT_ALARM_TRIGGER | alarm_control_panel.const.SUPPORT_ALARM_ARM_CUSTOM_BYPASS, }, ), PIN_CONFIG, ) assert trt.sync_attributes() == { "availableArmLevels": { "levels": [ { "level_name": "armed_custom_bypass", "level_values": [ { "level_synonym": ["armed custom bypass", "custom"], "lang": "en", } ], }, { "level_name": "triggered", "level_values": [{"level_synonym": ["triggered"], "lang": "en"}], }, ], "ordered": False, } } assert trt.query_attributes() == {"isArmed": False} assert trt.can_execute(trait.COMMAND_ARMDISARM, {"arm": False}) calls = async_mock_service( hass, alarm_control_panel.DOMAIN, alarm_control_panel.SERVICE_ALARM_DISARM ) # Test without secure_pin configured with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), BASIC_CONFIG, ) await trt.execute(trait.COMMAND_ARMDISARM, BASIC_DATA, {"arm": False}, {}) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NOT_SETUP trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), PIN_CONFIG, ) # No challenge data with pytest.raises(error.ChallengeNeeded) as err: await trt.execute(trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": False}, {}) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # invalid pin with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": False}, {"pin": 9999} ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_FAILED_PIN_NEEDED # correct pin await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": False}, {"pin": "1234"} ) assert len(calls) == 1 # Test already disarmed with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), PIN_CONFIG, ) await trt.execute(trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": False}, {}) assert len(calls) == 1 assert err.value.code == const.ERR_ALREADY_DISARMED # Cancel arming after already armed will require pin with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: False}, ), PIN_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "cancel": True}, {} ) assert len(calls) == 1 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # Cancel arming while pending to arm doesn't require pin trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_PENDING, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: False}, ), PIN_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "cancel": True}, {} ) assert len(calls) == 2 async def test_fan_speed(hass): """Test FanSpeed trait speed control support for fan domain.""" assert helpers.get_google_type(fan.DOMAIN, None) is not None assert trait.FanSpeedTrait.supported(fan.DOMAIN, fan.SUPPORT_SET_SPEED, None) trt = trait.FanSpeedTrait( hass, State( "fan.living_room_fan", fan.SPEED_HIGH, attributes={ "speed_list": [ fan.SPEED_OFF, fan.SPEED_LOW, fan.SPEED_MEDIUM, fan.SPEED_HIGH, ], "speed": "low", "percentage": 33, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableFanSpeeds": { "ordered": True, "speeds": [ { "speed_name": "off", "speed_values": [{"speed_synonym": ["stop", "off"], "lang": "en"}], }, { "speed_name": "low", "speed_values": [ { "speed_synonym": ["slow", "low", "slowest", "lowest"], "lang": "en", } ], }, { "speed_name": "medium", "speed_values": [ {"speed_synonym": ["medium", "mid", "middle"], "lang": "en"} ], }, { "speed_name": "high", "speed_values": [ { "speed_synonym": [ "high", "max", "fast", "highest", "fastest", "maximum", ], "lang": "en", } ], }, ], }, "reversible": False, "supportsFanSpeedPercent": True, } assert trt.query_attributes() == { "currentFanSpeedSetting": "low", "on": True, "currentFanSpeedPercent": 33, } assert trt.can_execute(trait.COMMAND_FANSPEED, params={"fanSpeed": "medium"}) calls = async_mock_service(hass, fan.DOMAIN, fan.SERVICE_SET_SPEED) await trt.execute(trait.COMMAND_FANSPEED, BASIC_DATA, {"fanSpeed": "medium"}, {}) assert len(calls) == 1 assert calls[0].data == {"entity_id": "fan.living_room_fan", "speed": "medium"} assert trt.can_execute(trait.COMMAND_FANSPEED, params={"fanSpeedPercent": 10}) calls = async_mock_service(hass, fan.DOMAIN, fan.SERVICE_SET_PERCENTAGE) await trt.execute(trait.COMMAND_FANSPEED, BASIC_DATA, {"fanSpeedPercent": 10}, {}) assert len(calls) == 1 assert calls[0].data == {"entity_id": "fan.living_room_fan", "percentage": 10} async def test_climate_fan_speed(hass): """Test FanSpeed trait speed control support for climate domain.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.FanSpeedTrait.supported(climate.DOMAIN, climate.SUPPORT_FAN_MODE, None) trt = trait.FanSpeedTrait( hass, State( "climate.living_room_ac", "on", attributes={ "fan_modes": ["auto", "low", "medium", "high"], "fan_mode": "low", }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableFanSpeeds": { "ordered": True, "speeds": [ { "speed_name": "auto", "speed_values": [{"speed_synonym": ["auto"], "lang": "en"}], }, { "speed_name": "low", "speed_values": [{"speed_synonym": ["low"], "lang": "en"}], }, { "speed_name": "medium", "speed_values": [{"speed_synonym": ["medium"], "lang": "en"}], }, { "speed_name": "high", "speed_values": [{"speed_synonym": ["high"], "lang": "en"}], }, ], }, "reversible": False, "supportsFanSpeedPercent": True, } assert trt.query_attributes() == { "currentFanSpeedSetting": "low", } assert trt.can_execute(trait.COMMAND_FANSPEED, params={"fanSpeed": "medium"}) calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_FAN_MODE) await trt.execute(trait.COMMAND_FANSPEED, BASIC_DATA, {"fanSpeed": "medium"}, {}) assert len(calls) == 1 assert calls[0].data == { "entity_id": "climate.living_room_ac", "fan_mode": "medium", } async def test_inputselector(hass): """Test input selector trait.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.InputSelectorTrait.supported( media_player.DOMAIN, media_player.SUPPORT_SELECT_SOURCE, None ) trt = trait.InputSelectorTrait( hass, State( "media_player.living_room", media_player.STATE_PLAYING, attributes={ media_player.ATTR_INPUT_SOURCE_LIST: [ "media", "game", "chromecast", "plex", ], media_player.ATTR_INPUT_SOURCE: "game", }, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableInputs": [ {"key": "media", "names": [{"name_synonym": ["media"], "lang": "en"}]}, {"key": "game", "names": [{"name_synonym": ["game"], "lang": "en"}]}, { "key": "chromecast", "names": [{"name_synonym": ["chromecast"], "lang": "en"}], }, {"key": "plex", "names": [{"name_synonym": ["plex"], "lang": "en"}]}, ], "orderedInputs": True, } assert trt.query_attributes() == { "currentInput": "game", } assert trt.can_execute( trait.COMMAND_INPUT, params={"newInput": "media"}, ) calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_SELECT_SOURCE ) await trt.execute( trait.COMMAND_INPUT, BASIC_DATA, {"newInput": "media"}, {}, ) assert len(calls) == 1 assert calls[0].data == {"entity_id": "media_player.living_room", "source": "media"} @pytest.mark.parametrize( "sources,source,source_next,source_prev", [ (["a"], "a", "a", "a"), (["a", "b"], "a", "b", "b"), (["a", "b", "c"], "a", "b", "c"), ], ) async def test_inputselector_nextprev(hass, sources, source, source_next, source_prev): """Test input selector trait.""" trt = trait.InputSelectorTrait( hass, State( "media_player.living_room", media_player.STATE_PLAYING, attributes={ media_player.ATTR_INPUT_SOURCE_LIST: sources, media_player.ATTR_INPUT_SOURCE: source, }, ), BASIC_CONFIG, ) assert trt.can_execute("action.devices.commands.NextInput", params={}) assert trt.can_execute("action.devices.commands.PreviousInput", params={}) calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_SELECT_SOURCE ) await trt.execute( "action.devices.commands.NextInput", BASIC_DATA, {}, {}, ) await trt.execute( "action.devices.commands.PreviousInput", BASIC_DATA, {}, {}, ) assert len(calls) == 2 assert calls[0].data == { "entity_id": "media_player.living_room", "source": source_next, } assert calls[1].data == { "entity_id": "media_player.living_room", "source": source_prev, } @pytest.mark.parametrize( "sources,source", [(None, "a"), (["a", "b"], None), (["a", "b"], "c")] ) async def test_inputselector_nextprev_invalid(hass, sources, source): """Test input selector trait.""" trt = trait.InputSelectorTrait( hass, State( "media_player.living_room", media_player.STATE_PLAYING, attributes={ media_player.ATTR_INPUT_SOURCE_LIST: sources, media_player.ATTR_INPUT_SOURCE: source, }, ), BASIC_CONFIG, ) with pytest.raises(SmartHomeError): await trt.execute( "action.devices.commands.NextInput", BASIC_DATA, {}, {}, ) with pytest.raises(SmartHomeError): await trt.execute( "action.devices.commands.PreviousInput", BASIC_DATA, {}, {}, ) with pytest.raises(SmartHomeError): await trt.execute( "action.devices.commands.InvalidCommand", BASIC_DATA, {}, {}, ) async def test_modes_input_select(hass): """Test Input Select Mode trait.""" assert helpers.get_google_type(input_select.DOMAIN, None) is not None assert trait.ModesTrait.supported(input_select.DOMAIN, None, None) trt = trait.ModesTrait( hass, State("input_select.bla", "unavailable"), BASIC_CONFIG, ) assert trt.sync_attributes() == {"availableModes": []} trt = trait.ModesTrait( hass, State( "input_select.bla", "abc", attributes={input_select.ATTR_OPTIONS: ["abc", "123", "xyz"]}, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableModes": [ { "name": "option", "name_values": [ { "name_synonym": ["option", "setting", "mode", "value"], "lang": "en", } ], "settings": [ { "setting_name": "abc", "setting_values": [{"setting_synonym": ["abc"], "lang": "en"}], }, { "setting_name": "123", "setting_values": [{"setting_synonym": ["123"], "lang": "en"}], }, { "setting_name": "xyz", "setting_values": [{"setting_synonym": ["xyz"], "lang": "en"}], }, ], "ordered": False, } ] } assert trt.query_attributes() == { "currentModeSettings": {"option": "abc"}, "on": True, } assert trt.can_execute( trait.COMMAND_MODES, params={"updateModeSettings": {"option": "xyz"}}, ) calls = async_mock_service( hass, input_select.DOMAIN, input_select.SERVICE_SELECT_OPTION ) await trt.execute( trait.COMMAND_MODES, BASIC_DATA, {"updateModeSettings": {"option": "xyz"}}, {}, ) assert len(calls) == 1 assert calls[0].data == {"entity_id": "input_select.bla", "option": "xyz"} async def test_modes_humidifier(hass): """Test Humidifier Mode trait.""" assert helpers.get_google_type(humidifier.DOMAIN, None) is not None assert trait.ModesTrait.supported(humidifier.DOMAIN, humidifier.SUPPORT_MODES, None) trt = trait.ModesTrait( hass, State( "humidifier.humidifier", STATE_OFF, attributes={ humidifier.ATTR_AVAILABLE_MODES: [ humidifier.MODE_NORMAL, humidifier.MODE_AUTO, humidifier.MODE_AWAY, ], ATTR_SUPPORTED_FEATURES: humidifier.SUPPORT_MODES, humidifier.ATTR_MIN_HUMIDITY: 30, humidifier.ATTR_MAX_HUMIDITY: 99, humidifier.ATTR_HUMIDITY: 50, ATTR_MODE: humidifier.MODE_AUTO, }, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableModes": [ { "name": "mode", "name_values": [{"name_synonym": ["mode"], "lang": "en"}], "settings": [ { "setting_name": "normal", "setting_values": [ {"setting_synonym": ["normal"], "lang": "en"} ], }, { "setting_name": "auto", "setting_values": [{"setting_synonym": ["auto"], "lang": "en"}], }, { "setting_name": "away", "setting_values": [{"setting_synonym": ["away"], "lang": "en"}], }, ], "ordered": False, }, ] } assert trt.query_attributes() == { "currentModeSettings": {"mode": "auto"}, "on": False, } assert trt.can_execute( trait.COMMAND_MODES, params={"updateModeSettings": {"mode": "away"}} ) calls = async_mock_service(hass, humidifier.DOMAIN, humidifier.SERVICE_SET_MODE) await trt.execute( trait.COMMAND_MODES, BASIC_DATA, {"updateModeSettings": {"mode": "away"}}, {}, ) assert len(calls) == 1 assert calls[0].data == { "entity_id": "humidifier.humidifier", "mode": "away", } async def test_sound_modes(hass): """Test Mode trait.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.ModesTrait.supported( media_player.DOMAIN, media_player.SUPPORT_SELECT_SOUND_MODE, None ) trt = trait.ModesTrait( hass, State( "media_player.living_room", media_player.STATE_PLAYING, attributes={ media_player.ATTR_SOUND_MODE_LIST: ["stereo", "prologic"], media_player.ATTR_SOUND_MODE: "stereo", }, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableModes": [ { "name": "sound mode", "name_values": [ {"name_synonym": ["sound mode", "effects"], "lang": "en"} ], "settings": [ { "setting_name": "stereo", "setting_values": [ {"setting_synonym": ["stereo"], "lang": "en"} ], }, { "setting_name": "prologic", "setting_values": [ {"setting_synonym": ["prologic"], "lang": "en"} ], }, ], "ordered": False, } ] } assert trt.query_attributes() == { "currentModeSettings": {"sound mode": "stereo"}, "on": True, } assert trt.can_execute( trait.COMMAND_MODES, params={"updateModeSettings": {"sound mode": "stereo"}}, ) calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_SELECT_SOUND_MODE ) await trt.execute( trait.COMMAND_MODES, BASIC_DATA, {"updateModeSettings": {"sound mode": "stereo"}}, {}, ) assert len(calls) == 1 assert calls[0].data == { "entity_id": "media_player.living_room", "sound_mode": "stereo", } async def test_openclose_cover(hass): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_SET_POSITION, None ) trt = trait.OpenCloseTrait( hass, State( "cover.bla", cover.STATE_OPEN, { cover.ATTR_CURRENT_POSITION: 75, ATTR_SUPPORTED_FEATURES: cover.SUPPORT_SET_POSITION, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"openPercent": 75} calls_set = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_SET_COVER_POSITION) calls_open = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_OPEN_COVER) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 50}, {}) await trt.execute( trait.COMMAND_OPENCLOSE_RELATIVE, BASIC_DATA, {"openRelativePercent": 50}, {} ) assert len(calls_set) == 1 assert calls_set[0].data == {ATTR_ENTITY_ID: "cover.bla", cover.ATTR_POSITION: 50} assert len(calls_open) == 1 assert calls_open[0].data == {ATTR_ENTITY_ID: "cover.bla"} async def test_openclose_cover_unknown_state(hass): """Test OpenClose trait support for cover domain with unknown state.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_SET_POSITION, None ) # No state trt = trait.OpenCloseTrait( hass, State( "cover.bla", STATE_UNKNOWN, {ATTR_SUPPORTED_FEATURES: cover.SUPPORT_OPEN} ), BASIC_CONFIG, ) assert trt.sync_attributes() == {"discreteOnlyOpenClose": True} with pytest.raises(helpers.SmartHomeError): trt.query_attributes() calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_OPEN_COVER) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 100}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} with pytest.raises(helpers.SmartHomeError): trt.query_attributes() async def test_openclose_cover_assumed_state(hass): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_SET_POSITION, None ) trt = trait.OpenCloseTrait( hass, State( "cover.bla", cover.STATE_OPEN, { ATTR_ASSUMED_STATE: True, ATTR_SUPPORTED_FEATURES: cover.SUPPORT_SET_POSITION, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == {"commandOnlyOpenClose": True} assert trt.query_attributes() == {} calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_SET_COVER_POSITION) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 40}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla", cover.ATTR_POSITION: 40} async def test_openclose_cover_query_only(hass): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported(cover.DOMAIN, 0, None) state = State( "cover.bla", cover.STATE_OPEN, ) trt = trait.OpenCloseTrait( hass, state, BASIC_CONFIG, ) assert trt.sync_attributes() == { "discreteOnlyOpenClose": True, "queryOnlyOpenClose": True, } assert trt.query_attributes() == {"openPercent": 100} async def test_openclose_cover_no_position(hass): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_OPEN | cover.SUPPORT_CLOSE, None ) state = State( "cover.bla", cover.STATE_OPEN, { ATTR_SUPPORTED_FEATURES: cover.SUPPORT_OPEN | cover.SUPPORT_CLOSE, }, ) trt = trait.OpenCloseTrait( hass, state, BASIC_CONFIG, ) assert trt.sync_attributes() == {"discreteOnlyOpenClose": True} assert trt.query_attributes() == {"openPercent": 100} state.state = cover.STATE_CLOSED assert trt.sync_attributes() == {"discreteOnlyOpenClose": True} assert trt.query_attributes() == {"openPercent": 0} calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_CLOSE_COVER) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 0}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_OPEN_COVER) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 100}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} with pytest.raises( SmartHomeError, match=r"Current position not know for relative command" ): await trt.execute( trait.COMMAND_OPENCLOSE_RELATIVE, BASIC_DATA, {"openRelativePercent": 100}, {}, ) with pytest.raises(SmartHomeError, match=r"No support for partial open close"): await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 50}, {}) @pytest.mark.parametrize( "device_class", (cover.DEVICE_CLASS_DOOR, cover.DEVICE_CLASS_GARAGE, cover.DEVICE_CLASS_GATE), ) async def test_openclose_cover_secure(hass, device_class): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, device_class) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_SET_POSITION, device_class ) assert trait.OpenCloseTrait.might_2fa( cover.DOMAIN, cover.SUPPORT_SET_POSITION, device_class ) trt = trait.OpenCloseTrait( hass, State( "cover.bla", cover.STATE_OPEN, { ATTR_DEVICE_CLASS: device_class, ATTR_SUPPORTED_FEATURES: cover.SUPPORT_SET_POSITION, cover.ATTR_CURRENT_POSITION: 75, }, ), PIN_CONFIG, ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"openPercent": 75} calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_SET_COVER_POSITION) calls_close = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_CLOSE_COVER) # No challenge data with pytest.raises(error.ChallengeNeeded) as err: await trt.execute(trait.COMMAND_OPENCLOSE, PIN_DATA, {"openPercent": 50}, {}) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # invalid pin with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_OPENCLOSE, PIN_DATA, {"openPercent": 50}, {"pin": "9999"} ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_FAILED_PIN_NEEDED await trt.execute( trait.COMMAND_OPENCLOSE, PIN_DATA, {"openPercent": 50}, {"pin": "1234"} ) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla", cover.ATTR_POSITION: 50} # no challenge on close await trt.execute(trait.COMMAND_OPENCLOSE, PIN_DATA, {"openPercent": 0}, {}) assert len(calls_close) == 1 assert calls_close[0].data == {ATTR_ENTITY_ID: "cover.bla"} @pytest.mark.parametrize( "device_class", ( binary_sensor.DEVICE_CLASS_DOOR, binary_sensor.DEVICE_CLASS_GARAGE_DOOR, binary_sensor.DEVICE_CLASS_LOCK, binary_sensor.DEVICE_CLASS_OPENING, binary_sensor.DEVICE_CLASS_WINDOW, ), ) async def test_openclose_binary_sensor(hass, device_class): """Test OpenClose trait support for binary_sensor domain.""" assert helpers.get_google_type(binary_sensor.DOMAIN, device_class) is not None assert trait.OpenCloseTrait.supported(binary_sensor.DOMAIN, 0, device_class) trt = trait.OpenCloseTrait( hass, State("binary_sensor.test", STATE_ON, {ATTR_DEVICE_CLASS: device_class}), BASIC_CONFIG, ) assert trt.sync_attributes() == { "queryOnlyOpenClose": True, "discreteOnlyOpenClose": True, } assert trt.query_attributes() == {"openPercent": 100} trt = trait.OpenCloseTrait( hass, State("binary_sensor.test", STATE_OFF, {ATTR_DEVICE_CLASS: device_class}), BASIC_CONFIG, ) assert trt.sync_attributes() == { "queryOnlyOpenClose": True, "discreteOnlyOpenClose": True, } assert trt.query_attributes() == {"openPercent": 0} async def test_volume_media_player(hass): """Test volume trait support for media player domain.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.VolumeTrait.supported( media_player.DOMAIN, media_player.SUPPORT_VOLUME_SET, None, ) trt = trait.VolumeTrait( hass, State( "media_player.bla", media_player.STATE_PLAYING, { ATTR_SUPPORTED_FEATURES: media_player.SUPPORT_VOLUME_SET, media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.3, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "volumeMaxLevel": 100, "levelStepSize": 10, "volumeCanMuteAndUnmute": False, "commandOnlyVolume": False, } assert trt.query_attributes() == {"currentVolume": 30} calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_SET ) await trt.execute(trait.COMMAND_SET_VOLUME, BASIC_DATA, {"volumeLevel": 60}, {}) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.6, } calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_SET ) await trt.execute( trait.COMMAND_VOLUME_RELATIVE, BASIC_DATA, {"relativeSteps": 10}, {} ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.4, } async def test_volume_media_player_relative(hass): """Test volume trait support for relative-volume-only media players.""" assert trait.VolumeTrait.supported( media_player.DOMAIN, media_player.SUPPORT_VOLUME_STEP, None, ) trt = trait.VolumeTrait( hass, State( "media_player.bla", media_player.STATE_PLAYING, { ATTR_ASSUMED_STATE: True, ATTR_SUPPORTED_FEATURES: media_player.SUPPORT_VOLUME_STEP, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "volumeMaxLevel": 100, "levelStepSize": 10, "volumeCanMuteAndUnmute": False, "commandOnlyVolume": True, } assert trt.query_attributes() == {} calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_UP ) await trt.execute( trait.COMMAND_VOLUME_RELATIVE, BASIC_DATA, {"relativeSteps": 10}, {}, ) assert len(calls) == 10 for call in calls: assert call.data == { ATTR_ENTITY_ID: "media_player.bla", } calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_DOWN ) await trt.execute( trait.COMMAND_VOLUME_RELATIVE, BASIC_DATA, {"relativeSteps": -10}, {}, ) assert len(calls) == 10 for call in calls: assert call.data == { ATTR_ENTITY_ID: "media_player.bla", } with pytest.raises(SmartHomeError): await trt.execute(trait.COMMAND_SET_VOLUME, BASIC_DATA, {"volumeLevel": 42}, {}) with pytest.raises(SmartHomeError): await trt.execute(trait.COMMAND_MUTE, BASIC_DATA, {"mute": True}, {}) async def test_media_player_mute(hass): """Test volume trait support for muting.""" assert trait.VolumeTrait.supported( media_player.DOMAIN, media_player.SUPPORT_VOLUME_STEP | media_player.SUPPORT_VOLUME_MUTE, None, ) trt = trait.VolumeTrait( hass, State( "media_player.bla", media_player.STATE_PLAYING, { ATTR_SUPPORTED_FEATURES: ( media_player.SUPPORT_VOLUME_STEP | media_player.SUPPORT_VOLUME_MUTE ), media_player.ATTR_MEDIA_VOLUME_MUTED: False, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "volumeMaxLevel": 100, "levelStepSize": 10, "volumeCanMuteAndUnmute": True, "commandOnlyVolume": False, } assert trt.query_attributes() == {"isMuted": False} mute_calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_MUTE ) await trt.execute( trait.COMMAND_MUTE, BASIC_DATA, {"mute": True}, {}, ) assert len(mute_calls) == 1 assert mute_calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_VOLUME_MUTED: True, } unmute_calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_MUTE ) await trt.execute( trait.COMMAND_MUTE, BASIC_DATA, {"mute": False}, {}, ) assert len(unmute_calls) == 1 assert unmute_calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_VOLUME_MUTED: False, } async def test_temperature_setting_sensor(hass): """Test TemperatureSetting trait support for temperature sensor.""" assert ( helpers.get_google_type(sensor.DOMAIN, sensor.DEVICE_CLASS_TEMPERATURE) is not None ) assert not trait.TemperatureSettingTrait.supported( sensor.DOMAIN, 0, sensor.DEVICE_CLASS_HUMIDITY ) assert trait.TemperatureSettingTrait.supported( sensor.DOMAIN, 0, sensor.DEVICE_CLASS_TEMPERATURE ) @pytest.mark.parametrize( "unit_in,unit_out,state,ambient", [ (TEMP_FAHRENHEIT, "F", "70", 21.1), (TEMP_CELSIUS, "C", "21.1", 21.1), (TEMP_FAHRENHEIT, "F", "unavailable", None), (TEMP_FAHRENHEIT, "F", "unknown", None), ], ) async def test_temperature_setting_sensor_data(hass, unit_in, unit_out, state, ambient): """Test TemperatureSetting trait support for temperature sensor.""" hass.config.units.temperature_unit = unit_in trt = trait.TemperatureSettingTrait( hass, State( "sensor.test", state, {ATTR_DEVICE_CLASS: sensor.DEVICE_CLASS_TEMPERATURE} ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "queryOnlyTemperatureSetting": True, "thermostatTemperatureUnit": unit_out, } if ambient: assert trt.query_attributes() == {"thermostatTemperatureAmbient": ambient} else: assert trt.query_attributes() == {} hass.config.units.temperature_unit = TEMP_CELSIUS async def test_humidity_setting_sensor(hass): """Test HumiditySetting trait support for humidity sensor.""" assert ( helpers.get_google_type(sensor.DOMAIN, sensor.DEVICE_CLASS_HUMIDITY) is not None ) assert not trait.HumiditySettingTrait.supported( sensor.DOMAIN, 0, sensor.DEVICE_CLASS_TEMPERATURE ) assert trait.HumiditySettingTrait.supported( sensor.DOMAIN, 0, sensor.DEVICE_CLASS_HUMIDITY ) @pytest.mark.parametrize( "state,ambient", [("70", 70), ("unavailable", None), ("unknown", None)] ) async def test_humidity_setting_sensor_data(hass, state, ambient): """Test HumiditySetting trait support for humidity sensor.""" trt = trait.HumiditySettingTrait( hass, State("sensor.test", state, {ATTR_DEVICE_CLASS: sensor.DEVICE_CLASS_HUMIDITY}), BASIC_CONFIG, ) assert trt.sync_attributes() == {"queryOnlyHumiditySetting": True} if ambient: assert trt.query_attributes() == {"humidityAmbientPercent": ambient} else: assert trt.query_attributes() == {} with pytest.raises(helpers.SmartHomeError) as err: await trt.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert err.value.code == const.ERR_NOT_SUPPORTED async def test_transport_control(hass): """Test the TransportControlTrait.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None for feature in trait.MEDIA_COMMAND_SUPPORT_MAPPING.values(): assert trait.TransportControlTrait.supported(media_player.DOMAIN, feature, None) now = datetime(2020, 1, 1) trt = trait.TransportControlTrait( hass, State( "media_player.bla", media_player.STATE_PLAYING, { media_player.ATTR_MEDIA_POSITION: 100, media_player.ATTR_MEDIA_DURATION: 200, media_player.ATTR_MEDIA_POSITION_UPDATED_AT: now - timedelta(seconds=10), media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.5, ATTR_SUPPORTED_FEATURES: media_player.SUPPORT_PLAY | media_player.SUPPORT_STOP, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "transportControlSupportedCommands": ["RESUME", "STOP"] } assert trt.query_attributes() == {} # COMMAND_MEDIA_SEEK_RELATIVE calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_SEEK ) # Patch to avoid time ticking over during the command failing the test with patch("homeassistant.util.dt.utcnow", return_value=now): await trt.execute( trait.COMMAND_MEDIA_SEEK_RELATIVE, BASIC_DATA, {"relativePositionMs": 10000}, {}, ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", # 100s (current position) + 10s (from command) + 10s (from updated_at) media_player.ATTR_MEDIA_SEEK_POSITION: 120, } # COMMAND_MEDIA_SEEK_TO_POSITION calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_SEEK ) await trt.execute( trait.COMMAND_MEDIA_SEEK_TO_POSITION, BASIC_DATA, {"absPositionMs": 50000}, {} ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_SEEK_POSITION: 50, } # COMMAND_MEDIA_NEXT calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_NEXT_TRACK ) await trt.execute(trait.COMMAND_MEDIA_NEXT, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} # COMMAND_MEDIA_PAUSE calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_PAUSE ) await trt.execute(trait.COMMAND_MEDIA_PAUSE, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} # COMMAND_MEDIA_PREVIOUS calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_PREVIOUS_TRACK ) await trt.execute(trait.COMMAND_MEDIA_PREVIOUS, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} # COMMAND_MEDIA_RESUME calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_PLAY ) await trt.execute(trait.COMMAND_MEDIA_RESUME, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} # COMMAND_MEDIA_SHUFFLE calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_SHUFFLE_SET ) await trt.execute(trait.COMMAND_MEDIA_SHUFFLE, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_SHUFFLE: True, } # COMMAND_MEDIA_STOP calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_STOP ) await trt.execute(trait.COMMAND_MEDIA_STOP, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} @pytest.mark.parametrize( "state", ( STATE_OFF, STATE_IDLE, STATE_PLAYING, STATE_ON, STATE_PAUSED, STATE_STANDBY, STATE_UNAVAILABLE, STATE_UNKNOWN, ), ) async def test_media_state(hass, state): """Test the MediaStateTrait.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.TransportControlTrait.supported( media_player.DOMAIN, media_player.SUPPORT_PLAY, None ) trt = trait.MediaStateTrait( hass, State( "media_player.bla", state, { media_player.ATTR_MEDIA_POSITION: 100, media_player.ATTR_MEDIA_DURATION: 200, media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.5, ATTR_SUPPORTED_FEATURES: media_player.SUPPORT_PLAY | media_player.SUPPORT_STOP, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "supportActivityState": True, "supportPlaybackState": True, } assert trt.query_attributes() == { "activityState": trt.activity_lookup.get(state), "playbackState": trt.playback_lookup.get(state), }
sander76/home-assistant
tests/components/google_assistant/test_trait.py
homeassistant/components/emulated_hue/upnp.py
"""Support for Google - Calendar Event Devices.""" from datetime import datetime, timedelta import logging import os from googleapiclient import discovery as google_discovery import httplib2 from oauth2client.client import ( FlowExchangeError, OAuth2DeviceCodeError, OAuth2WebServerFlow, ) from oauth2client.file import Storage import voluptuous as vol from voluptuous.error import Error as VoluptuousError import yaml from homeassistant.const import ( CONF_CLIENT_ID, CONF_CLIENT_SECRET, CONF_DEVICE_ID, CONF_ENTITIES, CONF_NAME, CONF_OFFSET, ) from homeassistant.helpers import discovery import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entity import generate_entity_id from homeassistant.helpers.event import track_time_change from homeassistant.util import convert, dt _LOGGER = logging.getLogger(__name__) DOMAIN = "google" ENTITY_ID_FORMAT = DOMAIN + ".{}" CONF_TRACK_NEW = "track_new_calendar" CONF_CAL_ID = "cal_id" CONF_TRACK = "track" CONF_SEARCH = "search" CONF_IGNORE_AVAILABILITY = "ignore_availability" CONF_MAX_RESULTS = "max_results" DEFAULT_CONF_TRACK_NEW = True DEFAULT_CONF_OFFSET = "!!" EVENT_CALENDAR_ID = "calendar_id" EVENT_DESCRIPTION = "description" EVENT_END_CONF = "end" EVENT_END_DATE = "end_date" EVENT_END_DATETIME = "end_date_time" EVENT_IN = "in" EVENT_IN_DAYS = "days" EVENT_IN_WEEKS = "weeks" EVENT_START_CONF = "start" EVENT_START_DATE = "start_date" EVENT_START_DATETIME = "start_date_time" EVENT_SUMMARY = "summary" EVENT_TYPES_CONF = "event_types" NOTIFICATION_ID = "google_calendar_notification" NOTIFICATION_TITLE = "Google Calendar Setup" GROUP_NAME_ALL_CALENDARS = "Google Calendar Sensors" SERVICE_SCAN_CALENDARS = "scan_for_calendars" SERVICE_FOUND_CALENDARS = "found_calendar" SERVICE_ADD_EVENT = "add_event" DATA_INDEX = "google_calendars" YAML_DEVICES = f"{DOMAIN}_calendars.yaml" SCOPES = "https://www.googleapis.com/auth/calendar" TOKEN_FILE = f".{DOMAIN}.token" CONFIG_SCHEMA = vol.Schema( { DOMAIN: vol.Schema( { vol.Required(CONF_CLIENT_ID): cv.string, vol.Required(CONF_CLIENT_SECRET): cv.string, vol.Optional(CONF_TRACK_NEW): cv.boolean, } ) }, extra=vol.ALLOW_EXTRA, ) _SINGLE_CALSEARCH_CONFIG = vol.Schema( { vol.Required(CONF_NAME): cv.string, vol.Required(CONF_DEVICE_ID): cv.string, vol.Optional(CONF_IGNORE_AVAILABILITY, default=True): cv.boolean, vol.Optional(CONF_OFFSET): cv.string, vol.Optional(CONF_SEARCH): cv.string, vol.Optional(CONF_TRACK): cv.boolean, vol.Optional(CONF_MAX_RESULTS): cv.positive_int, } ) DEVICE_SCHEMA = vol.Schema( { vol.Required(CONF_CAL_ID): cv.string, vol.Required(CONF_ENTITIES, None): vol.All( cv.ensure_list, [_SINGLE_CALSEARCH_CONFIG] ), }, extra=vol.ALLOW_EXTRA, ) _EVENT_IN_TYPES = vol.Schema( { vol.Exclusive(EVENT_IN_DAYS, EVENT_TYPES_CONF): cv.positive_int, vol.Exclusive(EVENT_IN_WEEKS, EVENT_TYPES_CONF): cv.positive_int, } ) ADD_EVENT_SERVICE_SCHEMA = vol.Schema( { vol.Required(EVENT_CALENDAR_ID): cv.string, vol.Required(EVENT_SUMMARY): cv.string, vol.Optional(EVENT_DESCRIPTION, default=""): cv.string, vol.Exclusive(EVENT_START_DATE, EVENT_START_CONF): cv.date, vol.Exclusive(EVENT_END_DATE, EVENT_END_CONF): cv.date, vol.Exclusive(EVENT_START_DATETIME, EVENT_START_CONF): cv.datetime, vol.Exclusive(EVENT_END_DATETIME, EVENT_END_CONF): cv.datetime, vol.Exclusive(EVENT_IN, EVENT_START_CONF, EVENT_END_CONF): _EVENT_IN_TYPES, } ) def do_authentication(hass, hass_config, config): """Notify user of actions and authenticate. Notify user of user_code and verification_url then poll until we have an access token. """ oauth = OAuth2WebServerFlow( client_id=config[CONF_CLIENT_ID], client_secret=config[CONF_CLIENT_SECRET], scope="https://www.googleapis.com/auth/calendar", redirect_uri="Home-Assistant.io", ) try: dev_flow = oauth.step1_get_device_and_user_codes() except OAuth2DeviceCodeError as err: hass.components.persistent_notification.create( f"Error: {err}<br />You will need to restart hass after fixing." "", title=NOTIFICATION_TITLE, notification_id=NOTIFICATION_ID, ) return False hass.components.persistent_notification.create( ( f"In order to authorize Home-Assistant to view your calendars " f'you must visit: <a href="{dev_flow.verification_url}" target="_blank">{dev_flow.verification_url}</a> and enter ' f"code: {dev_flow.user_code}" ), title=NOTIFICATION_TITLE, notification_id=NOTIFICATION_ID, ) def step2_exchange(now): """Keep trying to validate the user_code until it expires.""" if now >= dt.as_local(dev_flow.user_code_expiry): hass.components.persistent_notification.create( "Authentication code expired, please restart " "Home-Assistant and try again", title=NOTIFICATION_TITLE, notification_id=NOTIFICATION_ID, ) listener() try: credentials = oauth.step2_exchange(device_flow_info=dev_flow) except FlowExchangeError: # not ready yet, call again return storage = Storage(hass.config.path(TOKEN_FILE)) storage.put(credentials) do_setup(hass, hass_config, config) listener() hass.components.persistent_notification.create( ( f"We are all setup now. Check {YAML_DEVICES} for calendars that have " f"been found" ), title=NOTIFICATION_TITLE, notification_id=NOTIFICATION_ID, ) listener = track_time_change( hass, step2_exchange, second=range(0, 60, dev_flow.interval) ) return True def setup(hass, config): """Set up the Google platform.""" if DATA_INDEX not in hass.data: hass.data[DATA_INDEX] = {} conf = config.get(DOMAIN, {}) if not conf: # component is set up by tts platform return True token_file = hass.config.path(TOKEN_FILE) if not os.path.isfile(token_file): do_authentication(hass, config, conf) else: if not check_correct_scopes(token_file): do_authentication(hass, config, conf) else: do_setup(hass, config, conf) return True def check_correct_scopes(token_file): """Check for the correct scopes in file.""" tokenfile = open(token_file).read() if "readonly" in tokenfile: _LOGGER.warning("Please re-authenticate with Google") return False return True def setup_services(hass, hass_config, track_new_found_calendars, calendar_service): """Set up the service listeners.""" def _found_calendar(call): """Check if we know about a calendar and generate PLATFORM_DISCOVER.""" calendar = get_calendar_info(hass, call.data) if hass.data[DATA_INDEX].get(calendar[CONF_CAL_ID]) is not None: return hass.data[DATA_INDEX].update({calendar[CONF_CAL_ID]: calendar}) update_config( hass.config.path(YAML_DEVICES), hass.data[DATA_INDEX][calendar[CONF_CAL_ID]] ) discovery.load_platform( hass, "calendar", DOMAIN, hass.data[DATA_INDEX][calendar[CONF_CAL_ID]], hass_config, ) hass.services.register(DOMAIN, SERVICE_FOUND_CALENDARS, _found_calendar) def _scan_for_calendars(service): """Scan for new calendars.""" service = calendar_service.get() cal_list = service.calendarList() calendars = cal_list.list().execute()["items"] for calendar in calendars: calendar["track"] = track_new_found_calendars hass.services.call(DOMAIN, SERVICE_FOUND_CALENDARS, calendar) hass.services.register(DOMAIN, SERVICE_SCAN_CALENDARS, _scan_for_calendars) def _add_event(call): """Add a new event to calendar.""" service = calendar_service.get() start = {} end = {} if EVENT_IN in call.data: if EVENT_IN_DAYS in call.data[EVENT_IN]: now = datetime.now() start_in = now + timedelta(days=call.data[EVENT_IN][EVENT_IN_DAYS]) end_in = start_in + timedelta(days=1) start = {"date": start_in.strftime("%Y-%m-%d")} end = {"date": end_in.strftime("%Y-%m-%d")} elif EVENT_IN_WEEKS in call.data[EVENT_IN]: now = datetime.now() start_in = now + timedelta(weeks=call.data[EVENT_IN][EVENT_IN_WEEKS]) end_in = start_in + timedelta(days=1) start = {"date": start_in.strftime("%Y-%m-%d")} end = {"date": end_in.strftime("%Y-%m-%d")} elif EVENT_START_DATE in call.data: start = {"date": str(call.data[EVENT_START_DATE])} end = {"date": str(call.data[EVENT_END_DATE])} elif EVENT_START_DATETIME in call.data: start_dt = str( call.data[EVENT_START_DATETIME].strftime("%Y-%m-%dT%H:%M:%S") ) end_dt = str(call.data[EVENT_END_DATETIME].strftime("%Y-%m-%dT%H:%M:%S")) start = {"dateTime": start_dt, "timeZone": str(hass.config.time_zone)} end = {"dateTime": end_dt, "timeZone": str(hass.config.time_zone)} event = { "summary": call.data[EVENT_SUMMARY], "description": call.data[EVENT_DESCRIPTION], "start": start, "end": end, } service_data = {"calendarId": call.data[EVENT_CALENDAR_ID], "body": event} event = service.events().insert(**service_data).execute() hass.services.register( DOMAIN, SERVICE_ADD_EVENT, _add_event, schema=ADD_EVENT_SERVICE_SCHEMA ) return True def do_setup(hass, hass_config, config): """Run the setup after we have everything configured.""" # Load calendars the user has configured hass.data[DATA_INDEX] = load_config(hass.config.path(YAML_DEVICES)) calendar_service = GoogleCalendarService(hass.config.path(TOKEN_FILE)) track_new_found_calendars = convert( config.get(CONF_TRACK_NEW), bool, DEFAULT_CONF_TRACK_NEW ) setup_services(hass, hass_config, track_new_found_calendars, calendar_service) for calendar in hass.data[DATA_INDEX].values(): discovery.load_platform(hass, "calendar", DOMAIN, calendar, hass_config) # Look for any new calendars hass.services.call(DOMAIN, SERVICE_SCAN_CALENDARS, None) return True class GoogleCalendarService: """Calendar service interface to Google.""" def __init__(self, token_file): """Init the Google Calendar service.""" self.token_file = token_file def get(self): """Get the calendar service from the storage file token.""" credentials = Storage(self.token_file).get() http = credentials.authorize(httplib2.Http()) service = google_discovery.build( "calendar", "v3", http=http, cache_discovery=False ) return service def get_calendar_info(hass, calendar): """Convert data from Google into DEVICE_SCHEMA.""" calendar_info = DEVICE_SCHEMA( { CONF_CAL_ID: calendar["id"], CONF_ENTITIES: [ { CONF_TRACK: calendar["track"], CONF_NAME: calendar["summary"], CONF_DEVICE_ID: generate_entity_id( "{}", calendar["summary"], hass=hass ), } ], } ) return calendar_info def load_config(path): """Load the google_calendar_devices.yaml.""" calendars = {} try: with open(path) as file: data = yaml.safe_load(file) for calendar in data: try: calendars.update({calendar[CONF_CAL_ID]: DEVICE_SCHEMA(calendar)}) except VoluptuousError as exception: # keep going _LOGGER.warning("Calendar Invalid Data: %s", exception) except FileNotFoundError: # When YAML file could not be loaded/did not contain a dict return {} return calendars def update_config(path, calendar): """Write the google_calendar_devices.yaml.""" with open(path, "a") as out: out.write("\n") yaml.dump([calendar], out, default_flow_style=False)
"""Tests for the Google Assistant traits.""" from datetime import datetime, timedelta from unittest.mock import patch import pytest from homeassistant.components import ( alarm_control_panel, binary_sensor, camera, cover, fan, group, input_boolean, input_select, light, lock, media_player, scene, script, sensor, switch, vacuum, ) from homeassistant.components.climate import const as climate from homeassistant.components.google_assistant import const, error, helpers, trait from homeassistant.components.google_assistant.error import SmartHomeError from homeassistant.components.humidifier import const as humidifier from homeassistant.config import async_process_ha_core_config from homeassistant.const import ( ATTR_ASSUMED_STATE, ATTR_DEVICE_CLASS, ATTR_ENTITY_ID, ATTR_MODE, ATTR_SUPPORTED_FEATURES, ATTR_TEMPERATURE, SERVICE_TURN_OFF, SERVICE_TURN_ON, STATE_ALARM_ARMED_AWAY, STATE_ALARM_DISARMED, STATE_ALARM_PENDING, STATE_IDLE, STATE_OFF, STATE_ON, STATE_PAUSED, STATE_PLAYING, STATE_STANDBY, STATE_UNAVAILABLE, STATE_UNKNOWN, TEMP_CELSIUS, TEMP_FAHRENHEIT, ) from homeassistant.core import DOMAIN as HA_DOMAIN, EVENT_CALL_SERVICE, State from homeassistant.util import color from . import BASIC_CONFIG, MockConfig from tests.common import async_capture_events, async_mock_service REQ_ID = "ff36a3cc-ec34-11e6-b1a0-64510650abcf" BASIC_DATA = helpers.RequestData( BASIC_CONFIG, "test-agent", const.SOURCE_CLOUD, REQ_ID, None ) PIN_CONFIG = MockConfig(secure_devices_pin="1234") PIN_DATA = helpers.RequestData( PIN_CONFIG, "test-agent", const.SOURCE_CLOUD, REQ_ID, None ) async def test_brightness_light(hass): """Test brightness trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert trait.BrightnessTrait.supported(light.DOMAIN, light.SUPPORT_BRIGHTNESS, None) trt = trait.BrightnessTrait( hass, State("light.bla", light.STATE_ON, {light.ATTR_BRIGHTNESS: 243}), BASIC_CONFIG, ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"brightness": 95} events = async_capture_events(hass, EVENT_CALL_SERVICE) calls = async_mock_service(hass, light.DOMAIN, light.SERVICE_TURN_ON) await trt.execute( trait.COMMAND_BRIGHTNESS_ABSOLUTE, BASIC_DATA, {"brightness": 50}, {} ) await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "light.bla", light.ATTR_BRIGHTNESS_PCT: 50} assert len(events) == 1 assert events[0].data == { "domain": "light", "service": "turn_on", "service_data": {"brightness_pct": 50, "entity_id": "light.bla"}, } async def test_camera_stream(hass): """Test camera stream trait support for camera domain.""" await async_process_ha_core_config( hass, {"external_url": "https://example.com"}, ) assert helpers.get_google_type(camera.DOMAIN, None) is not None assert trait.CameraStreamTrait.supported(camera.DOMAIN, camera.SUPPORT_STREAM, None) trt = trait.CameraStreamTrait( hass, State("camera.bla", camera.STATE_IDLE, {}), BASIC_CONFIG ) assert trt.sync_attributes() == { "cameraStreamSupportedProtocols": ["hls"], "cameraStreamNeedAuthToken": False, "cameraStreamNeedDrmEncryption": False, } assert trt.query_attributes() == {} with patch( "homeassistant.components.camera.async_request_stream", return_value="/api/streams/bla", ): await trt.execute(trait.COMMAND_GET_CAMERA_STREAM, BASIC_DATA, {}, {}) assert trt.query_attributes() == { "cameraStreamAccessUrl": "https://example.com/api/streams/bla", "cameraStreamReceiverAppId": "B12CE3CA", } async def test_onoff_group(hass): """Test OnOff trait support for group domain.""" assert helpers.get_google_type(group.DOMAIN, None) is not None assert trait.OnOffTrait.supported(group.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("group.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("group.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, HA_DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "group.bla"} off_calls = async_mock_service(hass, HA_DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "group.bla"} async def test_onoff_input_boolean(hass): """Test OnOff trait support for input_boolean domain.""" assert helpers.get_google_type(input_boolean.DOMAIN, None) is not None assert trait.OnOffTrait.supported(input_boolean.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("input_boolean.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait( hass, State("input_boolean.bla", STATE_OFF), BASIC_CONFIG ) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, input_boolean.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "input_boolean.bla"} off_calls = async_mock_service(hass, input_boolean.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "input_boolean.bla"} async def test_onoff_switch(hass): """Test OnOff trait support for switch domain.""" assert helpers.get_google_type(switch.DOMAIN, None) is not None assert trait.OnOffTrait.supported(switch.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("switch.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("switch.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} trt_assumed = trait.OnOffTrait( hass, State("switch.bla", STATE_OFF, {"assumed_state": True}), BASIC_CONFIG ) assert trt_assumed.sync_attributes() == {"commandOnlyOnOff": True} on_calls = async_mock_service(hass, switch.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "switch.bla"} off_calls = async_mock_service(hass, switch.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "switch.bla"} async def test_onoff_fan(hass): """Test OnOff trait support for fan domain.""" assert helpers.get_google_type(fan.DOMAIN, None) is not None assert trait.OnOffTrait.supported(fan.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("fan.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("fan.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, fan.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "fan.bla"} off_calls = async_mock_service(hass, fan.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "fan.bla"} async def test_onoff_light(hass): """Test OnOff trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert trait.OnOffTrait.supported(light.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("light.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("light.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "light.bla"} off_calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "light.bla"} async def test_onoff_media_player(hass): """Test OnOff trait support for media_player domain.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.OnOffTrait.supported(media_player.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("media_player.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("media_player.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, media_player.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} off_calls = async_mock_service(hass, media_player.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} async def test_onoff_humidifier(hass): """Test OnOff trait support for humidifier domain.""" assert helpers.get_google_type(humidifier.DOMAIN, None) is not None assert trait.OnOffTrait.supported(humidifier.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("humidifier.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("humidifier.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, humidifier.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "humidifier.bla"} off_calls = async_mock_service(hass, humidifier.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "humidifier.bla"} async def test_dock_vacuum(hass): """Test dock trait support for vacuum domain.""" assert helpers.get_google_type(vacuum.DOMAIN, None) is not None assert trait.DockTrait.supported(vacuum.DOMAIN, 0, None) trt = trait.DockTrait(hass, State("vacuum.bla", vacuum.STATE_IDLE), BASIC_CONFIG) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"isDocked": False} calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_RETURN_TO_BASE) await trt.execute(trait.COMMAND_DOCK, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} async def test_startstop_vacuum(hass): """Test startStop trait support for vacuum domain.""" assert helpers.get_google_type(vacuum.DOMAIN, None) is not None assert trait.StartStopTrait.supported(vacuum.DOMAIN, 0, None) trt = trait.StartStopTrait( hass, State( "vacuum.bla", vacuum.STATE_PAUSED, {ATTR_SUPPORTED_FEATURES: vacuum.SUPPORT_PAUSE}, ), BASIC_CONFIG, ) assert trt.sync_attributes() == {"pausable": True} assert trt.query_attributes() == {"isRunning": False, "isPaused": True} start_calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_START) await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": True}, {}) assert len(start_calls) == 1 assert start_calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} stop_calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_STOP) await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": False}, {}) assert len(stop_calls) == 1 assert stop_calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} pause_calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_PAUSE) await trt.execute(trait.COMMAND_PAUSEUNPAUSE, BASIC_DATA, {"pause": True}, {}) assert len(pause_calls) == 1 assert pause_calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} unpause_calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_START) await trt.execute(trait.COMMAND_PAUSEUNPAUSE, BASIC_DATA, {"pause": False}, {}) assert len(unpause_calls) == 1 assert unpause_calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} async def test_startstop_cover(hass): """Test startStop trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.StartStopTrait.supported(cover.DOMAIN, cover.SUPPORT_STOP, None) state = State( "cover.bla", cover.STATE_CLOSED, {ATTR_SUPPORTED_FEATURES: cover.SUPPORT_STOP}, ) trt = trait.StartStopTrait( hass, state, BASIC_CONFIG, ) assert trt.sync_attributes() == {} for state_value in (cover.STATE_CLOSING, cover.STATE_OPENING): state.state = state_value assert trt.query_attributes() == {"isRunning": True} stop_calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_STOP_COVER) await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": False}, {}) assert len(stop_calls) == 1 assert stop_calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} for state_value in (cover.STATE_CLOSED, cover.STATE_OPEN): state.state = state_value assert trt.query_attributes() == {"isRunning": False} with pytest.raises(SmartHomeError, match="Cover is already stopped"): await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": False}, {}) with pytest.raises(SmartHomeError, match="Starting a cover is not supported"): await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": True}, {}) with pytest.raises( SmartHomeError, match="Command action.devices.commands.PauseUnpause is not supported", ): await trt.execute(trait.COMMAND_PAUSEUNPAUSE, BASIC_DATA, {"start": True}, {}) async def test_startstop_cover_assumed(hass): """Test startStop trait support for cover domain of assumed state.""" trt = trait.StartStopTrait( hass, State( "cover.bla", cover.STATE_CLOSED, {ATTR_SUPPORTED_FEATURES: cover.SUPPORT_STOP, ATTR_ASSUMED_STATE: True}, ), BASIC_CONFIG, ) stop_calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_STOP_COVER) await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": False}, {}) assert len(stop_calls) == 1 assert stop_calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} async def test_color_setting_color_light(hass): """Test ColorSpectrum trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert not trait.ColorSettingTrait.supported(light.DOMAIN, 0, None) assert trait.ColorSettingTrait.supported(light.DOMAIN, light.SUPPORT_COLOR, None) trt = trait.ColorSettingTrait( hass, State( "light.bla", STATE_ON, { light.ATTR_HS_COLOR: (20, 94), light.ATTR_BRIGHTNESS: 200, ATTR_SUPPORTED_FEATURES: light.SUPPORT_COLOR, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == {"colorModel": "hsv"} assert trt.query_attributes() == { "color": {"spectrumHsv": {"hue": 20, "saturation": 0.94, "value": 200 / 255}} } assert trt.can_execute( trait.COMMAND_COLOR_ABSOLUTE, {"color": {"spectrumRGB": 16715792}} ) calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON) await trt.execute( trait.COMMAND_COLOR_ABSOLUTE, BASIC_DATA, {"color": {"spectrumRGB": 1052927}}, {}, ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "light.bla", light.ATTR_HS_COLOR: (240, 93.725), } await trt.execute( trait.COMMAND_COLOR_ABSOLUTE, BASIC_DATA, {"color": {"spectrumHSV": {"hue": 100, "saturation": 0.50, "value": 0.20}}}, {}, ) assert len(calls) == 2 assert calls[1].data == { ATTR_ENTITY_ID: "light.bla", light.ATTR_HS_COLOR: [100, 50], light.ATTR_BRIGHTNESS: 0.2 * 255, } async def test_color_setting_temperature_light(hass): """Test ColorTemperature trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert not trait.ColorSettingTrait.supported(light.DOMAIN, 0, None) assert trait.ColorSettingTrait.supported( light.DOMAIN, light.SUPPORT_COLOR_TEMP, None ) trt = trait.ColorSettingTrait( hass, State( "light.bla", STATE_ON, { light.ATTR_MIN_MIREDS: 200, light.ATTR_COLOR_TEMP: 300, light.ATTR_MAX_MIREDS: 500, ATTR_SUPPORTED_FEATURES: light.SUPPORT_COLOR_TEMP, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "colorTemperatureRange": {"temperatureMinK": 2000, "temperatureMaxK": 5000} } assert trt.query_attributes() == {"color": {"temperatureK": 3333}} assert trt.can_execute( trait.COMMAND_COLOR_ABSOLUTE, {"color": {"temperature": 400}} ) calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON) with pytest.raises(helpers.SmartHomeError) as err: await trt.execute( trait.COMMAND_COLOR_ABSOLUTE, BASIC_DATA, {"color": {"temperature": 5555}}, {}, ) assert err.value.code == const.ERR_VALUE_OUT_OF_RANGE await trt.execute( trait.COMMAND_COLOR_ABSOLUTE, BASIC_DATA, {"color": {"temperature": 2857}}, {} ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "light.bla", light.ATTR_COLOR_TEMP: color.color_temperature_kelvin_to_mired(2857), } async def test_color_light_temperature_light_bad_temp(hass): """Test ColorTemperature trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert not trait.ColorSettingTrait.supported(light.DOMAIN, 0, None) assert trait.ColorSettingTrait.supported( light.DOMAIN, light.SUPPORT_COLOR_TEMP, None ) trt = trait.ColorSettingTrait( hass, State( "light.bla", STATE_ON, { light.ATTR_MIN_MIREDS: 200, light.ATTR_COLOR_TEMP: 0, light.ATTR_MAX_MIREDS: 500, }, ), BASIC_CONFIG, ) assert trt.query_attributes() == {} async def test_light_modes(hass): """Test Light Mode trait.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert trait.ModesTrait.supported(light.DOMAIN, light.SUPPORT_EFFECT, None) trt = trait.ModesTrait( hass, State( "light.living_room", light.STATE_ON, attributes={ light.ATTR_EFFECT_LIST: ["random", "colorloop"], light.ATTR_EFFECT: "random", }, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableModes": [ { "name": "effect", "name_values": [{"name_synonym": ["effect"], "lang": "en"}], "settings": [ { "setting_name": "random", "setting_values": [ {"setting_synonym": ["random"], "lang": "en"} ], }, { "setting_name": "colorloop", "setting_values": [ {"setting_synonym": ["colorloop"], "lang": "en"} ], }, ], "ordered": False, } ] } assert trt.query_attributes() == { "currentModeSettings": {"effect": "random"}, "on": True, } assert trt.can_execute( trait.COMMAND_MODES, params={"updateModeSettings": {"effect": "colorloop"}}, ) calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON) await trt.execute( trait.COMMAND_MODES, BASIC_DATA, {"updateModeSettings": {"effect": "colorloop"}}, {}, ) assert len(calls) == 1 assert calls[0].data == { "entity_id": "light.living_room", "effect": "colorloop", } async def test_scene_scene(hass): """Test Scene trait support for scene domain.""" assert helpers.get_google_type(scene.DOMAIN, None) is not None assert trait.SceneTrait.supported(scene.DOMAIN, 0, None) trt = trait.SceneTrait(hass, State("scene.bla", scene.STATE), BASIC_CONFIG) assert trt.sync_attributes() == {} assert trt.query_attributes() == {} assert trt.can_execute(trait.COMMAND_ACTIVATE_SCENE, {}) calls = async_mock_service(hass, scene.DOMAIN, SERVICE_TURN_ON) await trt.execute(trait.COMMAND_ACTIVATE_SCENE, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "scene.bla"} async def test_scene_script(hass): """Test Scene trait support for script domain.""" assert helpers.get_google_type(script.DOMAIN, None) is not None assert trait.SceneTrait.supported(script.DOMAIN, 0, None) trt = trait.SceneTrait(hass, State("script.bla", STATE_OFF), BASIC_CONFIG) assert trt.sync_attributes() == {} assert trt.query_attributes() == {} assert trt.can_execute(trait.COMMAND_ACTIVATE_SCENE, {}) calls = async_mock_service(hass, script.DOMAIN, SERVICE_TURN_ON) await trt.execute(trait.COMMAND_ACTIVATE_SCENE, BASIC_DATA, {}, {}) # We don't wait till script execution is done. await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "script.bla"} async def test_temperature_setting_climate_onoff(hass): """Test TemperatureSetting trait support for climate domain - range.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.TemperatureSettingTrait.supported(climate.DOMAIN, 0, None) hass.config.units.temperature_unit = TEMP_FAHRENHEIT trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_AUTO, { ATTR_SUPPORTED_FEATURES: climate.SUPPORT_TARGET_TEMPERATURE_RANGE, climate.ATTR_HVAC_MODES: [ climate.HVAC_MODE_OFF, climate.HVAC_MODE_COOL, climate.HVAC_MODE_HEAT, climate.HVAC_MODE_HEAT_COOL, ], climate.ATTR_MIN_TEMP: None, climate.ATTR_MAX_TEMP: None, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["off", "cool", "heat", "heatcool", "on"], "thermostatTemperatureUnit": "F", } assert trt.can_execute(trait.COMMAND_THERMOSTAT_SET_MODE, {}) calls = async_mock_service(hass, climate.DOMAIN, SERVICE_TURN_ON) await trt.execute( trait.COMMAND_THERMOSTAT_SET_MODE, BASIC_DATA, {"thermostatMode": "on"}, {} ) assert len(calls) == 1 calls = async_mock_service(hass, climate.DOMAIN, SERVICE_TURN_OFF) await trt.execute( trait.COMMAND_THERMOSTAT_SET_MODE, BASIC_DATA, {"thermostatMode": "off"}, {} ) assert len(calls) == 1 async def test_temperature_setting_climate_no_modes(hass): """Test TemperatureSetting trait support for climate domain not supporting any modes.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.TemperatureSettingTrait.supported(climate.DOMAIN, 0, None) hass.config.units.temperature_unit = TEMP_CELSIUS trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_AUTO, { climate.ATTR_HVAC_MODES: [], climate.ATTR_MIN_TEMP: None, climate.ATTR_MAX_TEMP: None, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["heat"], "thermostatTemperatureUnit": "C", } async def test_temperature_setting_climate_range(hass): """Test TemperatureSetting trait support for climate domain - range.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.TemperatureSettingTrait.supported(climate.DOMAIN, 0, None) hass.config.units.temperature_unit = TEMP_FAHRENHEIT trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_AUTO, { climate.ATTR_CURRENT_TEMPERATURE: 70, climate.ATTR_CURRENT_HUMIDITY: 25, ATTR_SUPPORTED_FEATURES: climate.SUPPORT_TARGET_TEMPERATURE_RANGE, climate.ATTR_HVAC_MODES: [ STATE_OFF, climate.HVAC_MODE_COOL, climate.HVAC_MODE_HEAT, climate.HVAC_MODE_AUTO, ], climate.ATTR_TARGET_TEMP_HIGH: 75, climate.ATTR_TARGET_TEMP_LOW: 65, climate.ATTR_MIN_TEMP: 50, climate.ATTR_MAX_TEMP: 80, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["off", "cool", "heat", "auto", "on"], "thermostatTemperatureUnit": "F", } assert trt.query_attributes() == { "thermostatMode": "auto", "thermostatTemperatureAmbient": 21.1, "thermostatHumidityAmbient": 25, "thermostatTemperatureSetpointLow": 18.3, "thermostatTemperatureSetpointHigh": 23.9, } assert trt.can_execute(trait.COMMAND_THERMOSTAT_TEMPERATURE_SET_RANGE, {}) assert trt.can_execute(trait.COMMAND_THERMOSTAT_SET_MODE, {}) calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_TEMPERATURE) await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SET_RANGE, BASIC_DATA, { "thermostatTemperatureSetpointHigh": 25, "thermostatTemperatureSetpointLow": 20, }, {}, ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "climate.bla", climate.ATTR_TARGET_TEMP_HIGH: 77, climate.ATTR_TARGET_TEMP_LOW: 68, } calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_HVAC_MODE) await trt.execute( trait.COMMAND_THERMOSTAT_SET_MODE, BASIC_DATA, {"thermostatMode": "cool"}, {} ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "climate.bla", climate.ATTR_HVAC_MODE: climate.HVAC_MODE_COOL, } with pytest.raises(helpers.SmartHomeError) as err: await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, BASIC_DATA, {"thermostatTemperatureSetpoint": -100}, {}, ) assert err.value.code == const.ERR_VALUE_OUT_OF_RANGE hass.config.units.temperature_unit = TEMP_CELSIUS async def test_temperature_setting_climate_setpoint(hass): """Test TemperatureSetting trait support for climate domain - setpoint.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.TemperatureSettingTrait.supported(climate.DOMAIN, 0, None) hass.config.units.temperature_unit = TEMP_CELSIUS trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_COOL, { climate.ATTR_HVAC_MODES: [STATE_OFF, climate.HVAC_MODE_COOL], climate.ATTR_MIN_TEMP: 10, climate.ATTR_MAX_TEMP: 30, ATTR_TEMPERATURE: 18, climate.ATTR_CURRENT_TEMPERATURE: 20, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["off", "cool", "on"], "thermostatTemperatureUnit": "C", } assert trt.query_attributes() == { "thermostatMode": "cool", "thermostatTemperatureAmbient": 20, "thermostatTemperatureSetpoint": 18, } assert trt.can_execute(trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, {}) assert trt.can_execute(trait.COMMAND_THERMOSTAT_SET_MODE, {}) calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_TEMPERATURE) with pytest.raises(helpers.SmartHomeError): await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, BASIC_DATA, {"thermostatTemperatureSetpoint": -100}, {}, ) await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, BASIC_DATA, {"thermostatTemperatureSetpoint": 19}, {}, ) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "climate.bla", ATTR_TEMPERATURE: 19} async def test_temperature_setting_climate_setpoint_auto(hass): """ Test TemperatureSetting trait support for climate domain. Setpoint in auto mode. """ hass.config.units.temperature_unit = TEMP_CELSIUS trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_HEAT_COOL, { climate.ATTR_HVAC_MODES: [ climate.HVAC_MODE_OFF, climate.HVAC_MODE_HEAT_COOL, ], climate.ATTR_MIN_TEMP: 10, climate.ATTR_MAX_TEMP: 30, ATTR_TEMPERATURE: 18, climate.ATTR_CURRENT_TEMPERATURE: 20, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["off", "heatcool", "on"], "thermostatTemperatureUnit": "C", } assert trt.query_attributes() == { "thermostatMode": "heatcool", "thermostatTemperatureAmbient": 20, "thermostatTemperatureSetpointHigh": 18, "thermostatTemperatureSetpointLow": 18, } assert trt.can_execute(trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, {}) assert trt.can_execute(trait.COMMAND_THERMOSTAT_SET_MODE, {}) calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_TEMPERATURE) await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, BASIC_DATA, {"thermostatTemperatureSetpoint": 19}, {}, ) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "climate.bla", ATTR_TEMPERATURE: 19} async def test_humidity_setting_humidifier_setpoint(hass): """Test HumiditySetting trait support for humidifier domain - setpoint.""" assert helpers.get_google_type(humidifier.DOMAIN, None) is not None assert trait.HumiditySettingTrait.supported(humidifier.DOMAIN, 0, None) trt = trait.HumiditySettingTrait( hass, State( "humidifier.bla", STATE_ON, { humidifier.ATTR_MIN_HUMIDITY: 20, humidifier.ATTR_MAX_HUMIDITY: 90, humidifier.ATTR_HUMIDITY: 38, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "humiditySetpointRange": {"minPercent": 20, "maxPercent": 90} } assert trt.query_attributes() == { "humiditySetpointPercent": 38, } assert trt.can_execute(trait.COMMAND_SET_HUMIDITY, {}) calls = async_mock_service(hass, humidifier.DOMAIN, humidifier.SERVICE_SET_HUMIDITY) await trt.execute(trait.COMMAND_SET_HUMIDITY, BASIC_DATA, {"humidity": 32}, {}) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "humidifier.bla", humidifier.ATTR_HUMIDITY: 32, } async def test_lock_unlock_lock(hass): """Test LockUnlock trait locking support for lock domain.""" assert helpers.get_google_type(lock.DOMAIN, None) is not None assert trait.LockUnlockTrait.supported(lock.DOMAIN, lock.SUPPORT_OPEN, None) assert trait.LockUnlockTrait.might_2fa(lock.DOMAIN, lock.SUPPORT_OPEN, None) trt = trait.LockUnlockTrait( hass, State("lock.front_door", lock.STATE_LOCKED), PIN_CONFIG ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"isLocked": True} assert trt.can_execute(trait.COMMAND_LOCKUNLOCK, {"lock": True}) calls = async_mock_service(hass, lock.DOMAIN, lock.SERVICE_LOCK) await trt.execute(trait.COMMAND_LOCKUNLOCK, PIN_DATA, {"lock": True}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "lock.front_door"} async def test_lock_unlock_unlock(hass): """Test LockUnlock trait unlocking support for lock domain.""" assert helpers.get_google_type(lock.DOMAIN, None) is not None assert trait.LockUnlockTrait.supported(lock.DOMAIN, lock.SUPPORT_OPEN, None) trt = trait.LockUnlockTrait( hass, State("lock.front_door", lock.STATE_LOCKED), PIN_CONFIG ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"isLocked": True} assert trt.can_execute(trait.COMMAND_LOCKUNLOCK, {"lock": False}) calls = async_mock_service(hass, lock.DOMAIN, lock.SERVICE_UNLOCK) # No challenge data with pytest.raises(error.ChallengeNeeded) as err: await trt.execute(trait.COMMAND_LOCKUNLOCK, PIN_DATA, {"lock": False}, {}) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # invalid pin with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_LOCKUNLOCK, PIN_DATA, {"lock": False}, {"pin": 9999} ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_FAILED_PIN_NEEDED await trt.execute( trait.COMMAND_LOCKUNLOCK, PIN_DATA, {"lock": False}, {"pin": "1234"} ) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "lock.front_door"} # Test without pin trt = trait.LockUnlockTrait( hass, State("lock.front_door", lock.STATE_LOCKED), BASIC_CONFIG ) with pytest.raises(error.SmartHomeError) as err: await trt.execute(trait.COMMAND_LOCKUNLOCK, BASIC_DATA, {"lock": False}, {}) assert len(calls) == 1 assert err.value.code == const.ERR_CHALLENGE_NOT_SETUP # Test with 2FA override with patch.object( BASIC_CONFIG, "should_2fa", return_value=False, ): await trt.execute(trait.COMMAND_LOCKUNLOCK, BASIC_DATA, {"lock": False}, {}) assert len(calls) == 2 async def test_arm_disarm_arm_away(hass): """Test ArmDisarm trait Arming support for alarm_control_panel domain.""" assert helpers.get_google_type(alarm_control_panel.DOMAIN, None) is not None assert trait.ArmDisArmTrait.supported(alarm_control_panel.DOMAIN, 0, None) assert trait.ArmDisArmTrait.might_2fa(alarm_control_panel.DOMAIN, 0, None) trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, { alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True, ATTR_SUPPORTED_FEATURES: alarm_control_panel.const.SUPPORT_ALARM_ARM_HOME | alarm_control_panel.const.SUPPORT_ALARM_ARM_AWAY, }, ), PIN_CONFIG, ) assert trt.sync_attributes() == { "availableArmLevels": { "levels": [ { "level_name": "armed_home", "level_values": [ {"level_synonym": ["armed home", "home"], "lang": "en"} ], }, { "level_name": "armed_away", "level_values": [ {"level_synonym": ["armed away", "away"], "lang": "en"} ], }, ], "ordered": False, } } assert trt.query_attributes() == { "isArmed": True, "currentArmLevel": STATE_ALARM_ARMED_AWAY, } assert trt.can_execute( trait.COMMAND_ARMDISARM, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY} ) calls = async_mock_service( hass, alarm_control_panel.DOMAIN, alarm_control_panel.SERVICE_ALARM_ARM_AWAY ) # Test with no secure_pin configured with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), BASIC_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, BASIC_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {}, ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NOT_SETUP trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), PIN_CONFIG, ) # No challenge data with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {}, ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # invalid pin with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {"pin": 9999}, ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_FAILED_PIN_NEEDED # correct pin await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {"pin": "1234"}, ) assert len(calls) == 1 # Test already armed with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), PIN_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {}, ) assert len(calls) == 1 assert err.value.code == const.ERR_ALREADY_ARMED # Test with code_arm_required False trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: False}, ), PIN_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {}, ) assert len(calls) == 2 with pytest.raises(error.SmartHomeError) as err: await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True}, {}, ) async def test_arm_disarm_disarm(hass): """Test ArmDisarm trait Disarming support for alarm_control_panel domain.""" assert helpers.get_google_type(alarm_control_panel.DOMAIN, None) is not None assert trait.ArmDisArmTrait.supported(alarm_control_panel.DOMAIN, 0, None) assert trait.ArmDisArmTrait.might_2fa(alarm_control_panel.DOMAIN, 0, None) trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, { alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True, ATTR_SUPPORTED_FEATURES: alarm_control_panel.const.SUPPORT_ALARM_TRIGGER | alarm_control_panel.const.SUPPORT_ALARM_ARM_CUSTOM_BYPASS, }, ), PIN_CONFIG, ) assert trt.sync_attributes() == { "availableArmLevels": { "levels": [ { "level_name": "armed_custom_bypass", "level_values": [ { "level_synonym": ["armed custom bypass", "custom"], "lang": "en", } ], }, { "level_name": "triggered", "level_values": [{"level_synonym": ["triggered"], "lang": "en"}], }, ], "ordered": False, } } assert trt.query_attributes() == {"isArmed": False} assert trt.can_execute(trait.COMMAND_ARMDISARM, {"arm": False}) calls = async_mock_service( hass, alarm_control_panel.DOMAIN, alarm_control_panel.SERVICE_ALARM_DISARM ) # Test without secure_pin configured with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), BASIC_CONFIG, ) await trt.execute(trait.COMMAND_ARMDISARM, BASIC_DATA, {"arm": False}, {}) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NOT_SETUP trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), PIN_CONFIG, ) # No challenge data with pytest.raises(error.ChallengeNeeded) as err: await trt.execute(trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": False}, {}) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # invalid pin with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": False}, {"pin": 9999} ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_FAILED_PIN_NEEDED # correct pin await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": False}, {"pin": "1234"} ) assert len(calls) == 1 # Test already disarmed with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), PIN_CONFIG, ) await trt.execute(trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": False}, {}) assert len(calls) == 1 assert err.value.code == const.ERR_ALREADY_DISARMED # Cancel arming after already armed will require pin with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: False}, ), PIN_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "cancel": True}, {} ) assert len(calls) == 1 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # Cancel arming while pending to arm doesn't require pin trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_PENDING, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: False}, ), PIN_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "cancel": True}, {} ) assert len(calls) == 2 async def test_fan_speed(hass): """Test FanSpeed trait speed control support for fan domain.""" assert helpers.get_google_type(fan.DOMAIN, None) is not None assert trait.FanSpeedTrait.supported(fan.DOMAIN, fan.SUPPORT_SET_SPEED, None) trt = trait.FanSpeedTrait( hass, State( "fan.living_room_fan", fan.SPEED_HIGH, attributes={ "speed_list": [ fan.SPEED_OFF, fan.SPEED_LOW, fan.SPEED_MEDIUM, fan.SPEED_HIGH, ], "speed": "low", "percentage": 33, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableFanSpeeds": { "ordered": True, "speeds": [ { "speed_name": "off", "speed_values": [{"speed_synonym": ["stop", "off"], "lang": "en"}], }, { "speed_name": "low", "speed_values": [ { "speed_synonym": ["slow", "low", "slowest", "lowest"], "lang": "en", } ], }, { "speed_name": "medium", "speed_values": [ {"speed_synonym": ["medium", "mid", "middle"], "lang": "en"} ], }, { "speed_name": "high", "speed_values": [ { "speed_synonym": [ "high", "max", "fast", "highest", "fastest", "maximum", ], "lang": "en", } ], }, ], }, "reversible": False, "supportsFanSpeedPercent": True, } assert trt.query_attributes() == { "currentFanSpeedSetting": "low", "on": True, "currentFanSpeedPercent": 33, } assert trt.can_execute(trait.COMMAND_FANSPEED, params={"fanSpeed": "medium"}) calls = async_mock_service(hass, fan.DOMAIN, fan.SERVICE_SET_SPEED) await trt.execute(trait.COMMAND_FANSPEED, BASIC_DATA, {"fanSpeed": "medium"}, {}) assert len(calls) == 1 assert calls[0].data == {"entity_id": "fan.living_room_fan", "speed": "medium"} assert trt.can_execute(trait.COMMAND_FANSPEED, params={"fanSpeedPercent": 10}) calls = async_mock_service(hass, fan.DOMAIN, fan.SERVICE_SET_PERCENTAGE) await trt.execute(trait.COMMAND_FANSPEED, BASIC_DATA, {"fanSpeedPercent": 10}, {}) assert len(calls) == 1 assert calls[0].data == {"entity_id": "fan.living_room_fan", "percentage": 10} async def test_climate_fan_speed(hass): """Test FanSpeed trait speed control support for climate domain.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.FanSpeedTrait.supported(climate.DOMAIN, climate.SUPPORT_FAN_MODE, None) trt = trait.FanSpeedTrait( hass, State( "climate.living_room_ac", "on", attributes={ "fan_modes": ["auto", "low", "medium", "high"], "fan_mode": "low", }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableFanSpeeds": { "ordered": True, "speeds": [ { "speed_name": "auto", "speed_values": [{"speed_synonym": ["auto"], "lang": "en"}], }, { "speed_name": "low", "speed_values": [{"speed_synonym": ["low"], "lang": "en"}], }, { "speed_name": "medium", "speed_values": [{"speed_synonym": ["medium"], "lang": "en"}], }, { "speed_name": "high", "speed_values": [{"speed_synonym": ["high"], "lang": "en"}], }, ], }, "reversible": False, "supportsFanSpeedPercent": True, } assert trt.query_attributes() == { "currentFanSpeedSetting": "low", } assert trt.can_execute(trait.COMMAND_FANSPEED, params={"fanSpeed": "medium"}) calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_FAN_MODE) await trt.execute(trait.COMMAND_FANSPEED, BASIC_DATA, {"fanSpeed": "medium"}, {}) assert len(calls) == 1 assert calls[0].data == { "entity_id": "climate.living_room_ac", "fan_mode": "medium", } async def test_inputselector(hass): """Test input selector trait.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.InputSelectorTrait.supported( media_player.DOMAIN, media_player.SUPPORT_SELECT_SOURCE, None ) trt = trait.InputSelectorTrait( hass, State( "media_player.living_room", media_player.STATE_PLAYING, attributes={ media_player.ATTR_INPUT_SOURCE_LIST: [ "media", "game", "chromecast", "plex", ], media_player.ATTR_INPUT_SOURCE: "game", }, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableInputs": [ {"key": "media", "names": [{"name_synonym": ["media"], "lang": "en"}]}, {"key": "game", "names": [{"name_synonym": ["game"], "lang": "en"}]}, { "key": "chromecast", "names": [{"name_synonym": ["chromecast"], "lang": "en"}], }, {"key": "plex", "names": [{"name_synonym": ["plex"], "lang": "en"}]}, ], "orderedInputs": True, } assert trt.query_attributes() == { "currentInput": "game", } assert trt.can_execute( trait.COMMAND_INPUT, params={"newInput": "media"}, ) calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_SELECT_SOURCE ) await trt.execute( trait.COMMAND_INPUT, BASIC_DATA, {"newInput": "media"}, {}, ) assert len(calls) == 1 assert calls[0].data == {"entity_id": "media_player.living_room", "source": "media"} @pytest.mark.parametrize( "sources,source,source_next,source_prev", [ (["a"], "a", "a", "a"), (["a", "b"], "a", "b", "b"), (["a", "b", "c"], "a", "b", "c"), ], ) async def test_inputselector_nextprev(hass, sources, source, source_next, source_prev): """Test input selector trait.""" trt = trait.InputSelectorTrait( hass, State( "media_player.living_room", media_player.STATE_PLAYING, attributes={ media_player.ATTR_INPUT_SOURCE_LIST: sources, media_player.ATTR_INPUT_SOURCE: source, }, ), BASIC_CONFIG, ) assert trt.can_execute("action.devices.commands.NextInput", params={}) assert trt.can_execute("action.devices.commands.PreviousInput", params={}) calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_SELECT_SOURCE ) await trt.execute( "action.devices.commands.NextInput", BASIC_DATA, {}, {}, ) await trt.execute( "action.devices.commands.PreviousInput", BASIC_DATA, {}, {}, ) assert len(calls) == 2 assert calls[0].data == { "entity_id": "media_player.living_room", "source": source_next, } assert calls[1].data == { "entity_id": "media_player.living_room", "source": source_prev, } @pytest.mark.parametrize( "sources,source", [(None, "a"), (["a", "b"], None), (["a", "b"], "c")] ) async def test_inputselector_nextprev_invalid(hass, sources, source): """Test input selector trait.""" trt = trait.InputSelectorTrait( hass, State( "media_player.living_room", media_player.STATE_PLAYING, attributes={ media_player.ATTR_INPUT_SOURCE_LIST: sources, media_player.ATTR_INPUT_SOURCE: source, }, ), BASIC_CONFIG, ) with pytest.raises(SmartHomeError): await trt.execute( "action.devices.commands.NextInput", BASIC_DATA, {}, {}, ) with pytest.raises(SmartHomeError): await trt.execute( "action.devices.commands.PreviousInput", BASIC_DATA, {}, {}, ) with pytest.raises(SmartHomeError): await trt.execute( "action.devices.commands.InvalidCommand", BASIC_DATA, {}, {}, ) async def test_modes_input_select(hass): """Test Input Select Mode trait.""" assert helpers.get_google_type(input_select.DOMAIN, None) is not None assert trait.ModesTrait.supported(input_select.DOMAIN, None, None) trt = trait.ModesTrait( hass, State("input_select.bla", "unavailable"), BASIC_CONFIG, ) assert trt.sync_attributes() == {"availableModes": []} trt = trait.ModesTrait( hass, State( "input_select.bla", "abc", attributes={input_select.ATTR_OPTIONS: ["abc", "123", "xyz"]}, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableModes": [ { "name": "option", "name_values": [ { "name_synonym": ["option", "setting", "mode", "value"], "lang": "en", } ], "settings": [ { "setting_name": "abc", "setting_values": [{"setting_synonym": ["abc"], "lang": "en"}], }, { "setting_name": "123", "setting_values": [{"setting_synonym": ["123"], "lang": "en"}], }, { "setting_name": "xyz", "setting_values": [{"setting_synonym": ["xyz"], "lang": "en"}], }, ], "ordered": False, } ] } assert trt.query_attributes() == { "currentModeSettings": {"option": "abc"}, "on": True, } assert trt.can_execute( trait.COMMAND_MODES, params={"updateModeSettings": {"option": "xyz"}}, ) calls = async_mock_service( hass, input_select.DOMAIN, input_select.SERVICE_SELECT_OPTION ) await trt.execute( trait.COMMAND_MODES, BASIC_DATA, {"updateModeSettings": {"option": "xyz"}}, {}, ) assert len(calls) == 1 assert calls[0].data == {"entity_id": "input_select.bla", "option": "xyz"} async def test_modes_humidifier(hass): """Test Humidifier Mode trait.""" assert helpers.get_google_type(humidifier.DOMAIN, None) is not None assert trait.ModesTrait.supported(humidifier.DOMAIN, humidifier.SUPPORT_MODES, None) trt = trait.ModesTrait( hass, State( "humidifier.humidifier", STATE_OFF, attributes={ humidifier.ATTR_AVAILABLE_MODES: [ humidifier.MODE_NORMAL, humidifier.MODE_AUTO, humidifier.MODE_AWAY, ], ATTR_SUPPORTED_FEATURES: humidifier.SUPPORT_MODES, humidifier.ATTR_MIN_HUMIDITY: 30, humidifier.ATTR_MAX_HUMIDITY: 99, humidifier.ATTR_HUMIDITY: 50, ATTR_MODE: humidifier.MODE_AUTO, }, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableModes": [ { "name": "mode", "name_values": [{"name_synonym": ["mode"], "lang": "en"}], "settings": [ { "setting_name": "normal", "setting_values": [ {"setting_synonym": ["normal"], "lang": "en"} ], }, { "setting_name": "auto", "setting_values": [{"setting_synonym": ["auto"], "lang": "en"}], }, { "setting_name": "away", "setting_values": [{"setting_synonym": ["away"], "lang": "en"}], }, ], "ordered": False, }, ] } assert trt.query_attributes() == { "currentModeSettings": {"mode": "auto"}, "on": False, } assert trt.can_execute( trait.COMMAND_MODES, params={"updateModeSettings": {"mode": "away"}} ) calls = async_mock_service(hass, humidifier.DOMAIN, humidifier.SERVICE_SET_MODE) await trt.execute( trait.COMMAND_MODES, BASIC_DATA, {"updateModeSettings": {"mode": "away"}}, {}, ) assert len(calls) == 1 assert calls[0].data == { "entity_id": "humidifier.humidifier", "mode": "away", } async def test_sound_modes(hass): """Test Mode trait.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.ModesTrait.supported( media_player.DOMAIN, media_player.SUPPORT_SELECT_SOUND_MODE, None ) trt = trait.ModesTrait( hass, State( "media_player.living_room", media_player.STATE_PLAYING, attributes={ media_player.ATTR_SOUND_MODE_LIST: ["stereo", "prologic"], media_player.ATTR_SOUND_MODE: "stereo", }, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableModes": [ { "name": "sound mode", "name_values": [ {"name_synonym": ["sound mode", "effects"], "lang": "en"} ], "settings": [ { "setting_name": "stereo", "setting_values": [ {"setting_synonym": ["stereo"], "lang": "en"} ], }, { "setting_name": "prologic", "setting_values": [ {"setting_synonym": ["prologic"], "lang": "en"} ], }, ], "ordered": False, } ] } assert trt.query_attributes() == { "currentModeSettings": {"sound mode": "stereo"}, "on": True, } assert trt.can_execute( trait.COMMAND_MODES, params={"updateModeSettings": {"sound mode": "stereo"}}, ) calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_SELECT_SOUND_MODE ) await trt.execute( trait.COMMAND_MODES, BASIC_DATA, {"updateModeSettings": {"sound mode": "stereo"}}, {}, ) assert len(calls) == 1 assert calls[0].data == { "entity_id": "media_player.living_room", "sound_mode": "stereo", } async def test_openclose_cover(hass): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_SET_POSITION, None ) trt = trait.OpenCloseTrait( hass, State( "cover.bla", cover.STATE_OPEN, { cover.ATTR_CURRENT_POSITION: 75, ATTR_SUPPORTED_FEATURES: cover.SUPPORT_SET_POSITION, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"openPercent": 75} calls_set = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_SET_COVER_POSITION) calls_open = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_OPEN_COVER) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 50}, {}) await trt.execute( trait.COMMAND_OPENCLOSE_RELATIVE, BASIC_DATA, {"openRelativePercent": 50}, {} ) assert len(calls_set) == 1 assert calls_set[0].data == {ATTR_ENTITY_ID: "cover.bla", cover.ATTR_POSITION: 50} assert len(calls_open) == 1 assert calls_open[0].data == {ATTR_ENTITY_ID: "cover.bla"} async def test_openclose_cover_unknown_state(hass): """Test OpenClose trait support for cover domain with unknown state.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_SET_POSITION, None ) # No state trt = trait.OpenCloseTrait( hass, State( "cover.bla", STATE_UNKNOWN, {ATTR_SUPPORTED_FEATURES: cover.SUPPORT_OPEN} ), BASIC_CONFIG, ) assert trt.sync_attributes() == {"discreteOnlyOpenClose": True} with pytest.raises(helpers.SmartHomeError): trt.query_attributes() calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_OPEN_COVER) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 100}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} with pytest.raises(helpers.SmartHomeError): trt.query_attributes() async def test_openclose_cover_assumed_state(hass): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_SET_POSITION, None ) trt = trait.OpenCloseTrait( hass, State( "cover.bla", cover.STATE_OPEN, { ATTR_ASSUMED_STATE: True, ATTR_SUPPORTED_FEATURES: cover.SUPPORT_SET_POSITION, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == {"commandOnlyOpenClose": True} assert trt.query_attributes() == {} calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_SET_COVER_POSITION) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 40}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla", cover.ATTR_POSITION: 40} async def test_openclose_cover_query_only(hass): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported(cover.DOMAIN, 0, None) state = State( "cover.bla", cover.STATE_OPEN, ) trt = trait.OpenCloseTrait( hass, state, BASIC_CONFIG, ) assert trt.sync_attributes() == { "discreteOnlyOpenClose": True, "queryOnlyOpenClose": True, } assert trt.query_attributes() == {"openPercent": 100} async def test_openclose_cover_no_position(hass): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_OPEN | cover.SUPPORT_CLOSE, None ) state = State( "cover.bla", cover.STATE_OPEN, { ATTR_SUPPORTED_FEATURES: cover.SUPPORT_OPEN | cover.SUPPORT_CLOSE, }, ) trt = trait.OpenCloseTrait( hass, state, BASIC_CONFIG, ) assert trt.sync_attributes() == {"discreteOnlyOpenClose": True} assert trt.query_attributes() == {"openPercent": 100} state.state = cover.STATE_CLOSED assert trt.sync_attributes() == {"discreteOnlyOpenClose": True} assert trt.query_attributes() == {"openPercent": 0} calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_CLOSE_COVER) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 0}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_OPEN_COVER) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 100}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} with pytest.raises( SmartHomeError, match=r"Current position not know for relative command" ): await trt.execute( trait.COMMAND_OPENCLOSE_RELATIVE, BASIC_DATA, {"openRelativePercent": 100}, {}, ) with pytest.raises(SmartHomeError, match=r"No support for partial open close"): await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 50}, {}) @pytest.mark.parametrize( "device_class", (cover.DEVICE_CLASS_DOOR, cover.DEVICE_CLASS_GARAGE, cover.DEVICE_CLASS_GATE), ) async def test_openclose_cover_secure(hass, device_class): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, device_class) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_SET_POSITION, device_class ) assert trait.OpenCloseTrait.might_2fa( cover.DOMAIN, cover.SUPPORT_SET_POSITION, device_class ) trt = trait.OpenCloseTrait( hass, State( "cover.bla", cover.STATE_OPEN, { ATTR_DEVICE_CLASS: device_class, ATTR_SUPPORTED_FEATURES: cover.SUPPORT_SET_POSITION, cover.ATTR_CURRENT_POSITION: 75, }, ), PIN_CONFIG, ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"openPercent": 75} calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_SET_COVER_POSITION) calls_close = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_CLOSE_COVER) # No challenge data with pytest.raises(error.ChallengeNeeded) as err: await trt.execute(trait.COMMAND_OPENCLOSE, PIN_DATA, {"openPercent": 50}, {}) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # invalid pin with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_OPENCLOSE, PIN_DATA, {"openPercent": 50}, {"pin": "9999"} ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_FAILED_PIN_NEEDED await trt.execute( trait.COMMAND_OPENCLOSE, PIN_DATA, {"openPercent": 50}, {"pin": "1234"} ) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla", cover.ATTR_POSITION: 50} # no challenge on close await trt.execute(trait.COMMAND_OPENCLOSE, PIN_DATA, {"openPercent": 0}, {}) assert len(calls_close) == 1 assert calls_close[0].data == {ATTR_ENTITY_ID: "cover.bla"} @pytest.mark.parametrize( "device_class", ( binary_sensor.DEVICE_CLASS_DOOR, binary_sensor.DEVICE_CLASS_GARAGE_DOOR, binary_sensor.DEVICE_CLASS_LOCK, binary_sensor.DEVICE_CLASS_OPENING, binary_sensor.DEVICE_CLASS_WINDOW, ), ) async def test_openclose_binary_sensor(hass, device_class): """Test OpenClose trait support for binary_sensor domain.""" assert helpers.get_google_type(binary_sensor.DOMAIN, device_class) is not None assert trait.OpenCloseTrait.supported(binary_sensor.DOMAIN, 0, device_class) trt = trait.OpenCloseTrait( hass, State("binary_sensor.test", STATE_ON, {ATTR_DEVICE_CLASS: device_class}), BASIC_CONFIG, ) assert trt.sync_attributes() == { "queryOnlyOpenClose": True, "discreteOnlyOpenClose": True, } assert trt.query_attributes() == {"openPercent": 100} trt = trait.OpenCloseTrait( hass, State("binary_sensor.test", STATE_OFF, {ATTR_DEVICE_CLASS: device_class}), BASIC_CONFIG, ) assert trt.sync_attributes() == { "queryOnlyOpenClose": True, "discreteOnlyOpenClose": True, } assert trt.query_attributes() == {"openPercent": 0} async def test_volume_media_player(hass): """Test volume trait support for media player domain.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.VolumeTrait.supported( media_player.DOMAIN, media_player.SUPPORT_VOLUME_SET, None, ) trt = trait.VolumeTrait( hass, State( "media_player.bla", media_player.STATE_PLAYING, { ATTR_SUPPORTED_FEATURES: media_player.SUPPORT_VOLUME_SET, media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.3, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "volumeMaxLevel": 100, "levelStepSize": 10, "volumeCanMuteAndUnmute": False, "commandOnlyVolume": False, } assert trt.query_attributes() == {"currentVolume": 30} calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_SET ) await trt.execute(trait.COMMAND_SET_VOLUME, BASIC_DATA, {"volumeLevel": 60}, {}) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.6, } calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_SET ) await trt.execute( trait.COMMAND_VOLUME_RELATIVE, BASIC_DATA, {"relativeSteps": 10}, {} ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.4, } async def test_volume_media_player_relative(hass): """Test volume trait support for relative-volume-only media players.""" assert trait.VolumeTrait.supported( media_player.DOMAIN, media_player.SUPPORT_VOLUME_STEP, None, ) trt = trait.VolumeTrait( hass, State( "media_player.bla", media_player.STATE_PLAYING, { ATTR_ASSUMED_STATE: True, ATTR_SUPPORTED_FEATURES: media_player.SUPPORT_VOLUME_STEP, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "volumeMaxLevel": 100, "levelStepSize": 10, "volumeCanMuteAndUnmute": False, "commandOnlyVolume": True, } assert trt.query_attributes() == {} calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_UP ) await trt.execute( trait.COMMAND_VOLUME_RELATIVE, BASIC_DATA, {"relativeSteps": 10}, {}, ) assert len(calls) == 10 for call in calls: assert call.data == { ATTR_ENTITY_ID: "media_player.bla", } calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_DOWN ) await trt.execute( trait.COMMAND_VOLUME_RELATIVE, BASIC_DATA, {"relativeSteps": -10}, {}, ) assert len(calls) == 10 for call in calls: assert call.data == { ATTR_ENTITY_ID: "media_player.bla", } with pytest.raises(SmartHomeError): await trt.execute(trait.COMMAND_SET_VOLUME, BASIC_DATA, {"volumeLevel": 42}, {}) with pytest.raises(SmartHomeError): await trt.execute(trait.COMMAND_MUTE, BASIC_DATA, {"mute": True}, {}) async def test_media_player_mute(hass): """Test volume trait support for muting.""" assert trait.VolumeTrait.supported( media_player.DOMAIN, media_player.SUPPORT_VOLUME_STEP | media_player.SUPPORT_VOLUME_MUTE, None, ) trt = trait.VolumeTrait( hass, State( "media_player.bla", media_player.STATE_PLAYING, { ATTR_SUPPORTED_FEATURES: ( media_player.SUPPORT_VOLUME_STEP | media_player.SUPPORT_VOLUME_MUTE ), media_player.ATTR_MEDIA_VOLUME_MUTED: False, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "volumeMaxLevel": 100, "levelStepSize": 10, "volumeCanMuteAndUnmute": True, "commandOnlyVolume": False, } assert trt.query_attributes() == {"isMuted": False} mute_calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_MUTE ) await trt.execute( trait.COMMAND_MUTE, BASIC_DATA, {"mute": True}, {}, ) assert len(mute_calls) == 1 assert mute_calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_VOLUME_MUTED: True, } unmute_calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_MUTE ) await trt.execute( trait.COMMAND_MUTE, BASIC_DATA, {"mute": False}, {}, ) assert len(unmute_calls) == 1 assert unmute_calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_VOLUME_MUTED: False, } async def test_temperature_setting_sensor(hass): """Test TemperatureSetting trait support for temperature sensor.""" assert ( helpers.get_google_type(sensor.DOMAIN, sensor.DEVICE_CLASS_TEMPERATURE) is not None ) assert not trait.TemperatureSettingTrait.supported( sensor.DOMAIN, 0, sensor.DEVICE_CLASS_HUMIDITY ) assert trait.TemperatureSettingTrait.supported( sensor.DOMAIN, 0, sensor.DEVICE_CLASS_TEMPERATURE ) @pytest.mark.parametrize( "unit_in,unit_out,state,ambient", [ (TEMP_FAHRENHEIT, "F", "70", 21.1), (TEMP_CELSIUS, "C", "21.1", 21.1), (TEMP_FAHRENHEIT, "F", "unavailable", None), (TEMP_FAHRENHEIT, "F", "unknown", None), ], ) async def test_temperature_setting_sensor_data(hass, unit_in, unit_out, state, ambient): """Test TemperatureSetting trait support for temperature sensor.""" hass.config.units.temperature_unit = unit_in trt = trait.TemperatureSettingTrait( hass, State( "sensor.test", state, {ATTR_DEVICE_CLASS: sensor.DEVICE_CLASS_TEMPERATURE} ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "queryOnlyTemperatureSetting": True, "thermostatTemperatureUnit": unit_out, } if ambient: assert trt.query_attributes() == {"thermostatTemperatureAmbient": ambient} else: assert trt.query_attributes() == {} hass.config.units.temperature_unit = TEMP_CELSIUS async def test_humidity_setting_sensor(hass): """Test HumiditySetting trait support for humidity sensor.""" assert ( helpers.get_google_type(sensor.DOMAIN, sensor.DEVICE_CLASS_HUMIDITY) is not None ) assert not trait.HumiditySettingTrait.supported( sensor.DOMAIN, 0, sensor.DEVICE_CLASS_TEMPERATURE ) assert trait.HumiditySettingTrait.supported( sensor.DOMAIN, 0, sensor.DEVICE_CLASS_HUMIDITY ) @pytest.mark.parametrize( "state,ambient", [("70", 70), ("unavailable", None), ("unknown", None)] ) async def test_humidity_setting_sensor_data(hass, state, ambient): """Test HumiditySetting trait support for humidity sensor.""" trt = trait.HumiditySettingTrait( hass, State("sensor.test", state, {ATTR_DEVICE_CLASS: sensor.DEVICE_CLASS_HUMIDITY}), BASIC_CONFIG, ) assert trt.sync_attributes() == {"queryOnlyHumiditySetting": True} if ambient: assert trt.query_attributes() == {"humidityAmbientPercent": ambient} else: assert trt.query_attributes() == {} with pytest.raises(helpers.SmartHomeError) as err: await trt.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert err.value.code == const.ERR_NOT_SUPPORTED async def test_transport_control(hass): """Test the TransportControlTrait.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None for feature in trait.MEDIA_COMMAND_SUPPORT_MAPPING.values(): assert trait.TransportControlTrait.supported(media_player.DOMAIN, feature, None) now = datetime(2020, 1, 1) trt = trait.TransportControlTrait( hass, State( "media_player.bla", media_player.STATE_PLAYING, { media_player.ATTR_MEDIA_POSITION: 100, media_player.ATTR_MEDIA_DURATION: 200, media_player.ATTR_MEDIA_POSITION_UPDATED_AT: now - timedelta(seconds=10), media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.5, ATTR_SUPPORTED_FEATURES: media_player.SUPPORT_PLAY | media_player.SUPPORT_STOP, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "transportControlSupportedCommands": ["RESUME", "STOP"] } assert trt.query_attributes() == {} # COMMAND_MEDIA_SEEK_RELATIVE calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_SEEK ) # Patch to avoid time ticking over during the command failing the test with patch("homeassistant.util.dt.utcnow", return_value=now): await trt.execute( trait.COMMAND_MEDIA_SEEK_RELATIVE, BASIC_DATA, {"relativePositionMs": 10000}, {}, ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", # 100s (current position) + 10s (from command) + 10s (from updated_at) media_player.ATTR_MEDIA_SEEK_POSITION: 120, } # COMMAND_MEDIA_SEEK_TO_POSITION calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_SEEK ) await trt.execute( trait.COMMAND_MEDIA_SEEK_TO_POSITION, BASIC_DATA, {"absPositionMs": 50000}, {} ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_SEEK_POSITION: 50, } # COMMAND_MEDIA_NEXT calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_NEXT_TRACK ) await trt.execute(trait.COMMAND_MEDIA_NEXT, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} # COMMAND_MEDIA_PAUSE calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_PAUSE ) await trt.execute(trait.COMMAND_MEDIA_PAUSE, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} # COMMAND_MEDIA_PREVIOUS calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_PREVIOUS_TRACK ) await trt.execute(trait.COMMAND_MEDIA_PREVIOUS, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} # COMMAND_MEDIA_RESUME calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_PLAY ) await trt.execute(trait.COMMAND_MEDIA_RESUME, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} # COMMAND_MEDIA_SHUFFLE calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_SHUFFLE_SET ) await trt.execute(trait.COMMAND_MEDIA_SHUFFLE, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_SHUFFLE: True, } # COMMAND_MEDIA_STOP calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_STOP ) await trt.execute(trait.COMMAND_MEDIA_STOP, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} @pytest.mark.parametrize( "state", ( STATE_OFF, STATE_IDLE, STATE_PLAYING, STATE_ON, STATE_PAUSED, STATE_STANDBY, STATE_UNAVAILABLE, STATE_UNKNOWN, ), ) async def test_media_state(hass, state): """Test the MediaStateTrait.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.TransportControlTrait.supported( media_player.DOMAIN, media_player.SUPPORT_PLAY, None ) trt = trait.MediaStateTrait( hass, State( "media_player.bla", state, { media_player.ATTR_MEDIA_POSITION: 100, media_player.ATTR_MEDIA_DURATION: 200, media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.5, ATTR_SUPPORTED_FEATURES: media_player.SUPPORT_PLAY | media_player.SUPPORT_STOP, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "supportActivityState": True, "supportPlaybackState": True, } assert trt.query_attributes() == { "activityState": trt.activity_lookup.get(state), "playbackState": trt.playback_lookup.get(state), }
sander76/home-assistant
tests/components/google_assistant/test_trait.py
homeassistant/components/google/__init__.py
"""Support for monitoring the Deluge BitTorrent client API.""" import logging from deluge_client import DelugeRPCClient, FailedToReconnectException import voluptuous as vol from homeassistant.components.sensor import PLATFORM_SCHEMA, SensorEntity from homeassistant.const import ( CONF_HOST, CONF_MONITORED_VARIABLES, CONF_NAME, CONF_PASSWORD, CONF_PORT, CONF_USERNAME, DATA_RATE_KILOBYTES_PER_SECOND, STATE_IDLE, ) from homeassistant.exceptions import PlatformNotReady import homeassistant.helpers.config_validation as cv _LOGGER = logging.getLogger(__name__) _THROTTLED_REFRESH = None DEFAULT_NAME = "Deluge" DEFAULT_PORT = 58846 DHT_UPLOAD = 1000 DHT_DOWNLOAD = 1000 SENSOR_TYPES = { "current_status": ["Status", None], "download_speed": ["Down Speed", DATA_RATE_KILOBYTES_PER_SECOND], "upload_speed": ["Up Speed", DATA_RATE_KILOBYTES_PER_SECOND], } PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( { vol.Required(CONF_HOST): cv.string, vol.Required(CONF_PASSWORD): cv.string, vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port, vol.Required(CONF_USERNAME): cv.string, vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, vol.Optional(CONF_MONITORED_VARIABLES, default=[]): vol.All( cv.ensure_list, [vol.In(SENSOR_TYPES)] ), } ) def setup_platform(hass, config, add_entities, discovery_info=None): """Set up the Deluge sensors.""" name = config[CONF_NAME] host = config[CONF_HOST] username = config[CONF_USERNAME] password = config[CONF_PASSWORD] port = config[CONF_PORT] deluge_api = DelugeRPCClient(host, port, username, password) try: deluge_api.connect() except ConnectionRefusedError as err: _LOGGER.error("Connection to Deluge Daemon failed") raise PlatformNotReady from err dev = [] for variable in config[CONF_MONITORED_VARIABLES]: dev.append(DelugeSensor(variable, deluge_api, name)) add_entities(dev) class DelugeSensor(SensorEntity): """Representation of a Deluge sensor.""" def __init__(self, sensor_type, deluge_client, client_name): """Initialize the sensor.""" self._name = SENSOR_TYPES[sensor_type][0] self.client = deluge_client self.type = sensor_type self.client_name = client_name self._state = None self._unit_of_measurement = SENSOR_TYPES[sensor_type][1] self.data = None self._available = False @property def name(self): """Return the name of the sensor.""" return f"{self.client_name} {self._name}" @property def state(self): """Return the state of the sensor.""" return self._state @property def available(self): """Return true if device is available.""" return self._available @property def unit_of_measurement(self): """Return the unit of measurement of this entity, if any.""" return self._unit_of_measurement def update(self): """Get the latest data from Deluge and updates the state.""" try: self.data = self.client.call( "core.get_session_status", [ "upload_rate", "download_rate", "dht_upload_rate", "dht_download_rate", ], ) self._available = True except FailedToReconnectException: _LOGGER.error("Connection to Deluge Daemon Lost") self._available = False return upload = self.data[b"upload_rate"] - self.data[b"dht_upload_rate"] download = self.data[b"download_rate"] - self.data[b"dht_download_rate"] if self.type == "current_status": if self.data: if upload > 0 and download > 0: self._state = "Up/Down" elif upload > 0 and download == 0: self._state = "Seeding" elif upload == 0 and download > 0: self._state = "Downloading" else: self._state = STATE_IDLE else: self._state = None if self.data: if self.type == "download_speed": kb_spd = float(download) kb_spd = kb_spd / 1024 self._state = round(kb_spd, 2 if kb_spd < 0.1 else 1) elif self.type == "upload_speed": kb_spd = float(upload) kb_spd = kb_spd / 1024 self._state = round(kb_spd, 2 if kb_spd < 0.1 else 1)
"""Tests for the Google Assistant traits.""" from datetime import datetime, timedelta from unittest.mock import patch import pytest from homeassistant.components import ( alarm_control_panel, binary_sensor, camera, cover, fan, group, input_boolean, input_select, light, lock, media_player, scene, script, sensor, switch, vacuum, ) from homeassistant.components.climate import const as climate from homeassistant.components.google_assistant import const, error, helpers, trait from homeassistant.components.google_assistant.error import SmartHomeError from homeassistant.components.humidifier import const as humidifier from homeassistant.config import async_process_ha_core_config from homeassistant.const import ( ATTR_ASSUMED_STATE, ATTR_DEVICE_CLASS, ATTR_ENTITY_ID, ATTR_MODE, ATTR_SUPPORTED_FEATURES, ATTR_TEMPERATURE, SERVICE_TURN_OFF, SERVICE_TURN_ON, STATE_ALARM_ARMED_AWAY, STATE_ALARM_DISARMED, STATE_ALARM_PENDING, STATE_IDLE, STATE_OFF, STATE_ON, STATE_PAUSED, STATE_PLAYING, STATE_STANDBY, STATE_UNAVAILABLE, STATE_UNKNOWN, TEMP_CELSIUS, TEMP_FAHRENHEIT, ) from homeassistant.core import DOMAIN as HA_DOMAIN, EVENT_CALL_SERVICE, State from homeassistant.util import color from . import BASIC_CONFIG, MockConfig from tests.common import async_capture_events, async_mock_service REQ_ID = "ff36a3cc-ec34-11e6-b1a0-64510650abcf" BASIC_DATA = helpers.RequestData( BASIC_CONFIG, "test-agent", const.SOURCE_CLOUD, REQ_ID, None ) PIN_CONFIG = MockConfig(secure_devices_pin="1234") PIN_DATA = helpers.RequestData( PIN_CONFIG, "test-agent", const.SOURCE_CLOUD, REQ_ID, None ) async def test_brightness_light(hass): """Test brightness trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert trait.BrightnessTrait.supported(light.DOMAIN, light.SUPPORT_BRIGHTNESS, None) trt = trait.BrightnessTrait( hass, State("light.bla", light.STATE_ON, {light.ATTR_BRIGHTNESS: 243}), BASIC_CONFIG, ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"brightness": 95} events = async_capture_events(hass, EVENT_CALL_SERVICE) calls = async_mock_service(hass, light.DOMAIN, light.SERVICE_TURN_ON) await trt.execute( trait.COMMAND_BRIGHTNESS_ABSOLUTE, BASIC_DATA, {"brightness": 50}, {} ) await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "light.bla", light.ATTR_BRIGHTNESS_PCT: 50} assert len(events) == 1 assert events[0].data == { "domain": "light", "service": "turn_on", "service_data": {"brightness_pct": 50, "entity_id": "light.bla"}, } async def test_camera_stream(hass): """Test camera stream trait support for camera domain.""" await async_process_ha_core_config( hass, {"external_url": "https://example.com"}, ) assert helpers.get_google_type(camera.DOMAIN, None) is not None assert trait.CameraStreamTrait.supported(camera.DOMAIN, camera.SUPPORT_STREAM, None) trt = trait.CameraStreamTrait( hass, State("camera.bla", camera.STATE_IDLE, {}), BASIC_CONFIG ) assert trt.sync_attributes() == { "cameraStreamSupportedProtocols": ["hls"], "cameraStreamNeedAuthToken": False, "cameraStreamNeedDrmEncryption": False, } assert trt.query_attributes() == {} with patch( "homeassistant.components.camera.async_request_stream", return_value="/api/streams/bla", ): await trt.execute(trait.COMMAND_GET_CAMERA_STREAM, BASIC_DATA, {}, {}) assert trt.query_attributes() == { "cameraStreamAccessUrl": "https://example.com/api/streams/bla", "cameraStreamReceiverAppId": "B12CE3CA", } async def test_onoff_group(hass): """Test OnOff trait support for group domain.""" assert helpers.get_google_type(group.DOMAIN, None) is not None assert trait.OnOffTrait.supported(group.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("group.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("group.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, HA_DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "group.bla"} off_calls = async_mock_service(hass, HA_DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "group.bla"} async def test_onoff_input_boolean(hass): """Test OnOff trait support for input_boolean domain.""" assert helpers.get_google_type(input_boolean.DOMAIN, None) is not None assert trait.OnOffTrait.supported(input_boolean.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("input_boolean.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait( hass, State("input_boolean.bla", STATE_OFF), BASIC_CONFIG ) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, input_boolean.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "input_boolean.bla"} off_calls = async_mock_service(hass, input_boolean.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "input_boolean.bla"} async def test_onoff_switch(hass): """Test OnOff trait support for switch domain.""" assert helpers.get_google_type(switch.DOMAIN, None) is not None assert trait.OnOffTrait.supported(switch.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("switch.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("switch.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} trt_assumed = trait.OnOffTrait( hass, State("switch.bla", STATE_OFF, {"assumed_state": True}), BASIC_CONFIG ) assert trt_assumed.sync_attributes() == {"commandOnlyOnOff": True} on_calls = async_mock_service(hass, switch.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "switch.bla"} off_calls = async_mock_service(hass, switch.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "switch.bla"} async def test_onoff_fan(hass): """Test OnOff trait support for fan domain.""" assert helpers.get_google_type(fan.DOMAIN, None) is not None assert trait.OnOffTrait.supported(fan.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("fan.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("fan.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, fan.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "fan.bla"} off_calls = async_mock_service(hass, fan.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "fan.bla"} async def test_onoff_light(hass): """Test OnOff trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert trait.OnOffTrait.supported(light.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("light.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("light.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "light.bla"} off_calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "light.bla"} async def test_onoff_media_player(hass): """Test OnOff trait support for media_player domain.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.OnOffTrait.supported(media_player.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("media_player.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("media_player.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, media_player.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} off_calls = async_mock_service(hass, media_player.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} async def test_onoff_humidifier(hass): """Test OnOff trait support for humidifier domain.""" assert helpers.get_google_type(humidifier.DOMAIN, None) is not None assert trait.OnOffTrait.supported(humidifier.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("humidifier.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("humidifier.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, humidifier.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "humidifier.bla"} off_calls = async_mock_service(hass, humidifier.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "humidifier.bla"} async def test_dock_vacuum(hass): """Test dock trait support for vacuum domain.""" assert helpers.get_google_type(vacuum.DOMAIN, None) is not None assert trait.DockTrait.supported(vacuum.DOMAIN, 0, None) trt = trait.DockTrait(hass, State("vacuum.bla", vacuum.STATE_IDLE), BASIC_CONFIG) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"isDocked": False} calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_RETURN_TO_BASE) await trt.execute(trait.COMMAND_DOCK, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} async def test_startstop_vacuum(hass): """Test startStop trait support for vacuum domain.""" assert helpers.get_google_type(vacuum.DOMAIN, None) is not None assert trait.StartStopTrait.supported(vacuum.DOMAIN, 0, None) trt = trait.StartStopTrait( hass, State( "vacuum.bla", vacuum.STATE_PAUSED, {ATTR_SUPPORTED_FEATURES: vacuum.SUPPORT_PAUSE}, ), BASIC_CONFIG, ) assert trt.sync_attributes() == {"pausable": True} assert trt.query_attributes() == {"isRunning": False, "isPaused": True} start_calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_START) await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": True}, {}) assert len(start_calls) == 1 assert start_calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} stop_calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_STOP) await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": False}, {}) assert len(stop_calls) == 1 assert stop_calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} pause_calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_PAUSE) await trt.execute(trait.COMMAND_PAUSEUNPAUSE, BASIC_DATA, {"pause": True}, {}) assert len(pause_calls) == 1 assert pause_calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} unpause_calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_START) await trt.execute(trait.COMMAND_PAUSEUNPAUSE, BASIC_DATA, {"pause": False}, {}) assert len(unpause_calls) == 1 assert unpause_calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} async def test_startstop_cover(hass): """Test startStop trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.StartStopTrait.supported(cover.DOMAIN, cover.SUPPORT_STOP, None) state = State( "cover.bla", cover.STATE_CLOSED, {ATTR_SUPPORTED_FEATURES: cover.SUPPORT_STOP}, ) trt = trait.StartStopTrait( hass, state, BASIC_CONFIG, ) assert trt.sync_attributes() == {} for state_value in (cover.STATE_CLOSING, cover.STATE_OPENING): state.state = state_value assert trt.query_attributes() == {"isRunning": True} stop_calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_STOP_COVER) await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": False}, {}) assert len(stop_calls) == 1 assert stop_calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} for state_value in (cover.STATE_CLOSED, cover.STATE_OPEN): state.state = state_value assert trt.query_attributes() == {"isRunning": False} with pytest.raises(SmartHomeError, match="Cover is already stopped"): await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": False}, {}) with pytest.raises(SmartHomeError, match="Starting a cover is not supported"): await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": True}, {}) with pytest.raises( SmartHomeError, match="Command action.devices.commands.PauseUnpause is not supported", ): await trt.execute(trait.COMMAND_PAUSEUNPAUSE, BASIC_DATA, {"start": True}, {}) async def test_startstop_cover_assumed(hass): """Test startStop trait support for cover domain of assumed state.""" trt = trait.StartStopTrait( hass, State( "cover.bla", cover.STATE_CLOSED, {ATTR_SUPPORTED_FEATURES: cover.SUPPORT_STOP, ATTR_ASSUMED_STATE: True}, ), BASIC_CONFIG, ) stop_calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_STOP_COVER) await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": False}, {}) assert len(stop_calls) == 1 assert stop_calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} async def test_color_setting_color_light(hass): """Test ColorSpectrum trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert not trait.ColorSettingTrait.supported(light.DOMAIN, 0, None) assert trait.ColorSettingTrait.supported(light.DOMAIN, light.SUPPORT_COLOR, None) trt = trait.ColorSettingTrait( hass, State( "light.bla", STATE_ON, { light.ATTR_HS_COLOR: (20, 94), light.ATTR_BRIGHTNESS: 200, ATTR_SUPPORTED_FEATURES: light.SUPPORT_COLOR, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == {"colorModel": "hsv"} assert trt.query_attributes() == { "color": {"spectrumHsv": {"hue": 20, "saturation": 0.94, "value": 200 / 255}} } assert trt.can_execute( trait.COMMAND_COLOR_ABSOLUTE, {"color": {"spectrumRGB": 16715792}} ) calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON) await trt.execute( trait.COMMAND_COLOR_ABSOLUTE, BASIC_DATA, {"color": {"spectrumRGB": 1052927}}, {}, ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "light.bla", light.ATTR_HS_COLOR: (240, 93.725), } await trt.execute( trait.COMMAND_COLOR_ABSOLUTE, BASIC_DATA, {"color": {"spectrumHSV": {"hue": 100, "saturation": 0.50, "value": 0.20}}}, {}, ) assert len(calls) == 2 assert calls[1].data == { ATTR_ENTITY_ID: "light.bla", light.ATTR_HS_COLOR: [100, 50], light.ATTR_BRIGHTNESS: 0.2 * 255, } async def test_color_setting_temperature_light(hass): """Test ColorTemperature trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert not trait.ColorSettingTrait.supported(light.DOMAIN, 0, None) assert trait.ColorSettingTrait.supported( light.DOMAIN, light.SUPPORT_COLOR_TEMP, None ) trt = trait.ColorSettingTrait( hass, State( "light.bla", STATE_ON, { light.ATTR_MIN_MIREDS: 200, light.ATTR_COLOR_TEMP: 300, light.ATTR_MAX_MIREDS: 500, ATTR_SUPPORTED_FEATURES: light.SUPPORT_COLOR_TEMP, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "colorTemperatureRange": {"temperatureMinK": 2000, "temperatureMaxK": 5000} } assert trt.query_attributes() == {"color": {"temperatureK": 3333}} assert trt.can_execute( trait.COMMAND_COLOR_ABSOLUTE, {"color": {"temperature": 400}} ) calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON) with pytest.raises(helpers.SmartHomeError) as err: await trt.execute( trait.COMMAND_COLOR_ABSOLUTE, BASIC_DATA, {"color": {"temperature": 5555}}, {}, ) assert err.value.code == const.ERR_VALUE_OUT_OF_RANGE await trt.execute( trait.COMMAND_COLOR_ABSOLUTE, BASIC_DATA, {"color": {"temperature": 2857}}, {} ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "light.bla", light.ATTR_COLOR_TEMP: color.color_temperature_kelvin_to_mired(2857), } async def test_color_light_temperature_light_bad_temp(hass): """Test ColorTemperature trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert not trait.ColorSettingTrait.supported(light.DOMAIN, 0, None) assert trait.ColorSettingTrait.supported( light.DOMAIN, light.SUPPORT_COLOR_TEMP, None ) trt = trait.ColorSettingTrait( hass, State( "light.bla", STATE_ON, { light.ATTR_MIN_MIREDS: 200, light.ATTR_COLOR_TEMP: 0, light.ATTR_MAX_MIREDS: 500, }, ), BASIC_CONFIG, ) assert trt.query_attributes() == {} async def test_light_modes(hass): """Test Light Mode trait.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert trait.ModesTrait.supported(light.DOMAIN, light.SUPPORT_EFFECT, None) trt = trait.ModesTrait( hass, State( "light.living_room", light.STATE_ON, attributes={ light.ATTR_EFFECT_LIST: ["random", "colorloop"], light.ATTR_EFFECT: "random", }, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableModes": [ { "name": "effect", "name_values": [{"name_synonym": ["effect"], "lang": "en"}], "settings": [ { "setting_name": "random", "setting_values": [ {"setting_synonym": ["random"], "lang": "en"} ], }, { "setting_name": "colorloop", "setting_values": [ {"setting_synonym": ["colorloop"], "lang": "en"} ], }, ], "ordered": False, } ] } assert trt.query_attributes() == { "currentModeSettings": {"effect": "random"}, "on": True, } assert trt.can_execute( trait.COMMAND_MODES, params={"updateModeSettings": {"effect": "colorloop"}}, ) calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON) await trt.execute( trait.COMMAND_MODES, BASIC_DATA, {"updateModeSettings": {"effect": "colorloop"}}, {}, ) assert len(calls) == 1 assert calls[0].data == { "entity_id": "light.living_room", "effect": "colorloop", } async def test_scene_scene(hass): """Test Scene trait support for scene domain.""" assert helpers.get_google_type(scene.DOMAIN, None) is not None assert trait.SceneTrait.supported(scene.DOMAIN, 0, None) trt = trait.SceneTrait(hass, State("scene.bla", scene.STATE), BASIC_CONFIG) assert trt.sync_attributes() == {} assert trt.query_attributes() == {} assert trt.can_execute(trait.COMMAND_ACTIVATE_SCENE, {}) calls = async_mock_service(hass, scene.DOMAIN, SERVICE_TURN_ON) await trt.execute(trait.COMMAND_ACTIVATE_SCENE, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "scene.bla"} async def test_scene_script(hass): """Test Scene trait support for script domain.""" assert helpers.get_google_type(script.DOMAIN, None) is not None assert trait.SceneTrait.supported(script.DOMAIN, 0, None) trt = trait.SceneTrait(hass, State("script.bla", STATE_OFF), BASIC_CONFIG) assert trt.sync_attributes() == {} assert trt.query_attributes() == {} assert trt.can_execute(trait.COMMAND_ACTIVATE_SCENE, {}) calls = async_mock_service(hass, script.DOMAIN, SERVICE_TURN_ON) await trt.execute(trait.COMMAND_ACTIVATE_SCENE, BASIC_DATA, {}, {}) # We don't wait till script execution is done. await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "script.bla"} async def test_temperature_setting_climate_onoff(hass): """Test TemperatureSetting trait support for climate domain - range.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.TemperatureSettingTrait.supported(climate.DOMAIN, 0, None) hass.config.units.temperature_unit = TEMP_FAHRENHEIT trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_AUTO, { ATTR_SUPPORTED_FEATURES: climate.SUPPORT_TARGET_TEMPERATURE_RANGE, climate.ATTR_HVAC_MODES: [ climate.HVAC_MODE_OFF, climate.HVAC_MODE_COOL, climate.HVAC_MODE_HEAT, climate.HVAC_MODE_HEAT_COOL, ], climate.ATTR_MIN_TEMP: None, climate.ATTR_MAX_TEMP: None, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["off", "cool", "heat", "heatcool", "on"], "thermostatTemperatureUnit": "F", } assert trt.can_execute(trait.COMMAND_THERMOSTAT_SET_MODE, {}) calls = async_mock_service(hass, climate.DOMAIN, SERVICE_TURN_ON) await trt.execute( trait.COMMAND_THERMOSTAT_SET_MODE, BASIC_DATA, {"thermostatMode": "on"}, {} ) assert len(calls) == 1 calls = async_mock_service(hass, climate.DOMAIN, SERVICE_TURN_OFF) await trt.execute( trait.COMMAND_THERMOSTAT_SET_MODE, BASIC_DATA, {"thermostatMode": "off"}, {} ) assert len(calls) == 1 async def test_temperature_setting_climate_no_modes(hass): """Test TemperatureSetting trait support for climate domain not supporting any modes.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.TemperatureSettingTrait.supported(climate.DOMAIN, 0, None) hass.config.units.temperature_unit = TEMP_CELSIUS trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_AUTO, { climate.ATTR_HVAC_MODES: [], climate.ATTR_MIN_TEMP: None, climate.ATTR_MAX_TEMP: None, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["heat"], "thermostatTemperatureUnit": "C", } async def test_temperature_setting_climate_range(hass): """Test TemperatureSetting trait support for climate domain - range.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.TemperatureSettingTrait.supported(climate.DOMAIN, 0, None) hass.config.units.temperature_unit = TEMP_FAHRENHEIT trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_AUTO, { climate.ATTR_CURRENT_TEMPERATURE: 70, climate.ATTR_CURRENT_HUMIDITY: 25, ATTR_SUPPORTED_FEATURES: climate.SUPPORT_TARGET_TEMPERATURE_RANGE, climate.ATTR_HVAC_MODES: [ STATE_OFF, climate.HVAC_MODE_COOL, climate.HVAC_MODE_HEAT, climate.HVAC_MODE_AUTO, ], climate.ATTR_TARGET_TEMP_HIGH: 75, climate.ATTR_TARGET_TEMP_LOW: 65, climate.ATTR_MIN_TEMP: 50, climate.ATTR_MAX_TEMP: 80, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["off", "cool", "heat", "auto", "on"], "thermostatTemperatureUnit": "F", } assert trt.query_attributes() == { "thermostatMode": "auto", "thermostatTemperatureAmbient": 21.1, "thermostatHumidityAmbient": 25, "thermostatTemperatureSetpointLow": 18.3, "thermostatTemperatureSetpointHigh": 23.9, } assert trt.can_execute(trait.COMMAND_THERMOSTAT_TEMPERATURE_SET_RANGE, {}) assert trt.can_execute(trait.COMMAND_THERMOSTAT_SET_MODE, {}) calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_TEMPERATURE) await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SET_RANGE, BASIC_DATA, { "thermostatTemperatureSetpointHigh": 25, "thermostatTemperatureSetpointLow": 20, }, {}, ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "climate.bla", climate.ATTR_TARGET_TEMP_HIGH: 77, climate.ATTR_TARGET_TEMP_LOW: 68, } calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_HVAC_MODE) await trt.execute( trait.COMMAND_THERMOSTAT_SET_MODE, BASIC_DATA, {"thermostatMode": "cool"}, {} ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "climate.bla", climate.ATTR_HVAC_MODE: climate.HVAC_MODE_COOL, } with pytest.raises(helpers.SmartHomeError) as err: await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, BASIC_DATA, {"thermostatTemperatureSetpoint": -100}, {}, ) assert err.value.code == const.ERR_VALUE_OUT_OF_RANGE hass.config.units.temperature_unit = TEMP_CELSIUS async def test_temperature_setting_climate_setpoint(hass): """Test TemperatureSetting trait support for climate domain - setpoint.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.TemperatureSettingTrait.supported(climate.DOMAIN, 0, None) hass.config.units.temperature_unit = TEMP_CELSIUS trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_COOL, { climate.ATTR_HVAC_MODES: [STATE_OFF, climate.HVAC_MODE_COOL], climate.ATTR_MIN_TEMP: 10, climate.ATTR_MAX_TEMP: 30, ATTR_TEMPERATURE: 18, climate.ATTR_CURRENT_TEMPERATURE: 20, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["off", "cool", "on"], "thermostatTemperatureUnit": "C", } assert trt.query_attributes() == { "thermostatMode": "cool", "thermostatTemperatureAmbient": 20, "thermostatTemperatureSetpoint": 18, } assert trt.can_execute(trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, {}) assert trt.can_execute(trait.COMMAND_THERMOSTAT_SET_MODE, {}) calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_TEMPERATURE) with pytest.raises(helpers.SmartHomeError): await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, BASIC_DATA, {"thermostatTemperatureSetpoint": -100}, {}, ) await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, BASIC_DATA, {"thermostatTemperatureSetpoint": 19}, {}, ) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "climate.bla", ATTR_TEMPERATURE: 19} async def test_temperature_setting_climate_setpoint_auto(hass): """ Test TemperatureSetting trait support for climate domain. Setpoint in auto mode. """ hass.config.units.temperature_unit = TEMP_CELSIUS trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_HEAT_COOL, { climate.ATTR_HVAC_MODES: [ climate.HVAC_MODE_OFF, climate.HVAC_MODE_HEAT_COOL, ], climate.ATTR_MIN_TEMP: 10, climate.ATTR_MAX_TEMP: 30, ATTR_TEMPERATURE: 18, climate.ATTR_CURRENT_TEMPERATURE: 20, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["off", "heatcool", "on"], "thermostatTemperatureUnit": "C", } assert trt.query_attributes() == { "thermostatMode": "heatcool", "thermostatTemperatureAmbient": 20, "thermostatTemperatureSetpointHigh": 18, "thermostatTemperatureSetpointLow": 18, } assert trt.can_execute(trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, {}) assert trt.can_execute(trait.COMMAND_THERMOSTAT_SET_MODE, {}) calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_TEMPERATURE) await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, BASIC_DATA, {"thermostatTemperatureSetpoint": 19}, {}, ) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "climate.bla", ATTR_TEMPERATURE: 19} async def test_humidity_setting_humidifier_setpoint(hass): """Test HumiditySetting trait support for humidifier domain - setpoint.""" assert helpers.get_google_type(humidifier.DOMAIN, None) is not None assert trait.HumiditySettingTrait.supported(humidifier.DOMAIN, 0, None) trt = trait.HumiditySettingTrait( hass, State( "humidifier.bla", STATE_ON, { humidifier.ATTR_MIN_HUMIDITY: 20, humidifier.ATTR_MAX_HUMIDITY: 90, humidifier.ATTR_HUMIDITY: 38, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "humiditySetpointRange": {"minPercent": 20, "maxPercent": 90} } assert trt.query_attributes() == { "humiditySetpointPercent": 38, } assert trt.can_execute(trait.COMMAND_SET_HUMIDITY, {}) calls = async_mock_service(hass, humidifier.DOMAIN, humidifier.SERVICE_SET_HUMIDITY) await trt.execute(trait.COMMAND_SET_HUMIDITY, BASIC_DATA, {"humidity": 32}, {}) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "humidifier.bla", humidifier.ATTR_HUMIDITY: 32, } async def test_lock_unlock_lock(hass): """Test LockUnlock trait locking support for lock domain.""" assert helpers.get_google_type(lock.DOMAIN, None) is not None assert trait.LockUnlockTrait.supported(lock.DOMAIN, lock.SUPPORT_OPEN, None) assert trait.LockUnlockTrait.might_2fa(lock.DOMAIN, lock.SUPPORT_OPEN, None) trt = trait.LockUnlockTrait( hass, State("lock.front_door", lock.STATE_LOCKED), PIN_CONFIG ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"isLocked": True} assert trt.can_execute(trait.COMMAND_LOCKUNLOCK, {"lock": True}) calls = async_mock_service(hass, lock.DOMAIN, lock.SERVICE_LOCK) await trt.execute(trait.COMMAND_LOCKUNLOCK, PIN_DATA, {"lock": True}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "lock.front_door"} async def test_lock_unlock_unlock(hass): """Test LockUnlock trait unlocking support for lock domain.""" assert helpers.get_google_type(lock.DOMAIN, None) is not None assert trait.LockUnlockTrait.supported(lock.DOMAIN, lock.SUPPORT_OPEN, None) trt = trait.LockUnlockTrait( hass, State("lock.front_door", lock.STATE_LOCKED), PIN_CONFIG ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"isLocked": True} assert trt.can_execute(trait.COMMAND_LOCKUNLOCK, {"lock": False}) calls = async_mock_service(hass, lock.DOMAIN, lock.SERVICE_UNLOCK) # No challenge data with pytest.raises(error.ChallengeNeeded) as err: await trt.execute(trait.COMMAND_LOCKUNLOCK, PIN_DATA, {"lock": False}, {}) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # invalid pin with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_LOCKUNLOCK, PIN_DATA, {"lock": False}, {"pin": 9999} ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_FAILED_PIN_NEEDED await trt.execute( trait.COMMAND_LOCKUNLOCK, PIN_DATA, {"lock": False}, {"pin": "1234"} ) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "lock.front_door"} # Test without pin trt = trait.LockUnlockTrait( hass, State("lock.front_door", lock.STATE_LOCKED), BASIC_CONFIG ) with pytest.raises(error.SmartHomeError) as err: await trt.execute(trait.COMMAND_LOCKUNLOCK, BASIC_DATA, {"lock": False}, {}) assert len(calls) == 1 assert err.value.code == const.ERR_CHALLENGE_NOT_SETUP # Test with 2FA override with patch.object( BASIC_CONFIG, "should_2fa", return_value=False, ): await trt.execute(trait.COMMAND_LOCKUNLOCK, BASIC_DATA, {"lock": False}, {}) assert len(calls) == 2 async def test_arm_disarm_arm_away(hass): """Test ArmDisarm trait Arming support for alarm_control_panel domain.""" assert helpers.get_google_type(alarm_control_panel.DOMAIN, None) is not None assert trait.ArmDisArmTrait.supported(alarm_control_panel.DOMAIN, 0, None) assert trait.ArmDisArmTrait.might_2fa(alarm_control_panel.DOMAIN, 0, None) trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, { alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True, ATTR_SUPPORTED_FEATURES: alarm_control_panel.const.SUPPORT_ALARM_ARM_HOME | alarm_control_panel.const.SUPPORT_ALARM_ARM_AWAY, }, ), PIN_CONFIG, ) assert trt.sync_attributes() == { "availableArmLevels": { "levels": [ { "level_name": "armed_home", "level_values": [ {"level_synonym": ["armed home", "home"], "lang": "en"} ], }, { "level_name": "armed_away", "level_values": [ {"level_synonym": ["armed away", "away"], "lang": "en"} ], }, ], "ordered": False, } } assert trt.query_attributes() == { "isArmed": True, "currentArmLevel": STATE_ALARM_ARMED_AWAY, } assert trt.can_execute( trait.COMMAND_ARMDISARM, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY} ) calls = async_mock_service( hass, alarm_control_panel.DOMAIN, alarm_control_panel.SERVICE_ALARM_ARM_AWAY ) # Test with no secure_pin configured with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), BASIC_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, BASIC_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {}, ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NOT_SETUP trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), PIN_CONFIG, ) # No challenge data with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {}, ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # invalid pin with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {"pin": 9999}, ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_FAILED_PIN_NEEDED # correct pin await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {"pin": "1234"}, ) assert len(calls) == 1 # Test already armed with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), PIN_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {}, ) assert len(calls) == 1 assert err.value.code == const.ERR_ALREADY_ARMED # Test with code_arm_required False trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: False}, ), PIN_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {}, ) assert len(calls) == 2 with pytest.raises(error.SmartHomeError) as err: await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True}, {}, ) async def test_arm_disarm_disarm(hass): """Test ArmDisarm trait Disarming support for alarm_control_panel domain.""" assert helpers.get_google_type(alarm_control_panel.DOMAIN, None) is not None assert trait.ArmDisArmTrait.supported(alarm_control_panel.DOMAIN, 0, None) assert trait.ArmDisArmTrait.might_2fa(alarm_control_panel.DOMAIN, 0, None) trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, { alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True, ATTR_SUPPORTED_FEATURES: alarm_control_panel.const.SUPPORT_ALARM_TRIGGER | alarm_control_panel.const.SUPPORT_ALARM_ARM_CUSTOM_BYPASS, }, ), PIN_CONFIG, ) assert trt.sync_attributes() == { "availableArmLevels": { "levels": [ { "level_name": "armed_custom_bypass", "level_values": [ { "level_synonym": ["armed custom bypass", "custom"], "lang": "en", } ], }, { "level_name": "triggered", "level_values": [{"level_synonym": ["triggered"], "lang": "en"}], }, ], "ordered": False, } } assert trt.query_attributes() == {"isArmed": False} assert trt.can_execute(trait.COMMAND_ARMDISARM, {"arm": False}) calls = async_mock_service( hass, alarm_control_panel.DOMAIN, alarm_control_panel.SERVICE_ALARM_DISARM ) # Test without secure_pin configured with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), BASIC_CONFIG, ) await trt.execute(trait.COMMAND_ARMDISARM, BASIC_DATA, {"arm": False}, {}) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NOT_SETUP trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), PIN_CONFIG, ) # No challenge data with pytest.raises(error.ChallengeNeeded) as err: await trt.execute(trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": False}, {}) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # invalid pin with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": False}, {"pin": 9999} ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_FAILED_PIN_NEEDED # correct pin await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": False}, {"pin": "1234"} ) assert len(calls) == 1 # Test already disarmed with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), PIN_CONFIG, ) await trt.execute(trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": False}, {}) assert len(calls) == 1 assert err.value.code == const.ERR_ALREADY_DISARMED # Cancel arming after already armed will require pin with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: False}, ), PIN_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "cancel": True}, {} ) assert len(calls) == 1 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # Cancel arming while pending to arm doesn't require pin trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_PENDING, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: False}, ), PIN_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "cancel": True}, {} ) assert len(calls) == 2 async def test_fan_speed(hass): """Test FanSpeed trait speed control support for fan domain.""" assert helpers.get_google_type(fan.DOMAIN, None) is not None assert trait.FanSpeedTrait.supported(fan.DOMAIN, fan.SUPPORT_SET_SPEED, None) trt = trait.FanSpeedTrait( hass, State( "fan.living_room_fan", fan.SPEED_HIGH, attributes={ "speed_list": [ fan.SPEED_OFF, fan.SPEED_LOW, fan.SPEED_MEDIUM, fan.SPEED_HIGH, ], "speed": "low", "percentage": 33, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableFanSpeeds": { "ordered": True, "speeds": [ { "speed_name": "off", "speed_values": [{"speed_synonym": ["stop", "off"], "lang": "en"}], }, { "speed_name": "low", "speed_values": [ { "speed_synonym": ["slow", "low", "slowest", "lowest"], "lang": "en", } ], }, { "speed_name": "medium", "speed_values": [ {"speed_synonym": ["medium", "mid", "middle"], "lang": "en"} ], }, { "speed_name": "high", "speed_values": [ { "speed_synonym": [ "high", "max", "fast", "highest", "fastest", "maximum", ], "lang": "en", } ], }, ], }, "reversible": False, "supportsFanSpeedPercent": True, } assert trt.query_attributes() == { "currentFanSpeedSetting": "low", "on": True, "currentFanSpeedPercent": 33, } assert trt.can_execute(trait.COMMAND_FANSPEED, params={"fanSpeed": "medium"}) calls = async_mock_service(hass, fan.DOMAIN, fan.SERVICE_SET_SPEED) await trt.execute(trait.COMMAND_FANSPEED, BASIC_DATA, {"fanSpeed": "medium"}, {}) assert len(calls) == 1 assert calls[0].data == {"entity_id": "fan.living_room_fan", "speed": "medium"} assert trt.can_execute(trait.COMMAND_FANSPEED, params={"fanSpeedPercent": 10}) calls = async_mock_service(hass, fan.DOMAIN, fan.SERVICE_SET_PERCENTAGE) await trt.execute(trait.COMMAND_FANSPEED, BASIC_DATA, {"fanSpeedPercent": 10}, {}) assert len(calls) == 1 assert calls[0].data == {"entity_id": "fan.living_room_fan", "percentage": 10} async def test_climate_fan_speed(hass): """Test FanSpeed trait speed control support for climate domain.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.FanSpeedTrait.supported(climate.DOMAIN, climate.SUPPORT_FAN_MODE, None) trt = trait.FanSpeedTrait( hass, State( "climate.living_room_ac", "on", attributes={ "fan_modes": ["auto", "low", "medium", "high"], "fan_mode": "low", }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableFanSpeeds": { "ordered": True, "speeds": [ { "speed_name": "auto", "speed_values": [{"speed_synonym": ["auto"], "lang": "en"}], }, { "speed_name": "low", "speed_values": [{"speed_synonym": ["low"], "lang": "en"}], }, { "speed_name": "medium", "speed_values": [{"speed_synonym": ["medium"], "lang": "en"}], }, { "speed_name": "high", "speed_values": [{"speed_synonym": ["high"], "lang": "en"}], }, ], }, "reversible": False, "supportsFanSpeedPercent": True, } assert trt.query_attributes() == { "currentFanSpeedSetting": "low", } assert trt.can_execute(trait.COMMAND_FANSPEED, params={"fanSpeed": "medium"}) calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_FAN_MODE) await trt.execute(trait.COMMAND_FANSPEED, BASIC_DATA, {"fanSpeed": "medium"}, {}) assert len(calls) == 1 assert calls[0].data == { "entity_id": "climate.living_room_ac", "fan_mode": "medium", } async def test_inputselector(hass): """Test input selector trait.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.InputSelectorTrait.supported( media_player.DOMAIN, media_player.SUPPORT_SELECT_SOURCE, None ) trt = trait.InputSelectorTrait( hass, State( "media_player.living_room", media_player.STATE_PLAYING, attributes={ media_player.ATTR_INPUT_SOURCE_LIST: [ "media", "game", "chromecast", "plex", ], media_player.ATTR_INPUT_SOURCE: "game", }, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableInputs": [ {"key": "media", "names": [{"name_synonym": ["media"], "lang": "en"}]}, {"key": "game", "names": [{"name_synonym": ["game"], "lang": "en"}]}, { "key": "chromecast", "names": [{"name_synonym": ["chromecast"], "lang": "en"}], }, {"key": "plex", "names": [{"name_synonym": ["plex"], "lang": "en"}]}, ], "orderedInputs": True, } assert trt.query_attributes() == { "currentInput": "game", } assert trt.can_execute( trait.COMMAND_INPUT, params={"newInput": "media"}, ) calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_SELECT_SOURCE ) await trt.execute( trait.COMMAND_INPUT, BASIC_DATA, {"newInput": "media"}, {}, ) assert len(calls) == 1 assert calls[0].data == {"entity_id": "media_player.living_room", "source": "media"} @pytest.mark.parametrize( "sources,source,source_next,source_prev", [ (["a"], "a", "a", "a"), (["a", "b"], "a", "b", "b"), (["a", "b", "c"], "a", "b", "c"), ], ) async def test_inputselector_nextprev(hass, sources, source, source_next, source_prev): """Test input selector trait.""" trt = trait.InputSelectorTrait( hass, State( "media_player.living_room", media_player.STATE_PLAYING, attributes={ media_player.ATTR_INPUT_SOURCE_LIST: sources, media_player.ATTR_INPUT_SOURCE: source, }, ), BASIC_CONFIG, ) assert trt.can_execute("action.devices.commands.NextInput", params={}) assert trt.can_execute("action.devices.commands.PreviousInput", params={}) calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_SELECT_SOURCE ) await trt.execute( "action.devices.commands.NextInput", BASIC_DATA, {}, {}, ) await trt.execute( "action.devices.commands.PreviousInput", BASIC_DATA, {}, {}, ) assert len(calls) == 2 assert calls[0].data == { "entity_id": "media_player.living_room", "source": source_next, } assert calls[1].data == { "entity_id": "media_player.living_room", "source": source_prev, } @pytest.mark.parametrize( "sources,source", [(None, "a"), (["a", "b"], None), (["a", "b"], "c")] ) async def test_inputselector_nextprev_invalid(hass, sources, source): """Test input selector trait.""" trt = trait.InputSelectorTrait( hass, State( "media_player.living_room", media_player.STATE_PLAYING, attributes={ media_player.ATTR_INPUT_SOURCE_LIST: sources, media_player.ATTR_INPUT_SOURCE: source, }, ), BASIC_CONFIG, ) with pytest.raises(SmartHomeError): await trt.execute( "action.devices.commands.NextInput", BASIC_DATA, {}, {}, ) with pytest.raises(SmartHomeError): await trt.execute( "action.devices.commands.PreviousInput", BASIC_DATA, {}, {}, ) with pytest.raises(SmartHomeError): await trt.execute( "action.devices.commands.InvalidCommand", BASIC_DATA, {}, {}, ) async def test_modes_input_select(hass): """Test Input Select Mode trait.""" assert helpers.get_google_type(input_select.DOMAIN, None) is not None assert trait.ModesTrait.supported(input_select.DOMAIN, None, None) trt = trait.ModesTrait( hass, State("input_select.bla", "unavailable"), BASIC_CONFIG, ) assert trt.sync_attributes() == {"availableModes": []} trt = trait.ModesTrait( hass, State( "input_select.bla", "abc", attributes={input_select.ATTR_OPTIONS: ["abc", "123", "xyz"]}, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableModes": [ { "name": "option", "name_values": [ { "name_synonym": ["option", "setting", "mode", "value"], "lang": "en", } ], "settings": [ { "setting_name": "abc", "setting_values": [{"setting_synonym": ["abc"], "lang": "en"}], }, { "setting_name": "123", "setting_values": [{"setting_synonym": ["123"], "lang": "en"}], }, { "setting_name": "xyz", "setting_values": [{"setting_synonym": ["xyz"], "lang": "en"}], }, ], "ordered": False, } ] } assert trt.query_attributes() == { "currentModeSettings": {"option": "abc"}, "on": True, } assert trt.can_execute( trait.COMMAND_MODES, params={"updateModeSettings": {"option": "xyz"}}, ) calls = async_mock_service( hass, input_select.DOMAIN, input_select.SERVICE_SELECT_OPTION ) await trt.execute( trait.COMMAND_MODES, BASIC_DATA, {"updateModeSettings": {"option": "xyz"}}, {}, ) assert len(calls) == 1 assert calls[0].data == {"entity_id": "input_select.bla", "option": "xyz"} async def test_modes_humidifier(hass): """Test Humidifier Mode trait.""" assert helpers.get_google_type(humidifier.DOMAIN, None) is not None assert trait.ModesTrait.supported(humidifier.DOMAIN, humidifier.SUPPORT_MODES, None) trt = trait.ModesTrait( hass, State( "humidifier.humidifier", STATE_OFF, attributes={ humidifier.ATTR_AVAILABLE_MODES: [ humidifier.MODE_NORMAL, humidifier.MODE_AUTO, humidifier.MODE_AWAY, ], ATTR_SUPPORTED_FEATURES: humidifier.SUPPORT_MODES, humidifier.ATTR_MIN_HUMIDITY: 30, humidifier.ATTR_MAX_HUMIDITY: 99, humidifier.ATTR_HUMIDITY: 50, ATTR_MODE: humidifier.MODE_AUTO, }, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableModes": [ { "name": "mode", "name_values": [{"name_synonym": ["mode"], "lang": "en"}], "settings": [ { "setting_name": "normal", "setting_values": [ {"setting_synonym": ["normal"], "lang": "en"} ], }, { "setting_name": "auto", "setting_values": [{"setting_synonym": ["auto"], "lang": "en"}], }, { "setting_name": "away", "setting_values": [{"setting_synonym": ["away"], "lang": "en"}], }, ], "ordered": False, }, ] } assert trt.query_attributes() == { "currentModeSettings": {"mode": "auto"}, "on": False, } assert trt.can_execute( trait.COMMAND_MODES, params={"updateModeSettings": {"mode": "away"}} ) calls = async_mock_service(hass, humidifier.DOMAIN, humidifier.SERVICE_SET_MODE) await trt.execute( trait.COMMAND_MODES, BASIC_DATA, {"updateModeSettings": {"mode": "away"}}, {}, ) assert len(calls) == 1 assert calls[0].data == { "entity_id": "humidifier.humidifier", "mode": "away", } async def test_sound_modes(hass): """Test Mode trait.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.ModesTrait.supported( media_player.DOMAIN, media_player.SUPPORT_SELECT_SOUND_MODE, None ) trt = trait.ModesTrait( hass, State( "media_player.living_room", media_player.STATE_PLAYING, attributes={ media_player.ATTR_SOUND_MODE_LIST: ["stereo", "prologic"], media_player.ATTR_SOUND_MODE: "stereo", }, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableModes": [ { "name": "sound mode", "name_values": [ {"name_synonym": ["sound mode", "effects"], "lang": "en"} ], "settings": [ { "setting_name": "stereo", "setting_values": [ {"setting_synonym": ["stereo"], "lang": "en"} ], }, { "setting_name": "prologic", "setting_values": [ {"setting_synonym": ["prologic"], "lang": "en"} ], }, ], "ordered": False, } ] } assert trt.query_attributes() == { "currentModeSettings": {"sound mode": "stereo"}, "on": True, } assert trt.can_execute( trait.COMMAND_MODES, params={"updateModeSettings": {"sound mode": "stereo"}}, ) calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_SELECT_SOUND_MODE ) await trt.execute( trait.COMMAND_MODES, BASIC_DATA, {"updateModeSettings": {"sound mode": "stereo"}}, {}, ) assert len(calls) == 1 assert calls[0].data == { "entity_id": "media_player.living_room", "sound_mode": "stereo", } async def test_openclose_cover(hass): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_SET_POSITION, None ) trt = trait.OpenCloseTrait( hass, State( "cover.bla", cover.STATE_OPEN, { cover.ATTR_CURRENT_POSITION: 75, ATTR_SUPPORTED_FEATURES: cover.SUPPORT_SET_POSITION, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"openPercent": 75} calls_set = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_SET_COVER_POSITION) calls_open = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_OPEN_COVER) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 50}, {}) await trt.execute( trait.COMMAND_OPENCLOSE_RELATIVE, BASIC_DATA, {"openRelativePercent": 50}, {} ) assert len(calls_set) == 1 assert calls_set[0].data == {ATTR_ENTITY_ID: "cover.bla", cover.ATTR_POSITION: 50} assert len(calls_open) == 1 assert calls_open[0].data == {ATTR_ENTITY_ID: "cover.bla"} async def test_openclose_cover_unknown_state(hass): """Test OpenClose trait support for cover domain with unknown state.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_SET_POSITION, None ) # No state trt = trait.OpenCloseTrait( hass, State( "cover.bla", STATE_UNKNOWN, {ATTR_SUPPORTED_FEATURES: cover.SUPPORT_OPEN} ), BASIC_CONFIG, ) assert trt.sync_attributes() == {"discreteOnlyOpenClose": True} with pytest.raises(helpers.SmartHomeError): trt.query_attributes() calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_OPEN_COVER) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 100}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} with pytest.raises(helpers.SmartHomeError): trt.query_attributes() async def test_openclose_cover_assumed_state(hass): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_SET_POSITION, None ) trt = trait.OpenCloseTrait( hass, State( "cover.bla", cover.STATE_OPEN, { ATTR_ASSUMED_STATE: True, ATTR_SUPPORTED_FEATURES: cover.SUPPORT_SET_POSITION, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == {"commandOnlyOpenClose": True} assert trt.query_attributes() == {} calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_SET_COVER_POSITION) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 40}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla", cover.ATTR_POSITION: 40} async def test_openclose_cover_query_only(hass): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported(cover.DOMAIN, 0, None) state = State( "cover.bla", cover.STATE_OPEN, ) trt = trait.OpenCloseTrait( hass, state, BASIC_CONFIG, ) assert trt.sync_attributes() == { "discreteOnlyOpenClose": True, "queryOnlyOpenClose": True, } assert trt.query_attributes() == {"openPercent": 100} async def test_openclose_cover_no_position(hass): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_OPEN | cover.SUPPORT_CLOSE, None ) state = State( "cover.bla", cover.STATE_OPEN, { ATTR_SUPPORTED_FEATURES: cover.SUPPORT_OPEN | cover.SUPPORT_CLOSE, }, ) trt = trait.OpenCloseTrait( hass, state, BASIC_CONFIG, ) assert trt.sync_attributes() == {"discreteOnlyOpenClose": True} assert trt.query_attributes() == {"openPercent": 100} state.state = cover.STATE_CLOSED assert trt.sync_attributes() == {"discreteOnlyOpenClose": True} assert trt.query_attributes() == {"openPercent": 0} calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_CLOSE_COVER) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 0}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_OPEN_COVER) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 100}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} with pytest.raises( SmartHomeError, match=r"Current position not know for relative command" ): await trt.execute( trait.COMMAND_OPENCLOSE_RELATIVE, BASIC_DATA, {"openRelativePercent": 100}, {}, ) with pytest.raises(SmartHomeError, match=r"No support for partial open close"): await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 50}, {}) @pytest.mark.parametrize( "device_class", (cover.DEVICE_CLASS_DOOR, cover.DEVICE_CLASS_GARAGE, cover.DEVICE_CLASS_GATE), ) async def test_openclose_cover_secure(hass, device_class): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, device_class) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_SET_POSITION, device_class ) assert trait.OpenCloseTrait.might_2fa( cover.DOMAIN, cover.SUPPORT_SET_POSITION, device_class ) trt = trait.OpenCloseTrait( hass, State( "cover.bla", cover.STATE_OPEN, { ATTR_DEVICE_CLASS: device_class, ATTR_SUPPORTED_FEATURES: cover.SUPPORT_SET_POSITION, cover.ATTR_CURRENT_POSITION: 75, }, ), PIN_CONFIG, ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"openPercent": 75} calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_SET_COVER_POSITION) calls_close = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_CLOSE_COVER) # No challenge data with pytest.raises(error.ChallengeNeeded) as err: await trt.execute(trait.COMMAND_OPENCLOSE, PIN_DATA, {"openPercent": 50}, {}) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # invalid pin with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_OPENCLOSE, PIN_DATA, {"openPercent": 50}, {"pin": "9999"} ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_FAILED_PIN_NEEDED await trt.execute( trait.COMMAND_OPENCLOSE, PIN_DATA, {"openPercent": 50}, {"pin": "1234"} ) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla", cover.ATTR_POSITION: 50} # no challenge on close await trt.execute(trait.COMMAND_OPENCLOSE, PIN_DATA, {"openPercent": 0}, {}) assert len(calls_close) == 1 assert calls_close[0].data == {ATTR_ENTITY_ID: "cover.bla"} @pytest.mark.parametrize( "device_class", ( binary_sensor.DEVICE_CLASS_DOOR, binary_sensor.DEVICE_CLASS_GARAGE_DOOR, binary_sensor.DEVICE_CLASS_LOCK, binary_sensor.DEVICE_CLASS_OPENING, binary_sensor.DEVICE_CLASS_WINDOW, ), ) async def test_openclose_binary_sensor(hass, device_class): """Test OpenClose trait support for binary_sensor domain.""" assert helpers.get_google_type(binary_sensor.DOMAIN, device_class) is not None assert trait.OpenCloseTrait.supported(binary_sensor.DOMAIN, 0, device_class) trt = trait.OpenCloseTrait( hass, State("binary_sensor.test", STATE_ON, {ATTR_DEVICE_CLASS: device_class}), BASIC_CONFIG, ) assert trt.sync_attributes() == { "queryOnlyOpenClose": True, "discreteOnlyOpenClose": True, } assert trt.query_attributes() == {"openPercent": 100} trt = trait.OpenCloseTrait( hass, State("binary_sensor.test", STATE_OFF, {ATTR_DEVICE_CLASS: device_class}), BASIC_CONFIG, ) assert trt.sync_attributes() == { "queryOnlyOpenClose": True, "discreteOnlyOpenClose": True, } assert trt.query_attributes() == {"openPercent": 0} async def test_volume_media_player(hass): """Test volume trait support for media player domain.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.VolumeTrait.supported( media_player.DOMAIN, media_player.SUPPORT_VOLUME_SET, None, ) trt = trait.VolumeTrait( hass, State( "media_player.bla", media_player.STATE_PLAYING, { ATTR_SUPPORTED_FEATURES: media_player.SUPPORT_VOLUME_SET, media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.3, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "volumeMaxLevel": 100, "levelStepSize": 10, "volumeCanMuteAndUnmute": False, "commandOnlyVolume": False, } assert trt.query_attributes() == {"currentVolume": 30} calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_SET ) await trt.execute(trait.COMMAND_SET_VOLUME, BASIC_DATA, {"volumeLevel": 60}, {}) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.6, } calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_SET ) await trt.execute( trait.COMMAND_VOLUME_RELATIVE, BASIC_DATA, {"relativeSteps": 10}, {} ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.4, } async def test_volume_media_player_relative(hass): """Test volume trait support for relative-volume-only media players.""" assert trait.VolumeTrait.supported( media_player.DOMAIN, media_player.SUPPORT_VOLUME_STEP, None, ) trt = trait.VolumeTrait( hass, State( "media_player.bla", media_player.STATE_PLAYING, { ATTR_ASSUMED_STATE: True, ATTR_SUPPORTED_FEATURES: media_player.SUPPORT_VOLUME_STEP, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "volumeMaxLevel": 100, "levelStepSize": 10, "volumeCanMuteAndUnmute": False, "commandOnlyVolume": True, } assert trt.query_attributes() == {} calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_UP ) await trt.execute( trait.COMMAND_VOLUME_RELATIVE, BASIC_DATA, {"relativeSteps": 10}, {}, ) assert len(calls) == 10 for call in calls: assert call.data == { ATTR_ENTITY_ID: "media_player.bla", } calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_DOWN ) await trt.execute( trait.COMMAND_VOLUME_RELATIVE, BASIC_DATA, {"relativeSteps": -10}, {}, ) assert len(calls) == 10 for call in calls: assert call.data == { ATTR_ENTITY_ID: "media_player.bla", } with pytest.raises(SmartHomeError): await trt.execute(trait.COMMAND_SET_VOLUME, BASIC_DATA, {"volumeLevel": 42}, {}) with pytest.raises(SmartHomeError): await trt.execute(trait.COMMAND_MUTE, BASIC_DATA, {"mute": True}, {}) async def test_media_player_mute(hass): """Test volume trait support for muting.""" assert trait.VolumeTrait.supported( media_player.DOMAIN, media_player.SUPPORT_VOLUME_STEP | media_player.SUPPORT_VOLUME_MUTE, None, ) trt = trait.VolumeTrait( hass, State( "media_player.bla", media_player.STATE_PLAYING, { ATTR_SUPPORTED_FEATURES: ( media_player.SUPPORT_VOLUME_STEP | media_player.SUPPORT_VOLUME_MUTE ), media_player.ATTR_MEDIA_VOLUME_MUTED: False, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "volumeMaxLevel": 100, "levelStepSize": 10, "volumeCanMuteAndUnmute": True, "commandOnlyVolume": False, } assert trt.query_attributes() == {"isMuted": False} mute_calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_MUTE ) await trt.execute( trait.COMMAND_MUTE, BASIC_DATA, {"mute": True}, {}, ) assert len(mute_calls) == 1 assert mute_calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_VOLUME_MUTED: True, } unmute_calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_MUTE ) await trt.execute( trait.COMMAND_MUTE, BASIC_DATA, {"mute": False}, {}, ) assert len(unmute_calls) == 1 assert unmute_calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_VOLUME_MUTED: False, } async def test_temperature_setting_sensor(hass): """Test TemperatureSetting trait support for temperature sensor.""" assert ( helpers.get_google_type(sensor.DOMAIN, sensor.DEVICE_CLASS_TEMPERATURE) is not None ) assert not trait.TemperatureSettingTrait.supported( sensor.DOMAIN, 0, sensor.DEVICE_CLASS_HUMIDITY ) assert trait.TemperatureSettingTrait.supported( sensor.DOMAIN, 0, sensor.DEVICE_CLASS_TEMPERATURE ) @pytest.mark.parametrize( "unit_in,unit_out,state,ambient", [ (TEMP_FAHRENHEIT, "F", "70", 21.1), (TEMP_CELSIUS, "C", "21.1", 21.1), (TEMP_FAHRENHEIT, "F", "unavailable", None), (TEMP_FAHRENHEIT, "F", "unknown", None), ], ) async def test_temperature_setting_sensor_data(hass, unit_in, unit_out, state, ambient): """Test TemperatureSetting trait support for temperature sensor.""" hass.config.units.temperature_unit = unit_in trt = trait.TemperatureSettingTrait( hass, State( "sensor.test", state, {ATTR_DEVICE_CLASS: sensor.DEVICE_CLASS_TEMPERATURE} ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "queryOnlyTemperatureSetting": True, "thermostatTemperatureUnit": unit_out, } if ambient: assert trt.query_attributes() == {"thermostatTemperatureAmbient": ambient} else: assert trt.query_attributes() == {} hass.config.units.temperature_unit = TEMP_CELSIUS async def test_humidity_setting_sensor(hass): """Test HumiditySetting trait support for humidity sensor.""" assert ( helpers.get_google_type(sensor.DOMAIN, sensor.DEVICE_CLASS_HUMIDITY) is not None ) assert not trait.HumiditySettingTrait.supported( sensor.DOMAIN, 0, sensor.DEVICE_CLASS_TEMPERATURE ) assert trait.HumiditySettingTrait.supported( sensor.DOMAIN, 0, sensor.DEVICE_CLASS_HUMIDITY ) @pytest.mark.parametrize( "state,ambient", [("70", 70), ("unavailable", None), ("unknown", None)] ) async def test_humidity_setting_sensor_data(hass, state, ambient): """Test HumiditySetting trait support for humidity sensor.""" trt = trait.HumiditySettingTrait( hass, State("sensor.test", state, {ATTR_DEVICE_CLASS: sensor.DEVICE_CLASS_HUMIDITY}), BASIC_CONFIG, ) assert trt.sync_attributes() == {"queryOnlyHumiditySetting": True} if ambient: assert trt.query_attributes() == {"humidityAmbientPercent": ambient} else: assert trt.query_attributes() == {} with pytest.raises(helpers.SmartHomeError) as err: await trt.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert err.value.code == const.ERR_NOT_SUPPORTED async def test_transport_control(hass): """Test the TransportControlTrait.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None for feature in trait.MEDIA_COMMAND_SUPPORT_MAPPING.values(): assert trait.TransportControlTrait.supported(media_player.DOMAIN, feature, None) now = datetime(2020, 1, 1) trt = trait.TransportControlTrait( hass, State( "media_player.bla", media_player.STATE_PLAYING, { media_player.ATTR_MEDIA_POSITION: 100, media_player.ATTR_MEDIA_DURATION: 200, media_player.ATTR_MEDIA_POSITION_UPDATED_AT: now - timedelta(seconds=10), media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.5, ATTR_SUPPORTED_FEATURES: media_player.SUPPORT_PLAY | media_player.SUPPORT_STOP, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "transportControlSupportedCommands": ["RESUME", "STOP"] } assert trt.query_attributes() == {} # COMMAND_MEDIA_SEEK_RELATIVE calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_SEEK ) # Patch to avoid time ticking over during the command failing the test with patch("homeassistant.util.dt.utcnow", return_value=now): await trt.execute( trait.COMMAND_MEDIA_SEEK_RELATIVE, BASIC_DATA, {"relativePositionMs": 10000}, {}, ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", # 100s (current position) + 10s (from command) + 10s (from updated_at) media_player.ATTR_MEDIA_SEEK_POSITION: 120, } # COMMAND_MEDIA_SEEK_TO_POSITION calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_SEEK ) await trt.execute( trait.COMMAND_MEDIA_SEEK_TO_POSITION, BASIC_DATA, {"absPositionMs": 50000}, {} ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_SEEK_POSITION: 50, } # COMMAND_MEDIA_NEXT calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_NEXT_TRACK ) await trt.execute(trait.COMMAND_MEDIA_NEXT, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} # COMMAND_MEDIA_PAUSE calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_PAUSE ) await trt.execute(trait.COMMAND_MEDIA_PAUSE, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} # COMMAND_MEDIA_PREVIOUS calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_PREVIOUS_TRACK ) await trt.execute(trait.COMMAND_MEDIA_PREVIOUS, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} # COMMAND_MEDIA_RESUME calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_PLAY ) await trt.execute(trait.COMMAND_MEDIA_RESUME, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} # COMMAND_MEDIA_SHUFFLE calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_SHUFFLE_SET ) await trt.execute(trait.COMMAND_MEDIA_SHUFFLE, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_SHUFFLE: True, } # COMMAND_MEDIA_STOP calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_STOP ) await trt.execute(trait.COMMAND_MEDIA_STOP, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} @pytest.mark.parametrize( "state", ( STATE_OFF, STATE_IDLE, STATE_PLAYING, STATE_ON, STATE_PAUSED, STATE_STANDBY, STATE_UNAVAILABLE, STATE_UNKNOWN, ), ) async def test_media_state(hass, state): """Test the MediaStateTrait.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.TransportControlTrait.supported( media_player.DOMAIN, media_player.SUPPORT_PLAY, None ) trt = trait.MediaStateTrait( hass, State( "media_player.bla", state, { media_player.ATTR_MEDIA_POSITION: 100, media_player.ATTR_MEDIA_DURATION: 200, media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.5, ATTR_SUPPORTED_FEATURES: media_player.SUPPORT_PLAY | media_player.SUPPORT_STOP, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "supportActivityState": True, "supportPlaybackState": True, } assert trt.query_attributes() == { "activityState": trt.activity_lookup.get(state), "playbackState": trt.playback_lookup.get(state), }
sander76/home-assistant
tests/components/google_assistant/test_trait.py
homeassistant/components/deluge/sensor.py
"""Support for monitoring an SABnzbd NZB client.""" from homeassistant.components.sensor import SensorEntity from homeassistant.helpers.dispatcher import async_dispatcher_connect from . import DATA_SABNZBD, SENSOR_TYPES, SIGNAL_SABNZBD_UPDATED async def async_setup_platform(hass, config, async_add_entities, discovery_info=None): """Set up the SABnzbd sensors.""" if discovery_info is None: return sab_api_data = hass.data[DATA_SABNZBD] sensors = sab_api_data.sensors client_name = sab_api_data.name async_add_entities( [SabnzbdSensor(sensor, sab_api_data, client_name) for sensor in sensors] ) class SabnzbdSensor(SensorEntity): """Representation of an SABnzbd sensor.""" def __init__(self, sensor_type, sabnzbd_api_data, client_name): """Initialize the sensor.""" self._client_name = client_name self._field_name = SENSOR_TYPES[sensor_type][2] self._name = SENSOR_TYPES[sensor_type][0] self._sabnzbd_api = sabnzbd_api_data self._state = None self._type = sensor_type self._unit_of_measurement = SENSOR_TYPES[sensor_type][1] async def async_added_to_hass(self): """Call when entity about to be added to hass.""" self.async_on_remove( async_dispatcher_connect( self.hass, SIGNAL_SABNZBD_UPDATED, self.update_state ) ) @property def name(self): """Return the name of the sensor.""" return f"{self._client_name} {self._name}" @property def state(self): """Return the state of the sensor.""" return self._state @property def should_poll(self): """Don't poll. Will be updated by dispatcher signal.""" return False @property def unit_of_measurement(self): """Return the unit of measurement of this entity, if any.""" return self._unit_of_measurement def update_state(self, args): """Get the latest data and updates the states.""" self._state = self._sabnzbd_api.get_queue_field(self._field_name) if self._type == "speed": self._state = round(float(self._state) / 1024, 1) elif "size" in self._type: self._state = round(float(self._state), 2) self.schedule_update_ha_state()
"""Tests for the Google Assistant traits.""" from datetime import datetime, timedelta from unittest.mock import patch import pytest from homeassistant.components import ( alarm_control_panel, binary_sensor, camera, cover, fan, group, input_boolean, input_select, light, lock, media_player, scene, script, sensor, switch, vacuum, ) from homeassistant.components.climate import const as climate from homeassistant.components.google_assistant import const, error, helpers, trait from homeassistant.components.google_assistant.error import SmartHomeError from homeassistant.components.humidifier import const as humidifier from homeassistant.config import async_process_ha_core_config from homeassistant.const import ( ATTR_ASSUMED_STATE, ATTR_DEVICE_CLASS, ATTR_ENTITY_ID, ATTR_MODE, ATTR_SUPPORTED_FEATURES, ATTR_TEMPERATURE, SERVICE_TURN_OFF, SERVICE_TURN_ON, STATE_ALARM_ARMED_AWAY, STATE_ALARM_DISARMED, STATE_ALARM_PENDING, STATE_IDLE, STATE_OFF, STATE_ON, STATE_PAUSED, STATE_PLAYING, STATE_STANDBY, STATE_UNAVAILABLE, STATE_UNKNOWN, TEMP_CELSIUS, TEMP_FAHRENHEIT, ) from homeassistant.core import DOMAIN as HA_DOMAIN, EVENT_CALL_SERVICE, State from homeassistant.util import color from . import BASIC_CONFIG, MockConfig from tests.common import async_capture_events, async_mock_service REQ_ID = "ff36a3cc-ec34-11e6-b1a0-64510650abcf" BASIC_DATA = helpers.RequestData( BASIC_CONFIG, "test-agent", const.SOURCE_CLOUD, REQ_ID, None ) PIN_CONFIG = MockConfig(secure_devices_pin="1234") PIN_DATA = helpers.RequestData( PIN_CONFIG, "test-agent", const.SOURCE_CLOUD, REQ_ID, None ) async def test_brightness_light(hass): """Test brightness trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert trait.BrightnessTrait.supported(light.DOMAIN, light.SUPPORT_BRIGHTNESS, None) trt = trait.BrightnessTrait( hass, State("light.bla", light.STATE_ON, {light.ATTR_BRIGHTNESS: 243}), BASIC_CONFIG, ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"brightness": 95} events = async_capture_events(hass, EVENT_CALL_SERVICE) calls = async_mock_service(hass, light.DOMAIN, light.SERVICE_TURN_ON) await trt.execute( trait.COMMAND_BRIGHTNESS_ABSOLUTE, BASIC_DATA, {"brightness": 50}, {} ) await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "light.bla", light.ATTR_BRIGHTNESS_PCT: 50} assert len(events) == 1 assert events[0].data == { "domain": "light", "service": "turn_on", "service_data": {"brightness_pct": 50, "entity_id": "light.bla"}, } async def test_camera_stream(hass): """Test camera stream trait support for camera domain.""" await async_process_ha_core_config( hass, {"external_url": "https://example.com"}, ) assert helpers.get_google_type(camera.DOMAIN, None) is not None assert trait.CameraStreamTrait.supported(camera.DOMAIN, camera.SUPPORT_STREAM, None) trt = trait.CameraStreamTrait( hass, State("camera.bla", camera.STATE_IDLE, {}), BASIC_CONFIG ) assert trt.sync_attributes() == { "cameraStreamSupportedProtocols": ["hls"], "cameraStreamNeedAuthToken": False, "cameraStreamNeedDrmEncryption": False, } assert trt.query_attributes() == {} with patch( "homeassistant.components.camera.async_request_stream", return_value="/api/streams/bla", ): await trt.execute(trait.COMMAND_GET_CAMERA_STREAM, BASIC_DATA, {}, {}) assert trt.query_attributes() == { "cameraStreamAccessUrl": "https://example.com/api/streams/bla", "cameraStreamReceiverAppId": "B12CE3CA", } async def test_onoff_group(hass): """Test OnOff trait support for group domain.""" assert helpers.get_google_type(group.DOMAIN, None) is not None assert trait.OnOffTrait.supported(group.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("group.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("group.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, HA_DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "group.bla"} off_calls = async_mock_service(hass, HA_DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "group.bla"} async def test_onoff_input_boolean(hass): """Test OnOff trait support for input_boolean domain.""" assert helpers.get_google_type(input_boolean.DOMAIN, None) is not None assert trait.OnOffTrait.supported(input_boolean.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("input_boolean.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait( hass, State("input_boolean.bla", STATE_OFF), BASIC_CONFIG ) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, input_boolean.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "input_boolean.bla"} off_calls = async_mock_service(hass, input_boolean.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "input_boolean.bla"} async def test_onoff_switch(hass): """Test OnOff trait support for switch domain.""" assert helpers.get_google_type(switch.DOMAIN, None) is not None assert trait.OnOffTrait.supported(switch.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("switch.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("switch.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} trt_assumed = trait.OnOffTrait( hass, State("switch.bla", STATE_OFF, {"assumed_state": True}), BASIC_CONFIG ) assert trt_assumed.sync_attributes() == {"commandOnlyOnOff": True} on_calls = async_mock_service(hass, switch.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "switch.bla"} off_calls = async_mock_service(hass, switch.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "switch.bla"} async def test_onoff_fan(hass): """Test OnOff trait support for fan domain.""" assert helpers.get_google_type(fan.DOMAIN, None) is not None assert trait.OnOffTrait.supported(fan.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("fan.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("fan.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, fan.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "fan.bla"} off_calls = async_mock_service(hass, fan.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "fan.bla"} async def test_onoff_light(hass): """Test OnOff trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert trait.OnOffTrait.supported(light.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("light.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("light.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "light.bla"} off_calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "light.bla"} async def test_onoff_media_player(hass): """Test OnOff trait support for media_player domain.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.OnOffTrait.supported(media_player.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("media_player.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("media_player.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, media_player.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} off_calls = async_mock_service(hass, media_player.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} async def test_onoff_humidifier(hass): """Test OnOff trait support for humidifier domain.""" assert helpers.get_google_type(humidifier.DOMAIN, None) is not None assert trait.OnOffTrait.supported(humidifier.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("humidifier.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("humidifier.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, humidifier.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "humidifier.bla"} off_calls = async_mock_service(hass, humidifier.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "humidifier.bla"} async def test_dock_vacuum(hass): """Test dock trait support for vacuum domain.""" assert helpers.get_google_type(vacuum.DOMAIN, None) is not None assert trait.DockTrait.supported(vacuum.DOMAIN, 0, None) trt = trait.DockTrait(hass, State("vacuum.bla", vacuum.STATE_IDLE), BASIC_CONFIG) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"isDocked": False} calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_RETURN_TO_BASE) await trt.execute(trait.COMMAND_DOCK, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} async def test_startstop_vacuum(hass): """Test startStop trait support for vacuum domain.""" assert helpers.get_google_type(vacuum.DOMAIN, None) is not None assert trait.StartStopTrait.supported(vacuum.DOMAIN, 0, None) trt = trait.StartStopTrait( hass, State( "vacuum.bla", vacuum.STATE_PAUSED, {ATTR_SUPPORTED_FEATURES: vacuum.SUPPORT_PAUSE}, ), BASIC_CONFIG, ) assert trt.sync_attributes() == {"pausable": True} assert trt.query_attributes() == {"isRunning": False, "isPaused": True} start_calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_START) await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": True}, {}) assert len(start_calls) == 1 assert start_calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} stop_calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_STOP) await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": False}, {}) assert len(stop_calls) == 1 assert stop_calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} pause_calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_PAUSE) await trt.execute(trait.COMMAND_PAUSEUNPAUSE, BASIC_DATA, {"pause": True}, {}) assert len(pause_calls) == 1 assert pause_calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} unpause_calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_START) await trt.execute(trait.COMMAND_PAUSEUNPAUSE, BASIC_DATA, {"pause": False}, {}) assert len(unpause_calls) == 1 assert unpause_calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} async def test_startstop_cover(hass): """Test startStop trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.StartStopTrait.supported(cover.DOMAIN, cover.SUPPORT_STOP, None) state = State( "cover.bla", cover.STATE_CLOSED, {ATTR_SUPPORTED_FEATURES: cover.SUPPORT_STOP}, ) trt = trait.StartStopTrait( hass, state, BASIC_CONFIG, ) assert trt.sync_attributes() == {} for state_value in (cover.STATE_CLOSING, cover.STATE_OPENING): state.state = state_value assert trt.query_attributes() == {"isRunning": True} stop_calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_STOP_COVER) await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": False}, {}) assert len(stop_calls) == 1 assert stop_calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} for state_value in (cover.STATE_CLOSED, cover.STATE_OPEN): state.state = state_value assert trt.query_attributes() == {"isRunning": False} with pytest.raises(SmartHomeError, match="Cover is already stopped"): await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": False}, {}) with pytest.raises(SmartHomeError, match="Starting a cover is not supported"): await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": True}, {}) with pytest.raises( SmartHomeError, match="Command action.devices.commands.PauseUnpause is not supported", ): await trt.execute(trait.COMMAND_PAUSEUNPAUSE, BASIC_DATA, {"start": True}, {}) async def test_startstop_cover_assumed(hass): """Test startStop trait support for cover domain of assumed state.""" trt = trait.StartStopTrait( hass, State( "cover.bla", cover.STATE_CLOSED, {ATTR_SUPPORTED_FEATURES: cover.SUPPORT_STOP, ATTR_ASSUMED_STATE: True}, ), BASIC_CONFIG, ) stop_calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_STOP_COVER) await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": False}, {}) assert len(stop_calls) == 1 assert stop_calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} async def test_color_setting_color_light(hass): """Test ColorSpectrum trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert not trait.ColorSettingTrait.supported(light.DOMAIN, 0, None) assert trait.ColorSettingTrait.supported(light.DOMAIN, light.SUPPORT_COLOR, None) trt = trait.ColorSettingTrait( hass, State( "light.bla", STATE_ON, { light.ATTR_HS_COLOR: (20, 94), light.ATTR_BRIGHTNESS: 200, ATTR_SUPPORTED_FEATURES: light.SUPPORT_COLOR, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == {"colorModel": "hsv"} assert trt.query_attributes() == { "color": {"spectrumHsv": {"hue": 20, "saturation": 0.94, "value": 200 / 255}} } assert trt.can_execute( trait.COMMAND_COLOR_ABSOLUTE, {"color": {"spectrumRGB": 16715792}} ) calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON) await trt.execute( trait.COMMAND_COLOR_ABSOLUTE, BASIC_DATA, {"color": {"spectrumRGB": 1052927}}, {}, ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "light.bla", light.ATTR_HS_COLOR: (240, 93.725), } await trt.execute( trait.COMMAND_COLOR_ABSOLUTE, BASIC_DATA, {"color": {"spectrumHSV": {"hue": 100, "saturation": 0.50, "value": 0.20}}}, {}, ) assert len(calls) == 2 assert calls[1].data == { ATTR_ENTITY_ID: "light.bla", light.ATTR_HS_COLOR: [100, 50], light.ATTR_BRIGHTNESS: 0.2 * 255, } async def test_color_setting_temperature_light(hass): """Test ColorTemperature trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert not trait.ColorSettingTrait.supported(light.DOMAIN, 0, None) assert trait.ColorSettingTrait.supported( light.DOMAIN, light.SUPPORT_COLOR_TEMP, None ) trt = trait.ColorSettingTrait( hass, State( "light.bla", STATE_ON, { light.ATTR_MIN_MIREDS: 200, light.ATTR_COLOR_TEMP: 300, light.ATTR_MAX_MIREDS: 500, ATTR_SUPPORTED_FEATURES: light.SUPPORT_COLOR_TEMP, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "colorTemperatureRange": {"temperatureMinK": 2000, "temperatureMaxK": 5000} } assert trt.query_attributes() == {"color": {"temperatureK": 3333}} assert trt.can_execute( trait.COMMAND_COLOR_ABSOLUTE, {"color": {"temperature": 400}} ) calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON) with pytest.raises(helpers.SmartHomeError) as err: await trt.execute( trait.COMMAND_COLOR_ABSOLUTE, BASIC_DATA, {"color": {"temperature": 5555}}, {}, ) assert err.value.code == const.ERR_VALUE_OUT_OF_RANGE await trt.execute( trait.COMMAND_COLOR_ABSOLUTE, BASIC_DATA, {"color": {"temperature": 2857}}, {} ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "light.bla", light.ATTR_COLOR_TEMP: color.color_temperature_kelvin_to_mired(2857), } async def test_color_light_temperature_light_bad_temp(hass): """Test ColorTemperature trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert not trait.ColorSettingTrait.supported(light.DOMAIN, 0, None) assert trait.ColorSettingTrait.supported( light.DOMAIN, light.SUPPORT_COLOR_TEMP, None ) trt = trait.ColorSettingTrait( hass, State( "light.bla", STATE_ON, { light.ATTR_MIN_MIREDS: 200, light.ATTR_COLOR_TEMP: 0, light.ATTR_MAX_MIREDS: 500, }, ), BASIC_CONFIG, ) assert trt.query_attributes() == {} async def test_light_modes(hass): """Test Light Mode trait.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert trait.ModesTrait.supported(light.DOMAIN, light.SUPPORT_EFFECT, None) trt = trait.ModesTrait( hass, State( "light.living_room", light.STATE_ON, attributes={ light.ATTR_EFFECT_LIST: ["random", "colorloop"], light.ATTR_EFFECT: "random", }, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableModes": [ { "name": "effect", "name_values": [{"name_synonym": ["effect"], "lang": "en"}], "settings": [ { "setting_name": "random", "setting_values": [ {"setting_synonym": ["random"], "lang": "en"} ], }, { "setting_name": "colorloop", "setting_values": [ {"setting_synonym": ["colorloop"], "lang": "en"} ], }, ], "ordered": False, } ] } assert trt.query_attributes() == { "currentModeSettings": {"effect": "random"}, "on": True, } assert trt.can_execute( trait.COMMAND_MODES, params={"updateModeSettings": {"effect": "colorloop"}}, ) calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON) await trt.execute( trait.COMMAND_MODES, BASIC_DATA, {"updateModeSettings": {"effect": "colorloop"}}, {}, ) assert len(calls) == 1 assert calls[0].data == { "entity_id": "light.living_room", "effect": "colorloop", } async def test_scene_scene(hass): """Test Scene trait support for scene domain.""" assert helpers.get_google_type(scene.DOMAIN, None) is not None assert trait.SceneTrait.supported(scene.DOMAIN, 0, None) trt = trait.SceneTrait(hass, State("scene.bla", scene.STATE), BASIC_CONFIG) assert trt.sync_attributes() == {} assert trt.query_attributes() == {} assert trt.can_execute(trait.COMMAND_ACTIVATE_SCENE, {}) calls = async_mock_service(hass, scene.DOMAIN, SERVICE_TURN_ON) await trt.execute(trait.COMMAND_ACTIVATE_SCENE, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "scene.bla"} async def test_scene_script(hass): """Test Scene trait support for script domain.""" assert helpers.get_google_type(script.DOMAIN, None) is not None assert trait.SceneTrait.supported(script.DOMAIN, 0, None) trt = trait.SceneTrait(hass, State("script.bla", STATE_OFF), BASIC_CONFIG) assert trt.sync_attributes() == {} assert trt.query_attributes() == {} assert trt.can_execute(trait.COMMAND_ACTIVATE_SCENE, {}) calls = async_mock_service(hass, script.DOMAIN, SERVICE_TURN_ON) await trt.execute(trait.COMMAND_ACTIVATE_SCENE, BASIC_DATA, {}, {}) # We don't wait till script execution is done. await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "script.bla"} async def test_temperature_setting_climate_onoff(hass): """Test TemperatureSetting trait support for climate domain - range.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.TemperatureSettingTrait.supported(climate.DOMAIN, 0, None) hass.config.units.temperature_unit = TEMP_FAHRENHEIT trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_AUTO, { ATTR_SUPPORTED_FEATURES: climate.SUPPORT_TARGET_TEMPERATURE_RANGE, climate.ATTR_HVAC_MODES: [ climate.HVAC_MODE_OFF, climate.HVAC_MODE_COOL, climate.HVAC_MODE_HEAT, climate.HVAC_MODE_HEAT_COOL, ], climate.ATTR_MIN_TEMP: None, climate.ATTR_MAX_TEMP: None, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["off", "cool", "heat", "heatcool", "on"], "thermostatTemperatureUnit": "F", } assert trt.can_execute(trait.COMMAND_THERMOSTAT_SET_MODE, {}) calls = async_mock_service(hass, climate.DOMAIN, SERVICE_TURN_ON) await trt.execute( trait.COMMAND_THERMOSTAT_SET_MODE, BASIC_DATA, {"thermostatMode": "on"}, {} ) assert len(calls) == 1 calls = async_mock_service(hass, climate.DOMAIN, SERVICE_TURN_OFF) await trt.execute( trait.COMMAND_THERMOSTAT_SET_MODE, BASIC_DATA, {"thermostatMode": "off"}, {} ) assert len(calls) == 1 async def test_temperature_setting_climate_no_modes(hass): """Test TemperatureSetting trait support for climate domain not supporting any modes.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.TemperatureSettingTrait.supported(climate.DOMAIN, 0, None) hass.config.units.temperature_unit = TEMP_CELSIUS trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_AUTO, { climate.ATTR_HVAC_MODES: [], climate.ATTR_MIN_TEMP: None, climate.ATTR_MAX_TEMP: None, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["heat"], "thermostatTemperatureUnit": "C", } async def test_temperature_setting_climate_range(hass): """Test TemperatureSetting trait support for climate domain - range.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.TemperatureSettingTrait.supported(climate.DOMAIN, 0, None) hass.config.units.temperature_unit = TEMP_FAHRENHEIT trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_AUTO, { climate.ATTR_CURRENT_TEMPERATURE: 70, climate.ATTR_CURRENT_HUMIDITY: 25, ATTR_SUPPORTED_FEATURES: climate.SUPPORT_TARGET_TEMPERATURE_RANGE, climate.ATTR_HVAC_MODES: [ STATE_OFF, climate.HVAC_MODE_COOL, climate.HVAC_MODE_HEAT, climate.HVAC_MODE_AUTO, ], climate.ATTR_TARGET_TEMP_HIGH: 75, climate.ATTR_TARGET_TEMP_LOW: 65, climate.ATTR_MIN_TEMP: 50, climate.ATTR_MAX_TEMP: 80, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["off", "cool", "heat", "auto", "on"], "thermostatTemperatureUnit": "F", } assert trt.query_attributes() == { "thermostatMode": "auto", "thermostatTemperatureAmbient": 21.1, "thermostatHumidityAmbient": 25, "thermostatTemperatureSetpointLow": 18.3, "thermostatTemperatureSetpointHigh": 23.9, } assert trt.can_execute(trait.COMMAND_THERMOSTAT_TEMPERATURE_SET_RANGE, {}) assert trt.can_execute(trait.COMMAND_THERMOSTAT_SET_MODE, {}) calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_TEMPERATURE) await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SET_RANGE, BASIC_DATA, { "thermostatTemperatureSetpointHigh": 25, "thermostatTemperatureSetpointLow": 20, }, {}, ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "climate.bla", climate.ATTR_TARGET_TEMP_HIGH: 77, climate.ATTR_TARGET_TEMP_LOW: 68, } calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_HVAC_MODE) await trt.execute( trait.COMMAND_THERMOSTAT_SET_MODE, BASIC_DATA, {"thermostatMode": "cool"}, {} ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "climate.bla", climate.ATTR_HVAC_MODE: climate.HVAC_MODE_COOL, } with pytest.raises(helpers.SmartHomeError) as err: await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, BASIC_DATA, {"thermostatTemperatureSetpoint": -100}, {}, ) assert err.value.code == const.ERR_VALUE_OUT_OF_RANGE hass.config.units.temperature_unit = TEMP_CELSIUS async def test_temperature_setting_climate_setpoint(hass): """Test TemperatureSetting trait support for climate domain - setpoint.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.TemperatureSettingTrait.supported(climate.DOMAIN, 0, None) hass.config.units.temperature_unit = TEMP_CELSIUS trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_COOL, { climate.ATTR_HVAC_MODES: [STATE_OFF, climate.HVAC_MODE_COOL], climate.ATTR_MIN_TEMP: 10, climate.ATTR_MAX_TEMP: 30, ATTR_TEMPERATURE: 18, climate.ATTR_CURRENT_TEMPERATURE: 20, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["off", "cool", "on"], "thermostatTemperatureUnit": "C", } assert trt.query_attributes() == { "thermostatMode": "cool", "thermostatTemperatureAmbient": 20, "thermostatTemperatureSetpoint": 18, } assert trt.can_execute(trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, {}) assert trt.can_execute(trait.COMMAND_THERMOSTAT_SET_MODE, {}) calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_TEMPERATURE) with pytest.raises(helpers.SmartHomeError): await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, BASIC_DATA, {"thermostatTemperatureSetpoint": -100}, {}, ) await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, BASIC_DATA, {"thermostatTemperatureSetpoint": 19}, {}, ) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "climate.bla", ATTR_TEMPERATURE: 19} async def test_temperature_setting_climate_setpoint_auto(hass): """ Test TemperatureSetting trait support for climate domain. Setpoint in auto mode. """ hass.config.units.temperature_unit = TEMP_CELSIUS trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_HEAT_COOL, { climate.ATTR_HVAC_MODES: [ climate.HVAC_MODE_OFF, climate.HVAC_MODE_HEAT_COOL, ], climate.ATTR_MIN_TEMP: 10, climate.ATTR_MAX_TEMP: 30, ATTR_TEMPERATURE: 18, climate.ATTR_CURRENT_TEMPERATURE: 20, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["off", "heatcool", "on"], "thermostatTemperatureUnit": "C", } assert trt.query_attributes() == { "thermostatMode": "heatcool", "thermostatTemperatureAmbient": 20, "thermostatTemperatureSetpointHigh": 18, "thermostatTemperatureSetpointLow": 18, } assert trt.can_execute(trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, {}) assert trt.can_execute(trait.COMMAND_THERMOSTAT_SET_MODE, {}) calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_TEMPERATURE) await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, BASIC_DATA, {"thermostatTemperatureSetpoint": 19}, {}, ) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "climate.bla", ATTR_TEMPERATURE: 19} async def test_humidity_setting_humidifier_setpoint(hass): """Test HumiditySetting trait support for humidifier domain - setpoint.""" assert helpers.get_google_type(humidifier.DOMAIN, None) is not None assert trait.HumiditySettingTrait.supported(humidifier.DOMAIN, 0, None) trt = trait.HumiditySettingTrait( hass, State( "humidifier.bla", STATE_ON, { humidifier.ATTR_MIN_HUMIDITY: 20, humidifier.ATTR_MAX_HUMIDITY: 90, humidifier.ATTR_HUMIDITY: 38, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "humiditySetpointRange": {"minPercent": 20, "maxPercent": 90} } assert trt.query_attributes() == { "humiditySetpointPercent": 38, } assert trt.can_execute(trait.COMMAND_SET_HUMIDITY, {}) calls = async_mock_service(hass, humidifier.DOMAIN, humidifier.SERVICE_SET_HUMIDITY) await trt.execute(trait.COMMAND_SET_HUMIDITY, BASIC_DATA, {"humidity": 32}, {}) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "humidifier.bla", humidifier.ATTR_HUMIDITY: 32, } async def test_lock_unlock_lock(hass): """Test LockUnlock trait locking support for lock domain.""" assert helpers.get_google_type(lock.DOMAIN, None) is not None assert trait.LockUnlockTrait.supported(lock.DOMAIN, lock.SUPPORT_OPEN, None) assert trait.LockUnlockTrait.might_2fa(lock.DOMAIN, lock.SUPPORT_OPEN, None) trt = trait.LockUnlockTrait( hass, State("lock.front_door", lock.STATE_LOCKED), PIN_CONFIG ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"isLocked": True} assert trt.can_execute(trait.COMMAND_LOCKUNLOCK, {"lock": True}) calls = async_mock_service(hass, lock.DOMAIN, lock.SERVICE_LOCK) await trt.execute(trait.COMMAND_LOCKUNLOCK, PIN_DATA, {"lock": True}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "lock.front_door"} async def test_lock_unlock_unlock(hass): """Test LockUnlock trait unlocking support for lock domain.""" assert helpers.get_google_type(lock.DOMAIN, None) is not None assert trait.LockUnlockTrait.supported(lock.DOMAIN, lock.SUPPORT_OPEN, None) trt = trait.LockUnlockTrait( hass, State("lock.front_door", lock.STATE_LOCKED), PIN_CONFIG ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"isLocked": True} assert trt.can_execute(trait.COMMAND_LOCKUNLOCK, {"lock": False}) calls = async_mock_service(hass, lock.DOMAIN, lock.SERVICE_UNLOCK) # No challenge data with pytest.raises(error.ChallengeNeeded) as err: await trt.execute(trait.COMMAND_LOCKUNLOCK, PIN_DATA, {"lock": False}, {}) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # invalid pin with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_LOCKUNLOCK, PIN_DATA, {"lock": False}, {"pin": 9999} ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_FAILED_PIN_NEEDED await trt.execute( trait.COMMAND_LOCKUNLOCK, PIN_DATA, {"lock": False}, {"pin": "1234"} ) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "lock.front_door"} # Test without pin trt = trait.LockUnlockTrait( hass, State("lock.front_door", lock.STATE_LOCKED), BASIC_CONFIG ) with pytest.raises(error.SmartHomeError) as err: await trt.execute(trait.COMMAND_LOCKUNLOCK, BASIC_DATA, {"lock": False}, {}) assert len(calls) == 1 assert err.value.code == const.ERR_CHALLENGE_NOT_SETUP # Test with 2FA override with patch.object( BASIC_CONFIG, "should_2fa", return_value=False, ): await trt.execute(trait.COMMAND_LOCKUNLOCK, BASIC_DATA, {"lock": False}, {}) assert len(calls) == 2 async def test_arm_disarm_arm_away(hass): """Test ArmDisarm trait Arming support for alarm_control_panel domain.""" assert helpers.get_google_type(alarm_control_panel.DOMAIN, None) is not None assert trait.ArmDisArmTrait.supported(alarm_control_panel.DOMAIN, 0, None) assert trait.ArmDisArmTrait.might_2fa(alarm_control_panel.DOMAIN, 0, None) trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, { alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True, ATTR_SUPPORTED_FEATURES: alarm_control_panel.const.SUPPORT_ALARM_ARM_HOME | alarm_control_panel.const.SUPPORT_ALARM_ARM_AWAY, }, ), PIN_CONFIG, ) assert trt.sync_attributes() == { "availableArmLevels": { "levels": [ { "level_name": "armed_home", "level_values": [ {"level_synonym": ["armed home", "home"], "lang": "en"} ], }, { "level_name": "armed_away", "level_values": [ {"level_synonym": ["armed away", "away"], "lang": "en"} ], }, ], "ordered": False, } } assert trt.query_attributes() == { "isArmed": True, "currentArmLevel": STATE_ALARM_ARMED_AWAY, } assert trt.can_execute( trait.COMMAND_ARMDISARM, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY} ) calls = async_mock_service( hass, alarm_control_panel.DOMAIN, alarm_control_panel.SERVICE_ALARM_ARM_AWAY ) # Test with no secure_pin configured with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), BASIC_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, BASIC_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {}, ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NOT_SETUP trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), PIN_CONFIG, ) # No challenge data with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {}, ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # invalid pin with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {"pin": 9999}, ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_FAILED_PIN_NEEDED # correct pin await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {"pin": "1234"}, ) assert len(calls) == 1 # Test already armed with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), PIN_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {}, ) assert len(calls) == 1 assert err.value.code == const.ERR_ALREADY_ARMED # Test with code_arm_required False trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: False}, ), PIN_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {}, ) assert len(calls) == 2 with pytest.raises(error.SmartHomeError) as err: await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True}, {}, ) async def test_arm_disarm_disarm(hass): """Test ArmDisarm trait Disarming support for alarm_control_panel domain.""" assert helpers.get_google_type(alarm_control_panel.DOMAIN, None) is not None assert trait.ArmDisArmTrait.supported(alarm_control_panel.DOMAIN, 0, None) assert trait.ArmDisArmTrait.might_2fa(alarm_control_panel.DOMAIN, 0, None) trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, { alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True, ATTR_SUPPORTED_FEATURES: alarm_control_panel.const.SUPPORT_ALARM_TRIGGER | alarm_control_panel.const.SUPPORT_ALARM_ARM_CUSTOM_BYPASS, }, ), PIN_CONFIG, ) assert trt.sync_attributes() == { "availableArmLevels": { "levels": [ { "level_name": "armed_custom_bypass", "level_values": [ { "level_synonym": ["armed custom bypass", "custom"], "lang": "en", } ], }, { "level_name": "triggered", "level_values": [{"level_synonym": ["triggered"], "lang": "en"}], }, ], "ordered": False, } } assert trt.query_attributes() == {"isArmed": False} assert trt.can_execute(trait.COMMAND_ARMDISARM, {"arm": False}) calls = async_mock_service( hass, alarm_control_panel.DOMAIN, alarm_control_panel.SERVICE_ALARM_DISARM ) # Test without secure_pin configured with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), BASIC_CONFIG, ) await trt.execute(trait.COMMAND_ARMDISARM, BASIC_DATA, {"arm": False}, {}) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NOT_SETUP trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), PIN_CONFIG, ) # No challenge data with pytest.raises(error.ChallengeNeeded) as err: await trt.execute(trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": False}, {}) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # invalid pin with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": False}, {"pin": 9999} ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_FAILED_PIN_NEEDED # correct pin await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": False}, {"pin": "1234"} ) assert len(calls) == 1 # Test already disarmed with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), PIN_CONFIG, ) await trt.execute(trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": False}, {}) assert len(calls) == 1 assert err.value.code == const.ERR_ALREADY_DISARMED # Cancel arming after already armed will require pin with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: False}, ), PIN_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "cancel": True}, {} ) assert len(calls) == 1 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # Cancel arming while pending to arm doesn't require pin trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_PENDING, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: False}, ), PIN_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "cancel": True}, {} ) assert len(calls) == 2 async def test_fan_speed(hass): """Test FanSpeed trait speed control support for fan domain.""" assert helpers.get_google_type(fan.DOMAIN, None) is not None assert trait.FanSpeedTrait.supported(fan.DOMAIN, fan.SUPPORT_SET_SPEED, None) trt = trait.FanSpeedTrait( hass, State( "fan.living_room_fan", fan.SPEED_HIGH, attributes={ "speed_list": [ fan.SPEED_OFF, fan.SPEED_LOW, fan.SPEED_MEDIUM, fan.SPEED_HIGH, ], "speed": "low", "percentage": 33, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableFanSpeeds": { "ordered": True, "speeds": [ { "speed_name": "off", "speed_values": [{"speed_synonym": ["stop", "off"], "lang": "en"}], }, { "speed_name": "low", "speed_values": [ { "speed_synonym": ["slow", "low", "slowest", "lowest"], "lang": "en", } ], }, { "speed_name": "medium", "speed_values": [ {"speed_synonym": ["medium", "mid", "middle"], "lang": "en"} ], }, { "speed_name": "high", "speed_values": [ { "speed_synonym": [ "high", "max", "fast", "highest", "fastest", "maximum", ], "lang": "en", } ], }, ], }, "reversible": False, "supportsFanSpeedPercent": True, } assert trt.query_attributes() == { "currentFanSpeedSetting": "low", "on": True, "currentFanSpeedPercent": 33, } assert trt.can_execute(trait.COMMAND_FANSPEED, params={"fanSpeed": "medium"}) calls = async_mock_service(hass, fan.DOMAIN, fan.SERVICE_SET_SPEED) await trt.execute(trait.COMMAND_FANSPEED, BASIC_DATA, {"fanSpeed": "medium"}, {}) assert len(calls) == 1 assert calls[0].data == {"entity_id": "fan.living_room_fan", "speed": "medium"} assert trt.can_execute(trait.COMMAND_FANSPEED, params={"fanSpeedPercent": 10}) calls = async_mock_service(hass, fan.DOMAIN, fan.SERVICE_SET_PERCENTAGE) await trt.execute(trait.COMMAND_FANSPEED, BASIC_DATA, {"fanSpeedPercent": 10}, {}) assert len(calls) == 1 assert calls[0].data == {"entity_id": "fan.living_room_fan", "percentage": 10} async def test_climate_fan_speed(hass): """Test FanSpeed trait speed control support for climate domain.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.FanSpeedTrait.supported(climate.DOMAIN, climate.SUPPORT_FAN_MODE, None) trt = trait.FanSpeedTrait( hass, State( "climate.living_room_ac", "on", attributes={ "fan_modes": ["auto", "low", "medium", "high"], "fan_mode": "low", }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableFanSpeeds": { "ordered": True, "speeds": [ { "speed_name": "auto", "speed_values": [{"speed_synonym": ["auto"], "lang": "en"}], }, { "speed_name": "low", "speed_values": [{"speed_synonym": ["low"], "lang": "en"}], }, { "speed_name": "medium", "speed_values": [{"speed_synonym": ["medium"], "lang": "en"}], }, { "speed_name": "high", "speed_values": [{"speed_synonym": ["high"], "lang": "en"}], }, ], }, "reversible": False, "supportsFanSpeedPercent": True, } assert trt.query_attributes() == { "currentFanSpeedSetting": "low", } assert trt.can_execute(trait.COMMAND_FANSPEED, params={"fanSpeed": "medium"}) calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_FAN_MODE) await trt.execute(trait.COMMAND_FANSPEED, BASIC_DATA, {"fanSpeed": "medium"}, {}) assert len(calls) == 1 assert calls[0].data == { "entity_id": "climate.living_room_ac", "fan_mode": "medium", } async def test_inputselector(hass): """Test input selector trait.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.InputSelectorTrait.supported( media_player.DOMAIN, media_player.SUPPORT_SELECT_SOURCE, None ) trt = trait.InputSelectorTrait( hass, State( "media_player.living_room", media_player.STATE_PLAYING, attributes={ media_player.ATTR_INPUT_SOURCE_LIST: [ "media", "game", "chromecast", "plex", ], media_player.ATTR_INPUT_SOURCE: "game", }, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableInputs": [ {"key": "media", "names": [{"name_synonym": ["media"], "lang": "en"}]}, {"key": "game", "names": [{"name_synonym": ["game"], "lang": "en"}]}, { "key": "chromecast", "names": [{"name_synonym": ["chromecast"], "lang": "en"}], }, {"key": "plex", "names": [{"name_synonym": ["plex"], "lang": "en"}]}, ], "orderedInputs": True, } assert trt.query_attributes() == { "currentInput": "game", } assert trt.can_execute( trait.COMMAND_INPUT, params={"newInput": "media"}, ) calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_SELECT_SOURCE ) await trt.execute( trait.COMMAND_INPUT, BASIC_DATA, {"newInput": "media"}, {}, ) assert len(calls) == 1 assert calls[0].data == {"entity_id": "media_player.living_room", "source": "media"} @pytest.mark.parametrize( "sources,source,source_next,source_prev", [ (["a"], "a", "a", "a"), (["a", "b"], "a", "b", "b"), (["a", "b", "c"], "a", "b", "c"), ], ) async def test_inputselector_nextprev(hass, sources, source, source_next, source_prev): """Test input selector trait.""" trt = trait.InputSelectorTrait( hass, State( "media_player.living_room", media_player.STATE_PLAYING, attributes={ media_player.ATTR_INPUT_SOURCE_LIST: sources, media_player.ATTR_INPUT_SOURCE: source, }, ), BASIC_CONFIG, ) assert trt.can_execute("action.devices.commands.NextInput", params={}) assert trt.can_execute("action.devices.commands.PreviousInput", params={}) calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_SELECT_SOURCE ) await trt.execute( "action.devices.commands.NextInput", BASIC_DATA, {}, {}, ) await trt.execute( "action.devices.commands.PreviousInput", BASIC_DATA, {}, {}, ) assert len(calls) == 2 assert calls[0].data == { "entity_id": "media_player.living_room", "source": source_next, } assert calls[1].data == { "entity_id": "media_player.living_room", "source": source_prev, } @pytest.mark.parametrize( "sources,source", [(None, "a"), (["a", "b"], None), (["a", "b"], "c")] ) async def test_inputselector_nextprev_invalid(hass, sources, source): """Test input selector trait.""" trt = trait.InputSelectorTrait( hass, State( "media_player.living_room", media_player.STATE_PLAYING, attributes={ media_player.ATTR_INPUT_SOURCE_LIST: sources, media_player.ATTR_INPUT_SOURCE: source, }, ), BASIC_CONFIG, ) with pytest.raises(SmartHomeError): await trt.execute( "action.devices.commands.NextInput", BASIC_DATA, {}, {}, ) with pytest.raises(SmartHomeError): await trt.execute( "action.devices.commands.PreviousInput", BASIC_DATA, {}, {}, ) with pytest.raises(SmartHomeError): await trt.execute( "action.devices.commands.InvalidCommand", BASIC_DATA, {}, {}, ) async def test_modes_input_select(hass): """Test Input Select Mode trait.""" assert helpers.get_google_type(input_select.DOMAIN, None) is not None assert trait.ModesTrait.supported(input_select.DOMAIN, None, None) trt = trait.ModesTrait( hass, State("input_select.bla", "unavailable"), BASIC_CONFIG, ) assert trt.sync_attributes() == {"availableModes": []} trt = trait.ModesTrait( hass, State( "input_select.bla", "abc", attributes={input_select.ATTR_OPTIONS: ["abc", "123", "xyz"]}, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableModes": [ { "name": "option", "name_values": [ { "name_synonym": ["option", "setting", "mode", "value"], "lang": "en", } ], "settings": [ { "setting_name": "abc", "setting_values": [{"setting_synonym": ["abc"], "lang": "en"}], }, { "setting_name": "123", "setting_values": [{"setting_synonym": ["123"], "lang": "en"}], }, { "setting_name": "xyz", "setting_values": [{"setting_synonym": ["xyz"], "lang": "en"}], }, ], "ordered": False, } ] } assert trt.query_attributes() == { "currentModeSettings": {"option": "abc"}, "on": True, } assert trt.can_execute( trait.COMMAND_MODES, params={"updateModeSettings": {"option": "xyz"}}, ) calls = async_mock_service( hass, input_select.DOMAIN, input_select.SERVICE_SELECT_OPTION ) await trt.execute( trait.COMMAND_MODES, BASIC_DATA, {"updateModeSettings": {"option": "xyz"}}, {}, ) assert len(calls) == 1 assert calls[0].data == {"entity_id": "input_select.bla", "option": "xyz"} async def test_modes_humidifier(hass): """Test Humidifier Mode trait.""" assert helpers.get_google_type(humidifier.DOMAIN, None) is not None assert trait.ModesTrait.supported(humidifier.DOMAIN, humidifier.SUPPORT_MODES, None) trt = trait.ModesTrait( hass, State( "humidifier.humidifier", STATE_OFF, attributes={ humidifier.ATTR_AVAILABLE_MODES: [ humidifier.MODE_NORMAL, humidifier.MODE_AUTO, humidifier.MODE_AWAY, ], ATTR_SUPPORTED_FEATURES: humidifier.SUPPORT_MODES, humidifier.ATTR_MIN_HUMIDITY: 30, humidifier.ATTR_MAX_HUMIDITY: 99, humidifier.ATTR_HUMIDITY: 50, ATTR_MODE: humidifier.MODE_AUTO, }, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableModes": [ { "name": "mode", "name_values": [{"name_synonym": ["mode"], "lang": "en"}], "settings": [ { "setting_name": "normal", "setting_values": [ {"setting_synonym": ["normal"], "lang": "en"} ], }, { "setting_name": "auto", "setting_values": [{"setting_synonym": ["auto"], "lang": "en"}], }, { "setting_name": "away", "setting_values": [{"setting_synonym": ["away"], "lang": "en"}], }, ], "ordered": False, }, ] } assert trt.query_attributes() == { "currentModeSettings": {"mode": "auto"}, "on": False, } assert trt.can_execute( trait.COMMAND_MODES, params={"updateModeSettings": {"mode": "away"}} ) calls = async_mock_service(hass, humidifier.DOMAIN, humidifier.SERVICE_SET_MODE) await trt.execute( trait.COMMAND_MODES, BASIC_DATA, {"updateModeSettings": {"mode": "away"}}, {}, ) assert len(calls) == 1 assert calls[0].data == { "entity_id": "humidifier.humidifier", "mode": "away", } async def test_sound_modes(hass): """Test Mode trait.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.ModesTrait.supported( media_player.DOMAIN, media_player.SUPPORT_SELECT_SOUND_MODE, None ) trt = trait.ModesTrait( hass, State( "media_player.living_room", media_player.STATE_PLAYING, attributes={ media_player.ATTR_SOUND_MODE_LIST: ["stereo", "prologic"], media_player.ATTR_SOUND_MODE: "stereo", }, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableModes": [ { "name": "sound mode", "name_values": [ {"name_synonym": ["sound mode", "effects"], "lang": "en"} ], "settings": [ { "setting_name": "stereo", "setting_values": [ {"setting_synonym": ["stereo"], "lang": "en"} ], }, { "setting_name": "prologic", "setting_values": [ {"setting_synonym": ["prologic"], "lang": "en"} ], }, ], "ordered": False, } ] } assert trt.query_attributes() == { "currentModeSettings": {"sound mode": "stereo"}, "on": True, } assert trt.can_execute( trait.COMMAND_MODES, params={"updateModeSettings": {"sound mode": "stereo"}}, ) calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_SELECT_SOUND_MODE ) await trt.execute( trait.COMMAND_MODES, BASIC_DATA, {"updateModeSettings": {"sound mode": "stereo"}}, {}, ) assert len(calls) == 1 assert calls[0].data == { "entity_id": "media_player.living_room", "sound_mode": "stereo", } async def test_openclose_cover(hass): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_SET_POSITION, None ) trt = trait.OpenCloseTrait( hass, State( "cover.bla", cover.STATE_OPEN, { cover.ATTR_CURRENT_POSITION: 75, ATTR_SUPPORTED_FEATURES: cover.SUPPORT_SET_POSITION, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"openPercent": 75} calls_set = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_SET_COVER_POSITION) calls_open = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_OPEN_COVER) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 50}, {}) await trt.execute( trait.COMMAND_OPENCLOSE_RELATIVE, BASIC_DATA, {"openRelativePercent": 50}, {} ) assert len(calls_set) == 1 assert calls_set[0].data == {ATTR_ENTITY_ID: "cover.bla", cover.ATTR_POSITION: 50} assert len(calls_open) == 1 assert calls_open[0].data == {ATTR_ENTITY_ID: "cover.bla"} async def test_openclose_cover_unknown_state(hass): """Test OpenClose trait support for cover domain with unknown state.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_SET_POSITION, None ) # No state trt = trait.OpenCloseTrait( hass, State( "cover.bla", STATE_UNKNOWN, {ATTR_SUPPORTED_FEATURES: cover.SUPPORT_OPEN} ), BASIC_CONFIG, ) assert trt.sync_attributes() == {"discreteOnlyOpenClose": True} with pytest.raises(helpers.SmartHomeError): trt.query_attributes() calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_OPEN_COVER) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 100}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} with pytest.raises(helpers.SmartHomeError): trt.query_attributes() async def test_openclose_cover_assumed_state(hass): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_SET_POSITION, None ) trt = trait.OpenCloseTrait( hass, State( "cover.bla", cover.STATE_OPEN, { ATTR_ASSUMED_STATE: True, ATTR_SUPPORTED_FEATURES: cover.SUPPORT_SET_POSITION, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == {"commandOnlyOpenClose": True} assert trt.query_attributes() == {} calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_SET_COVER_POSITION) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 40}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla", cover.ATTR_POSITION: 40} async def test_openclose_cover_query_only(hass): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported(cover.DOMAIN, 0, None) state = State( "cover.bla", cover.STATE_OPEN, ) trt = trait.OpenCloseTrait( hass, state, BASIC_CONFIG, ) assert trt.sync_attributes() == { "discreteOnlyOpenClose": True, "queryOnlyOpenClose": True, } assert trt.query_attributes() == {"openPercent": 100} async def test_openclose_cover_no_position(hass): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_OPEN | cover.SUPPORT_CLOSE, None ) state = State( "cover.bla", cover.STATE_OPEN, { ATTR_SUPPORTED_FEATURES: cover.SUPPORT_OPEN | cover.SUPPORT_CLOSE, }, ) trt = trait.OpenCloseTrait( hass, state, BASIC_CONFIG, ) assert trt.sync_attributes() == {"discreteOnlyOpenClose": True} assert trt.query_attributes() == {"openPercent": 100} state.state = cover.STATE_CLOSED assert trt.sync_attributes() == {"discreteOnlyOpenClose": True} assert trt.query_attributes() == {"openPercent": 0} calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_CLOSE_COVER) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 0}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_OPEN_COVER) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 100}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} with pytest.raises( SmartHomeError, match=r"Current position not know for relative command" ): await trt.execute( trait.COMMAND_OPENCLOSE_RELATIVE, BASIC_DATA, {"openRelativePercent": 100}, {}, ) with pytest.raises(SmartHomeError, match=r"No support for partial open close"): await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 50}, {}) @pytest.mark.parametrize( "device_class", (cover.DEVICE_CLASS_DOOR, cover.DEVICE_CLASS_GARAGE, cover.DEVICE_CLASS_GATE), ) async def test_openclose_cover_secure(hass, device_class): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, device_class) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_SET_POSITION, device_class ) assert trait.OpenCloseTrait.might_2fa( cover.DOMAIN, cover.SUPPORT_SET_POSITION, device_class ) trt = trait.OpenCloseTrait( hass, State( "cover.bla", cover.STATE_OPEN, { ATTR_DEVICE_CLASS: device_class, ATTR_SUPPORTED_FEATURES: cover.SUPPORT_SET_POSITION, cover.ATTR_CURRENT_POSITION: 75, }, ), PIN_CONFIG, ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"openPercent": 75} calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_SET_COVER_POSITION) calls_close = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_CLOSE_COVER) # No challenge data with pytest.raises(error.ChallengeNeeded) as err: await trt.execute(trait.COMMAND_OPENCLOSE, PIN_DATA, {"openPercent": 50}, {}) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # invalid pin with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_OPENCLOSE, PIN_DATA, {"openPercent": 50}, {"pin": "9999"} ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_FAILED_PIN_NEEDED await trt.execute( trait.COMMAND_OPENCLOSE, PIN_DATA, {"openPercent": 50}, {"pin": "1234"} ) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla", cover.ATTR_POSITION: 50} # no challenge on close await trt.execute(trait.COMMAND_OPENCLOSE, PIN_DATA, {"openPercent": 0}, {}) assert len(calls_close) == 1 assert calls_close[0].data == {ATTR_ENTITY_ID: "cover.bla"} @pytest.mark.parametrize( "device_class", ( binary_sensor.DEVICE_CLASS_DOOR, binary_sensor.DEVICE_CLASS_GARAGE_DOOR, binary_sensor.DEVICE_CLASS_LOCK, binary_sensor.DEVICE_CLASS_OPENING, binary_sensor.DEVICE_CLASS_WINDOW, ), ) async def test_openclose_binary_sensor(hass, device_class): """Test OpenClose trait support for binary_sensor domain.""" assert helpers.get_google_type(binary_sensor.DOMAIN, device_class) is not None assert trait.OpenCloseTrait.supported(binary_sensor.DOMAIN, 0, device_class) trt = trait.OpenCloseTrait( hass, State("binary_sensor.test", STATE_ON, {ATTR_DEVICE_CLASS: device_class}), BASIC_CONFIG, ) assert trt.sync_attributes() == { "queryOnlyOpenClose": True, "discreteOnlyOpenClose": True, } assert trt.query_attributes() == {"openPercent": 100} trt = trait.OpenCloseTrait( hass, State("binary_sensor.test", STATE_OFF, {ATTR_DEVICE_CLASS: device_class}), BASIC_CONFIG, ) assert trt.sync_attributes() == { "queryOnlyOpenClose": True, "discreteOnlyOpenClose": True, } assert trt.query_attributes() == {"openPercent": 0} async def test_volume_media_player(hass): """Test volume trait support for media player domain.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.VolumeTrait.supported( media_player.DOMAIN, media_player.SUPPORT_VOLUME_SET, None, ) trt = trait.VolumeTrait( hass, State( "media_player.bla", media_player.STATE_PLAYING, { ATTR_SUPPORTED_FEATURES: media_player.SUPPORT_VOLUME_SET, media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.3, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "volumeMaxLevel": 100, "levelStepSize": 10, "volumeCanMuteAndUnmute": False, "commandOnlyVolume": False, } assert trt.query_attributes() == {"currentVolume": 30} calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_SET ) await trt.execute(trait.COMMAND_SET_VOLUME, BASIC_DATA, {"volumeLevel": 60}, {}) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.6, } calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_SET ) await trt.execute( trait.COMMAND_VOLUME_RELATIVE, BASIC_DATA, {"relativeSteps": 10}, {} ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.4, } async def test_volume_media_player_relative(hass): """Test volume trait support for relative-volume-only media players.""" assert trait.VolumeTrait.supported( media_player.DOMAIN, media_player.SUPPORT_VOLUME_STEP, None, ) trt = trait.VolumeTrait( hass, State( "media_player.bla", media_player.STATE_PLAYING, { ATTR_ASSUMED_STATE: True, ATTR_SUPPORTED_FEATURES: media_player.SUPPORT_VOLUME_STEP, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "volumeMaxLevel": 100, "levelStepSize": 10, "volumeCanMuteAndUnmute": False, "commandOnlyVolume": True, } assert trt.query_attributes() == {} calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_UP ) await trt.execute( trait.COMMAND_VOLUME_RELATIVE, BASIC_DATA, {"relativeSteps": 10}, {}, ) assert len(calls) == 10 for call in calls: assert call.data == { ATTR_ENTITY_ID: "media_player.bla", } calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_DOWN ) await trt.execute( trait.COMMAND_VOLUME_RELATIVE, BASIC_DATA, {"relativeSteps": -10}, {}, ) assert len(calls) == 10 for call in calls: assert call.data == { ATTR_ENTITY_ID: "media_player.bla", } with pytest.raises(SmartHomeError): await trt.execute(trait.COMMAND_SET_VOLUME, BASIC_DATA, {"volumeLevel": 42}, {}) with pytest.raises(SmartHomeError): await trt.execute(trait.COMMAND_MUTE, BASIC_DATA, {"mute": True}, {}) async def test_media_player_mute(hass): """Test volume trait support for muting.""" assert trait.VolumeTrait.supported( media_player.DOMAIN, media_player.SUPPORT_VOLUME_STEP | media_player.SUPPORT_VOLUME_MUTE, None, ) trt = trait.VolumeTrait( hass, State( "media_player.bla", media_player.STATE_PLAYING, { ATTR_SUPPORTED_FEATURES: ( media_player.SUPPORT_VOLUME_STEP | media_player.SUPPORT_VOLUME_MUTE ), media_player.ATTR_MEDIA_VOLUME_MUTED: False, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "volumeMaxLevel": 100, "levelStepSize": 10, "volumeCanMuteAndUnmute": True, "commandOnlyVolume": False, } assert trt.query_attributes() == {"isMuted": False} mute_calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_MUTE ) await trt.execute( trait.COMMAND_MUTE, BASIC_DATA, {"mute": True}, {}, ) assert len(mute_calls) == 1 assert mute_calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_VOLUME_MUTED: True, } unmute_calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_MUTE ) await trt.execute( trait.COMMAND_MUTE, BASIC_DATA, {"mute": False}, {}, ) assert len(unmute_calls) == 1 assert unmute_calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_VOLUME_MUTED: False, } async def test_temperature_setting_sensor(hass): """Test TemperatureSetting trait support for temperature sensor.""" assert ( helpers.get_google_type(sensor.DOMAIN, sensor.DEVICE_CLASS_TEMPERATURE) is not None ) assert not trait.TemperatureSettingTrait.supported( sensor.DOMAIN, 0, sensor.DEVICE_CLASS_HUMIDITY ) assert trait.TemperatureSettingTrait.supported( sensor.DOMAIN, 0, sensor.DEVICE_CLASS_TEMPERATURE ) @pytest.mark.parametrize( "unit_in,unit_out,state,ambient", [ (TEMP_FAHRENHEIT, "F", "70", 21.1), (TEMP_CELSIUS, "C", "21.1", 21.1), (TEMP_FAHRENHEIT, "F", "unavailable", None), (TEMP_FAHRENHEIT, "F", "unknown", None), ], ) async def test_temperature_setting_sensor_data(hass, unit_in, unit_out, state, ambient): """Test TemperatureSetting trait support for temperature sensor.""" hass.config.units.temperature_unit = unit_in trt = trait.TemperatureSettingTrait( hass, State( "sensor.test", state, {ATTR_DEVICE_CLASS: sensor.DEVICE_CLASS_TEMPERATURE} ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "queryOnlyTemperatureSetting": True, "thermostatTemperatureUnit": unit_out, } if ambient: assert trt.query_attributes() == {"thermostatTemperatureAmbient": ambient} else: assert trt.query_attributes() == {} hass.config.units.temperature_unit = TEMP_CELSIUS async def test_humidity_setting_sensor(hass): """Test HumiditySetting trait support for humidity sensor.""" assert ( helpers.get_google_type(sensor.DOMAIN, sensor.DEVICE_CLASS_HUMIDITY) is not None ) assert not trait.HumiditySettingTrait.supported( sensor.DOMAIN, 0, sensor.DEVICE_CLASS_TEMPERATURE ) assert trait.HumiditySettingTrait.supported( sensor.DOMAIN, 0, sensor.DEVICE_CLASS_HUMIDITY ) @pytest.mark.parametrize( "state,ambient", [("70", 70), ("unavailable", None), ("unknown", None)] ) async def test_humidity_setting_sensor_data(hass, state, ambient): """Test HumiditySetting trait support for humidity sensor.""" trt = trait.HumiditySettingTrait( hass, State("sensor.test", state, {ATTR_DEVICE_CLASS: sensor.DEVICE_CLASS_HUMIDITY}), BASIC_CONFIG, ) assert trt.sync_attributes() == {"queryOnlyHumiditySetting": True} if ambient: assert trt.query_attributes() == {"humidityAmbientPercent": ambient} else: assert trt.query_attributes() == {} with pytest.raises(helpers.SmartHomeError) as err: await trt.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert err.value.code == const.ERR_NOT_SUPPORTED async def test_transport_control(hass): """Test the TransportControlTrait.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None for feature in trait.MEDIA_COMMAND_SUPPORT_MAPPING.values(): assert trait.TransportControlTrait.supported(media_player.DOMAIN, feature, None) now = datetime(2020, 1, 1) trt = trait.TransportControlTrait( hass, State( "media_player.bla", media_player.STATE_PLAYING, { media_player.ATTR_MEDIA_POSITION: 100, media_player.ATTR_MEDIA_DURATION: 200, media_player.ATTR_MEDIA_POSITION_UPDATED_AT: now - timedelta(seconds=10), media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.5, ATTR_SUPPORTED_FEATURES: media_player.SUPPORT_PLAY | media_player.SUPPORT_STOP, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "transportControlSupportedCommands": ["RESUME", "STOP"] } assert trt.query_attributes() == {} # COMMAND_MEDIA_SEEK_RELATIVE calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_SEEK ) # Patch to avoid time ticking over during the command failing the test with patch("homeassistant.util.dt.utcnow", return_value=now): await trt.execute( trait.COMMAND_MEDIA_SEEK_RELATIVE, BASIC_DATA, {"relativePositionMs": 10000}, {}, ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", # 100s (current position) + 10s (from command) + 10s (from updated_at) media_player.ATTR_MEDIA_SEEK_POSITION: 120, } # COMMAND_MEDIA_SEEK_TO_POSITION calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_SEEK ) await trt.execute( trait.COMMAND_MEDIA_SEEK_TO_POSITION, BASIC_DATA, {"absPositionMs": 50000}, {} ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_SEEK_POSITION: 50, } # COMMAND_MEDIA_NEXT calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_NEXT_TRACK ) await trt.execute(trait.COMMAND_MEDIA_NEXT, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} # COMMAND_MEDIA_PAUSE calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_PAUSE ) await trt.execute(trait.COMMAND_MEDIA_PAUSE, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} # COMMAND_MEDIA_PREVIOUS calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_PREVIOUS_TRACK ) await trt.execute(trait.COMMAND_MEDIA_PREVIOUS, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} # COMMAND_MEDIA_RESUME calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_PLAY ) await trt.execute(trait.COMMAND_MEDIA_RESUME, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} # COMMAND_MEDIA_SHUFFLE calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_SHUFFLE_SET ) await trt.execute(trait.COMMAND_MEDIA_SHUFFLE, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_SHUFFLE: True, } # COMMAND_MEDIA_STOP calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_STOP ) await trt.execute(trait.COMMAND_MEDIA_STOP, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} @pytest.mark.parametrize( "state", ( STATE_OFF, STATE_IDLE, STATE_PLAYING, STATE_ON, STATE_PAUSED, STATE_STANDBY, STATE_UNAVAILABLE, STATE_UNKNOWN, ), ) async def test_media_state(hass, state): """Test the MediaStateTrait.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.TransportControlTrait.supported( media_player.DOMAIN, media_player.SUPPORT_PLAY, None ) trt = trait.MediaStateTrait( hass, State( "media_player.bla", state, { media_player.ATTR_MEDIA_POSITION: 100, media_player.ATTR_MEDIA_DURATION: 200, media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.5, ATTR_SUPPORTED_FEATURES: media_player.SUPPORT_PLAY | media_player.SUPPORT_STOP, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "supportActivityState": True, "supportPlaybackState": True, } assert trt.query_attributes() == { "activityState": trt.activity_lookup.get(state), "playbackState": trt.playback_lookup.get(state), }
sander76/home-assistant
tests/components/google_assistant/test_trait.py
homeassistant/components/sabnzbd/sensor.py
"""Support for interacting with Digital Ocean droplets.""" import logging import voluptuous as vol from homeassistant.components.switch import PLATFORM_SCHEMA, SwitchEntity from homeassistant.const import ATTR_ATTRIBUTION import homeassistant.helpers.config_validation as cv from . import ( ATTR_CREATED_AT, ATTR_DROPLET_ID, ATTR_DROPLET_NAME, ATTR_FEATURES, ATTR_IPV4_ADDRESS, ATTR_IPV6_ADDRESS, ATTR_MEMORY, ATTR_REGION, ATTR_VCPUS, ATTRIBUTION, CONF_DROPLETS, DATA_DIGITAL_OCEAN, ) _LOGGER = logging.getLogger(__name__) DEFAULT_NAME = "Droplet" PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( {vol.Required(CONF_DROPLETS): vol.All(cv.ensure_list, [cv.string])} ) def setup_platform(hass, config, add_entities, discovery_info=None): """Set up the Digital Ocean droplet switch.""" digital = hass.data.get(DATA_DIGITAL_OCEAN) if not digital: return False droplets = config[CONF_DROPLETS] dev = [] for droplet in droplets: droplet_id = digital.get_droplet_id(droplet) if droplet_id is None: _LOGGER.error("Droplet %s is not available", droplet) return False dev.append(DigitalOceanSwitch(digital, droplet_id)) add_entities(dev, True) class DigitalOceanSwitch(SwitchEntity): """Representation of a Digital Ocean droplet switch.""" def __init__(self, do, droplet_id): """Initialize a new Digital Ocean sensor.""" self._digital_ocean = do self._droplet_id = droplet_id self.data = None self._state = None @property def name(self): """Return the name of the switch.""" return self.data.name @property def is_on(self): """Return true if switch is on.""" return self.data.status == "active" @property def extra_state_attributes(self): """Return the state attributes of the Digital Ocean droplet.""" return { ATTR_ATTRIBUTION: ATTRIBUTION, ATTR_CREATED_AT: self.data.created_at, ATTR_DROPLET_ID: self.data.id, ATTR_DROPLET_NAME: self.data.name, ATTR_FEATURES: self.data.features, ATTR_IPV4_ADDRESS: self.data.ip_address, ATTR_IPV6_ADDRESS: self.data.ip_v6_address, ATTR_MEMORY: self.data.memory, ATTR_REGION: self.data.region["name"], ATTR_VCPUS: self.data.vcpus, } def turn_on(self, **kwargs): """Boot-up the droplet.""" if self.data.status != "active": self.data.power_on() def turn_off(self, **kwargs): """Shutdown the droplet.""" if self.data.status == "active": self.data.power_off() def update(self): """Get the latest data from the device and update the data.""" self._digital_ocean.update() for droplet in self._digital_ocean.data: if droplet.id == self._droplet_id: self.data = droplet
"""Tests for the Google Assistant traits.""" from datetime import datetime, timedelta from unittest.mock import patch import pytest from homeassistant.components import ( alarm_control_panel, binary_sensor, camera, cover, fan, group, input_boolean, input_select, light, lock, media_player, scene, script, sensor, switch, vacuum, ) from homeassistant.components.climate import const as climate from homeassistant.components.google_assistant import const, error, helpers, trait from homeassistant.components.google_assistant.error import SmartHomeError from homeassistant.components.humidifier import const as humidifier from homeassistant.config import async_process_ha_core_config from homeassistant.const import ( ATTR_ASSUMED_STATE, ATTR_DEVICE_CLASS, ATTR_ENTITY_ID, ATTR_MODE, ATTR_SUPPORTED_FEATURES, ATTR_TEMPERATURE, SERVICE_TURN_OFF, SERVICE_TURN_ON, STATE_ALARM_ARMED_AWAY, STATE_ALARM_DISARMED, STATE_ALARM_PENDING, STATE_IDLE, STATE_OFF, STATE_ON, STATE_PAUSED, STATE_PLAYING, STATE_STANDBY, STATE_UNAVAILABLE, STATE_UNKNOWN, TEMP_CELSIUS, TEMP_FAHRENHEIT, ) from homeassistant.core import DOMAIN as HA_DOMAIN, EVENT_CALL_SERVICE, State from homeassistant.util import color from . import BASIC_CONFIG, MockConfig from tests.common import async_capture_events, async_mock_service REQ_ID = "ff36a3cc-ec34-11e6-b1a0-64510650abcf" BASIC_DATA = helpers.RequestData( BASIC_CONFIG, "test-agent", const.SOURCE_CLOUD, REQ_ID, None ) PIN_CONFIG = MockConfig(secure_devices_pin="1234") PIN_DATA = helpers.RequestData( PIN_CONFIG, "test-agent", const.SOURCE_CLOUD, REQ_ID, None ) async def test_brightness_light(hass): """Test brightness trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert trait.BrightnessTrait.supported(light.DOMAIN, light.SUPPORT_BRIGHTNESS, None) trt = trait.BrightnessTrait( hass, State("light.bla", light.STATE_ON, {light.ATTR_BRIGHTNESS: 243}), BASIC_CONFIG, ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"brightness": 95} events = async_capture_events(hass, EVENT_CALL_SERVICE) calls = async_mock_service(hass, light.DOMAIN, light.SERVICE_TURN_ON) await trt.execute( trait.COMMAND_BRIGHTNESS_ABSOLUTE, BASIC_DATA, {"brightness": 50}, {} ) await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "light.bla", light.ATTR_BRIGHTNESS_PCT: 50} assert len(events) == 1 assert events[0].data == { "domain": "light", "service": "turn_on", "service_data": {"brightness_pct": 50, "entity_id": "light.bla"}, } async def test_camera_stream(hass): """Test camera stream trait support for camera domain.""" await async_process_ha_core_config( hass, {"external_url": "https://example.com"}, ) assert helpers.get_google_type(camera.DOMAIN, None) is not None assert trait.CameraStreamTrait.supported(camera.DOMAIN, camera.SUPPORT_STREAM, None) trt = trait.CameraStreamTrait( hass, State("camera.bla", camera.STATE_IDLE, {}), BASIC_CONFIG ) assert trt.sync_attributes() == { "cameraStreamSupportedProtocols": ["hls"], "cameraStreamNeedAuthToken": False, "cameraStreamNeedDrmEncryption": False, } assert trt.query_attributes() == {} with patch( "homeassistant.components.camera.async_request_stream", return_value="/api/streams/bla", ): await trt.execute(trait.COMMAND_GET_CAMERA_STREAM, BASIC_DATA, {}, {}) assert trt.query_attributes() == { "cameraStreamAccessUrl": "https://example.com/api/streams/bla", "cameraStreamReceiverAppId": "B12CE3CA", } async def test_onoff_group(hass): """Test OnOff trait support for group domain.""" assert helpers.get_google_type(group.DOMAIN, None) is not None assert trait.OnOffTrait.supported(group.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("group.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("group.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, HA_DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "group.bla"} off_calls = async_mock_service(hass, HA_DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "group.bla"} async def test_onoff_input_boolean(hass): """Test OnOff trait support for input_boolean domain.""" assert helpers.get_google_type(input_boolean.DOMAIN, None) is not None assert trait.OnOffTrait.supported(input_boolean.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("input_boolean.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait( hass, State("input_boolean.bla", STATE_OFF), BASIC_CONFIG ) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, input_boolean.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "input_boolean.bla"} off_calls = async_mock_service(hass, input_boolean.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "input_boolean.bla"} async def test_onoff_switch(hass): """Test OnOff trait support for switch domain.""" assert helpers.get_google_type(switch.DOMAIN, None) is not None assert trait.OnOffTrait.supported(switch.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("switch.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("switch.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} trt_assumed = trait.OnOffTrait( hass, State("switch.bla", STATE_OFF, {"assumed_state": True}), BASIC_CONFIG ) assert trt_assumed.sync_attributes() == {"commandOnlyOnOff": True} on_calls = async_mock_service(hass, switch.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "switch.bla"} off_calls = async_mock_service(hass, switch.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "switch.bla"} async def test_onoff_fan(hass): """Test OnOff trait support for fan domain.""" assert helpers.get_google_type(fan.DOMAIN, None) is not None assert trait.OnOffTrait.supported(fan.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("fan.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("fan.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, fan.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "fan.bla"} off_calls = async_mock_service(hass, fan.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "fan.bla"} async def test_onoff_light(hass): """Test OnOff trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert trait.OnOffTrait.supported(light.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("light.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("light.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "light.bla"} off_calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "light.bla"} async def test_onoff_media_player(hass): """Test OnOff trait support for media_player domain.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.OnOffTrait.supported(media_player.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("media_player.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("media_player.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, media_player.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} off_calls = async_mock_service(hass, media_player.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} async def test_onoff_humidifier(hass): """Test OnOff trait support for humidifier domain.""" assert helpers.get_google_type(humidifier.DOMAIN, None) is not None assert trait.OnOffTrait.supported(humidifier.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("humidifier.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("humidifier.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, humidifier.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "humidifier.bla"} off_calls = async_mock_service(hass, humidifier.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "humidifier.bla"} async def test_dock_vacuum(hass): """Test dock trait support for vacuum domain.""" assert helpers.get_google_type(vacuum.DOMAIN, None) is not None assert trait.DockTrait.supported(vacuum.DOMAIN, 0, None) trt = trait.DockTrait(hass, State("vacuum.bla", vacuum.STATE_IDLE), BASIC_CONFIG) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"isDocked": False} calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_RETURN_TO_BASE) await trt.execute(trait.COMMAND_DOCK, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} async def test_startstop_vacuum(hass): """Test startStop trait support for vacuum domain.""" assert helpers.get_google_type(vacuum.DOMAIN, None) is not None assert trait.StartStopTrait.supported(vacuum.DOMAIN, 0, None) trt = trait.StartStopTrait( hass, State( "vacuum.bla", vacuum.STATE_PAUSED, {ATTR_SUPPORTED_FEATURES: vacuum.SUPPORT_PAUSE}, ), BASIC_CONFIG, ) assert trt.sync_attributes() == {"pausable": True} assert trt.query_attributes() == {"isRunning": False, "isPaused": True} start_calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_START) await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": True}, {}) assert len(start_calls) == 1 assert start_calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} stop_calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_STOP) await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": False}, {}) assert len(stop_calls) == 1 assert stop_calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} pause_calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_PAUSE) await trt.execute(trait.COMMAND_PAUSEUNPAUSE, BASIC_DATA, {"pause": True}, {}) assert len(pause_calls) == 1 assert pause_calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} unpause_calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_START) await trt.execute(trait.COMMAND_PAUSEUNPAUSE, BASIC_DATA, {"pause": False}, {}) assert len(unpause_calls) == 1 assert unpause_calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} async def test_startstop_cover(hass): """Test startStop trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.StartStopTrait.supported(cover.DOMAIN, cover.SUPPORT_STOP, None) state = State( "cover.bla", cover.STATE_CLOSED, {ATTR_SUPPORTED_FEATURES: cover.SUPPORT_STOP}, ) trt = trait.StartStopTrait( hass, state, BASIC_CONFIG, ) assert trt.sync_attributes() == {} for state_value in (cover.STATE_CLOSING, cover.STATE_OPENING): state.state = state_value assert trt.query_attributes() == {"isRunning": True} stop_calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_STOP_COVER) await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": False}, {}) assert len(stop_calls) == 1 assert stop_calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} for state_value in (cover.STATE_CLOSED, cover.STATE_OPEN): state.state = state_value assert trt.query_attributes() == {"isRunning": False} with pytest.raises(SmartHomeError, match="Cover is already stopped"): await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": False}, {}) with pytest.raises(SmartHomeError, match="Starting a cover is not supported"): await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": True}, {}) with pytest.raises( SmartHomeError, match="Command action.devices.commands.PauseUnpause is not supported", ): await trt.execute(trait.COMMAND_PAUSEUNPAUSE, BASIC_DATA, {"start": True}, {}) async def test_startstop_cover_assumed(hass): """Test startStop trait support for cover domain of assumed state.""" trt = trait.StartStopTrait( hass, State( "cover.bla", cover.STATE_CLOSED, {ATTR_SUPPORTED_FEATURES: cover.SUPPORT_STOP, ATTR_ASSUMED_STATE: True}, ), BASIC_CONFIG, ) stop_calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_STOP_COVER) await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": False}, {}) assert len(stop_calls) == 1 assert stop_calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} async def test_color_setting_color_light(hass): """Test ColorSpectrum trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert not trait.ColorSettingTrait.supported(light.DOMAIN, 0, None) assert trait.ColorSettingTrait.supported(light.DOMAIN, light.SUPPORT_COLOR, None) trt = trait.ColorSettingTrait( hass, State( "light.bla", STATE_ON, { light.ATTR_HS_COLOR: (20, 94), light.ATTR_BRIGHTNESS: 200, ATTR_SUPPORTED_FEATURES: light.SUPPORT_COLOR, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == {"colorModel": "hsv"} assert trt.query_attributes() == { "color": {"spectrumHsv": {"hue": 20, "saturation": 0.94, "value": 200 / 255}} } assert trt.can_execute( trait.COMMAND_COLOR_ABSOLUTE, {"color": {"spectrumRGB": 16715792}} ) calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON) await trt.execute( trait.COMMAND_COLOR_ABSOLUTE, BASIC_DATA, {"color": {"spectrumRGB": 1052927}}, {}, ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "light.bla", light.ATTR_HS_COLOR: (240, 93.725), } await trt.execute( trait.COMMAND_COLOR_ABSOLUTE, BASIC_DATA, {"color": {"spectrumHSV": {"hue": 100, "saturation": 0.50, "value": 0.20}}}, {}, ) assert len(calls) == 2 assert calls[1].data == { ATTR_ENTITY_ID: "light.bla", light.ATTR_HS_COLOR: [100, 50], light.ATTR_BRIGHTNESS: 0.2 * 255, } async def test_color_setting_temperature_light(hass): """Test ColorTemperature trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert not trait.ColorSettingTrait.supported(light.DOMAIN, 0, None) assert trait.ColorSettingTrait.supported( light.DOMAIN, light.SUPPORT_COLOR_TEMP, None ) trt = trait.ColorSettingTrait( hass, State( "light.bla", STATE_ON, { light.ATTR_MIN_MIREDS: 200, light.ATTR_COLOR_TEMP: 300, light.ATTR_MAX_MIREDS: 500, ATTR_SUPPORTED_FEATURES: light.SUPPORT_COLOR_TEMP, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "colorTemperatureRange": {"temperatureMinK": 2000, "temperatureMaxK": 5000} } assert trt.query_attributes() == {"color": {"temperatureK": 3333}} assert trt.can_execute( trait.COMMAND_COLOR_ABSOLUTE, {"color": {"temperature": 400}} ) calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON) with pytest.raises(helpers.SmartHomeError) as err: await trt.execute( trait.COMMAND_COLOR_ABSOLUTE, BASIC_DATA, {"color": {"temperature": 5555}}, {}, ) assert err.value.code == const.ERR_VALUE_OUT_OF_RANGE await trt.execute( trait.COMMAND_COLOR_ABSOLUTE, BASIC_DATA, {"color": {"temperature": 2857}}, {} ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "light.bla", light.ATTR_COLOR_TEMP: color.color_temperature_kelvin_to_mired(2857), } async def test_color_light_temperature_light_bad_temp(hass): """Test ColorTemperature trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert not trait.ColorSettingTrait.supported(light.DOMAIN, 0, None) assert trait.ColorSettingTrait.supported( light.DOMAIN, light.SUPPORT_COLOR_TEMP, None ) trt = trait.ColorSettingTrait( hass, State( "light.bla", STATE_ON, { light.ATTR_MIN_MIREDS: 200, light.ATTR_COLOR_TEMP: 0, light.ATTR_MAX_MIREDS: 500, }, ), BASIC_CONFIG, ) assert trt.query_attributes() == {} async def test_light_modes(hass): """Test Light Mode trait.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert trait.ModesTrait.supported(light.DOMAIN, light.SUPPORT_EFFECT, None) trt = trait.ModesTrait( hass, State( "light.living_room", light.STATE_ON, attributes={ light.ATTR_EFFECT_LIST: ["random", "colorloop"], light.ATTR_EFFECT: "random", }, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableModes": [ { "name": "effect", "name_values": [{"name_synonym": ["effect"], "lang": "en"}], "settings": [ { "setting_name": "random", "setting_values": [ {"setting_synonym": ["random"], "lang": "en"} ], }, { "setting_name": "colorloop", "setting_values": [ {"setting_synonym": ["colorloop"], "lang": "en"} ], }, ], "ordered": False, } ] } assert trt.query_attributes() == { "currentModeSettings": {"effect": "random"}, "on": True, } assert trt.can_execute( trait.COMMAND_MODES, params={"updateModeSettings": {"effect": "colorloop"}}, ) calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON) await trt.execute( trait.COMMAND_MODES, BASIC_DATA, {"updateModeSettings": {"effect": "colorloop"}}, {}, ) assert len(calls) == 1 assert calls[0].data == { "entity_id": "light.living_room", "effect": "colorloop", } async def test_scene_scene(hass): """Test Scene trait support for scene domain.""" assert helpers.get_google_type(scene.DOMAIN, None) is not None assert trait.SceneTrait.supported(scene.DOMAIN, 0, None) trt = trait.SceneTrait(hass, State("scene.bla", scene.STATE), BASIC_CONFIG) assert trt.sync_attributes() == {} assert trt.query_attributes() == {} assert trt.can_execute(trait.COMMAND_ACTIVATE_SCENE, {}) calls = async_mock_service(hass, scene.DOMAIN, SERVICE_TURN_ON) await trt.execute(trait.COMMAND_ACTIVATE_SCENE, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "scene.bla"} async def test_scene_script(hass): """Test Scene trait support for script domain.""" assert helpers.get_google_type(script.DOMAIN, None) is not None assert trait.SceneTrait.supported(script.DOMAIN, 0, None) trt = trait.SceneTrait(hass, State("script.bla", STATE_OFF), BASIC_CONFIG) assert trt.sync_attributes() == {} assert trt.query_attributes() == {} assert trt.can_execute(trait.COMMAND_ACTIVATE_SCENE, {}) calls = async_mock_service(hass, script.DOMAIN, SERVICE_TURN_ON) await trt.execute(trait.COMMAND_ACTIVATE_SCENE, BASIC_DATA, {}, {}) # We don't wait till script execution is done. await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "script.bla"} async def test_temperature_setting_climate_onoff(hass): """Test TemperatureSetting trait support for climate domain - range.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.TemperatureSettingTrait.supported(climate.DOMAIN, 0, None) hass.config.units.temperature_unit = TEMP_FAHRENHEIT trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_AUTO, { ATTR_SUPPORTED_FEATURES: climate.SUPPORT_TARGET_TEMPERATURE_RANGE, climate.ATTR_HVAC_MODES: [ climate.HVAC_MODE_OFF, climate.HVAC_MODE_COOL, climate.HVAC_MODE_HEAT, climate.HVAC_MODE_HEAT_COOL, ], climate.ATTR_MIN_TEMP: None, climate.ATTR_MAX_TEMP: None, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["off", "cool", "heat", "heatcool", "on"], "thermostatTemperatureUnit": "F", } assert trt.can_execute(trait.COMMAND_THERMOSTAT_SET_MODE, {}) calls = async_mock_service(hass, climate.DOMAIN, SERVICE_TURN_ON) await trt.execute( trait.COMMAND_THERMOSTAT_SET_MODE, BASIC_DATA, {"thermostatMode": "on"}, {} ) assert len(calls) == 1 calls = async_mock_service(hass, climate.DOMAIN, SERVICE_TURN_OFF) await trt.execute( trait.COMMAND_THERMOSTAT_SET_MODE, BASIC_DATA, {"thermostatMode": "off"}, {} ) assert len(calls) == 1 async def test_temperature_setting_climate_no_modes(hass): """Test TemperatureSetting trait support for climate domain not supporting any modes.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.TemperatureSettingTrait.supported(climate.DOMAIN, 0, None) hass.config.units.temperature_unit = TEMP_CELSIUS trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_AUTO, { climate.ATTR_HVAC_MODES: [], climate.ATTR_MIN_TEMP: None, climate.ATTR_MAX_TEMP: None, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["heat"], "thermostatTemperatureUnit": "C", } async def test_temperature_setting_climate_range(hass): """Test TemperatureSetting trait support for climate domain - range.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.TemperatureSettingTrait.supported(climate.DOMAIN, 0, None) hass.config.units.temperature_unit = TEMP_FAHRENHEIT trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_AUTO, { climate.ATTR_CURRENT_TEMPERATURE: 70, climate.ATTR_CURRENT_HUMIDITY: 25, ATTR_SUPPORTED_FEATURES: climate.SUPPORT_TARGET_TEMPERATURE_RANGE, climate.ATTR_HVAC_MODES: [ STATE_OFF, climate.HVAC_MODE_COOL, climate.HVAC_MODE_HEAT, climate.HVAC_MODE_AUTO, ], climate.ATTR_TARGET_TEMP_HIGH: 75, climate.ATTR_TARGET_TEMP_LOW: 65, climate.ATTR_MIN_TEMP: 50, climate.ATTR_MAX_TEMP: 80, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["off", "cool", "heat", "auto", "on"], "thermostatTemperatureUnit": "F", } assert trt.query_attributes() == { "thermostatMode": "auto", "thermostatTemperatureAmbient": 21.1, "thermostatHumidityAmbient": 25, "thermostatTemperatureSetpointLow": 18.3, "thermostatTemperatureSetpointHigh": 23.9, } assert trt.can_execute(trait.COMMAND_THERMOSTAT_TEMPERATURE_SET_RANGE, {}) assert trt.can_execute(trait.COMMAND_THERMOSTAT_SET_MODE, {}) calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_TEMPERATURE) await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SET_RANGE, BASIC_DATA, { "thermostatTemperatureSetpointHigh": 25, "thermostatTemperatureSetpointLow": 20, }, {}, ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "climate.bla", climate.ATTR_TARGET_TEMP_HIGH: 77, climate.ATTR_TARGET_TEMP_LOW: 68, } calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_HVAC_MODE) await trt.execute( trait.COMMAND_THERMOSTAT_SET_MODE, BASIC_DATA, {"thermostatMode": "cool"}, {} ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "climate.bla", climate.ATTR_HVAC_MODE: climate.HVAC_MODE_COOL, } with pytest.raises(helpers.SmartHomeError) as err: await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, BASIC_DATA, {"thermostatTemperatureSetpoint": -100}, {}, ) assert err.value.code == const.ERR_VALUE_OUT_OF_RANGE hass.config.units.temperature_unit = TEMP_CELSIUS async def test_temperature_setting_climate_setpoint(hass): """Test TemperatureSetting trait support for climate domain - setpoint.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.TemperatureSettingTrait.supported(climate.DOMAIN, 0, None) hass.config.units.temperature_unit = TEMP_CELSIUS trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_COOL, { climate.ATTR_HVAC_MODES: [STATE_OFF, climate.HVAC_MODE_COOL], climate.ATTR_MIN_TEMP: 10, climate.ATTR_MAX_TEMP: 30, ATTR_TEMPERATURE: 18, climate.ATTR_CURRENT_TEMPERATURE: 20, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["off", "cool", "on"], "thermostatTemperatureUnit": "C", } assert trt.query_attributes() == { "thermostatMode": "cool", "thermostatTemperatureAmbient": 20, "thermostatTemperatureSetpoint": 18, } assert trt.can_execute(trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, {}) assert trt.can_execute(trait.COMMAND_THERMOSTAT_SET_MODE, {}) calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_TEMPERATURE) with pytest.raises(helpers.SmartHomeError): await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, BASIC_DATA, {"thermostatTemperatureSetpoint": -100}, {}, ) await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, BASIC_DATA, {"thermostatTemperatureSetpoint": 19}, {}, ) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "climate.bla", ATTR_TEMPERATURE: 19} async def test_temperature_setting_climate_setpoint_auto(hass): """ Test TemperatureSetting trait support for climate domain. Setpoint in auto mode. """ hass.config.units.temperature_unit = TEMP_CELSIUS trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_HEAT_COOL, { climate.ATTR_HVAC_MODES: [ climate.HVAC_MODE_OFF, climate.HVAC_MODE_HEAT_COOL, ], climate.ATTR_MIN_TEMP: 10, climate.ATTR_MAX_TEMP: 30, ATTR_TEMPERATURE: 18, climate.ATTR_CURRENT_TEMPERATURE: 20, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["off", "heatcool", "on"], "thermostatTemperatureUnit": "C", } assert trt.query_attributes() == { "thermostatMode": "heatcool", "thermostatTemperatureAmbient": 20, "thermostatTemperatureSetpointHigh": 18, "thermostatTemperatureSetpointLow": 18, } assert trt.can_execute(trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, {}) assert trt.can_execute(trait.COMMAND_THERMOSTAT_SET_MODE, {}) calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_TEMPERATURE) await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, BASIC_DATA, {"thermostatTemperatureSetpoint": 19}, {}, ) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "climate.bla", ATTR_TEMPERATURE: 19} async def test_humidity_setting_humidifier_setpoint(hass): """Test HumiditySetting trait support for humidifier domain - setpoint.""" assert helpers.get_google_type(humidifier.DOMAIN, None) is not None assert trait.HumiditySettingTrait.supported(humidifier.DOMAIN, 0, None) trt = trait.HumiditySettingTrait( hass, State( "humidifier.bla", STATE_ON, { humidifier.ATTR_MIN_HUMIDITY: 20, humidifier.ATTR_MAX_HUMIDITY: 90, humidifier.ATTR_HUMIDITY: 38, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "humiditySetpointRange": {"minPercent": 20, "maxPercent": 90} } assert trt.query_attributes() == { "humiditySetpointPercent": 38, } assert trt.can_execute(trait.COMMAND_SET_HUMIDITY, {}) calls = async_mock_service(hass, humidifier.DOMAIN, humidifier.SERVICE_SET_HUMIDITY) await trt.execute(trait.COMMAND_SET_HUMIDITY, BASIC_DATA, {"humidity": 32}, {}) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "humidifier.bla", humidifier.ATTR_HUMIDITY: 32, } async def test_lock_unlock_lock(hass): """Test LockUnlock trait locking support for lock domain.""" assert helpers.get_google_type(lock.DOMAIN, None) is not None assert trait.LockUnlockTrait.supported(lock.DOMAIN, lock.SUPPORT_OPEN, None) assert trait.LockUnlockTrait.might_2fa(lock.DOMAIN, lock.SUPPORT_OPEN, None) trt = trait.LockUnlockTrait( hass, State("lock.front_door", lock.STATE_LOCKED), PIN_CONFIG ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"isLocked": True} assert trt.can_execute(trait.COMMAND_LOCKUNLOCK, {"lock": True}) calls = async_mock_service(hass, lock.DOMAIN, lock.SERVICE_LOCK) await trt.execute(trait.COMMAND_LOCKUNLOCK, PIN_DATA, {"lock": True}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "lock.front_door"} async def test_lock_unlock_unlock(hass): """Test LockUnlock trait unlocking support for lock domain.""" assert helpers.get_google_type(lock.DOMAIN, None) is not None assert trait.LockUnlockTrait.supported(lock.DOMAIN, lock.SUPPORT_OPEN, None) trt = trait.LockUnlockTrait( hass, State("lock.front_door", lock.STATE_LOCKED), PIN_CONFIG ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"isLocked": True} assert trt.can_execute(trait.COMMAND_LOCKUNLOCK, {"lock": False}) calls = async_mock_service(hass, lock.DOMAIN, lock.SERVICE_UNLOCK) # No challenge data with pytest.raises(error.ChallengeNeeded) as err: await trt.execute(trait.COMMAND_LOCKUNLOCK, PIN_DATA, {"lock": False}, {}) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # invalid pin with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_LOCKUNLOCK, PIN_DATA, {"lock": False}, {"pin": 9999} ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_FAILED_PIN_NEEDED await trt.execute( trait.COMMAND_LOCKUNLOCK, PIN_DATA, {"lock": False}, {"pin": "1234"} ) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "lock.front_door"} # Test without pin trt = trait.LockUnlockTrait( hass, State("lock.front_door", lock.STATE_LOCKED), BASIC_CONFIG ) with pytest.raises(error.SmartHomeError) as err: await trt.execute(trait.COMMAND_LOCKUNLOCK, BASIC_DATA, {"lock": False}, {}) assert len(calls) == 1 assert err.value.code == const.ERR_CHALLENGE_NOT_SETUP # Test with 2FA override with patch.object( BASIC_CONFIG, "should_2fa", return_value=False, ): await trt.execute(trait.COMMAND_LOCKUNLOCK, BASIC_DATA, {"lock": False}, {}) assert len(calls) == 2 async def test_arm_disarm_arm_away(hass): """Test ArmDisarm trait Arming support for alarm_control_panel domain.""" assert helpers.get_google_type(alarm_control_panel.DOMAIN, None) is not None assert trait.ArmDisArmTrait.supported(alarm_control_panel.DOMAIN, 0, None) assert trait.ArmDisArmTrait.might_2fa(alarm_control_panel.DOMAIN, 0, None) trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, { alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True, ATTR_SUPPORTED_FEATURES: alarm_control_panel.const.SUPPORT_ALARM_ARM_HOME | alarm_control_panel.const.SUPPORT_ALARM_ARM_AWAY, }, ), PIN_CONFIG, ) assert trt.sync_attributes() == { "availableArmLevels": { "levels": [ { "level_name": "armed_home", "level_values": [ {"level_synonym": ["armed home", "home"], "lang": "en"} ], }, { "level_name": "armed_away", "level_values": [ {"level_synonym": ["armed away", "away"], "lang": "en"} ], }, ], "ordered": False, } } assert trt.query_attributes() == { "isArmed": True, "currentArmLevel": STATE_ALARM_ARMED_AWAY, } assert trt.can_execute( trait.COMMAND_ARMDISARM, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY} ) calls = async_mock_service( hass, alarm_control_panel.DOMAIN, alarm_control_panel.SERVICE_ALARM_ARM_AWAY ) # Test with no secure_pin configured with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), BASIC_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, BASIC_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {}, ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NOT_SETUP trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), PIN_CONFIG, ) # No challenge data with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {}, ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # invalid pin with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {"pin": 9999}, ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_FAILED_PIN_NEEDED # correct pin await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {"pin": "1234"}, ) assert len(calls) == 1 # Test already armed with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), PIN_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {}, ) assert len(calls) == 1 assert err.value.code == const.ERR_ALREADY_ARMED # Test with code_arm_required False trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: False}, ), PIN_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {}, ) assert len(calls) == 2 with pytest.raises(error.SmartHomeError) as err: await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True}, {}, ) async def test_arm_disarm_disarm(hass): """Test ArmDisarm trait Disarming support for alarm_control_panel domain.""" assert helpers.get_google_type(alarm_control_panel.DOMAIN, None) is not None assert trait.ArmDisArmTrait.supported(alarm_control_panel.DOMAIN, 0, None) assert trait.ArmDisArmTrait.might_2fa(alarm_control_panel.DOMAIN, 0, None) trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, { alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True, ATTR_SUPPORTED_FEATURES: alarm_control_panel.const.SUPPORT_ALARM_TRIGGER | alarm_control_panel.const.SUPPORT_ALARM_ARM_CUSTOM_BYPASS, }, ), PIN_CONFIG, ) assert trt.sync_attributes() == { "availableArmLevels": { "levels": [ { "level_name": "armed_custom_bypass", "level_values": [ { "level_synonym": ["armed custom bypass", "custom"], "lang": "en", } ], }, { "level_name": "triggered", "level_values": [{"level_synonym": ["triggered"], "lang": "en"}], }, ], "ordered": False, } } assert trt.query_attributes() == {"isArmed": False} assert trt.can_execute(trait.COMMAND_ARMDISARM, {"arm": False}) calls = async_mock_service( hass, alarm_control_panel.DOMAIN, alarm_control_panel.SERVICE_ALARM_DISARM ) # Test without secure_pin configured with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), BASIC_CONFIG, ) await trt.execute(trait.COMMAND_ARMDISARM, BASIC_DATA, {"arm": False}, {}) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NOT_SETUP trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), PIN_CONFIG, ) # No challenge data with pytest.raises(error.ChallengeNeeded) as err: await trt.execute(trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": False}, {}) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # invalid pin with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": False}, {"pin": 9999} ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_FAILED_PIN_NEEDED # correct pin await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": False}, {"pin": "1234"} ) assert len(calls) == 1 # Test already disarmed with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), PIN_CONFIG, ) await trt.execute(trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": False}, {}) assert len(calls) == 1 assert err.value.code == const.ERR_ALREADY_DISARMED # Cancel arming after already armed will require pin with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: False}, ), PIN_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "cancel": True}, {} ) assert len(calls) == 1 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # Cancel arming while pending to arm doesn't require pin trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_PENDING, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: False}, ), PIN_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "cancel": True}, {} ) assert len(calls) == 2 async def test_fan_speed(hass): """Test FanSpeed trait speed control support for fan domain.""" assert helpers.get_google_type(fan.DOMAIN, None) is not None assert trait.FanSpeedTrait.supported(fan.DOMAIN, fan.SUPPORT_SET_SPEED, None) trt = trait.FanSpeedTrait( hass, State( "fan.living_room_fan", fan.SPEED_HIGH, attributes={ "speed_list": [ fan.SPEED_OFF, fan.SPEED_LOW, fan.SPEED_MEDIUM, fan.SPEED_HIGH, ], "speed": "low", "percentage": 33, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableFanSpeeds": { "ordered": True, "speeds": [ { "speed_name": "off", "speed_values": [{"speed_synonym": ["stop", "off"], "lang": "en"}], }, { "speed_name": "low", "speed_values": [ { "speed_synonym": ["slow", "low", "slowest", "lowest"], "lang": "en", } ], }, { "speed_name": "medium", "speed_values": [ {"speed_synonym": ["medium", "mid", "middle"], "lang": "en"} ], }, { "speed_name": "high", "speed_values": [ { "speed_synonym": [ "high", "max", "fast", "highest", "fastest", "maximum", ], "lang": "en", } ], }, ], }, "reversible": False, "supportsFanSpeedPercent": True, } assert trt.query_attributes() == { "currentFanSpeedSetting": "low", "on": True, "currentFanSpeedPercent": 33, } assert trt.can_execute(trait.COMMAND_FANSPEED, params={"fanSpeed": "medium"}) calls = async_mock_service(hass, fan.DOMAIN, fan.SERVICE_SET_SPEED) await trt.execute(trait.COMMAND_FANSPEED, BASIC_DATA, {"fanSpeed": "medium"}, {}) assert len(calls) == 1 assert calls[0].data == {"entity_id": "fan.living_room_fan", "speed": "medium"} assert trt.can_execute(trait.COMMAND_FANSPEED, params={"fanSpeedPercent": 10}) calls = async_mock_service(hass, fan.DOMAIN, fan.SERVICE_SET_PERCENTAGE) await trt.execute(trait.COMMAND_FANSPEED, BASIC_DATA, {"fanSpeedPercent": 10}, {}) assert len(calls) == 1 assert calls[0].data == {"entity_id": "fan.living_room_fan", "percentage": 10} async def test_climate_fan_speed(hass): """Test FanSpeed trait speed control support for climate domain.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.FanSpeedTrait.supported(climate.DOMAIN, climate.SUPPORT_FAN_MODE, None) trt = trait.FanSpeedTrait( hass, State( "climate.living_room_ac", "on", attributes={ "fan_modes": ["auto", "low", "medium", "high"], "fan_mode": "low", }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableFanSpeeds": { "ordered": True, "speeds": [ { "speed_name": "auto", "speed_values": [{"speed_synonym": ["auto"], "lang": "en"}], }, { "speed_name": "low", "speed_values": [{"speed_synonym": ["low"], "lang": "en"}], }, { "speed_name": "medium", "speed_values": [{"speed_synonym": ["medium"], "lang": "en"}], }, { "speed_name": "high", "speed_values": [{"speed_synonym": ["high"], "lang": "en"}], }, ], }, "reversible": False, "supportsFanSpeedPercent": True, } assert trt.query_attributes() == { "currentFanSpeedSetting": "low", } assert trt.can_execute(trait.COMMAND_FANSPEED, params={"fanSpeed": "medium"}) calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_FAN_MODE) await trt.execute(trait.COMMAND_FANSPEED, BASIC_DATA, {"fanSpeed": "medium"}, {}) assert len(calls) == 1 assert calls[0].data == { "entity_id": "climate.living_room_ac", "fan_mode": "medium", } async def test_inputselector(hass): """Test input selector trait.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.InputSelectorTrait.supported( media_player.DOMAIN, media_player.SUPPORT_SELECT_SOURCE, None ) trt = trait.InputSelectorTrait( hass, State( "media_player.living_room", media_player.STATE_PLAYING, attributes={ media_player.ATTR_INPUT_SOURCE_LIST: [ "media", "game", "chromecast", "plex", ], media_player.ATTR_INPUT_SOURCE: "game", }, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableInputs": [ {"key": "media", "names": [{"name_synonym": ["media"], "lang": "en"}]}, {"key": "game", "names": [{"name_synonym": ["game"], "lang": "en"}]}, { "key": "chromecast", "names": [{"name_synonym": ["chromecast"], "lang": "en"}], }, {"key": "plex", "names": [{"name_synonym": ["plex"], "lang": "en"}]}, ], "orderedInputs": True, } assert trt.query_attributes() == { "currentInput": "game", } assert trt.can_execute( trait.COMMAND_INPUT, params={"newInput": "media"}, ) calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_SELECT_SOURCE ) await trt.execute( trait.COMMAND_INPUT, BASIC_DATA, {"newInput": "media"}, {}, ) assert len(calls) == 1 assert calls[0].data == {"entity_id": "media_player.living_room", "source": "media"} @pytest.mark.parametrize( "sources,source,source_next,source_prev", [ (["a"], "a", "a", "a"), (["a", "b"], "a", "b", "b"), (["a", "b", "c"], "a", "b", "c"), ], ) async def test_inputselector_nextprev(hass, sources, source, source_next, source_prev): """Test input selector trait.""" trt = trait.InputSelectorTrait( hass, State( "media_player.living_room", media_player.STATE_PLAYING, attributes={ media_player.ATTR_INPUT_SOURCE_LIST: sources, media_player.ATTR_INPUT_SOURCE: source, }, ), BASIC_CONFIG, ) assert trt.can_execute("action.devices.commands.NextInput", params={}) assert trt.can_execute("action.devices.commands.PreviousInput", params={}) calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_SELECT_SOURCE ) await trt.execute( "action.devices.commands.NextInput", BASIC_DATA, {}, {}, ) await trt.execute( "action.devices.commands.PreviousInput", BASIC_DATA, {}, {}, ) assert len(calls) == 2 assert calls[0].data == { "entity_id": "media_player.living_room", "source": source_next, } assert calls[1].data == { "entity_id": "media_player.living_room", "source": source_prev, } @pytest.mark.parametrize( "sources,source", [(None, "a"), (["a", "b"], None), (["a", "b"], "c")] ) async def test_inputselector_nextprev_invalid(hass, sources, source): """Test input selector trait.""" trt = trait.InputSelectorTrait( hass, State( "media_player.living_room", media_player.STATE_PLAYING, attributes={ media_player.ATTR_INPUT_SOURCE_LIST: sources, media_player.ATTR_INPUT_SOURCE: source, }, ), BASIC_CONFIG, ) with pytest.raises(SmartHomeError): await trt.execute( "action.devices.commands.NextInput", BASIC_DATA, {}, {}, ) with pytest.raises(SmartHomeError): await trt.execute( "action.devices.commands.PreviousInput", BASIC_DATA, {}, {}, ) with pytest.raises(SmartHomeError): await trt.execute( "action.devices.commands.InvalidCommand", BASIC_DATA, {}, {}, ) async def test_modes_input_select(hass): """Test Input Select Mode trait.""" assert helpers.get_google_type(input_select.DOMAIN, None) is not None assert trait.ModesTrait.supported(input_select.DOMAIN, None, None) trt = trait.ModesTrait( hass, State("input_select.bla", "unavailable"), BASIC_CONFIG, ) assert trt.sync_attributes() == {"availableModes": []} trt = trait.ModesTrait( hass, State( "input_select.bla", "abc", attributes={input_select.ATTR_OPTIONS: ["abc", "123", "xyz"]}, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableModes": [ { "name": "option", "name_values": [ { "name_synonym": ["option", "setting", "mode", "value"], "lang": "en", } ], "settings": [ { "setting_name": "abc", "setting_values": [{"setting_synonym": ["abc"], "lang": "en"}], }, { "setting_name": "123", "setting_values": [{"setting_synonym": ["123"], "lang": "en"}], }, { "setting_name": "xyz", "setting_values": [{"setting_synonym": ["xyz"], "lang": "en"}], }, ], "ordered": False, } ] } assert trt.query_attributes() == { "currentModeSettings": {"option": "abc"}, "on": True, } assert trt.can_execute( trait.COMMAND_MODES, params={"updateModeSettings": {"option": "xyz"}}, ) calls = async_mock_service( hass, input_select.DOMAIN, input_select.SERVICE_SELECT_OPTION ) await trt.execute( trait.COMMAND_MODES, BASIC_DATA, {"updateModeSettings": {"option": "xyz"}}, {}, ) assert len(calls) == 1 assert calls[0].data == {"entity_id": "input_select.bla", "option": "xyz"} async def test_modes_humidifier(hass): """Test Humidifier Mode trait.""" assert helpers.get_google_type(humidifier.DOMAIN, None) is not None assert trait.ModesTrait.supported(humidifier.DOMAIN, humidifier.SUPPORT_MODES, None) trt = trait.ModesTrait( hass, State( "humidifier.humidifier", STATE_OFF, attributes={ humidifier.ATTR_AVAILABLE_MODES: [ humidifier.MODE_NORMAL, humidifier.MODE_AUTO, humidifier.MODE_AWAY, ], ATTR_SUPPORTED_FEATURES: humidifier.SUPPORT_MODES, humidifier.ATTR_MIN_HUMIDITY: 30, humidifier.ATTR_MAX_HUMIDITY: 99, humidifier.ATTR_HUMIDITY: 50, ATTR_MODE: humidifier.MODE_AUTO, }, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableModes": [ { "name": "mode", "name_values": [{"name_synonym": ["mode"], "lang": "en"}], "settings": [ { "setting_name": "normal", "setting_values": [ {"setting_synonym": ["normal"], "lang": "en"} ], }, { "setting_name": "auto", "setting_values": [{"setting_synonym": ["auto"], "lang": "en"}], }, { "setting_name": "away", "setting_values": [{"setting_synonym": ["away"], "lang": "en"}], }, ], "ordered": False, }, ] } assert trt.query_attributes() == { "currentModeSettings": {"mode": "auto"}, "on": False, } assert trt.can_execute( trait.COMMAND_MODES, params={"updateModeSettings": {"mode": "away"}} ) calls = async_mock_service(hass, humidifier.DOMAIN, humidifier.SERVICE_SET_MODE) await trt.execute( trait.COMMAND_MODES, BASIC_DATA, {"updateModeSettings": {"mode": "away"}}, {}, ) assert len(calls) == 1 assert calls[0].data == { "entity_id": "humidifier.humidifier", "mode": "away", } async def test_sound_modes(hass): """Test Mode trait.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.ModesTrait.supported( media_player.DOMAIN, media_player.SUPPORT_SELECT_SOUND_MODE, None ) trt = trait.ModesTrait( hass, State( "media_player.living_room", media_player.STATE_PLAYING, attributes={ media_player.ATTR_SOUND_MODE_LIST: ["stereo", "prologic"], media_player.ATTR_SOUND_MODE: "stereo", }, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableModes": [ { "name": "sound mode", "name_values": [ {"name_synonym": ["sound mode", "effects"], "lang": "en"} ], "settings": [ { "setting_name": "stereo", "setting_values": [ {"setting_synonym": ["stereo"], "lang": "en"} ], }, { "setting_name": "prologic", "setting_values": [ {"setting_synonym": ["prologic"], "lang": "en"} ], }, ], "ordered": False, } ] } assert trt.query_attributes() == { "currentModeSettings": {"sound mode": "stereo"}, "on": True, } assert trt.can_execute( trait.COMMAND_MODES, params={"updateModeSettings": {"sound mode": "stereo"}}, ) calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_SELECT_SOUND_MODE ) await trt.execute( trait.COMMAND_MODES, BASIC_DATA, {"updateModeSettings": {"sound mode": "stereo"}}, {}, ) assert len(calls) == 1 assert calls[0].data == { "entity_id": "media_player.living_room", "sound_mode": "stereo", } async def test_openclose_cover(hass): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_SET_POSITION, None ) trt = trait.OpenCloseTrait( hass, State( "cover.bla", cover.STATE_OPEN, { cover.ATTR_CURRENT_POSITION: 75, ATTR_SUPPORTED_FEATURES: cover.SUPPORT_SET_POSITION, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"openPercent": 75} calls_set = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_SET_COVER_POSITION) calls_open = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_OPEN_COVER) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 50}, {}) await trt.execute( trait.COMMAND_OPENCLOSE_RELATIVE, BASIC_DATA, {"openRelativePercent": 50}, {} ) assert len(calls_set) == 1 assert calls_set[0].data == {ATTR_ENTITY_ID: "cover.bla", cover.ATTR_POSITION: 50} assert len(calls_open) == 1 assert calls_open[0].data == {ATTR_ENTITY_ID: "cover.bla"} async def test_openclose_cover_unknown_state(hass): """Test OpenClose trait support for cover domain with unknown state.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_SET_POSITION, None ) # No state trt = trait.OpenCloseTrait( hass, State( "cover.bla", STATE_UNKNOWN, {ATTR_SUPPORTED_FEATURES: cover.SUPPORT_OPEN} ), BASIC_CONFIG, ) assert trt.sync_attributes() == {"discreteOnlyOpenClose": True} with pytest.raises(helpers.SmartHomeError): trt.query_attributes() calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_OPEN_COVER) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 100}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} with pytest.raises(helpers.SmartHomeError): trt.query_attributes() async def test_openclose_cover_assumed_state(hass): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_SET_POSITION, None ) trt = trait.OpenCloseTrait( hass, State( "cover.bla", cover.STATE_OPEN, { ATTR_ASSUMED_STATE: True, ATTR_SUPPORTED_FEATURES: cover.SUPPORT_SET_POSITION, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == {"commandOnlyOpenClose": True} assert trt.query_attributes() == {} calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_SET_COVER_POSITION) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 40}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla", cover.ATTR_POSITION: 40} async def test_openclose_cover_query_only(hass): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported(cover.DOMAIN, 0, None) state = State( "cover.bla", cover.STATE_OPEN, ) trt = trait.OpenCloseTrait( hass, state, BASIC_CONFIG, ) assert trt.sync_attributes() == { "discreteOnlyOpenClose": True, "queryOnlyOpenClose": True, } assert trt.query_attributes() == {"openPercent": 100} async def test_openclose_cover_no_position(hass): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_OPEN | cover.SUPPORT_CLOSE, None ) state = State( "cover.bla", cover.STATE_OPEN, { ATTR_SUPPORTED_FEATURES: cover.SUPPORT_OPEN | cover.SUPPORT_CLOSE, }, ) trt = trait.OpenCloseTrait( hass, state, BASIC_CONFIG, ) assert trt.sync_attributes() == {"discreteOnlyOpenClose": True} assert trt.query_attributes() == {"openPercent": 100} state.state = cover.STATE_CLOSED assert trt.sync_attributes() == {"discreteOnlyOpenClose": True} assert trt.query_attributes() == {"openPercent": 0} calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_CLOSE_COVER) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 0}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_OPEN_COVER) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 100}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} with pytest.raises( SmartHomeError, match=r"Current position not know for relative command" ): await trt.execute( trait.COMMAND_OPENCLOSE_RELATIVE, BASIC_DATA, {"openRelativePercent": 100}, {}, ) with pytest.raises(SmartHomeError, match=r"No support for partial open close"): await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 50}, {}) @pytest.mark.parametrize( "device_class", (cover.DEVICE_CLASS_DOOR, cover.DEVICE_CLASS_GARAGE, cover.DEVICE_CLASS_GATE), ) async def test_openclose_cover_secure(hass, device_class): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, device_class) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_SET_POSITION, device_class ) assert trait.OpenCloseTrait.might_2fa( cover.DOMAIN, cover.SUPPORT_SET_POSITION, device_class ) trt = trait.OpenCloseTrait( hass, State( "cover.bla", cover.STATE_OPEN, { ATTR_DEVICE_CLASS: device_class, ATTR_SUPPORTED_FEATURES: cover.SUPPORT_SET_POSITION, cover.ATTR_CURRENT_POSITION: 75, }, ), PIN_CONFIG, ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"openPercent": 75} calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_SET_COVER_POSITION) calls_close = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_CLOSE_COVER) # No challenge data with pytest.raises(error.ChallengeNeeded) as err: await trt.execute(trait.COMMAND_OPENCLOSE, PIN_DATA, {"openPercent": 50}, {}) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # invalid pin with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_OPENCLOSE, PIN_DATA, {"openPercent": 50}, {"pin": "9999"} ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_FAILED_PIN_NEEDED await trt.execute( trait.COMMAND_OPENCLOSE, PIN_DATA, {"openPercent": 50}, {"pin": "1234"} ) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla", cover.ATTR_POSITION: 50} # no challenge on close await trt.execute(trait.COMMAND_OPENCLOSE, PIN_DATA, {"openPercent": 0}, {}) assert len(calls_close) == 1 assert calls_close[0].data == {ATTR_ENTITY_ID: "cover.bla"} @pytest.mark.parametrize( "device_class", ( binary_sensor.DEVICE_CLASS_DOOR, binary_sensor.DEVICE_CLASS_GARAGE_DOOR, binary_sensor.DEVICE_CLASS_LOCK, binary_sensor.DEVICE_CLASS_OPENING, binary_sensor.DEVICE_CLASS_WINDOW, ), ) async def test_openclose_binary_sensor(hass, device_class): """Test OpenClose trait support for binary_sensor domain.""" assert helpers.get_google_type(binary_sensor.DOMAIN, device_class) is not None assert trait.OpenCloseTrait.supported(binary_sensor.DOMAIN, 0, device_class) trt = trait.OpenCloseTrait( hass, State("binary_sensor.test", STATE_ON, {ATTR_DEVICE_CLASS: device_class}), BASIC_CONFIG, ) assert trt.sync_attributes() == { "queryOnlyOpenClose": True, "discreteOnlyOpenClose": True, } assert trt.query_attributes() == {"openPercent": 100} trt = trait.OpenCloseTrait( hass, State("binary_sensor.test", STATE_OFF, {ATTR_DEVICE_CLASS: device_class}), BASIC_CONFIG, ) assert trt.sync_attributes() == { "queryOnlyOpenClose": True, "discreteOnlyOpenClose": True, } assert trt.query_attributes() == {"openPercent": 0} async def test_volume_media_player(hass): """Test volume trait support for media player domain.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.VolumeTrait.supported( media_player.DOMAIN, media_player.SUPPORT_VOLUME_SET, None, ) trt = trait.VolumeTrait( hass, State( "media_player.bla", media_player.STATE_PLAYING, { ATTR_SUPPORTED_FEATURES: media_player.SUPPORT_VOLUME_SET, media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.3, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "volumeMaxLevel": 100, "levelStepSize": 10, "volumeCanMuteAndUnmute": False, "commandOnlyVolume": False, } assert trt.query_attributes() == {"currentVolume": 30} calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_SET ) await trt.execute(trait.COMMAND_SET_VOLUME, BASIC_DATA, {"volumeLevel": 60}, {}) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.6, } calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_SET ) await trt.execute( trait.COMMAND_VOLUME_RELATIVE, BASIC_DATA, {"relativeSteps": 10}, {} ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.4, } async def test_volume_media_player_relative(hass): """Test volume trait support for relative-volume-only media players.""" assert trait.VolumeTrait.supported( media_player.DOMAIN, media_player.SUPPORT_VOLUME_STEP, None, ) trt = trait.VolumeTrait( hass, State( "media_player.bla", media_player.STATE_PLAYING, { ATTR_ASSUMED_STATE: True, ATTR_SUPPORTED_FEATURES: media_player.SUPPORT_VOLUME_STEP, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "volumeMaxLevel": 100, "levelStepSize": 10, "volumeCanMuteAndUnmute": False, "commandOnlyVolume": True, } assert trt.query_attributes() == {} calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_UP ) await trt.execute( trait.COMMAND_VOLUME_RELATIVE, BASIC_DATA, {"relativeSteps": 10}, {}, ) assert len(calls) == 10 for call in calls: assert call.data == { ATTR_ENTITY_ID: "media_player.bla", } calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_DOWN ) await trt.execute( trait.COMMAND_VOLUME_RELATIVE, BASIC_DATA, {"relativeSteps": -10}, {}, ) assert len(calls) == 10 for call in calls: assert call.data == { ATTR_ENTITY_ID: "media_player.bla", } with pytest.raises(SmartHomeError): await trt.execute(trait.COMMAND_SET_VOLUME, BASIC_DATA, {"volumeLevel": 42}, {}) with pytest.raises(SmartHomeError): await trt.execute(trait.COMMAND_MUTE, BASIC_DATA, {"mute": True}, {}) async def test_media_player_mute(hass): """Test volume trait support for muting.""" assert trait.VolumeTrait.supported( media_player.DOMAIN, media_player.SUPPORT_VOLUME_STEP | media_player.SUPPORT_VOLUME_MUTE, None, ) trt = trait.VolumeTrait( hass, State( "media_player.bla", media_player.STATE_PLAYING, { ATTR_SUPPORTED_FEATURES: ( media_player.SUPPORT_VOLUME_STEP | media_player.SUPPORT_VOLUME_MUTE ), media_player.ATTR_MEDIA_VOLUME_MUTED: False, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "volumeMaxLevel": 100, "levelStepSize": 10, "volumeCanMuteAndUnmute": True, "commandOnlyVolume": False, } assert trt.query_attributes() == {"isMuted": False} mute_calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_MUTE ) await trt.execute( trait.COMMAND_MUTE, BASIC_DATA, {"mute": True}, {}, ) assert len(mute_calls) == 1 assert mute_calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_VOLUME_MUTED: True, } unmute_calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_MUTE ) await trt.execute( trait.COMMAND_MUTE, BASIC_DATA, {"mute": False}, {}, ) assert len(unmute_calls) == 1 assert unmute_calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_VOLUME_MUTED: False, } async def test_temperature_setting_sensor(hass): """Test TemperatureSetting trait support for temperature sensor.""" assert ( helpers.get_google_type(sensor.DOMAIN, sensor.DEVICE_CLASS_TEMPERATURE) is not None ) assert not trait.TemperatureSettingTrait.supported( sensor.DOMAIN, 0, sensor.DEVICE_CLASS_HUMIDITY ) assert trait.TemperatureSettingTrait.supported( sensor.DOMAIN, 0, sensor.DEVICE_CLASS_TEMPERATURE ) @pytest.mark.parametrize( "unit_in,unit_out,state,ambient", [ (TEMP_FAHRENHEIT, "F", "70", 21.1), (TEMP_CELSIUS, "C", "21.1", 21.1), (TEMP_FAHRENHEIT, "F", "unavailable", None), (TEMP_FAHRENHEIT, "F", "unknown", None), ], ) async def test_temperature_setting_sensor_data(hass, unit_in, unit_out, state, ambient): """Test TemperatureSetting trait support for temperature sensor.""" hass.config.units.temperature_unit = unit_in trt = trait.TemperatureSettingTrait( hass, State( "sensor.test", state, {ATTR_DEVICE_CLASS: sensor.DEVICE_CLASS_TEMPERATURE} ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "queryOnlyTemperatureSetting": True, "thermostatTemperatureUnit": unit_out, } if ambient: assert trt.query_attributes() == {"thermostatTemperatureAmbient": ambient} else: assert trt.query_attributes() == {} hass.config.units.temperature_unit = TEMP_CELSIUS async def test_humidity_setting_sensor(hass): """Test HumiditySetting trait support for humidity sensor.""" assert ( helpers.get_google_type(sensor.DOMAIN, sensor.DEVICE_CLASS_HUMIDITY) is not None ) assert not trait.HumiditySettingTrait.supported( sensor.DOMAIN, 0, sensor.DEVICE_CLASS_TEMPERATURE ) assert trait.HumiditySettingTrait.supported( sensor.DOMAIN, 0, sensor.DEVICE_CLASS_HUMIDITY ) @pytest.mark.parametrize( "state,ambient", [("70", 70), ("unavailable", None), ("unknown", None)] ) async def test_humidity_setting_sensor_data(hass, state, ambient): """Test HumiditySetting trait support for humidity sensor.""" trt = trait.HumiditySettingTrait( hass, State("sensor.test", state, {ATTR_DEVICE_CLASS: sensor.DEVICE_CLASS_HUMIDITY}), BASIC_CONFIG, ) assert trt.sync_attributes() == {"queryOnlyHumiditySetting": True} if ambient: assert trt.query_attributes() == {"humidityAmbientPercent": ambient} else: assert trt.query_attributes() == {} with pytest.raises(helpers.SmartHomeError) as err: await trt.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert err.value.code == const.ERR_NOT_SUPPORTED async def test_transport_control(hass): """Test the TransportControlTrait.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None for feature in trait.MEDIA_COMMAND_SUPPORT_MAPPING.values(): assert trait.TransportControlTrait.supported(media_player.DOMAIN, feature, None) now = datetime(2020, 1, 1) trt = trait.TransportControlTrait( hass, State( "media_player.bla", media_player.STATE_PLAYING, { media_player.ATTR_MEDIA_POSITION: 100, media_player.ATTR_MEDIA_DURATION: 200, media_player.ATTR_MEDIA_POSITION_UPDATED_AT: now - timedelta(seconds=10), media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.5, ATTR_SUPPORTED_FEATURES: media_player.SUPPORT_PLAY | media_player.SUPPORT_STOP, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "transportControlSupportedCommands": ["RESUME", "STOP"] } assert trt.query_attributes() == {} # COMMAND_MEDIA_SEEK_RELATIVE calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_SEEK ) # Patch to avoid time ticking over during the command failing the test with patch("homeassistant.util.dt.utcnow", return_value=now): await trt.execute( trait.COMMAND_MEDIA_SEEK_RELATIVE, BASIC_DATA, {"relativePositionMs": 10000}, {}, ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", # 100s (current position) + 10s (from command) + 10s (from updated_at) media_player.ATTR_MEDIA_SEEK_POSITION: 120, } # COMMAND_MEDIA_SEEK_TO_POSITION calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_SEEK ) await trt.execute( trait.COMMAND_MEDIA_SEEK_TO_POSITION, BASIC_DATA, {"absPositionMs": 50000}, {} ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_SEEK_POSITION: 50, } # COMMAND_MEDIA_NEXT calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_NEXT_TRACK ) await trt.execute(trait.COMMAND_MEDIA_NEXT, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} # COMMAND_MEDIA_PAUSE calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_PAUSE ) await trt.execute(trait.COMMAND_MEDIA_PAUSE, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} # COMMAND_MEDIA_PREVIOUS calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_PREVIOUS_TRACK ) await trt.execute(trait.COMMAND_MEDIA_PREVIOUS, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} # COMMAND_MEDIA_RESUME calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_PLAY ) await trt.execute(trait.COMMAND_MEDIA_RESUME, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} # COMMAND_MEDIA_SHUFFLE calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_SHUFFLE_SET ) await trt.execute(trait.COMMAND_MEDIA_SHUFFLE, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_SHUFFLE: True, } # COMMAND_MEDIA_STOP calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_STOP ) await trt.execute(trait.COMMAND_MEDIA_STOP, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} @pytest.mark.parametrize( "state", ( STATE_OFF, STATE_IDLE, STATE_PLAYING, STATE_ON, STATE_PAUSED, STATE_STANDBY, STATE_UNAVAILABLE, STATE_UNKNOWN, ), ) async def test_media_state(hass, state): """Test the MediaStateTrait.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.TransportControlTrait.supported( media_player.DOMAIN, media_player.SUPPORT_PLAY, None ) trt = trait.MediaStateTrait( hass, State( "media_player.bla", state, { media_player.ATTR_MEDIA_POSITION: 100, media_player.ATTR_MEDIA_DURATION: 200, media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.5, ATTR_SUPPORTED_FEATURES: media_player.SUPPORT_PLAY | media_player.SUPPORT_STOP, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "supportActivityState": True, "supportPlaybackState": True, } assert trt.query_attributes() == { "activityState": trt.activity_lookup.get(state), "playbackState": trt.playback_lookup.get(state), }
sander76/home-assistant
tests/components/google_assistant/test_trait.py
homeassistant/components/digital_ocean/switch.py
"""Constants for the sentry integration.""" import logging DOMAIN = "sentry" CONF_DSN = "dsn" CONF_ENVIRONMENT = "environment" CONF_EVENT_CUSTOM_COMPONENTS = "event_custom_components" CONF_EVENT_HANDLED = "event_handled" CONF_EVENT_THIRD_PARTY_PACKAGES = "event_third_party_packages" CONF_LOGGING_EVENT_LEVEL = "logging_event_level" CONF_LOGGING_LEVEL = "logging_level" CONF_TRACING = "tracing" CONF_TRACING_SAMPLE_RATE = "tracing_sample_rate" DEFAULT_LOGGING_EVENT_LEVEL = logging.ERROR DEFAULT_LOGGING_LEVEL = logging.WARNING DEFAULT_TRACING_SAMPLE_RATE = 1.0 LOGGING_LEVELS = { logging.DEBUG: "debug", logging.INFO: "info", logging.WARNING: "warning", logging.ERROR: "error", logging.CRITICAL: "critical", } ENTITY_COMPONENTS = [ "air_quality", "alarm_control_panel", "binary_sensor", "calendar", "camera", "climate", "cover", "device_tracker", "fan", "geo_location", "group", "humidifier", "light", "lock", "media_player", "remote", "scene", "sensor", "switch", "vacuum", "water_heater", "weather", ]
"""Tests for the Google Assistant traits.""" from datetime import datetime, timedelta from unittest.mock import patch import pytest from homeassistant.components import ( alarm_control_panel, binary_sensor, camera, cover, fan, group, input_boolean, input_select, light, lock, media_player, scene, script, sensor, switch, vacuum, ) from homeassistant.components.climate import const as climate from homeassistant.components.google_assistant import const, error, helpers, trait from homeassistant.components.google_assistant.error import SmartHomeError from homeassistant.components.humidifier import const as humidifier from homeassistant.config import async_process_ha_core_config from homeassistant.const import ( ATTR_ASSUMED_STATE, ATTR_DEVICE_CLASS, ATTR_ENTITY_ID, ATTR_MODE, ATTR_SUPPORTED_FEATURES, ATTR_TEMPERATURE, SERVICE_TURN_OFF, SERVICE_TURN_ON, STATE_ALARM_ARMED_AWAY, STATE_ALARM_DISARMED, STATE_ALARM_PENDING, STATE_IDLE, STATE_OFF, STATE_ON, STATE_PAUSED, STATE_PLAYING, STATE_STANDBY, STATE_UNAVAILABLE, STATE_UNKNOWN, TEMP_CELSIUS, TEMP_FAHRENHEIT, ) from homeassistant.core import DOMAIN as HA_DOMAIN, EVENT_CALL_SERVICE, State from homeassistant.util import color from . import BASIC_CONFIG, MockConfig from tests.common import async_capture_events, async_mock_service REQ_ID = "ff36a3cc-ec34-11e6-b1a0-64510650abcf" BASIC_DATA = helpers.RequestData( BASIC_CONFIG, "test-agent", const.SOURCE_CLOUD, REQ_ID, None ) PIN_CONFIG = MockConfig(secure_devices_pin="1234") PIN_DATA = helpers.RequestData( PIN_CONFIG, "test-agent", const.SOURCE_CLOUD, REQ_ID, None ) async def test_brightness_light(hass): """Test brightness trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert trait.BrightnessTrait.supported(light.DOMAIN, light.SUPPORT_BRIGHTNESS, None) trt = trait.BrightnessTrait( hass, State("light.bla", light.STATE_ON, {light.ATTR_BRIGHTNESS: 243}), BASIC_CONFIG, ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"brightness": 95} events = async_capture_events(hass, EVENT_CALL_SERVICE) calls = async_mock_service(hass, light.DOMAIN, light.SERVICE_TURN_ON) await trt.execute( trait.COMMAND_BRIGHTNESS_ABSOLUTE, BASIC_DATA, {"brightness": 50}, {} ) await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "light.bla", light.ATTR_BRIGHTNESS_PCT: 50} assert len(events) == 1 assert events[0].data == { "domain": "light", "service": "turn_on", "service_data": {"brightness_pct": 50, "entity_id": "light.bla"}, } async def test_camera_stream(hass): """Test camera stream trait support for camera domain.""" await async_process_ha_core_config( hass, {"external_url": "https://example.com"}, ) assert helpers.get_google_type(camera.DOMAIN, None) is not None assert trait.CameraStreamTrait.supported(camera.DOMAIN, camera.SUPPORT_STREAM, None) trt = trait.CameraStreamTrait( hass, State("camera.bla", camera.STATE_IDLE, {}), BASIC_CONFIG ) assert trt.sync_attributes() == { "cameraStreamSupportedProtocols": ["hls"], "cameraStreamNeedAuthToken": False, "cameraStreamNeedDrmEncryption": False, } assert trt.query_attributes() == {} with patch( "homeassistant.components.camera.async_request_stream", return_value="/api/streams/bla", ): await trt.execute(trait.COMMAND_GET_CAMERA_STREAM, BASIC_DATA, {}, {}) assert trt.query_attributes() == { "cameraStreamAccessUrl": "https://example.com/api/streams/bla", "cameraStreamReceiverAppId": "B12CE3CA", } async def test_onoff_group(hass): """Test OnOff trait support for group domain.""" assert helpers.get_google_type(group.DOMAIN, None) is not None assert trait.OnOffTrait.supported(group.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("group.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("group.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, HA_DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "group.bla"} off_calls = async_mock_service(hass, HA_DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "group.bla"} async def test_onoff_input_boolean(hass): """Test OnOff trait support for input_boolean domain.""" assert helpers.get_google_type(input_boolean.DOMAIN, None) is not None assert trait.OnOffTrait.supported(input_boolean.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("input_boolean.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait( hass, State("input_boolean.bla", STATE_OFF), BASIC_CONFIG ) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, input_boolean.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "input_boolean.bla"} off_calls = async_mock_service(hass, input_boolean.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "input_boolean.bla"} async def test_onoff_switch(hass): """Test OnOff trait support for switch domain.""" assert helpers.get_google_type(switch.DOMAIN, None) is not None assert trait.OnOffTrait.supported(switch.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("switch.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("switch.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} trt_assumed = trait.OnOffTrait( hass, State("switch.bla", STATE_OFF, {"assumed_state": True}), BASIC_CONFIG ) assert trt_assumed.sync_attributes() == {"commandOnlyOnOff": True} on_calls = async_mock_service(hass, switch.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "switch.bla"} off_calls = async_mock_service(hass, switch.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "switch.bla"} async def test_onoff_fan(hass): """Test OnOff trait support for fan domain.""" assert helpers.get_google_type(fan.DOMAIN, None) is not None assert trait.OnOffTrait.supported(fan.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("fan.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("fan.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, fan.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "fan.bla"} off_calls = async_mock_service(hass, fan.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "fan.bla"} async def test_onoff_light(hass): """Test OnOff trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert trait.OnOffTrait.supported(light.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("light.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("light.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "light.bla"} off_calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "light.bla"} async def test_onoff_media_player(hass): """Test OnOff trait support for media_player domain.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.OnOffTrait.supported(media_player.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("media_player.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("media_player.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, media_player.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} off_calls = async_mock_service(hass, media_player.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} async def test_onoff_humidifier(hass): """Test OnOff trait support for humidifier domain.""" assert helpers.get_google_type(humidifier.DOMAIN, None) is not None assert trait.OnOffTrait.supported(humidifier.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("humidifier.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("humidifier.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, humidifier.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "humidifier.bla"} off_calls = async_mock_service(hass, humidifier.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "humidifier.bla"} async def test_dock_vacuum(hass): """Test dock trait support for vacuum domain.""" assert helpers.get_google_type(vacuum.DOMAIN, None) is not None assert trait.DockTrait.supported(vacuum.DOMAIN, 0, None) trt = trait.DockTrait(hass, State("vacuum.bla", vacuum.STATE_IDLE), BASIC_CONFIG) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"isDocked": False} calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_RETURN_TO_BASE) await trt.execute(trait.COMMAND_DOCK, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} async def test_startstop_vacuum(hass): """Test startStop trait support for vacuum domain.""" assert helpers.get_google_type(vacuum.DOMAIN, None) is not None assert trait.StartStopTrait.supported(vacuum.DOMAIN, 0, None) trt = trait.StartStopTrait( hass, State( "vacuum.bla", vacuum.STATE_PAUSED, {ATTR_SUPPORTED_FEATURES: vacuum.SUPPORT_PAUSE}, ), BASIC_CONFIG, ) assert trt.sync_attributes() == {"pausable": True} assert trt.query_attributes() == {"isRunning": False, "isPaused": True} start_calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_START) await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": True}, {}) assert len(start_calls) == 1 assert start_calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} stop_calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_STOP) await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": False}, {}) assert len(stop_calls) == 1 assert stop_calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} pause_calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_PAUSE) await trt.execute(trait.COMMAND_PAUSEUNPAUSE, BASIC_DATA, {"pause": True}, {}) assert len(pause_calls) == 1 assert pause_calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} unpause_calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_START) await trt.execute(trait.COMMAND_PAUSEUNPAUSE, BASIC_DATA, {"pause": False}, {}) assert len(unpause_calls) == 1 assert unpause_calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} async def test_startstop_cover(hass): """Test startStop trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.StartStopTrait.supported(cover.DOMAIN, cover.SUPPORT_STOP, None) state = State( "cover.bla", cover.STATE_CLOSED, {ATTR_SUPPORTED_FEATURES: cover.SUPPORT_STOP}, ) trt = trait.StartStopTrait( hass, state, BASIC_CONFIG, ) assert trt.sync_attributes() == {} for state_value in (cover.STATE_CLOSING, cover.STATE_OPENING): state.state = state_value assert trt.query_attributes() == {"isRunning": True} stop_calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_STOP_COVER) await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": False}, {}) assert len(stop_calls) == 1 assert stop_calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} for state_value in (cover.STATE_CLOSED, cover.STATE_OPEN): state.state = state_value assert trt.query_attributes() == {"isRunning": False} with pytest.raises(SmartHomeError, match="Cover is already stopped"): await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": False}, {}) with pytest.raises(SmartHomeError, match="Starting a cover is not supported"): await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": True}, {}) with pytest.raises( SmartHomeError, match="Command action.devices.commands.PauseUnpause is not supported", ): await trt.execute(trait.COMMAND_PAUSEUNPAUSE, BASIC_DATA, {"start": True}, {}) async def test_startstop_cover_assumed(hass): """Test startStop trait support for cover domain of assumed state.""" trt = trait.StartStopTrait( hass, State( "cover.bla", cover.STATE_CLOSED, {ATTR_SUPPORTED_FEATURES: cover.SUPPORT_STOP, ATTR_ASSUMED_STATE: True}, ), BASIC_CONFIG, ) stop_calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_STOP_COVER) await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": False}, {}) assert len(stop_calls) == 1 assert stop_calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} async def test_color_setting_color_light(hass): """Test ColorSpectrum trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert not trait.ColorSettingTrait.supported(light.DOMAIN, 0, None) assert trait.ColorSettingTrait.supported(light.DOMAIN, light.SUPPORT_COLOR, None) trt = trait.ColorSettingTrait( hass, State( "light.bla", STATE_ON, { light.ATTR_HS_COLOR: (20, 94), light.ATTR_BRIGHTNESS: 200, ATTR_SUPPORTED_FEATURES: light.SUPPORT_COLOR, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == {"colorModel": "hsv"} assert trt.query_attributes() == { "color": {"spectrumHsv": {"hue": 20, "saturation": 0.94, "value": 200 / 255}} } assert trt.can_execute( trait.COMMAND_COLOR_ABSOLUTE, {"color": {"spectrumRGB": 16715792}} ) calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON) await trt.execute( trait.COMMAND_COLOR_ABSOLUTE, BASIC_DATA, {"color": {"spectrumRGB": 1052927}}, {}, ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "light.bla", light.ATTR_HS_COLOR: (240, 93.725), } await trt.execute( trait.COMMAND_COLOR_ABSOLUTE, BASIC_DATA, {"color": {"spectrumHSV": {"hue": 100, "saturation": 0.50, "value": 0.20}}}, {}, ) assert len(calls) == 2 assert calls[1].data == { ATTR_ENTITY_ID: "light.bla", light.ATTR_HS_COLOR: [100, 50], light.ATTR_BRIGHTNESS: 0.2 * 255, } async def test_color_setting_temperature_light(hass): """Test ColorTemperature trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert not trait.ColorSettingTrait.supported(light.DOMAIN, 0, None) assert trait.ColorSettingTrait.supported( light.DOMAIN, light.SUPPORT_COLOR_TEMP, None ) trt = trait.ColorSettingTrait( hass, State( "light.bla", STATE_ON, { light.ATTR_MIN_MIREDS: 200, light.ATTR_COLOR_TEMP: 300, light.ATTR_MAX_MIREDS: 500, ATTR_SUPPORTED_FEATURES: light.SUPPORT_COLOR_TEMP, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "colorTemperatureRange": {"temperatureMinK": 2000, "temperatureMaxK": 5000} } assert trt.query_attributes() == {"color": {"temperatureK": 3333}} assert trt.can_execute( trait.COMMAND_COLOR_ABSOLUTE, {"color": {"temperature": 400}} ) calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON) with pytest.raises(helpers.SmartHomeError) as err: await trt.execute( trait.COMMAND_COLOR_ABSOLUTE, BASIC_DATA, {"color": {"temperature": 5555}}, {}, ) assert err.value.code == const.ERR_VALUE_OUT_OF_RANGE await trt.execute( trait.COMMAND_COLOR_ABSOLUTE, BASIC_DATA, {"color": {"temperature": 2857}}, {} ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "light.bla", light.ATTR_COLOR_TEMP: color.color_temperature_kelvin_to_mired(2857), } async def test_color_light_temperature_light_bad_temp(hass): """Test ColorTemperature trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert not trait.ColorSettingTrait.supported(light.DOMAIN, 0, None) assert trait.ColorSettingTrait.supported( light.DOMAIN, light.SUPPORT_COLOR_TEMP, None ) trt = trait.ColorSettingTrait( hass, State( "light.bla", STATE_ON, { light.ATTR_MIN_MIREDS: 200, light.ATTR_COLOR_TEMP: 0, light.ATTR_MAX_MIREDS: 500, }, ), BASIC_CONFIG, ) assert trt.query_attributes() == {} async def test_light_modes(hass): """Test Light Mode trait.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert trait.ModesTrait.supported(light.DOMAIN, light.SUPPORT_EFFECT, None) trt = trait.ModesTrait( hass, State( "light.living_room", light.STATE_ON, attributes={ light.ATTR_EFFECT_LIST: ["random", "colorloop"], light.ATTR_EFFECT: "random", }, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableModes": [ { "name": "effect", "name_values": [{"name_synonym": ["effect"], "lang": "en"}], "settings": [ { "setting_name": "random", "setting_values": [ {"setting_synonym": ["random"], "lang": "en"} ], }, { "setting_name": "colorloop", "setting_values": [ {"setting_synonym": ["colorloop"], "lang": "en"} ], }, ], "ordered": False, } ] } assert trt.query_attributes() == { "currentModeSettings": {"effect": "random"}, "on": True, } assert trt.can_execute( trait.COMMAND_MODES, params={"updateModeSettings": {"effect": "colorloop"}}, ) calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON) await trt.execute( trait.COMMAND_MODES, BASIC_DATA, {"updateModeSettings": {"effect": "colorloop"}}, {}, ) assert len(calls) == 1 assert calls[0].data == { "entity_id": "light.living_room", "effect": "colorloop", } async def test_scene_scene(hass): """Test Scene trait support for scene domain.""" assert helpers.get_google_type(scene.DOMAIN, None) is not None assert trait.SceneTrait.supported(scene.DOMAIN, 0, None) trt = trait.SceneTrait(hass, State("scene.bla", scene.STATE), BASIC_CONFIG) assert trt.sync_attributes() == {} assert trt.query_attributes() == {} assert trt.can_execute(trait.COMMAND_ACTIVATE_SCENE, {}) calls = async_mock_service(hass, scene.DOMAIN, SERVICE_TURN_ON) await trt.execute(trait.COMMAND_ACTIVATE_SCENE, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "scene.bla"} async def test_scene_script(hass): """Test Scene trait support for script domain.""" assert helpers.get_google_type(script.DOMAIN, None) is not None assert trait.SceneTrait.supported(script.DOMAIN, 0, None) trt = trait.SceneTrait(hass, State("script.bla", STATE_OFF), BASIC_CONFIG) assert trt.sync_attributes() == {} assert trt.query_attributes() == {} assert trt.can_execute(trait.COMMAND_ACTIVATE_SCENE, {}) calls = async_mock_service(hass, script.DOMAIN, SERVICE_TURN_ON) await trt.execute(trait.COMMAND_ACTIVATE_SCENE, BASIC_DATA, {}, {}) # We don't wait till script execution is done. await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "script.bla"} async def test_temperature_setting_climate_onoff(hass): """Test TemperatureSetting trait support for climate domain - range.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.TemperatureSettingTrait.supported(climate.DOMAIN, 0, None) hass.config.units.temperature_unit = TEMP_FAHRENHEIT trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_AUTO, { ATTR_SUPPORTED_FEATURES: climate.SUPPORT_TARGET_TEMPERATURE_RANGE, climate.ATTR_HVAC_MODES: [ climate.HVAC_MODE_OFF, climate.HVAC_MODE_COOL, climate.HVAC_MODE_HEAT, climate.HVAC_MODE_HEAT_COOL, ], climate.ATTR_MIN_TEMP: None, climate.ATTR_MAX_TEMP: None, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["off", "cool", "heat", "heatcool", "on"], "thermostatTemperatureUnit": "F", } assert trt.can_execute(trait.COMMAND_THERMOSTAT_SET_MODE, {}) calls = async_mock_service(hass, climate.DOMAIN, SERVICE_TURN_ON) await trt.execute( trait.COMMAND_THERMOSTAT_SET_MODE, BASIC_DATA, {"thermostatMode": "on"}, {} ) assert len(calls) == 1 calls = async_mock_service(hass, climate.DOMAIN, SERVICE_TURN_OFF) await trt.execute( trait.COMMAND_THERMOSTAT_SET_MODE, BASIC_DATA, {"thermostatMode": "off"}, {} ) assert len(calls) == 1 async def test_temperature_setting_climate_no_modes(hass): """Test TemperatureSetting trait support for climate domain not supporting any modes.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.TemperatureSettingTrait.supported(climate.DOMAIN, 0, None) hass.config.units.temperature_unit = TEMP_CELSIUS trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_AUTO, { climate.ATTR_HVAC_MODES: [], climate.ATTR_MIN_TEMP: None, climate.ATTR_MAX_TEMP: None, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["heat"], "thermostatTemperatureUnit": "C", } async def test_temperature_setting_climate_range(hass): """Test TemperatureSetting trait support for climate domain - range.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.TemperatureSettingTrait.supported(climate.DOMAIN, 0, None) hass.config.units.temperature_unit = TEMP_FAHRENHEIT trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_AUTO, { climate.ATTR_CURRENT_TEMPERATURE: 70, climate.ATTR_CURRENT_HUMIDITY: 25, ATTR_SUPPORTED_FEATURES: climate.SUPPORT_TARGET_TEMPERATURE_RANGE, climate.ATTR_HVAC_MODES: [ STATE_OFF, climate.HVAC_MODE_COOL, climate.HVAC_MODE_HEAT, climate.HVAC_MODE_AUTO, ], climate.ATTR_TARGET_TEMP_HIGH: 75, climate.ATTR_TARGET_TEMP_LOW: 65, climate.ATTR_MIN_TEMP: 50, climate.ATTR_MAX_TEMP: 80, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["off", "cool", "heat", "auto", "on"], "thermostatTemperatureUnit": "F", } assert trt.query_attributes() == { "thermostatMode": "auto", "thermostatTemperatureAmbient": 21.1, "thermostatHumidityAmbient": 25, "thermostatTemperatureSetpointLow": 18.3, "thermostatTemperatureSetpointHigh": 23.9, } assert trt.can_execute(trait.COMMAND_THERMOSTAT_TEMPERATURE_SET_RANGE, {}) assert trt.can_execute(trait.COMMAND_THERMOSTAT_SET_MODE, {}) calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_TEMPERATURE) await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SET_RANGE, BASIC_DATA, { "thermostatTemperatureSetpointHigh": 25, "thermostatTemperatureSetpointLow": 20, }, {}, ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "climate.bla", climate.ATTR_TARGET_TEMP_HIGH: 77, climate.ATTR_TARGET_TEMP_LOW: 68, } calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_HVAC_MODE) await trt.execute( trait.COMMAND_THERMOSTAT_SET_MODE, BASIC_DATA, {"thermostatMode": "cool"}, {} ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "climate.bla", climate.ATTR_HVAC_MODE: climate.HVAC_MODE_COOL, } with pytest.raises(helpers.SmartHomeError) as err: await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, BASIC_DATA, {"thermostatTemperatureSetpoint": -100}, {}, ) assert err.value.code == const.ERR_VALUE_OUT_OF_RANGE hass.config.units.temperature_unit = TEMP_CELSIUS async def test_temperature_setting_climate_setpoint(hass): """Test TemperatureSetting trait support for climate domain - setpoint.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.TemperatureSettingTrait.supported(climate.DOMAIN, 0, None) hass.config.units.temperature_unit = TEMP_CELSIUS trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_COOL, { climate.ATTR_HVAC_MODES: [STATE_OFF, climate.HVAC_MODE_COOL], climate.ATTR_MIN_TEMP: 10, climate.ATTR_MAX_TEMP: 30, ATTR_TEMPERATURE: 18, climate.ATTR_CURRENT_TEMPERATURE: 20, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["off", "cool", "on"], "thermostatTemperatureUnit": "C", } assert trt.query_attributes() == { "thermostatMode": "cool", "thermostatTemperatureAmbient": 20, "thermostatTemperatureSetpoint": 18, } assert trt.can_execute(trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, {}) assert trt.can_execute(trait.COMMAND_THERMOSTAT_SET_MODE, {}) calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_TEMPERATURE) with pytest.raises(helpers.SmartHomeError): await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, BASIC_DATA, {"thermostatTemperatureSetpoint": -100}, {}, ) await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, BASIC_DATA, {"thermostatTemperatureSetpoint": 19}, {}, ) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "climate.bla", ATTR_TEMPERATURE: 19} async def test_temperature_setting_climate_setpoint_auto(hass): """ Test TemperatureSetting trait support for climate domain. Setpoint in auto mode. """ hass.config.units.temperature_unit = TEMP_CELSIUS trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_HEAT_COOL, { climate.ATTR_HVAC_MODES: [ climate.HVAC_MODE_OFF, climate.HVAC_MODE_HEAT_COOL, ], climate.ATTR_MIN_TEMP: 10, climate.ATTR_MAX_TEMP: 30, ATTR_TEMPERATURE: 18, climate.ATTR_CURRENT_TEMPERATURE: 20, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["off", "heatcool", "on"], "thermostatTemperatureUnit": "C", } assert trt.query_attributes() == { "thermostatMode": "heatcool", "thermostatTemperatureAmbient": 20, "thermostatTemperatureSetpointHigh": 18, "thermostatTemperatureSetpointLow": 18, } assert trt.can_execute(trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, {}) assert trt.can_execute(trait.COMMAND_THERMOSTAT_SET_MODE, {}) calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_TEMPERATURE) await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, BASIC_DATA, {"thermostatTemperatureSetpoint": 19}, {}, ) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "climate.bla", ATTR_TEMPERATURE: 19} async def test_humidity_setting_humidifier_setpoint(hass): """Test HumiditySetting trait support for humidifier domain - setpoint.""" assert helpers.get_google_type(humidifier.DOMAIN, None) is not None assert trait.HumiditySettingTrait.supported(humidifier.DOMAIN, 0, None) trt = trait.HumiditySettingTrait( hass, State( "humidifier.bla", STATE_ON, { humidifier.ATTR_MIN_HUMIDITY: 20, humidifier.ATTR_MAX_HUMIDITY: 90, humidifier.ATTR_HUMIDITY: 38, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "humiditySetpointRange": {"minPercent": 20, "maxPercent": 90} } assert trt.query_attributes() == { "humiditySetpointPercent": 38, } assert trt.can_execute(trait.COMMAND_SET_HUMIDITY, {}) calls = async_mock_service(hass, humidifier.DOMAIN, humidifier.SERVICE_SET_HUMIDITY) await trt.execute(trait.COMMAND_SET_HUMIDITY, BASIC_DATA, {"humidity": 32}, {}) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "humidifier.bla", humidifier.ATTR_HUMIDITY: 32, } async def test_lock_unlock_lock(hass): """Test LockUnlock trait locking support for lock domain.""" assert helpers.get_google_type(lock.DOMAIN, None) is not None assert trait.LockUnlockTrait.supported(lock.DOMAIN, lock.SUPPORT_OPEN, None) assert trait.LockUnlockTrait.might_2fa(lock.DOMAIN, lock.SUPPORT_OPEN, None) trt = trait.LockUnlockTrait( hass, State("lock.front_door", lock.STATE_LOCKED), PIN_CONFIG ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"isLocked": True} assert trt.can_execute(trait.COMMAND_LOCKUNLOCK, {"lock": True}) calls = async_mock_service(hass, lock.DOMAIN, lock.SERVICE_LOCK) await trt.execute(trait.COMMAND_LOCKUNLOCK, PIN_DATA, {"lock": True}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "lock.front_door"} async def test_lock_unlock_unlock(hass): """Test LockUnlock trait unlocking support for lock domain.""" assert helpers.get_google_type(lock.DOMAIN, None) is not None assert trait.LockUnlockTrait.supported(lock.DOMAIN, lock.SUPPORT_OPEN, None) trt = trait.LockUnlockTrait( hass, State("lock.front_door", lock.STATE_LOCKED), PIN_CONFIG ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"isLocked": True} assert trt.can_execute(trait.COMMAND_LOCKUNLOCK, {"lock": False}) calls = async_mock_service(hass, lock.DOMAIN, lock.SERVICE_UNLOCK) # No challenge data with pytest.raises(error.ChallengeNeeded) as err: await trt.execute(trait.COMMAND_LOCKUNLOCK, PIN_DATA, {"lock": False}, {}) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # invalid pin with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_LOCKUNLOCK, PIN_DATA, {"lock": False}, {"pin": 9999} ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_FAILED_PIN_NEEDED await trt.execute( trait.COMMAND_LOCKUNLOCK, PIN_DATA, {"lock": False}, {"pin": "1234"} ) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "lock.front_door"} # Test without pin trt = trait.LockUnlockTrait( hass, State("lock.front_door", lock.STATE_LOCKED), BASIC_CONFIG ) with pytest.raises(error.SmartHomeError) as err: await trt.execute(trait.COMMAND_LOCKUNLOCK, BASIC_DATA, {"lock": False}, {}) assert len(calls) == 1 assert err.value.code == const.ERR_CHALLENGE_NOT_SETUP # Test with 2FA override with patch.object( BASIC_CONFIG, "should_2fa", return_value=False, ): await trt.execute(trait.COMMAND_LOCKUNLOCK, BASIC_DATA, {"lock": False}, {}) assert len(calls) == 2 async def test_arm_disarm_arm_away(hass): """Test ArmDisarm trait Arming support for alarm_control_panel domain.""" assert helpers.get_google_type(alarm_control_panel.DOMAIN, None) is not None assert trait.ArmDisArmTrait.supported(alarm_control_panel.DOMAIN, 0, None) assert trait.ArmDisArmTrait.might_2fa(alarm_control_panel.DOMAIN, 0, None) trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, { alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True, ATTR_SUPPORTED_FEATURES: alarm_control_panel.const.SUPPORT_ALARM_ARM_HOME | alarm_control_panel.const.SUPPORT_ALARM_ARM_AWAY, }, ), PIN_CONFIG, ) assert trt.sync_attributes() == { "availableArmLevels": { "levels": [ { "level_name": "armed_home", "level_values": [ {"level_synonym": ["armed home", "home"], "lang": "en"} ], }, { "level_name": "armed_away", "level_values": [ {"level_synonym": ["armed away", "away"], "lang": "en"} ], }, ], "ordered": False, } } assert trt.query_attributes() == { "isArmed": True, "currentArmLevel": STATE_ALARM_ARMED_AWAY, } assert trt.can_execute( trait.COMMAND_ARMDISARM, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY} ) calls = async_mock_service( hass, alarm_control_panel.DOMAIN, alarm_control_panel.SERVICE_ALARM_ARM_AWAY ) # Test with no secure_pin configured with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), BASIC_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, BASIC_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {}, ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NOT_SETUP trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), PIN_CONFIG, ) # No challenge data with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {}, ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # invalid pin with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {"pin": 9999}, ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_FAILED_PIN_NEEDED # correct pin await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {"pin": "1234"}, ) assert len(calls) == 1 # Test already armed with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), PIN_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {}, ) assert len(calls) == 1 assert err.value.code == const.ERR_ALREADY_ARMED # Test with code_arm_required False trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: False}, ), PIN_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {}, ) assert len(calls) == 2 with pytest.raises(error.SmartHomeError) as err: await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True}, {}, ) async def test_arm_disarm_disarm(hass): """Test ArmDisarm trait Disarming support for alarm_control_panel domain.""" assert helpers.get_google_type(alarm_control_panel.DOMAIN, None) is not None assert trait.ArmDisArmTrait.supported(alarm_control_panel.DOMAIN, 0, None) assert trait.ArmDisArmTrait.might_2fa(alarm_control_panel.DOMAIN, 0, None) trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, { alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True, ATTR_SUPPORTED_FEATURES: alarm_control_panel.const.SUPPORT_ALARM_TRIGGER | alarm_control_panel.const.SUPPORT_ALARM_ARM_CUSTOM_BYPASS, }, ), PIN_CONFIG, ) assert trt.sync_attributes() == { "availableArmLevels": { "levels": [ { "level_name": "armed_custom_bypass", "level_values": [ { "level_synonym": ["armed custom bypass", "custom"], "lang": "en", } ], }, { "level_name": "triggered", "level_values": [{"level_synonym": ["triggered"], "lang": "en"}], }, ], "ordered": False, } } assert trt.query_attributes() == {"isArmed": False} assert trt.can_execute(trait.COMMAND_ARMDISARM, {"arm": False}) calls = async_mock_service( hass, alarm_control_panel.DOMAIN, alarm_control_panel.SERVICE_ALARM_DISARM ) # Test without secure_pin configured with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), BASIC_CONFIG, ) await trt.execute(trait.COMMAND_ARMDISARM, BASIC_DATA, {"arm": False}, {}) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NOT_SETUP trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), PIN_CONFIG, ) # No challenge data with pytest.raises(error.ChallengeNeeded) as err: await trt.execute(trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": False}, {}) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # invalid pin with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": False}, {"pin": 9999} ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_FAILED_PIN_NEEDED # correct pin await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": False}, {"pin": "1234"} ) assert len(calls) == 1 # Test already disarmed with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), PIN_CONFIG, ) await trt.execute(trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": False}, {}) assert len(calls) == 1 assert err.value.code == const.ERR_ALREADY_DISARMED # Cancel arming after already armed will require pin with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: False}, ), PIN_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "cancel": True}, {} ) assert len(calls) == 1 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # Cancel arming while pending to arm doesn't require pin trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_PENDING, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: False}, ), PIN_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "cancel": True}, {} ) assert len(calls) == 2 async def test_fan_speed(hass): """Test FanSpeed trait speed control support for fan domain.""" assert helpers.get_google_type(fan.DOMAIN, None) is not None assert trait.FanSpeedTrait.supported(fan.DOMAIN, fan.SUPPORT_SET_SPEED, None) trt = trait.FanSpeedTrait( hass, State( "fan.living_room_fan", fan.SPEED_HIGH, attributes={ "speed_list": [ fan.SPEED_OFF, fan.SPEED_LOW, fan.SPEED_MEDIUM, fan.SPEED_HIGH, ], "speed": "low", "percentage": 33, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableFanSpeeds": { "ordered": True, "speeds": [ { "speed_name": "off", "speed_values": [{"speed_synonym": ["stop", "off"], "lang": "en"}], }, { "speed_name": "low", "speed_values": [ { "speed_synonym": ["slow", "low", "slowest", "lowest"], "lang": "en", } ], }, { "speed_name": "medium", "speed_values": [ {"speed_synonym": ["medium", "mid", "middle"], "lang": "en"} ], }, { "speed_name": "high", "speed_values": [ { "speed_synonym": [ "high", "max", "fast", "highest", "fastest", "maximum", ], "lang": "en", } ], }, ], }, "reversible": False, "supportsFanSpeedPercent": True, } assert trt.query_attributes() == { "currentFanSpeedSetting": "low", "on": True, "currentFanSpeedPercent": 33, } assert trt.can_execute(trait.COMMAND_FANSPEED, params={"fanSpeed": "medium"}) calls = async_mock_service(hass, fan.DOMAIN, fan.SERVICE_SET_SPEED) await trt.execute(trait.COMMAND_FANSPEED, BASIC_DATA, {"fanSpeed": "medium"}, {}) assert len(calls) == 1 assert calls[0].data == {"entity_id": "fan.living_room_fan", "speed": "medium"} assert trt.can_execute(trait.COMMAND_FANSPEED, params={"fanSpeedPercent": 10}) calls = async_mock_service(hass, fan.DOMAIN, fan.SERVICE_SET_PERCENTAGE) await trt.execute(trait.COMMAND_FANSPEED, BASIC_DATA, {"fanSpeedPercent": 10}, {}) assert len(calls) == 1 assert calls[0].data == {"entity_id": "fan.living_room_fan", "percentage": 10} async def test_climate_fan_speed(hass): """Test FanSpeed trait speed control support for climate domain.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.FanSpeedTrait.supported(climate.DOMAIN, climate.SUPPORT_FAN_MODE, None) trt = trait.FanSpeedTrait( hass, State( "climate.living_room_ac", "on", attributes={ "fan_modes": ["auto", "low", "medium", "high"], "fan_mode": "low", }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableFanSpeeds": { "ordered": True, "speeds": [ { "speed_name": "auto", "speed_values": [{"speed_synonym": ["auto"], "lang": "en"}], }, { "speed_name": "low", "speed_values": [{"speed_synonym": ["low"], "lang": "en"}], }, { "speed_name": "medium", "speed_values": [{"speed_synonym": ["medium"], "lang": "en"}], }, { "speed_name": "high", "speed_values": [{"speed_synonym": ["high"], "lang": "en"}], }, ], }, "reversible": False, "supportsFanSpeedPercent": True, } assert trt.query_attributes() == { "currentFanSpeedSetting": "low", } assert trt.can_execute(trait.COMMAND_FANSPEED, params={"fanSpeed": "medium"}) calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_FAN_MODE) await trt.execute(trait.COMMAND_FANSPEED, BASIC_DATA, {"fanSpeed": "medium"}, {}) assert len(calls) == 1 assert calls[0].data == { "entity_id": "climate.living_room_ac", "fan_mode": "medium", } async def test_inputselector(hass): """Test input selector trait.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.InputSelectorTrait.supported( media_player.DOMAIN, media_player.SUPPORT_SELECT_SOURCE, None ) trt = trait.InputSelectorTrait( hass, State( "media_player.living_room", media_player.STATE_PLAYING, attributes={ media_player.ATTR_INPUT_SOURCE_LIST: [ "media", "game", "chromecast", "plex", ], media_player.ATTR_INPUT_SOURCE: "game", }, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableInputs": [ {"key": "media", "names": [{"name_synonym": ["media"], "lang": "en"}]}, {"key": "game", "names": [{"name_synonym": ["game"], "lang": "en"}]}, { "key": "chromecast", "names": [{"name_synonym": ["chromecast"], "lang": "en"}], }, {"key": "plex", "names": [{"name_synonym": ["plex"], "lang": "en"}]}, ], "orderedInputs": True, } assert trt.query_attributes() == { "currentInput": "game", } assert trt.can_execute( trait.COMMAND_INPUT, params={"newInput": "media"}, ) calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_SELECT_SOURCE ) await trt.execute( trait.COMMAND_INPUT, BASIC_DATA, {"newInput": "media"}, {}, ) assert len(calls) == 1 assert calls[0].data == {"entity_id": "media_player.living_room", "source": "media"} @pytest.mark.parametrize( "sources,source,source_next,source_prev", [ (["a"], "a", "a", "a"), (["a", "b"], "a", "b", "b"), (["a", "b", "c"], "a", "b", "c"), ], ) async def test_inputselector_nextprev(hass, sources, source, source_next, source_prev): """Test input selector trait.""" trt = trait.InputSelectorTrait( hass, State( "media_player.living_room", media_player.STATE_PLAYING, attributes={ media_player.ATTR_INPUT_SOURCE_LIST: sources, media_player.ATTR_INPUT_SOURCE: source, }, ), BASIC_CONFIG, ) assert trt.can_execute("action.devices.commands.NextInput", params={}) assert trt.can_execute("action.devices.commands.PreviousInput", params={}) calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_SELECT_SOURCE ) await trt.execute( "action.devices.commands.NextInput", BASIC_DATA, {}, {}, ) await trt.execute( "action.devices.commands.PreviousInput", BASIC_DATA, {}, {}, ) assert len(calls) == 2 assert calls[0].data == { "entity_id": "media_player.living_room", "source": source_next, } assert calls[1].data == { "entity_id": "media_player.living_room", "source": source_prev, } @pytest.mark.parametrize( "sources,source", [(None, "a"), (["a", "b"], None), (["a", "b"], "c")] ) async def test_inputselector_nextprev_invalid(hass, sources, source): """Test input selector trait.""" trt = trait.InputSelectorTrait( hass, State( "media_player.living_room", media_player.STATE_PLAYING, attributes={ media_player.ATTR_INPUT_SOURCE_LIST: sources, media_player.ATTR_INPUT_SOURCE: source, }, ), BASIC_CONFIG, ) with pytest.raises(SmartHomeError): await trt.execute( "action.devices.commands.NextInput", BASIC_DATA, {}, {}, ) with pytest.raises(SmartHomeError): await trt.execute( "action.devices.commands.PreviousInput", BASIC_DATA, {}, {}, ) with pytest.raises(SmartHomeError): await trt.execute( "action.devices.commands.InvalidCommand", BASIC_DATA, {}, {}, ) async def test_modes_input_select(hass): """Test Input Select Mode trait.""" assert helpers.get_google_type(input_select.DOMAIN, None) is not None assert trait.ModesTrait.supported(input_select.DOMAIN, None, None) trt = trait.ModesTrait( hass, State("input_select.bla", "unavailable"), BASIC_CONFIG, ) assert trt.sync_attributes() == {"availableModes": []} trt = trait.ModesTrait( hass, State( "input_select.bla", "abc", attributes={input_select.ATTR_OPTIONS: ["abc", "123", "xyz"]}, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableModes": [ { "name": "option", "name_values": [ { "name_synonym": ["option", "setting", "mode", "value"], "lang": "en", } ], "settings": [ { "setting_name": "abc", "setting_values": [{"setting_synonym": ["abc"], "lang": "en"}], }, { "setting_name": "123", "setting_values": [{"setting_synonym": ["123"], "lang": "en"}], }, { "setting_name": "xyz", "setting_values": [{"setting_synonym": ["xyz"], "lang": "en"}], }, ], "ordered": False, } ] } assert trt.query_attributes() == { "currentModeSettings": {"option": "abc"}, "on": True, } assert trt.can_execute( trait.COMMAND_MODES, params={"updateModeSettings": {"option": "xyz"}}, ) calls = async_mock_service( hass, input_select.DOMAIN, input_select.SERVICE_SELECT_OPTION ) await trt.execute( trait.COMMAND_MODES, BASIC_DATA, {"updateModeSettings": {"option": "xyz"}}, {}, ) assert len(calls) == 1 assert calls[0].data == {"entity_id": "input_select.bla", "option": "xyz"} async def test_modes_humidifier(hass): """Test Humidifier Mode trait.""" assert helpers.get_google_type(humidifier.DOMAIN, None) is not None assert trait.ModesTrait.supported(humidifier.DOMAIN, humidifier.SUPPORT_MODES, None) trt = trait.ModesTrait( hass, State( "humidifier.humidifier", STATE_OFF, attributes={ humidifier.ATTR_AVAILABLE_MODES: [ humidifier.MODE_NORMAL, humidifier.MODE_AUTO, humidifier.MODE_AWAY, ], ATTR_SUPPORTED_FEATURES: humidifier.SUPPORT_MODES, humidifier.ATTR_MIN_HUMIDITY: 30, humidifier.ATTR_MAX_HUMIDITY: 99, humidifier.ATTR_HUMIDITY: 50, ATTR_MODE: humidifier.MODE_AUTO, }, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableModes": [ { "name": "mode", "name_values": [{"name_synonym": ["mode"], "lang": "en"}], "settings": [ { "setting_name": "normal", "setting_values": [ {"setting_synonym": ["normal"], "lang": "en"} ], }, { "setting_name": "auto", "setting_values": [{"setting_synonym": ["auto"], "lang": "en"}], }, { "setting_name": "away", "setting_values": [{"setting_synonym": ["away"], "lang": "en"}], }, ], "ordered": False, }, ] } assert trt.query_attributes() == { "currentModeSettings": {"mode": "auto"}, "on": False, } assert trt.can_execute( trait.COMMAND_MODES, params={"updateModeSettings": {"mode": "away"}} ) calls = async_mock_service(hass, humidifier.DOMAIN, humidifier.SERVICE_SET_MODE) await trt.execute( trait.COMMAND_MODES, BASIC_DATA, {"updateModeSettings": {"mode": "away"}}, {}, ) assert len(calls) == 1 assert calls[0].data == { "entity_id": "humidifier.humidifier", "mode": "away", } async def test_sound_modes(hass): """Test Mode trait.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.ModesTrait.supported( media_player.DOMAIN, media_player.SUPPORT_SELECT_SOUND_MODE, None ) trt = trait.ModesTrait( hass, State( "media_player.living_room", media_player.STATE_PLAYING, attributes={ media_player.ATTR_SOUND_MODE_LIST: ["stereo", "prologic"], media_player.ATTR_SOUND_MODE: "stereo", }, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableModes": [ { "name": "sound mode", "name_values": [ {"name_synonym": ["sound mode", "effects"], "lang": "en"} ], "settings": [ { "setting_name": "stereo", "setting_values": [ {"setting_synonym": ["stereo"], "lang": "en"} ], }, { "setting_name": "prologic", "setting_values": [ {"setting_synonym": ["prologic"], "lang": "en"} ], }, ], "ordered": False, } ] } assert trt.query_attributes() == { "currentModeSettings": {"sound mode": "stereo"}, "on": True, } assert trt.can_execute( trait.COMMAND_MODES, params={"updateModeSettings": {"sound mode": "stereo"}}, ) calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_SELECT_SOUND_MODE ) await trt.execute( trait.COMMAND_MODES, BASIC_DATA, {"updateModeSettings": {"sound mode": "stereo"}}, {}, ) assert len(calls) == 1 assert calls[0].data == { "entity_id": "media_player.living_room", "sound_mode": "stereo", } async def test_openclose_cover(hass): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_SET_POSITION, None ) trt = trait.OpenCloseTrait( hass, State( "cover.bla", cover.STATE_OPEN, { cover.ATTR_CURRENT_POSITION: 75, ATTR_SUPPORTED_FEATURES: cover.SUPPORT_SET_POSITION, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"openPercent": 75} calls_set = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_SET_COVER_POSITION) calls_open = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_OPEN_COVER) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 50}, {}) await trt.execute( trait.COMMAND_OPENCLOSE_RELATIVE, BASIC_DATA, {"openRelativePercent": 50}, {} ) assert len(calls_set) == 1 assert calls_set[0].data == {ATTR_ENTITY_ID: "cover.bla", cover.ATTR_POSITION: 50} assert len(calls_open) == 1 assert calls_open[0].data == {ATTR_ENTITY_ID: "cover.bla"} async def test_openclose_cover_unknown_state(hass): """Test OpenClose trait support for cover domain with unknown state.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_SET_POSITION, None ) # No state trt = trait.OpenCloseTrait( hass, State( "cover.bla", STATE_UNKNOWN, {ATTR_SUPPORTED_FEATURES: cover.SUPPORT_OPEN} ), BASIC_CONFIG, ) assert trt.sync_attributes() == {"discreteOnlyOpenClose": True} with pytest.raises(helpers.SmartHomeError): trt.query_attributes() calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_OPEN_COVER) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 100}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} with pytest.raises(helpers.SmartHomeError): trt.query_attributes() async def test_openclose_cover_assumed_state(hass): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_SET_POSITION, None ) trt = trait.OpenCloseTrait( hass, State( "cover.bla", cover.STATE_OPEN, { ATTR_ASSUMED_STATE: True, ATTR_SUPPORTED_FEATURES: cover.SUPPORT_SET_POSITION, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == {"commandOnlyOpenClose": True} assert trt.query_attributes() == {} calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_SET_COVER_POSITION) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 40}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla", cover.ATTR_POSITION: 40} async def test_openclose_cover_query_only(hass): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported(cover.DOMAIN, 0, None) state = State( "cover.bla", cover.STATE_OPEN, ) trt = trait.OpenCloseTrait( hass, state, BASIC_CONFIG, ) assert trt.sync_attributes() == { "discreteOnlyOpenClose": True, "queryOnlyOpenClose": True, } assert trt.query_attributes() == {"openPercent": 100} async def test_openclose_cover_no_position(hass): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_OPEN | cover.SUPPORT_CLOSE, None ) state = State( "cover.bla", cover.STATE_OPEN, { ATTR_SUPPORTED_FEATURES: cover.SUPPORT_OPEN | cover.SUPPORT_CLOSE, }, ) trt = trait.OpenCloseTrait( hass, state, BASIC_CONFIG, ) assert trt.sync_attributes() == {"discreteOnlyOpenClose": True} assert trt.query_attributes() == {"openPercent": 100} state.state = cover.STATE_CLOSED assert trt.sync_attributes() == {"discreteOnlyOpenClose": True} assert trt.query_attributes() == {"openPercent": 0} calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_CLOSE_COVER) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 0}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_OPEN_COVER) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 100}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} with pytest.raises( SmartHomeError, match=r"Current position not know for relative command" ): await trt.execute( trait.COMMAND_OPENCLOSE_RELATIVE, BASIC_DATA, {"openRelativePercent": 100}, {}, ) with pytest.raises(SmartHomeError, match=r"No support for partial open close"): await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 50}, {}) @pytest.mark.parametrize( "device_class", (cover.DEVICE_CLASS_DOOR, cover.DEVICE_CLASS_GARAGE, cover.DEVICE_CLASS_GATE), ) async def test_openclose_cover_secure(hass, device_class): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, device_class) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_SET_POSITION, device_class ) assert trait.OpenCloseTrait.might_2fa( cover.DOMAIN, cover.SUPPORT_SET_POSITION, device_class ) trt = trait.OpenCloseTrait( hass, State( "cover.bla", cover.STATE_OPEN, { ATTR_DEVICE_CLASS: device_class, ATTR_SUPPORTED_FEATURES: cover.SUPPORT_SET_POSITION, cover.ATTR_CURRENT_POSITION: 75, }, ), PIN_CONFIG, ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"openPercent": 75} calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_SET_COVER_POSITION) calls_close = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_CLOSE_COVER) # No challenge data with pytest.raises(error.ChallengeNeeded) as err: await trt.execute(trait.COMMAND_OPENCLOSE, PIN_DATA, {"openPercent": 50}, {}) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # invalid pin with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_OPENCLOSE, PIN_DATA, {"openPercent": 50}, {"pin": "9999"} ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_FAILED_PIN_NEEDED await trt.execute( trait.COMMAND_OPENCLOSE, PIN_DATA, {"openPercent": 50}, {"pin": "1234"} ) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla", cover.ATTR_POSITION: 50} # no challenge on close await trt.execute(trait.COMMAND_OPENCLOSE, PIN_DATA, {"openPercent": 0}, {}) assert len(calls_close) == 1 assert calls_close[0].data == {ATTR_ENTITY_ID: "cover.bla"} @pytest.mark.parametrize( "device_class", ( binary_sensor.DEVICE_CLASS_DOOR, binary_sensor.DEVICE_CLASS_GARAGE_DOOR, binary_sensor.DEVICE_CLASS_LOCK, binary_sensor.DEVICE_CLASS_OPENING, binary_sensor.DEVICE_CLASS_WINDOW, ), ) async def test_openclose_binary_sensor(hass, device_class): """Test OpenClose trait support for binary_sensor domain.""" assert helpers.get_google_type(binary_sensor.DOMAIN, device_class) is not None assert trait.OpenCloseTrait.supported(binary_sensor.DOMAIN, 0, device_class) trt = trait.OpenCloseTrait( hass, State("binary_sensor.test", STATE_ON, {ATTR_DEVICE_CLASS: device_class}), BASIC_CONFIG, ) assert trt.sync_attributes() == { "queryOnlyOpenClose": True, "discreteOnlyOpenClose": True, } assert trt.query_attributes() == {"openPercent": 100} trt = trait.OpenCloseTrait( hass, State("binary_sensor.test", STATE_OFF, {ATTR_DEVICE_CLASS: device_class}), BASIC_CONFIG, ) assert trt.sync_attributes() == { "queryOnlyOpenClose": True, "discreteOnlyOpenClose": True, } assert trt.query_attributes() == {"openPercent": 0} async def test_volume_media_player(hass): """Test volume trait support for media player domain.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.VolumeTrait.supported( media_player.DOMAIN, media_player.SUPPORT_VOLUME_SET, None, ) trt = trait.VolumeTrait( hass, State( "media_player.bla", media_player.STATE_PLAYING, { ATTR_SUPPORTED_FEATURES: media_player.SUPPORT_VOLUME_SET, media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.3, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "volumeMaxLevel": 100, "levelStepSize": 10, "volumeCanMuteAndUnmute": False, "commandOnlyVolume": False, } assert trt.query_attributes() == {"currentVolume": 30} calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_SET ) await trt.execute(trait.COMMAND_SET_VOLUME, BASIC_DATA, {"volumeLevel": 60}, {}) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.6, } calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_SET ) await trt.execute( trait.COMMAND_VOLUME_RELATIVE, BASIC_DATA, {"relativeSteps": 10}, {} ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.4, } async def test_volume_media_player_relative(hass): """Test volume trait support for relative-volume-only media players.""" assert trait.VolumeTrait.supported( media_player.DOMAIN, media_player.SUPPORT_VOLUME_STEP, None, ) trt = trait.VolumeTrait( hass, State( "media_player.bla", media_player.STATE_PLAYING, { ATTR_ASSUMED_STATE: True, ATTR_SUPPORTED_FEATURES: media_player.SUPPORT_VOLUME_STEP, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "volumeMaxLevel": 100, "levelStepSize": 10, "volumeCanMuteAndUnmute": False, "commandOnlyVolume": True, } assert trt.query_attributes() == {} calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_UP ) await trt.execute( trait.COMMAND_VOLUME_RELATIVE, BASIC_DATA, {"relativeSteps": 10}, {}, ) assert len(calls) == 10 for call in calls: assert call.data == { ATTR_ENTITY_ID: "media_player.bla", } calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_DOWN ) await trt.execute( trait.COMMAND_VOLUME_RELATIVE, BASIC_DATA, {"relativeSteps": -10}, {}, ) assert len(calls) == 10 for call in calls: assert call.data == { ATTR_ENTITY_ID: "media_player.bla", } with pytest.raises(SmartHomeError): await trt.execute(trait.COMMAND_SET_VOLUME, BASIC_DATA, {"volumeLevel": 42}, {}) with pytest.raises(SmartHomeError): await trt.execute(trait.COMMAND_MUTE, BASIC_DATA, {"mute": True}, {}) async def test_media_player_mute(hass): """Test volume trait support for muting.""" assert trait.VolumeTrait.supported( media_player.DOMAIN, media_player.SUPPORT_VOLUME_STEP | media_player.SUPPORT_VOLUME_MUTE, None, ) trt = trait.VolumeTrait( hass, State( "media_player.bla", media_player.STATE_PLAYING, { ATTR_SUPPORTED_FEATURES: ( media_player.SUPPORT_VOLUME_STEP | media_player.SUPPORT_VOLUME_MUTE ), media_player.ATTR_MEDIA_VOLUME_MUTED: False, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "volumeMaxLevel": 100, "levelStepSize": 10, "volumeCanMuteAndUnmute": True, "commandOnlyVolume": False, } assert trt.query_attributes() == {"isMuted": False} mute_calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_MUTE ) await trt.execute( trait.COMMAND_MUTE, BASIC_DATA, {"mute": True}, {}, ) assert len(mute_calls) == 1 assert mute_calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_VOLUME_MUTED: True, } unmute_calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_MUTE ) await trt.execute( trait.COMMAND_MUTE, BASIC_DATA, {"mute": False}, {}, ) assert len(unmute_calls) == 1 assert unmute_calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_VOLUME_MUTED: False, } async def test_temperature_setting_sensor(hass): """Test TemperatureSetting trait support for temperature sensor.""" assert ( helpers.get_google_type(sensor.DOMAIN, sensor.DEVICE_CLASS_TEMPERATURE) is not None ) assert not trait.TemperatureSettingTrait.supported( sensor.DOMAIN, 0, sensor.DEVICE_CLASS_HUMIDITY ) assert trait.TemperatureSettingTrait.supported( sensor.DOMAIN, 0, sensor.DEVICE_CLASS_TEMPERATURE ) @pytest.mark.parametrize( "unit_in,unit_out,state,ambient", [ (TEMP_FAHRENHEIT, "F", "70", 21.1), (TEMP_CELSIUS, "C", "21.1", 21.1), (TEMP_FAHRENHEIT, "F", "unavailable", None), (TEMP_FAHRENHEIT, "F", "unknown", None), ], ) async def test_temperature_setting_sensor_data(hass, unit_in, unit_out, state, ambient): """Test TemperatureSetting trait support for temperature sensor.""" hass.config.units.temperature_unit = unit_in trt = trait.TemperatureSettingTrait( hass, State( "sensor.test", state, {ATTR_DEVICE_CLASS: sensor.DEVICE_CLASS_TEMPERATURE} ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "queryOnlyTemperatureSetting": True, "thermostatTemperatureUnit": unit_out, } if ambient: assert trt.query_attributes() == {"thermostatTemperatureAmbient": ambient} else: assert trt.query_attributes() == {} hass.config.units.temperature_unit = TEMP_CELSIUS async def test_humidity_setting_sensor(hass): """Test HumiditySetting trait support for humidity sensor.""" assert ( helpers.get_google_type(sensor.DOMAIN, sensor.DEVICE_CLASS_HUMIDITY) is not None ) assert not trait.HumiditySettingTrait.supported( sensor.DOMAIN, 0, sensor.DEVICE_CLASS_TEMPERATURE ) assert trait.HumiditySettingTrait.supported( sensor.DOMAIN, 0, sensor.DEVICE_CLASS_HUMIDITY ) @pytest.mark.parametrize( "state,ambient", [("70", 70), ("unavailable", None), ("unknown", None)] ) async def test_humidity_setting_sensor_data(hass, state, ambient): """Test HumiditySetting trait support for humidity sensor.""" trt = trait.HumiditySettingTrait( hass, State("sensor.test", state, {ATTR_DEVICE_CLASS: sensor.DEVICE_CLASS_HUMIDITY}), BASIC_CONFIG, ) assert trt.sync_attributes() == {"queryOnlyHumiditySetting": True} if ambient: assert trt.query_attributes() == {"humidityAmbientPercent": ambient} else: assert trt.query_attributes() == {} with pytest.raises(helpers.SmartHomeError) as err: await trt.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert err.value.code == const.ERR_NOT_SUPPORTED async def test_transport_control(hass): """Test the TransportControlTrait.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None for feature in trait.MEDIA_COMMAND_SUPPORT_MAPPING.values(): assert trait.TransportControlTrait.supported(media_player.DOMAIN, feature, None) now = datetime(2020, 1, 1) trt = trait.TransportControlTrait( hass, State( "media_player.bla", media_player.STATE_PLAYING, { media_player.ATTR_MEDIA_POSITION: 100, media_player.ATTR_MEDIA_DURATION: 200, media_player.ATTR_MEDIA_POSITION_UPDATED_AT: now - timedelta(seconds=10), media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.5, ATTR_SUPPORTED_FEATURES: media_player.SUPPORT_PLAY | media_player.SUPPORT_STOP, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "transportControlSupportedCommands": ["RESUME", "STOP"] } assert trt.query_attributes() == {} # COMMAND_MEDIA_SEEK_RELATIVE calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_SEEK ) # Patch to avoid time ticking over during the command failing the test with patch("homeassistant.util.dt.utcnow", return_value=now): await trt.execute( trait.COMMAND_MEDIA_SEEK_RELATIVE, BASIC_DATA, {"relativePositionMs": 10000}, {}, ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", # 100s (current position) + 10s (from command) + 10s (from updated_at) media_player.ATTR_MEDIA_SEEK_POSITION: 120, } # COMMAND_MEDIA_SEEK_TO_POSITION calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_SEEK ) await trt.execute( trait.COMMAND_MEDIA_SEEK_TO_POSITION, BASIC_DATA, {"absPositionMs": 50000}, {} ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_SEEK_POSITION: 50, } # COMMAND_MEDIA_NEXT calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_NEXT_TRACK ) await trt.execute(trait.COMMAND_MEDIA_NEXT, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} # COMMAND_MEDIA_PAUSE calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_PAUSE ) await trt.execute(trait.COMMAND_MEDIA_PAUSE, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} # COMMAND_MEDIA_PREVIOUS calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_PREVIOUS_TRACK ) await trt.execute(trait.COMMAND_MEDIA_PREVIOUS, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} # COMMAND_MEDIA_RESUME calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_PLAY ) await trt.execute(trait.COMMAND_MEDIA_RESUME, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} # COMMAND_MEDIA_SHUFFLE calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_SHUFFLE_SET ) await trt.execute(trait.COMMAND_MEDIA_SHUFFLE, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_SHUFFLE: True, } # COMMAND_MEDIA_STOP calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_STOP ) await trt.execute(trait.COMMAND_MEDIA_STOP, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} @pytest.mark.parametrize( "state", ( STATE_OFF, STATE_IDLE, STATE_PLAYING, STATE_ON, STATE_PAUSED, STATE_STANDBY, STATE_UNAVAILABLE, STATE_UNKNOWN, ), ) async def test_media_state(hass, state): """Test the MediaStateTrait.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.TransportControlTrait.supported( media_player.DOMAIN, media_player.SUPPORT_PLAY, None ) trt = trait.MediaStateTrait( hass, State( "media_player.bla", state, { media_player.ATTR_MEDIA_POSITION: 100, media_player.ATTR_MEDIA_DURATION: 200, media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.5, ATTR_SUPPORTED_FEATURES: media_player.SUPPORT_PLAY | media_player.SUPPORT_STOP, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "supportActivityState": True, "supportPlaybackState": True, } assert trt.query_attributes() == { "activityState": trt.activity_lookup.get(state), "playbackState": trt.playback_lookup.get(state), }
sander76/home-assistant
tests/components/google_assistant/test_trait.py
homeassistant/components/sentry/const.py
"""The Tasmota integration.""" import asyncio import logging from hatasmota.const import ( CONF_MAC, CONF_MANUFACTURER, CONF_MODEL, CONF_NAME, CONF_SW_VERSION, ) from hatasmota.discovery import clear_discovery_topic from hatasmota.mqtt import TasmotaMQTTClient import voluptuous as vol from homeassistant.components import mqtt, websocket_api from homeassistant.components.mqtt.subscription import ( async_subscribe_topics, async_unsubscribe_topics, ) from homeassistant.core import callback from homeassistant.helpers.device_registry import ( CONNECTION_NETWORK_MAC, EVENT_DEVICE_REGISTRY_UPDATED, async_entries_for_config_entry, ) from . import device_automation, discovery from .const import ( CONF_DISCOVERY_PREFIX, DATA_REMOVE_DISCOVER_COMPONENT, DATA_UNSUB, DOMAIN, PLATFORMS, ) _LOGGER = logging.getLogger(__name__) async def async_setup_entry(hass, entry): """Set up Tasmota from a config entry.""" websocket_api.async_register_command(hass, websocket_remove_device) hass.data[DATA_UNSUB] = [] def _publish(*args, **kwds): mqtt.async_publish(hass, *args, **kwds) async def _subscribe_topics(sub_state, topics): # Optionally mark message handlers as callback for topic in topics.values(): if "msg_callback" in topic and "event_loop_safe" in topic: topic["msg_callback"] = callback(topic["msg_callback"]) return await async_subscribe_topics(hass, sub_state, topics) async def _unsubscribe_topics(sub_state): return await async_unsubscribe_topics(hass, sub_state) tasmota_mqtt = TasmotaMQTTClient(_publish, _subscribe_topics, _unsubscribe_topics) device_registry = await hass.helpers.device_registry.async_get_registry() def async_discover_device(config, mac): """Discover and add a Tasmota device.""" async_setup_device(hass, mac, config, entry, tasmota_mqtt, device_registry) async def async_device_removed(event): """Handle the removal of a device.""" device_registry = await hass.helpers.device_registry.async_get_registry() if event.data["action"] != "remove": return device = device_registry.deleted_devices[event.data["device_id"]] if entry.entry_id not in device.config_entries: return macs = [c[1] for c in device.connections if c[0] == CONNECTION_NETWORK_MAC] for mac in macs: clear_discovery_topic(mac, entry.data[CONF_DISCOVERY_PREFIX], tasmota_mqtt) hass.data[DATA_UNSUB].append( hass.bus.async_listen(EVENT_DEVICE_REGISTRY_UPDATED, async_device_removed) ) async def start_platforms(): await device_automation.async_setup_entry(hass, entry) await asyncio.gather( *[ hass.config_entries.async_forward_entry_setup(entry, platform) for platform in PLATFORMS ] ) discovery_prefix = entry.data[CONF_DISCOVERY_PREFIX] await discovery.async_start( hass, discovery_prefix, entry, tasmota_mqtt, async_discover_device ) hass.async_create_task(start_platforms()) return True async def async_unload_entry(hass, entry): """Unload a config entry.""" # cleanup platforms unload_ok = all( await asyncio.gather( *[ hass.config_entries.async_forward_entry_unload(entry, platform) for platform in PLATFORMS ] ) ) if not unload_ok: return False # disable discovery await discovery.async_stop(hass) # cleanup subscriptions for unsub in hass.data[DATA_UNSUB]: unsub() hass.data.pop(DATA_REMOVE_DISCOVER_COMPONENT.format("device_automation"))() for platform in PLATFORMS: hass.data.pop(DATA_REMOVE_DISCOVER_COMPONENT.format(platform))() # deattach device triggers device_registry = await hass.helpers.device_registry.async_get_registry() devices = async_entries_for_config_entry(device_registry, entry.entry_id) for device in devices: await device_automation.async_remove_automations(hass, device.id) return True def _remove_device(hass, config_entry, mac, tasmota_mqtt, device_registry): """Remove device from device registry.""" device = device_registry.async_get_device(set(), {(CONNECTION_NETWORK_MAC, mac)}) if device is None: return _LOGGER.debug("Removing tasmota device %s", mac) device_registry.async_remove_device(device.id) clear_discovery_topic(mac, config_entry.data[CONF_DISCOVERY_PREFIX], tasmota_mqtt) def _update_device(hass, config_entry, config, device_registry): """Add or update device registry.""" config_entry_id = config_entry.entry_id device_info = { "connections": {(CONNECTION_NETWORK_MAC, config[CONF_MAC])}, "manufacturer": config[CONF_MANUFACTURER], "model": config[CONF_MODEL], "name": config[CONF_NAME], "sw_version": config[CONF_SW_VERSION], "config_entry_id": config_entry_id, } _LOGGER.debug("Adding or updating tasmota device %s", config[CONF_MAC]) device_registry.async_get_or_create(**device_info) def async_setup_device(hass, mac, config, config_entry, tasmota_mqtt, device_registry): """Set up the Tasmota device.""" if not config: _remove_device(hass, config_entry, mac, tasmota_mqtt, device_registry) else: _update_device(hass, config_entry, config, device_registry) @websocket_api.websocket_command( {vol.Required("type"): "tasmota/device/remove", vol.Required("device_id"): str} ) @websocket_api.async_response async def websocket_remove_device(hass, connection, msg): """Delete device.""" device_id = msg["device_id"] dev_registry = await hass.helpers.device_registry.async_get_registry() device = dev_registry.async_get(device_id) if not device: connection.send_error( msg["id"], websocket_api.const.ERR_NOT_FOUND, "Device not found" ) return for config_entry in device.config_entries: config_entry = hass.config_entries.async_get_entry(config_entry) # Only delete the device if it belongs to a Tasmota device entry if config_entry.domain == DOMAIN: dev_registry.async_remove_device(device_id) connection.send_message(websocket_api.result_message(msg["id"])) return connection.send_error( msg["id"], websocket_api.const.ERR_NOT_FOUND, "Non Tasmota device" )
"""Tests for the Google Assistant traits.""" from datetime import datetime, timedelta from unittest.mock import patch import pytest from homeassistant.components import ( alarm_control_panel, binary_sensor, camera, cover, fan, group, input_boolean, input_select, light, lock, media_player, scene, script, sensor, switch, vacuum, ) from homeassistant.components.climate import const as climate from homeassistant.components.google_assistant import const, error, helpers, trait from homeassistant.components.google_assistant.error import SmartHomeError from homeassistant.components.humidifier import const as humidifier from homeassistant.config import async_process_ha_core_config from homeassistant.const import ( ATTR_ASSUMED_STATE, ATTR_DEVICE_CLASS, ATTR_ENTITY_ID, ATTR_MODE, ATTR_SUPPORTED_FEATURES, ATTR_TEMPERATURE, SERVICE_TURN_OFF, SERVICE_TURN_ON, STATE_ALARM_ARMED_AWAY, STATE_ALARM_DISARMED, STATE_ALARM_PENDING, STATE_IDLE, STATE_OFF, STATE_ON, STATE_PAUSED, STATE_PLAYING, STATE_STANDBY, STATE_UNAVAILABLE, STATE_UNKNOWN, TEMP_CELSIUS, TEMP_FAHRENHEIT, ) from homeassistant.core import DOMAIN as HA_DOMAIN, EVENT_CALL_SERVICE, State from homeassistant.util import color from . import BASIC_CONFIG, MockConfig from tests.common import async_capture_events, async_mock_service REQ_ID = "ff36a3cc-ec34-11e6-b1a0-64510650abcf" BASIC_DATA = helpers.RequestData( BASIC_CONFIG, "test-agent", const.SOURCE_CLOUD, REQ_ID, None ) PIN_CONFIG = MockConfig(secure_devices_pin="1234") PIN_DATA = helpers.RequestData( PIN_CONFIG, "test-agent", const.SOURCE_CLOUD, REQ_ID, None ) async def test_brightness_light(hass): """Test brightness trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert trait.BrightnessTrait.supported(light.DOMAIN, light.SUPPORT_BRIGHTNESS, None) trt = trait.BrightnessTrait( hass, State("light.bla", light.STATE_ON, {light.ATTR_BRIGHTNESS: 243}), BASIC_CONFIG, ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"brightness": 95} events = async_capture_events(hass, EVENT_CALL_SERVICE) calls = async_mock_service(hass, light.DOMAIN, light.SERVICE_TURN_ON) await trt.execute( trait.COMMAND_BRIGHTNESS_ABSOLUTE, BASIC_DATA, {"brightness": 50}, {} ) await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "light.bla", light.ATTR_BRIGHTNESS_PCT: 50} assert len(events) == 1 assert events[0].data == { "domain": "light", "service": "turn_on", "service_data": {"brightness_pct": 50, "entity_id": "light.bla"}, } async def test_camera_stream(hass): """Test camera stream trait support for camera domain.""" await async_process_ha_core_config( hass, {"external_url": "https://example.com"}, ) assert helpers.get_google_type(camera.DOMAIN, None) is not None assert trait.CameraStreamTrait.supported(camera.DOMAIN, camera.SUPPORT_STREAM, None) trt = trait.CameraStreamTrait( hass, State("camera.bla", camera.STATE_IDLE, {}), BASIC_CONFIG ) assert trt.sync_attributes() == { "cameraStreamSupportedProtocols": ["hls"], "cameraStreamNeedAuthToken": False, "cameraStreamNeedDrmEncryption": False, } assert trt.query_attributes() == {} with patch( "homeassistant.components.camera.async_request_stream", return_value="/api/streams/bla", ): await trt.execute(trait.COMMAND_GET_CAMERA_STREAM, BASIC_DATA, {}, {}) assert trt.query_attributes() == { "cameraStreamAccessUrl": "https://example.com/api/streams/bla", "cameraStreamReceiverAppId": "B12CE3CA", } async def test_onoff_group(hass): """Test OnOff trait support for group domain.""" assert helpers.get_google_type(group.DOMAIN, None) is not None assert trait.OnOffTrait.supported(group.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("group.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("group.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, HA_DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "group.bla"} off_calls = async_mock_service(hass, HA_DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "group.bla"} async def test_onoff_input_boolean(hass): """Test OnOff trait support for input_boolean domain.""" assert helpers.get_google_type(input_boolean.DOMAIN, None) is not None assert trait.OnOffTrait.supported(input_boolean.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("input_boolean.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait( hass, State("input_boolean.bla", STATE_OFF), BASIC_CONFIG ) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, input_boolean.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "input_boolean.bla"} off_calls = async_mock_service(hass, input_boolean.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "input_boolean.bla"} async def test_onoff_switch(hass): """Test OnOff trait support for switch domain.""" assert helpers.get_google_type(switch.DOMAIN, None) is not None assert trait.OnOffTrait.supported(switch.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("switch.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("switch.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} trt_assumed = trait.OnOffTrait( hass, State("switch.bla", STATE_OFF, {"assumed_state": True}), BASIC_CONFIG ) assert trt_assumed.sync_attributes() == {"commandOnlyOnOff": True} on_calls = async_mock_service(hass, switch.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "switch.bla"} off_calls = async_mock_service(hass, switch.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "switch.bla"} async def test_onoff_fan(hass): """Test OnOff trait support for fan domain.""" assert helpers.get_google_type(fan.DOMAIN, None) is not None assert trait.OnOffTrait.supported(fan.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("fan.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("fan.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, fan.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "fan.bla"} off_calls = async_mock_service(hass, fan.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "fan.bla"} async def test_onoff_light(hass): """Test OnOff trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert trait.OnOffTrait.supported(light.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("light.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("light.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "light.bla"} off_calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "light.bla"} async def test_onoff_media_player(hass): """Test OnOff trait support for media_player domain.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.OnOffTrait.supported(media_player.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("media_player.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("media_player.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, media_player.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} off_calls = async_mock_service(hass, media_player.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} async def test_onoff_humidifier(hass): """Test OnOff trait support for humidifier domain.""" assert helpers.get_google_type(humidifier.DOMAIN, None) is not None assert trait.OnOffTrait.supported(humidifier.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("humidifier.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("humidifier.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, humidifier.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "humidifier.bla"} off_calls = async_mock_service(hass, humidifier.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "humidifier.bla"} async def test_dock_vacuum(hass): """Test dock trait support for vacuum domain.""" assert helpers.get_google_type(vacuum.DOMAIN, None) is not None assert trait.DockTrait.supported(vacuum.DOMAIN, 0, None) trt = trait.DockTrait(hass, State("vacuum.bla", vacuum.STATE_IDLE), BASIC_CONFIG) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"isDocked": False} calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_RETURN_TO_BASE) await trt.execute(trait.COMMAND_DOCK, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} async def test_startstop_vacuum(hass): """Test startStop trait support for vacuum domain.""" assert helpers.get_google_type(vacuum.DOMAIN, None) is not None assert trait.StartStopTrait.supported(vacuum.DOMAIN, 0, None) trt = trait.StartStopTrait( hass, State( "vacuum.bla", vacuum.STATE_PAUSED, {ATTR_SUPPORTED_FEATURES: vacuum.SUPPORT_PAUSE}, ), BASIC_CONFIG, ) assert trt.sync_attributes() == {"pausable": True} assert trt.query_attributes() == {"isRunning": False, "isPaused": True} start_calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_START) await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": True}, {}) assert len(start_calls) == 1 assert start_calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} stop_calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_STOP) await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": False}, {}) assert len(stop_calls) == 1 assert stop_calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} pause_calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_PAUSE) await trt.execute(trait.COMMAND_PAUSEUNPAUSE, BASIC_DATA, {"pause": True}, {}) assert len(pause_calls) == 1 assert pause_calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} unpause_calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_START) await trt.execute(trait.COMMAND_PAUSEUNPAUSE, BASIC_DATA, {"pause": False}, {}) assert len(unpause_calls) == 1 assert unpause_calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} async def test_startstop_cover(hass): """Test startStop trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.StartStopTrait.supported(cover.DOMAIN, cover.SUPPORT_STOP, None) state = State( "cover.bla", cover.STATE_CLOSED, {ATTR_SUPPORTED_FEATURES: cover.SUPPORT_STOP}, ) trt = trait.StartStopTrait( hass, state, BASIC_CONFIG, ) assert trt.sync_attributes() == {} for state_value in (cover.STATE_CLOSING, cover.STATE_OPENING): state.state = state_value assert trt.query_attributes() == {"isRunning": True} stop_calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_STOP_COVER) await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": False}, {}) assert len(stop_calls) == 1 assert stop_calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} for state_value in (cover.STATE_CLOSED, cover.STATE_OPEN): state.state = state_value assert trt.query_attributes() == {"isRunning": False} with pytest.raises(SmartHomeError, match="Cover is already stopped"): await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": False}, {}) with pytest.raises(SmartHomeError, match="Starting a cover is not supported"): await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": True}, {}) with pytest.raises( SmartHomeError, match="Command action.devices.commands.PauseUnpause is not supported", ): await trt.execute(trait.COMMAND_PAUSEUNPAUSE, BASIC_DATA, {"start": True}, {}) async def test_startstop_cover_assumed(hass): """Test startStop trait support for cover domain of assumed state.""" trt = trait.StartStopTrait( hass, State( "cover.bla", cover.STATE_CLOSED, {ATTR_SUPPORTED_FEATURES: cover.SUPPORT_STOP, ATTR_ASSUMED_STATE: True}, ), BASIC_CONFIG, ) stop_calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_STOP_COVER) await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": False}, {}) assert len(stop_calls) == 1 assert stop_calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} async def test_color_setting_color_light(hass): """Test ColorSpectrum trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert not trait.ColorSettingTrait.supported(light.DOMAIN, 0, None) assert trait.ColorSettingTrait.supported(light.DOMAIN, light.SUPPORT_COLOR, None) trt = trait.ColorSettingTrait( hass, State( "light.bla", STATE_ON, { light.ATTR_HS_COLOR: (20, 94), light.ATTR_BRIGHTNESS: 200, ATTR_SUPPORTED_FEATURES: light.SUPPORT_COLOR, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == {"colorModel": "hsv"} assert trt.query_attributes() == { "color": {"spectrumHsv": {"hue": 20, "saturation": 0.94, "value": 200 / 255}} } assert trt.can_execute( trait.COMMAND_COLOR_ABSOLUTE, {"color": {"spectrumRGB": 16715792}} ) calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON) await trt.execute( trait.COMMAND_COLOR_ABSOLUTE, BASIC_DATA, {"color": {"spectrumRGB": 1052927}}, {}, ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "light.bla", light.ATTR_HS_COLOR: (240, 93.725), } await trt.execute( trait.COMMAND_COLOR_ABSOLUTE, BASIC_DATA, {"color": {"spectrumHSV": {"hue": 100, "saturation": 0.50, "value": 0.20}}}, {}, ) assert len(calls) == 2 assert calls[1].data == { ATTR_ENTITY_ID: "light.bla", light.ATTR_HS_COLOR: [100, 50], light.ATTR_BRIGHTNESS: 0.2 * 255, } async def test_color_setting_temperature_light(hass): """Test ColorTemperature trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert not trait.ColorSettingTrait.supported(light.DOMAIN, 0, None) assert trait.ColorSettingTrait.supported( light.DOMAIN, light.SUPPORT_COLOR_TEMP, None ) trt = trait.ColorSettingTrait( hass, State( "light.bla", STATE_ON, { light.ATTR_MIN_MIREDS: 200, light.ATTR_COLOR_TEMP: 300, light.ATTR_MAX_MIREDS: 500, ATTR_SUPPORTED_FEATURES: light.SUPPORT_COLOR_TEMP, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "colorTemperatureRange": {"temperatureMinK": 2000, "temperatureMaxK": 5000} } assert trt.query_attributes() == {"color": {"temperatureK": 3333}} assert trt.can_execute( trait.COMMAND_COLOR_ABSOLUTE, {"color": {"temperature": 400}} ) calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON) with pytest.raises(helpers.SmartHomeError) as err: await trt.execute( trait.COMMAND_COLOR_ABSOLUTE, BASIC_DATA, {"color": {"temperature": 5555}}, {}, ) assert err.value.code == const.ERR_VALUE_OUT_OF_RANGE await trt.execute( trait.COMMAND_COLOR_ABSOLUTE, BASIC_DATA, {"color": {"temperature": 2857}}, {} ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "light.bla", light.ATTR_COLOR_TEMP: color.color_temperature_kelvin_to_mired(2857), } async def test_color_light_temperature_light_bad_temp(hass): """Test ColorTemperature trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert not trait.ColorSettingTrait.supported(light.DOMAIN, 0, None) assert trait.ColorSettingTrait.supported( light.DOMAIN, light.SUPPORT_COLOR_TEMP, None ) trt = trait.ColorSettingTrait( hass, State( "light.bla", STATE_ON, { light.ATTR_MIN_MIREDS: 200, light.ATTR_COLOR_TEMP: 0, light.ATTR_MAX_MIREDS: 500, }, ), BASIC_CONFIG, ) assert trt.query_attributes() == {} async def test_light_modes(hass): """Test Light Mode trait.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert trait.ModesTrait.supported(light.DOMAIN, light.SUPPORT_EFFECT, None) trt = trait.ModesTrait( hass, State( "light.living_room", light.STATE_ON, attributes={ light.ATTR_EFFECT_LIST: ["random", "colorloop"], light.ATTR_EFFECT: "random", }, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableModes": [ { "name": "effect", "name_values": [{"name_synonym": ["effect"], "lang": "en"}], "settings": [ { "setting_name": "random", "setting_values": [ {"setting_synonym": ["random"], "lang": "en"} ], }, { "setting_name": "colorloop", "setting_values": [ {"setting_synonym": ["colorloop"], "lang": "en"} ], }, ], "ordered": False, } ] } assert trt.query_attributes() == { "currentModeSettings": {"effect": "random"}, "on": True, } assert trt.can_execute( trait.COMMAND_MODES, params={"updateModeSettings": {"effect": "colorloop"}}, ) calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON) await trt.execute( trait.COMMAND_MODES, BASIC_DATA, {"updateModeSettings": {"effect": "colorloop"}}, {}, ) assert len(calls) == 1 assert calls[0].data == { "entity_id": "light.living_room", "effect": "colorloop", } async def test_scene_scene(hass): """Test Scene trait support for scene domain.""" assert helpers.get_google_type(scene.DOMAIN, None) is not None assert trait.SceneTrait.supported(scene.DOMAIN, 0, None) trt = trait.SceneTrait(hass, State("scene.bla", scene.STATE), BASIC_CONFIG) assert trt.sync_attributes() == {} assert trt.query_attributes() == {} assert trt.can_execute(trait.COMMAND_ACTIVATE_SCENE, {}) calls = async_mock_service(hass, scene.DOMAIN, SERVICE_TURN_ON) await trt.execute(trait.COMMAND_ACTIVATE_SCENE, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "scene.bla"} async def test_scene_script(hass): """Test Scene trait support for script domain.""" assert helpers.get_google_type(script.DOMAIN, None) is not None assert trait.SceneTrait.supported(script.DOMAIN, 0, None) trt = trait.SceneTrait(hass, State("script.bla", STATE_OFF), BASIC_CONFIG) assert trt.sync_attributes() == {} assert trt.query_attributes() == {} assert trt.can_execute(trait.COMMAND_ACTIVATE_SCENE, {}) calls = async_mock_service(hass, script.DOMAIN, SERVICE_TURN_ON) await trt.execute(trait.COMMAND_ACTIVATE_SCENE, BASIC_DATA, {}, {}) # We don't wait till script execution is done. await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "script.bla"} async def test_temperature_setting_climate_onoff(hass): """Test TemperatureSetting trait support for climate domain - range.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.TemperatureSettingTrait.supported(climate.DOMAIN, 0, None) hass.config.units.temperature_unit = TEMP_FAHRENHEIT trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_AUTO, { ATTR_SUPPORTED_FEATURES: climate.SUPPORT_TARGET_TEMPERATURE_RANGE, climate.ATTR_HVAC_MODES: [ climate.HVAC_MODE_OFF, climate.HVAC_MODE_COOL, climate.HVAC_MODE_HEAT, climate.HVAC_MODE_HEAT_COOL, ], climate.ATTR_MIN_TEMP: None, climate.ATTR_MAX_TEMP: None, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["off", "cool", "heat", "heatcool", "on"], "thermostatTemperatureUnit": "F", } assert trt.can_execute(trait.COMMAND_THERMOSTAT_SET_MODE, {}) calls = async_mock_service(hass, climate.DOMAIN, SERVICE_TURN_ON) await trt.execute( trait.COMMAND_THERMOSTAT_SET_MODE, BASIC_DATA, {"thermostatMode": "on"}, {} ) assert len(calls) == 1 calls = async_mock_service(hass, climate.DOMAIN, SERVICE_TURN_OFF) await trt.execute( trait.COMMAND_THERMOSTAT_SET_MODE, BASIC_DATA, {"thermostatMode": "off"}, {} ) assert len(calls) == 1 async def test_temperature_setting_climate_no_modes(hass): """Test TemperatureSetting trait support for climate domain not supporting any modes.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.TemperatureSettingTrait.supported(climate.DOMAIN, 0, None) hass.config.units.temperature_unit = TEMP_CELSIUS trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_AUTO, { climate.ATTR_HVAC_MODES: [], climate.ATTR_MIN_TEMP: None, climate.ATTR_MAX_TEMP: None, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["heat"], "thermostatTemperatureUnit": "C", } async def test_temperature_setting_climate_range(hass): """Test TemperatureSetting trait support for climate domain - range.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.TemperatureSettingTrait.supported(climate.DOMAIN, 0, None) hass.config.units.temperature_unit = TEMP_FAHRENHEIT trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_AUTO, { climate.ATTR_CURRENT_TEMPERATURE: 70, climate.ATTR_CURRENT_HUMIDITY: 25, ATTR_SUPPORTED_FEATURES: climate.SUPPORT_TARGET_TEMPERATURE_RANGE, climate.ATTR_HVAC_MODES: [ STATE_OFF, climate.HVAC_MODE_COOL, climate.HVAC_MODE_HEAT, climate.HVAC_MODE_AUTO, ], climate.ATTR_TARGET_TEMP_HIGH: 75, climate.ATTR_TARGET_TEMP_LOW: 65, climate.ATTR_MIN_TEMP: 50, climate.ATTR_MAX_TEMP: 80, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["off", "cool", "heat", "auto", "on"], "thermostatTemperatureUnit": "F", } assert trt.query_attributes() == { "thermostatMode": "auto", "thermostatTemperatureAmbient": 21.1, "thermostatHumidityAmbient": 25, "thermostatTemperatureSetpointLow": 18.3, "thermostatTemperatureSetpointHigh": 23.9, } assert trt.can_execute(trait.COMMAND_THERMOSTAT_TEMPERATURE_SET_RANGE, {}) assert trt.can_execute(trait.COMMAND_THERMOSTAT_SET_MODE, {}) calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_TEMPERATURE) await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SET_RANGE, BASIC_DATA, { "thermostatTemperatureSetpointHigh": 25, "thermostatTemperatureSetpointLow": 20, }, {}, ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "climate.bla", climate.ATTR_TARGET_TEMP_HIGH: 77, climate.ATTR_TARGET_TEMP_LOW: 68, } calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_HVAC_MODE) await trt.execute( trait.COMMAND_THERMOSTAT_SET_MODE, BASIC_DATA, {"thermostatMode": "cool"}, {} ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "climate.bla", climate.ATTR_HVAC_MODE: climate.HVAC_MODE_COOL, } with pytest.raises(helpers.SmartHomeError) as err: await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, BASIC_DATA, {"thermostatTemperatureSetpoint": -100}, {}, ) assert err.value.code == const.ERR_VALUE_OUT_OF_RANGE hass.config.units.temperature_unit = TEMP_CELSIUS async def test_temperature_setting_climate_setpoint(hass): """Test TemperatureSetting trait support for climate domain - setpoint.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.TemperatureSettingTrait.supported(climate.DOMAIN, 0, None) hass.config.units.temperature_unit = TEMP_CELSIUS trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_COOL, { climate.ATTR_HVAC_MODES: [STATE_OFF, climate.HVAC_MODE_COOL], climate.ATTR_MIN_TEMP: 10, climate.ATTR_MAX_TEMP: 30, ATTR_TEMPERATURE: 18, climate.ATTR_CURRENT_TEMPERATURE: 20, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["off", "cool", "on"], "thermostatTemperatureUnit": "C", } assert trt.query_attributes() == { "thermostatMode": "cool", "thermostatTemperatureAmbient": 20, "thermostatTemperatureSetpoint": 18, } assert trt.can_execute(trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, {}) assert trt.can_execute(trait.COMMAND_THERMOSTAT_SET_MODE, {}) calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_TEMPERATURE) with pytest.raises(helpers.SmartHomeError): await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, BASIC_DATA, {"thermostatTemperatureSetpoint": -100}, {}, ) await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, BASIC_DATA, {"thermostatTemperatureSetpoint": 19}, {}, ) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "climate.bla", ATTR_TEMPERATURE: 19} async def test_temperature_setting_climate_setpoint_auto(hass): """ Test TemperatureSetting trait support for climate domain. Setpoint in auto mode. """ hass.config.units.temperature_unit = TEMP_CELSIUS trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_HEAT_COOL, { climate.ATTR_HVAC_MODES: [ climate.HVAC_MODE_OFF, climate.HVAC_MODE_HEAT_COOL, ], climate.ATTR_MIN_TEMP: 10, climate.ATTR_MAX_TEMP: 30, ATTR_TEMPERATURE: 18, climate.ATTR_CURRENT_TEMPERATURE: 20, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["off", "heatcool", "on"], "thermostatTemperatureUnit": "C", } assert trt.query_attributes() == { "thermostatMode": "heatcool", "thermostatTemperatureAmbient": 20, "thermostatTemperatureSetpointHigh": 18, "thermostatTemperatureSetpointLow": 18, } assert trt.can_execute(trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, {}) assert trt.can_execute(trait.COMMAND_THERMOSTAT_SET_MODE, {}) calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_TEMPERATURE) await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, BASIC_DATA, {"thermostatTemperatureSetpoint": 19}, {}, ) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "climate.bla", ATTR_TEMPERATURE: 19} async def test_humidity_setting_humidifier_setpoint(hass): """Test HumiditySetting trait support for humidifier domain - setpoint.""" assert helpers.get_google_type(humidifier.DOMAIN, None) is not None assert trait.HumiditySettingTrait.supported(humidifier.DOMAIN, 0, None) trt = trait.HumiditySettingTrait( hass, State( "humidifier.bla", STATE_ON, { humidifier.ATTR_MIN_HUMIDITY: 20, humidifier.ATTR_MAX_HUMIDITY: 90, humidifier.ATTR_HUMIDITY: 38, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "humiditySetpointRange": {"minPercent": 20, "maxPercent": 90} } assert trt.query_attributes() == { "humiditySetpointPercent": 38, } assert trt.can_execute(trait.COMMAND_SET_HUMIDITY, {}) calls = async_mock_service(hass, humidifier.DOMAIN, humidifier.SERVICE_SET_HUMIDITY) await trt.execute(trait.COMMAND_SET_HUMIDITY, BASIC_DATA, {"humidity": 32}, {}) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "humidifier.bla", humidifier.ATTR_HUMIDITY: 32, } async def test_lock_unlock_lock(hass): """Test LockUnlock trait locking support for lock domain.""" assert helpers.get_google_type(lock.DOMAIN, None) is not None assert trait.LockUnlockTrait.supported(lock.DOMAIN, lock.SUPPORT_OPEN, None) assert trait.LockUnlockTrait.might_2fa(lock.DOMAIN, lock.SUPPORT_OPEN, None) trt = trait.LockUnlockTrait( hass, State("lock.front_door", lock.STATE_LOCKED), PIN_CONFIG ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"isLocked": True} assert trt.can_execute(trait.COMMAND_LOCKUNLOCK, {"lock": True}) calls = async_mock_service(hass, lock.DOMAIN, lock.SERVICE_LOCK) await trt.execute(trait.COMMAND_LOCKUNLOCK, PIN_DATA, {"lock": True}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "lock.front_door"} async def test_lock_unlock_unlock(hass): """Test LockUnlock trait unlocking support for lock domain.""" assert helpers.get_google_type(lock.DOMAIN, None) is not None assert trait.LockUnlockTrait.supported(lock.DOMAIN, lock.SUPPORT_OPEN, None) trt = trait.LockUnlockTrait( hass, State("lock.front_door", lock.STATE_LOCKED), PIN_CONFIG ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"isLocked": True} assert trt.can_execute(trait.COMMAND_LOCKUNLOCK, {"lock": False}) calls = async_mock_service(hass, lock.DOMAIN, lock.SERVICE_UNLOCK) # No challenge data with pytest.raises(error.ChallengeNeeded) as err: await trt.execute(trait.COMMAND_LOCKUNLOCK, PIN_DATA, {"lock": False}, {}) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # invalid pin with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_LOCKUNLOCK, PIN_DATA, {"lock": False}, {"pin": 9999} ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_FAILED_PIN_NEEDED await trt.execute( trait.COMMAND_LOCKUNLOCK, PIN_DATA, {"lock": False}, {"pin": "1234"} ) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "lock.front_door"} # Test without pin trt = trait.LockUnlockTrait( hass, State("lock.front_door", lock.STATE_LOCKED), BASIC_CONFIG ) with pytest.raises(error.SmartHomeError) as err: await trt.execute(trait.COMMAND_LOCKUNLOCK, BASIC_DATA, {"lock": False}, {}) assert len(calls) == 1 assert err.value.code == const.ERR_CHALLENGE_NOT_SETUP # Test with 2FA override with patch.object( BASIC_CONFIG, "should_2fa", return_value=False, ): await trt.execute(trait.COMMAND_LOCKUNLOCK, BASIC_DATA, {"lock": False}, {}) assert len(calls) == 2 async def test_arm_disarm_arm_away(hass): """Test ArmDisarm trait Arming support for alarm_control_panel domain.""" assert helpers.get_google_type(alarm_control_panel.DOMAIN, None) is not None assert trait.ArmDisArmTrait.supported(alarm_control_panel.DOMAIN, 0, None) assert trait.ArmDisArmTrait.might_2fa(alarm_control_panel.DOMAIN, 0, None) trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, { alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True, ATTR_SUPPORTED_FEATURES: alarm_control_panel.const.SUPPORT_ALARM_ARM_HOME | alarm_control_panel.const.SUPPORT_ALARM_ARM_AWAY, }, ), PIN_CONFIG, ) assert trt.sync_attributes() == { "availableArmLevels": { "levels": [ { "level_name": "armed_home", "level_values": [ {"level_synonym": ["armed home", "home"], "lang": "en"} ], }, { "level_name": "armed_away", "level_values": [ {"level_synonym": ["armed away", "away"], "lang": "en"} ], }, ], "ordered": False, } } assert trt.query_attributes() == { "isArmed": True, "currentArmLevel": STATE_ALARM_ARMED_AWAY, } assert trt.can_execute( trait.COMMAND_ARMDISARM, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY} ) calls = async_mock_service( hass, alarm_control_panel.DOMAIN, alarm_control_panel.SERVICE_ALARM_ARM_AWAY ) # Test with no secure_pin configured with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), BASIC_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, BASIC_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {}, ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NOT_SETUP trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), PIN_CONFIG, ) # No challenge data with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {}, ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # invalid pin with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {"pin": 9999}, ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_FAILED_PIN_NEEDED # correct pin await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {"pin": "1234"}, ) assert len(calls) == 1 # Test already armed with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), PIN_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {}, ) assert len(calls) == 1 assert err.value.code == const.ERR_ALREADY_ARMED # Test with code_arm_required False trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: False}, ), PIN_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {}, ) assert len(calls) == 2 with pytest.raises(error.SmartHomeError) as err: await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True}, {}, ) async def test_arm_disarm_disarm(hass): """Test ArmDisarm trait Disarming support for alarm_control_panel domain.""" assert helpers.get_google_type(alarm_control_panel.DOMAIN, None) is not None assert trait.ArmDisArmTrait.supported(alarm_control_panel.DOMAIN, 0, None) assert trait.ArmDisArmTrait.might_2fa(alarm_control_panel.DOMAIN, 0, None) trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, { alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True, ATTR_SUPPORTED_FEATURES: alarm_control_panel.const.SUPPORT_ALARM_TRIGGER | alarm_control_panel.const.SUPPORT_ALARM_ARM_CUSTOM_BYPASS, }, ), PIN_CONFIG, ) assert trt.sync_attributes() == { "availableArmLevels": { "levels": [ { "level_name": "armed_custom_bypass", "level_values": [ { "level_synonym": ["armed custom bypass", "custom"], "lang": "en", } ], }, { "level_name": "triggered", "level_values": [{"level_synonym": ["triggered"], "lang": "en"}], }, ], "ordered": False, } } assert trt.query_attributes() == {"isArmed": False} assert trt.can_execute(trait.COMMAND_ARMDISARM, {"arm": False}) calls = async_mock_service( hass, alarm_control_panel.DOMAIN, alarm_control_panel.SERVICE_ALARM_DISARM ) # Test without secure_pin configured with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), BASIC_CONFIG, ) await trt.execute(trait.COMMAND_ARMDISARM, BASIC_DATA, {"arm": False}, {}) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NOT_SETUP trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), PIN_CONFIG, ) # No challenge data with pytest.raises(error.ChallengeNeeded) as err: await trt.execute(trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": False}, {}) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # invalid pin with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": False}, {"pin": 9999} ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_FAILED_PIN_NEEDED # correct pin await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": False}, {"pin": "1234"} ) assert len(calls) == 1 # Test already disarmed with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), PIN_CONFIG, ) await trt.execute(trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": False}, {}) assert len(calls) == 1 assert err.value.code == const.ERR_ALREADY_DISARMED # Cancel arming after already armed will require pin with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: False}, ), PIN_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "cancel": True}, {} ) assert len(calls) == 1 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # Cancel arming while pending to arm doesn't require pin trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_PENDING, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: False}, ), PIN_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "cancel": True}, {} ) assert len(calls) == 2 async def test_fan_speed(hass): """Test FanSpeed trait speed control support for fan domain.""" assert helpers.get_google_type(fan.DOMAIN, None) is not None assert trait.FanSpeedTrait.supported(fan.DOMAIN, fan.SUPPORT_SET_SPEED, None) trt = trait.FanSpeedTrait( hass, State( "fan.living_room_fan", fan.SPEED_HIGH, attributes={ "speed_list": [ fan.SPEED_OFF, fan.SPEED_LOW, fan.SPEED_MEDIUM, fan.SPEED_HIGH, ], "speed": "low", "percentage": 33, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableFanSpeeds": { "ordered": True, "speeds": [ { "speed_name": "off", "speed_values": [{"speed_synonym": ["stop", "off"], "lang": "en"}], }, { "speed_name": "low", "speed_values": [ { "speed_synonym": ["slow", "low", "slowest", "lowest"], "lang": "en", } ], }, { "speed_name": "medium", "speed_values": [ {"speed_synonym": ["medium", "mid", "middle"], "lang": "en"} ], }, { "speed_name": "high", "speed_values": [ { "speed_synonym": [ "high", "max", "fast", "highest", "fastest", "maximum", ], "lang": "en", } ], }, ], }, "reversible": False, "supportsFanSpeedPercent": True, } assert trt.query_attributes() == { "currentFanSpeedSetting": "low", "on": True, "currentFanSpeedPercent": 33, } assert trt.can_execute(trait.COMMAND_FANSPEED, params={"fanSpeed": "medium"}) calls = async_mock_service(hass, fan.DOMAIN, fan.SERVICE_SET_SPEED) await trt.execute(trait.COMMAND_FANSPEED, BASIC_DATA, {"fanSpeed": "medium"}, {}) assert len(calls) == 1 assert calls[0].data == {"entity_id": "fan.living_room_fan", "speed": "medium"} assert trt.can_execute(trait.COMMAND_FANSPEED, params={"fanSpeedPercent": 10}) calls = async_mock_service(hass, fan.DOMAIN, fan.SERVICE_SET_PERCENTAGE) await trt.execute(trait.COMMAND_FANSPEED, BASIC_DATA, {"fanSpeedPercent": 10}, {}) assert len(calls) == 1 assert calls[0].data == {"entity_id": "fan.living_room_fan", "percentage": 10} async def test_climate_fan_speed(hass): """Test FanSpeed trait speed control support for climate domain.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.FanSpeedTrait.supported(climate.DOMAIN, climate.SUPPORT_FAN_MODE, None) trt = trait.FanSpeedTrait( hass, State( "climate.living_room_ac", "on", attributes={ "fan_modes": ["auto", "low", "medium", "high"], "fan_mode": "low", }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableFanSpeeds": { "ordered": True, "speeds": [ { "speed_name": "auto", "speed_values": [{"speed_synonym": ["auto"], "lang": "en"}], }, { "speed_name": "low", "speed_values": [{"speed_synonym": ["low"], "lang": "en"}], }, { "speed_name": "medium", "speed_values": [{"speed_synonym": ["medium"], "lang": "en"}], }, { "speed_name": "high", "speed_values": [{"speed_synonym": ["high"], "lang": "en"}], }, ], }, "reversible": False, "supportsFanSpeedPercent": True, } assert trt.query_attributes() == { "currentFanSpeedSetting": "low", } assert trt.can_execute(trait.COMMAND_FANSPEED, params={"fanSpeed": "medium"}) calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_FAN_MODE) await trt.execute(trait.COMMAND_FANSPEED, BASIC_DATA, {"fanSpeed": "medium"}, {}) assert len(calls) == 1 assert calls[0].data == { "entity_id": "climate.living_room_ac", "fan_mode": "medium", } async def test_inputselector(hass): """Test input selector trait.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.InputSelectorTrait.supported( media_player.DOMAIN, media_player.SUPPORT_SELECT_SOURCE, None ) trt = trait.InputSelectorTrait( hass, State( "media_player.living_room", media_player.STATE_PLAYING, attributes={ media_player.ATTR_INPUT_SOURCE_LIST: [ "media", "game", "chromecast", "plex", ], media_player.ATTR_INPUT_SOURCE: "game", }, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableInputs": [ {"key": "media", "names": [{"name_synonym": ["media"], "lang": "en"}]}, {"key": "game", "names": [{"name_synonym": ["game"], "lang": "en"}]}, { "key": "chromecast", "names": [{"name_synonym": ["chromecast"], "lang": "en"}], }, {"key": "plex", "names": [{"name_synonym": ["plex"], "lang": "en"}]}, ], "orderedInputs": True, } assert trt.query_attributes() == { "currentInput": "game", } assert trt.can_execute( trait.COMMAND_INPUT, params={"newInput": "media"}, ) calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_SELECT_SOURCE ) await trt.execute( trait.COMMAND_INPUT, BASIC_DATA, {"newInput": "media"}, {}, ) assert len(calls) == 1 assert calls[0].data == {"entity_id": "media_player.living_room", "source": "media"} @pytest.mark.parametrize( "sources,source,source_next,source_prev", [ (["a"], "a", "a", "a"), (["a", "b"], "a", "b", "b"), (["a", "b", "c"], "a", "b", "c"), ], ) async def test_inputselector_nextprev(hass, sources, source, source_next, source_prev): """Test input selector trait.""" trt = trait.InputSelectorTrait( hass, State( "media_player.living_room", media_player.STATE_PLAYING, attributes={ media_player.ATTR_INPUT_SOURCE_LIST: sources, media_player.ATTR_INPUT_SOURCE: source, }, ), BASIC_CONFIG, ) assert trt.can_execute("action.devices.commands.NextInput", params={}) assert trt.can_execute("action.devices.commands.PreviousInput", params={}) calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_SELECT_SOURCE ) await trt.execute( "action.devices.commands.NextInput", BASIC_DATA, {}, {}, ) await trt.execute( "action.devices.commands.PreviousInput", BASIC_DATA, {}, {}, ) assert len(calls) == 2 assert calls[0].data == { "entity_id": "media_player.living_room", "source": source_next, } assert calls[1].data == { "entity_id": "media_player.living_room", "source": source_prev, } @pytest.mark.parametrize( "sources,source", [(None, "a"), (["a", "b"], None), (["a", "b"], "c")] ) async def test_inputselector_nextprev_invalid(hass, sources, source): """Test input selector trait.""" trt = trait.InputSelectorTrait( hass, State( "media_player.living_room", media_player.STATE_PLAYING, attributes={ media_player.ATTR_INPUT_SOURCE_LIST: sources, media_player.ATTR_INPUT_SOURCE: source, }, ), BASIC_CONFIG, ) with pytest.raises(SmartHomeError): await trt.execute( "action.devices.commands.NextInput", BASIC_DATA, {}, {}, ) with pytest.raises(SmartHomeError): await trt.execute( "action.devices.commands.PreviousInput", BASIC_DATA, {}, {}, ) with pytest.raises(SmartHomeError): await trt.execute( "action.devices.commands.InvalidCommand", BASIC_DATA, {}, {}, ) async def test_modes_input_select(hass): """Test Input Select Mode trait.""" assert helpers.get_google_type(input_select.DOMAIN, None) is not None assert trait.ModesTrait.supported(input_select.DOMAIN, None, None) trt = trait.ModesTrait( hass, State("input_select.bla", "unavailable"), BASIC_CONFIG, ) assert trt.sync_attributes() == {"availableModes": []} trt = trait.ModesTrait( hass, State( "input_select.bla", "abc", attributes={input_select.ATTR_OPTIONS: ["abc", "123", "xyz"]}, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableModes": [ { "name": "option", "name_values": [ { "name_synonym": ["option", "setting", "mode", "value"], "lang": "en", } ], "settings": [ { "setting_name": "abc", "setting_values": [{"setting_synonym": ["abc"], "lang": "en"}], }, { "setting_name": "123", "setting_values": [{"setting_synonym": ["123"], "lang": "en"}], }, { "setting_name": "xyz", "setting_values": [{"setting_synonym": ["xyz"], "lang": "en"}], }, ], "ordered": False, } ] } assert trt.query_attributes() == { "currentModeSettings": {"option": "abc"}, "on": True, } assert trt.can_execute( trait.COMMAND_MODES, params={"updateModeSettings": {"option": "xyz"}}, ) calls = async_mock_service( hass, input_select.DOMAIN, input_select.SERVICE_SELECT_OPTION ) await trt.execute( trait.COMMAND_MODES, BASIC_DATA, {"updateModeSettings": {"option": "xyz"}}, {}, ) assert len(calls) == 1 assert calls[0].data == {"entity_id": "input_select.bla", "option": "xyz"} async def test_modes_humidifier(hass): """Test Humidifier Mode trait.""" assert helpers.get_google_type(humidifier.DOMAIN, None) is not None assert trait.ModesTrait.supported(humidifier.DOMAIN, humidifier.SUPPORT_MODES, None) trt = trait.ModesTrait( hass, State( "humidifier.humidifier", STATE_OFF, attributes={ humidifier.ATTR_AVAILABLE_MODES: [ humidifier.MODE_NORMAL, humidifier.MODE_AUTO, humidifier.MODE_AWAY, ], ATTR_SUPPORTED_FEATURES: humidifier.SUPPORT_MODES, humidifier.ATTR_MIN_HUMIDITY: 30, humidifier.ATTR_MAX_HUMIDITY: 99, humidifier.ATTR_HUMIDITY: 50, ATTR_MODE: humidifier.MODE_AUTO, }, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableModes": [ { "name": "mode", "name_values": [{"name_synonym": ["mode"], "lang": "en"}], "settings": [ { "setting_name": "normal", "setting_values": [ {"setting_synonym": ["normal"], "lang": "en"} ], }, { "setting_name": "auto", "setting_values": [{"setting_synonym": ["auto"], "lang": "en"}], }, { "setting_name": "away", "setting_values": [{"setting_synonym": ["away"], "lang": "en"}], }, ], "ordered": False, }, ] } assert trt.query_attributes() == { "currentModeSettings": {"mode": "auto"}, "on": False, } assert trt.can_execute( trait.COMMAND_MODES, params={"updateModeSettings": {"mode": "away"}} ) calls = async_mock_service(hass, humidifier.DOMAIN, humidifier.SERVICE_SET_MODE) await trt.execute( trait.COMMAND_MODES, BASIC_DATA, {"updateModeSettings": {"mode": "away"}}, {}, ) assert len(calls) == 1 assert calls[0].data == { "entity_id": "humidifier.humidifier", "mode": "away", } async def test_sound_modes(hass): """Test Mode trait.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.ModesTrait.supported( media_player.DOMAIN, media_player.SUPPORT_SELECT_SOUND_MODE, None ) trt = trait.ModesTrait( hass, State( "media_player.living_room", media_player.STATE_PLAYING, attributes={ media_player.ATTR_SOUND_MODE_LIST: ["stereo", "prologic"], media_player.ATTR_SOUND_MODE: "stereo", }, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableModes": [ { "name": "sound mode", "name_values": [ {"name_synonym": ["sound mode", "effects"], "lang": "en"} ], "settings": [ { "setting_name": "stereo", "setting_values": [ {"setting_synonym": ["stereo"], "lang": "en"} ], }, { "setting_name": "prologic", "setting_values": [ {"setting_synonym": ["prologic"], "lang": "en"} ], }, ], "ordered": False, } ] } assert trt.query_attributes() == { "currentModeSettings": {"sound mode": "stereo"}, "on": True, } assert trt.can_execute( trait.COMMAND_MODES, params={"updateModeSettings": {"sound mode": "stereo"}}, ) calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_SELECT_SOUND_MODE ) await trt.execute( trait.COMMAND_MODES, BASIC_DATA, {"updateModeSettings": {"sound mode": "stereo"}}, {}, ) assert len(calls) == 1 assert calls[0].data == { "entity_id": "media_player.living_room", "sound_mode": "stereo", } async def test_openclose_cover(hass): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_SET_POSITION, None ) trt = trait.OpenCloseTrait( hass, State( "cover.bla", cover.STATE_OPEN, { cover.ATTR_CURRENT_POSITION: 75, ATTR_SUPPORTED_FEATURES: cover.SUPPORT_SET_POSITION, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"openPercent": 75} calls_set = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_SET_COVER_POSITION) calls_open = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_OPEN_COVER) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 50}, {}) await trt.execute( trait.COMMAND_OPENCLOSE_RELATIVE, BASIC_DATA, {"openRelativePercent": 50}, {} ) assert len(calls_set) == 1 assert calls_set[0].data == {ATTR_ENTITY_ID: "cover.bla", cover.ATTR_POSITION: 50} assert len(calls_open) == 1 assert calls_open[0].data == {ATTR_ENTITY_ID: "cover.bla"} async def test_openclose_cover_unknown_state(hass): """Test OpenClose trait support for cover domain with unknown state.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_SET_POSITION, None ) # No state trt = trait.OpenCloseTrait( hass, State( "cover.bla", STATE_UNKNOWN, {ATTR_SUPPORTED_FEATURES: cover.SUPPORT_OPEN} ), BASIC_CONFIG, ) assert trt.sync_attributes() == {"discreteOnlyOpenClose": True} with pytest.raises(helpers.SmartHomeError): trt.query_attributes() calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_OPEN_COVER) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 100}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} with pytest.raises(helpers.SmartHomeError): trt.query_attributes() async def test_openclose_cover_assumed_state(hass): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_SET_POSITION, None ) trt = trait.OpenCloseTrait( hass, State( "cover.bla", cover.STATE_OPEN, { ATTR_ASSUMED_STATE: True, ATTR_SUPPORTED_FEATURES: cover.SUPPORT_SET_POSITION, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == {"commandOnlyOpenClose": True} assert trt.query_attributes() == {} calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_SET_COVER_POSITION) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 40}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla", cover.ATTR_POSITION: 40} async def test_openclose_cover_query_only(hass): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported(cover.DOMAIN, 0, None) state = State( "cover.bla", cover.STATE_OPEN, ) trt = trait.OpenCloseTrait( hass, state, BASIC_CONFIG, ) assert trt.sync_attributes() == { "discreteOnlyOpenClose": True, "queryOnlyOpenClose": True, } assert trt.query_attributes() == {"openPercent": 100} async def test_openclose_cover_no_position(hass): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_OPEN | cover.SUPPORT_CLOSE, None ) state = State( "cover.bla", cover.STATE_OPEN, { ATTR_SUPPORTED_FEATURES: cover.SUPPORT_OPEN | cover.SUPPORT_CLOSE, }, ) trt = trait.OpenCloseTrait( hass, state, BASIC_CONFIG, ) assert trt.sync_attributes() == {"discreteOnlyOpenClose": True} assert trt.query_attributes() == {"openPercent": 100} state.state = cover.STATE_CLOSED assert trt.sync_attributes() == {"discreteOnlyOpenClose": True} assert trt.query_attributes() == {"openPercent": 0} calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_CLOSE_COVER) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 0}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_OPEN_COVER) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 100}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} with pytest.raises( SmartHomeError, match=r"Current position not know for relative command" ): await trt.execute( trait.COMMAND_OPENCLOSE_RELATIVE, BASIC_DATA, {"openRelativePercent": 100}, {}, ) with pytest.raises(SmartHomeError, match=r"No support for partial open close"): await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 50}, {}) @pytest.mark.parametrize( "device_class", (cover.DEVICE_CLASS_DOOR, cover.DEVICE_CLASS_GARAGE, cover.DEVICE_CLASS_GATE), ) async def test_openclose_cover_secure(hass, device_class): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, device_class) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_SET_POSITION, device_class ) assert trait.OpenCloseTrait.might_2fa( cover.DOMAIN, cover.SUPPORT_SET_POSITION, device_class ) trt = trait.OpenCloseTrait( hass, State( "cover.bla", cover.STATE_OPEN, { ATTR_DEVICE_CLASS: device_class, ATTR_SUPPORTED_FEATURES: cover.SUPPORT_SET_POSITION, cover.ATTR_CURRENT_POSITION: 75, }, ), PIN_CONFIG, ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"openPercent": 75} calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_SET_COVER_POSITION) calls_close = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_CLOSE_COVER) # No challenge data with pytest.raises(error.ChallengeNeeded) as err: await trt.execute(trait.COMMAND_OPENCLOSE, PIN_DATA, {"openPercent": 50}, {}) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # invalid pin with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_OPENCLOSE, PIN_DATA, {"openPercent": 50}, {"pin": "9999"} ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_FAILED_PIN_NEEDED await trt.execute( trait.COMMAND_OPENCLOSE, PIN_DATA, {"openPercent": 50}, {"pin": "1234"} ) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla", cover.ATTR_POSITION: 50} # no challenge on close await trt.execute(trait.COMMAND_OPENCLOSE, PIN_DATA, {"openPercent": 0}, {}) assert len(calls_close) == 1 assert calls_close[0].data == {ATTR_ENTITY_ID: "cover.bla"} @pytest.mark.parametrize( "device_class", ( binary_sensor.DEVICE_CLASS_DOOR, binary_sensor.DEVICE_CLASS_GARAGE_DOOR, binary_sensor.DEVICE_CLASS_LOCK, binary_sensor.DEVICE_CLASS_OPENING, binary_sensor.DEVICE_CLASS_WINDOW, ), ) async def test_openclose_binary_sensor(hass, device_class): """Test OpenClose trait support for binary_sensor domain.""" assert helpers.get_google_type(binary_sensor.DOMAIN, device_class) is not None assert trait.OpenCloseTrait.supported(binary_sensor.DOMAIN, 0, device_class) trt = trait.OpenCloseTrait( hass, State("binary_sensor.test", STATE_ON, {ATTR_DEVICE_CLASS: device_class}), BASIC_CONFIG, ) assert trt.sync_attributes() == { "queryOnlyOpenClose": True, "discreteOnlyOpenClose": True, } assert trt.query_attributes() == {"openPercent": 100} trt = trait.OpenCloseTrait( hass, State("binary_sensor.test", STATE_OFF, {ATTR_DEVICE_CLASS: device_class}), BASIC_CONFIG, ) assert trt.sync_attributes() == { "queryOnlyOpenClose": True, "discreteOnlyOpenClose": True, } assert trt.query_attributes() == {"openPercent": 0} async def test_volume_media_player(hass): """Test volume trait support for media player domain.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.VolumeTrait.supported( media_player.DOMAIN, media_player.SUPPORT_VOLUME_SET, None, ) trt = trait.VolumeTrait( hass, State( "media_player.bla", media_player.STATE_PLAYING, { ATTR_SUPPORTED_FEATURES: media_player.SUPPORT_VOLUME_SET, media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.3, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "volumeMaxLevel": 100, "levelStepSize": 10, "volumeCanMuteAndUnmute": False, "commandOnlyVolume": False, } assert trt.query_attributes() == {"currentVolume": 30} calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_SET ) await trt.execute(trait.COMMAND_SET_VOLUME, BASIC_DATA, {"volumeLevel": 60}, {}) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.6, } calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_SET ) await trt.execute( trait.COMMAND_VOLUME_RELATIVE, BASIC_DATA, {"relativeSteps": 10}, {} ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.4, } async def test_volume_media_player_relative(hass): """Test volume trait support for relative-volume-only media players.""" assert trait.VolumeTrait.supported( media_player.DOMAIN, media_player.SUPPORT_VOLUME_STEP, None, ) trt = trait.VolumeTrait( hass, State( "media_player.bla", media_player.STATE_PLAYING, { ATTR_ASSUMED_STATE: True, ATTR_SUPPORTED_FEATURES: media_player.SUPPORT_VOLUME_STEP, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "volumeMaxLevel": 100, "levelStepSize": 10, "volumeCanMuteAndUnmute": False, "commandOnlyVolume": True, } assert trt.query_attributes() == {} calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_UP ) await trt.execute( trait.COMMAND_VOLUME_RELATIVE, BASIC_DATA, {"relativeSteps": 10}, {}, ) assert len(calls) == 10 for call in calls: assert call.data == { ATTR_ENTITY_ID: "media_player.bla", } calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_DOWN ) await trt.execute( trait.COMMAND_VOLUME_RELATIVE, BASIC_DATA, {"relativeSteps": -10}, {}, ) assert len(calls) == 10 for call in calls: assert call.data == { ATTR_ENTITY_ID: "media_player.bla", } with pytest.raises(SmartHomeError): await trt.execute(trait.COMMAND_SET_VOLUME, BASIC_DATA, {"volumeLevel": 42}, {}) with pytest.raises(SmartHomeError): await trt.execute(trait.COMMAND_MUTE, BASIC_DATA, {"mute": True}, {}) async def test_media_player_mute(hass): """Test volume trait support for muting.""" assert trait.VolumeTrait.supported( media_player.DOMAIN, media_player.SUPPORT_VOLUME_STEP | media_player.SUPPORT_VOLUME_MUTE, None, ) trt = trait.VolumeTrait( hass, State( "media_player.bla", media_player.STATE_PLAYING, { ATTR_SUPPORTED_FEATURES: ( media_player.SUPPORT_VOLUME_STEP | media_player.SUPPORT_VOLUME_MUTE ), media_player.ATTR_MEDIA_VOLUME_MUTED: False, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "volumeMaxLevel": 100, "levelStepSize": 10, "volumeCanMuteAndUnmute": True, "commandOnlyVolume": False, } assert trt.query_attributes() == {"isMuted": False} mute_calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_MUTE ) await trt.execute( trait.COMMAND_MUTE, BASIC_DATA, {"mute": True}, {}, ) assert len(mute_calls) == 1 assert mute_calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_VOLUME_MUTED: True, } unmute_calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_MUTE ) await trt.execute( trait.COMMAND_MUTE, BASIC_DATA, {"mute": False}, {}, ) assert len(unmute_calls) == 1 assert unmute_calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_VOLUME_MUTED: False, } async def test_temperature_setting_sensor(hass): """Test TemperatureSetting trait support for temperature sensor.""" assert ( helpers.get_google_type(sensor.DOMAIN, sensor.DEVICE_CLASS_TEMPERATURE) is not None ) assert not trait.TemperatureSettingTrait.supported( sensor.DOMAIN, 0, sensor.DEVICE_CLASS_HUMIDITY ) assert trait.TemperatureSettingTrait.supported( sensor.DOMAIN, 0, sensor.DEVICE_CLASS_TEMPERATURE ) @pytest.mark.parametrize( "unit_in,unit_out,state,ambient", [ (TEMP_FAHRENHEIT, "F", "70", 21.1), (TEMP_CELSIUS, "C", "21.1", 21.1), (TEMP_FAHRENHEIT, "F", "unavailable", None), (TEMP_FAHRENHEIT, "F", "unknown", None), ], ) async def test_temperature_setting_sensor_data(hass, unit_in, unit_out, state, ambient): """Test TemperatureSetting trait support for temperature sensor.""" hass.config.units.temperature_unit = unit_in trt = trait.TemperatureSettingTrait( hass, State( "sensor.test", state, {ATTR_DEVICE_CLASS: sensor.DEVICE_CLASS_TEMPERATURE} ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "queryOnlyTemperatureSetting": True, "thermostatTemperatureUnit": unit_out, } if ambient: assert trt.query_attributes() == {"thermostatTemperatureAmbient": ambient} else: assert trt.query_attributes() == {} hass.config.units.temperature_unit = TEMP_CELSIUS async def test_humidity_setting_sensor(hass): """Test HumiditySetting trait support for humidity sensor.""" assert ( helpers.get_google_type(sensor.DOMAIN, sensor.DEVICE_CLASS_HUMIDITY) is not None ) assert not trait.HumiditySettingTrait.supported( sensor.DOMAIN, 0, sensor.DEVICE_CLASS_TEMPERATURE ) assert trait.HumiditySettingTrait.supported( sensor.DOMAIN, 0, sensor.DEVICE_CLASS_HUMIDITY ) @pytest.mark.parametrize( "state,ambient", [("70", 70), ("unavailable", None), ("unknown", None)] ) async def test_humidity_setting_sensor_data(hass, state, ambient): """Test HumiditySetting trait support for humidity sensor.""" trt = trait.HumiditySettingTrait( hass, State("sensor.test", state, {ATTR_DEVICE_CLASS: sensor.DEVICE_CLASS_HUMIDITY}), BASIC_CONFIG, ) assert trt.sync_attributes() == {"queryOnlyHumiditySetting": True} if ambient: assert trt.query_attributes() == {"humidityAmbientPercent": ambient} else: assert trt.query_attributes() == {} with pytest.raises(helpers.SmartHomeError) as err: await trt.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert err.value.code == const.ERR_NOT_SUPPORTED async def test_transport_control(hass): """Test the TransportControlTrait.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None for feature in trait.MEDIA_COMMAND_SUPPORT_MAPPING.values(): assert trait.TransportControlTrait.supported(media_player.DOMAIN, feature, None) now = datetime(2020, 1, 1) trt = trait.TransportControlTrait( hass, State( "media_player.bla", media_player.STATE_PLAYING, { media_player.ATTR_MEDIA_POSITION: 100, media_player.ATTR_MEDIA_DURATION: 200, media_player.ATTR_MEDIA_POSITION_UPDATED_AT: now - timedelta(seconds=10), media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.5, ATTR_SUPPORTED_FEATURES: media_player.SUPPORT_PLAY | media_player.SUPPORT_STOP, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "transportControlSupportedCommands": ["RESUME", "STOP"] } assert trt.query_attributes() == {} # COMMAND_MEDIA_SEEK_RELATIVE calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_SEEK ) # Patch to avoid time ticking over during the command failing the test with patch("homeassistant.util.dt.utcnow", return_value=now): await trt.execute( trait.COMMAND_MEDIA_SEEK_RELATIVE, BASIC_DATA, {"relativePositionMs": 10000}, {}, ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", # 100s (current position) + 10s (from command) + 10s (from updated_at) media_player.ATTR_MEDIA_SEEK_POSITION: 120, } # COMMAND_MEDIA_SEEK_TO_POSITION calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_SEEK ) await trt.execute( trait.COMMAND_MEDIA_SEEK_TO_POSITION, BASIC_DATA, {"absPositionMs": 50000}, {} ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_SEEK_POSITION: 50, } # COMMAND_MEDIA_NEXT calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_NEXT_TRACK ) await trt.execute(trait.COMMAND_MEDIA_NEXT, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} # COMMAND_MEDIA_PAUSE calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_PAUSE ) await trt.execute(trait.COMMAND_MEDIA_PAUSE, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} # COMMAND_MEDIA_PREVIOUS calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_PREVIOUS_TRACK ) await trt.execute(trait.COMMAND_MEDIA_PREVIOUS, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} # COMMAND_MEDIA_RESUME calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_PLAY ) await trt.execute(trait.COMMAND_MEDIA_RESUME, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} # COMMAND_MEDIA_SHUFFLE calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_SHUFFLE_SET ) await trt.execute(trait.COMMAND_MEDIA_SHUFFLE, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_SHUFFLE: True, } # COMMAND_MEDIA_STOP calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_STOP ) await trt.execute(trait.COMMAND_MEDIA_STOP, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} @pytest.mark.parametrize( "state", ( STATE_OFF, STATE_IDLE, STATE_PLAYING, STATE_ON, STATE_PAUSED, STATE_STANDBY, STATE_UNAVAILABLE, STATE_UNKNOWN, ), ) async def test_media_state(hass, state): """Test the MediaStateTrait.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.TransportControlTrait.supported( media_player.DOMAIN, media_player.SUPPORT_PLAY, None ) trt = trait.MediaStateTrait( hass, State( "media_player.bla", state, { media_player.ATTR_MEDIA_POSITION: 100, media_player.ATTR_MEDIA_DURATION: 200, media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.5, ATTR_SUPPORTED_FEATURES: media_player.SUPPORT_PLAY | media_player.SUPPORT_STOP, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "supportActivityState": True, "supportPlaybackState": True, } assert trt.query_attributes() == { "activityState": trt.activity_lookup.get(state), "playbackState": trt.playback_lookup.get(state), }
sander76/home-assistant
tests/components/google_assistant/test_trait.py
homeassistant/components/tasmota/__init__.py
"""Support for Vera devices.""" from __future__ import annotations import asyncio from collections import defaultdict import logging from typing import Any, Generic, TypeVar import pyvera as veraApi from requests.exceptions import RequestException import voluptuous as vol from homeassistant import config_entries from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( ATTR_ARMED, ATTR_BATTERY_LEVEL, ATTR_LAST_TRIP_TIME, ATTR_TRIPPED, CONF_EXCLUDE, CONF_LIGHTS, EVENT_HOMEASSISTANT_STOP, ) from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.helpers import config_validation as cv from homeassistant.helpers.entity import Entity from homeassistant.util import convert, slugify from homeassistant.util.dt import utc_from_timestamp from .common import ( ControllerData, SubscriptionRegistry, get_configured_platforms, get_controller_data, set_controller_data, ) from .config_flow import fix_device_id_list, new_options from .const import ( ATTR_CURRENT_ENERGY_KWH, ATTR_CURRENT_POWER_W, CONF_CONTROLLER, CONF_LEGACY_UNIQUE_ID, DOMAIN, VERA_ID_FORMAT, ) _LOGGER = logging.getLogger(__name__) VERA_ID_LIST_SCHEMA = vol.Schema([int]) CONFIG_SCHEMA = vol.Schema( { DOMAIN: vol.Schema( { vol.Required(CONF_CONTROLLER): cv.url, vol.Optional(CONF_EXCLUDE, default=[]): VERA_ID_LIST_SCHEMA, vol.Optional(CONF_LIGHTS, default=[]): VERA_ID_LIST_SCHEMA, } ) }, extra=vol.ALLOW_EXTRA, ) async def async_setup(hass: HomeAssistant, base_config: dict) -> bool: """Set up for Vera controllers.""" hass.data[DOMAIN] = {} config = base_config.get(DOMAIN) if not config: return True hass.async_create_task( hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=config, ) ) return True async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool: """Do setup of vera.""" # Use options entered during initial config flow or provided from configuration.yml if config_entry.data.get(CONF_LIGHTS) or config_entry.data.get(CONF_EXCLUDE): hass.config_entries.async_update_entry( entry=config_entry, data=config_entry.data, options=new_options( config_entry.data.get(CONF_LIGHTS, []), config_entry.data.get(CONF_EXCLUDE, []), ), ) saved_light_ids = config_entry.options.get(CONF_LIGHTS, []) saved_exclude_ids = config_entry.options.get(CONF_EXCLUDE, []) base_url = config_entry.data[CONF_CONTROLLER] light_ids = fix_device_id_list(saved_light_ids) exclude_ids = fix_device_id_list(saved_exclude_ids) # If the ids were corrected. Update the config entry. if light_ids != saved_light_ids or exclude_ids != saved_exclude_ids: hass.config_entries.async_update_entry( entry=config_entry, options=new_options(light_ids, exclude_ids) ) # Initialize the Vera controller. subscription_registry = SubscriptionRegistry(hass) controller = veraApi.VeraController(base_url, subscription_registry) try: all_devices = await hass.async_add_executor_job(controller.get_devices) all_scenes = await hass.async_add_executor_job(controller.get_scenes) except RequestException as exception: # There was a network related error connecting to the Vera controller. _LOGGER.exception("Error communicating with Vera API") raise ConfigEntryNotReady from exception # Exclude devices unwanted by user. devices = [device for device in all_devices if device.device_id not in exclude_ids] vera_devices = defaultdict(list) for device in devices: device_type = map_vera_device(device, light_ids) if device_type is not None: vera_devices[device_type].append(device) vera_scenes = [] for scene in all_scenes: vera_scenes.append(scene) controller_data = ControllerData( controller=controller, devices=vera_devices, scenes=vera_scenes, config_entry=config_entry, ) set_controller_data(hass, config_entry, controller_data) # Forward the config data to the necessary platforms. for platform in get_configured_platforms(controller_data): hass.async_create_task( hass.config_entries.async_forward_entry_setup(config_entry, platform) ) def stop_subscription(event): """Stop SubscriptionRegistry updates.""" controller.stop() await hass.async_add_executor_job(controller.start) hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, stop_subscription) return True async def async_unload_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool: """Unload Withings config entry.""" controller_data: ControllerData = get_controller_data(hass, config_entry) tasks = [ hass.config_entries.async_forward_entry_unload(config_entry, platform) for platform in get_configured_platforms(controller_data) ] tasks.append(hass.async_add_executor_job(controller_data.controller.stop)) await asyncio.gather(*tasks) return True def map_vera_device(vera_device: veraApi.VeraDevice, remap: list[int]) -> str: """Map vera classes to Home Assistant types.""" type_map = { veraApi.VeraDimmer: "light", veraApi.VeraBinarySensor: "binary_sensor", veraApi.VeraSensor: "sensor", veraApi.VeraArmableDevice: "switch", veraApi.VeraLock: "lock", veraApi.VeraThermostat: "climate", veraApi.VeraCurtain: "cover", veraApi.VeraSceneController: "sensor", veraApi.VeraSwitch: "switch", } def map_special_case(instance_class: type, entity_type: str) -> str: if instance_class is veraApi.VeraSwitch and vera_device.device_id in remap: return "light" return entity_type return next( iter( map_special_case(instance_class, entity_type) for instance_class, entity_type in type_map.items() if isinstance(vera_device, instance_class) ), None, ) DeviceType = TypeVar("DeviceType", bound=veraApi.VeraDevice) class VeraDevice(Generic[DeviceType], Entity): """Representation of a Vera device entity.""" def __init__(self, vera_device: DeviceType, controller_data: ControllerData): """Initialize the device.""" self.vera_device = vera_device self.controller = controller_data.controller self._name = self.vera_device.name # Append device id to prevent name clashes in HA. self.vera_id = VERA_ID_FORMAT.format( slugify(vera_device.name), vera_device.vera_device_id ) if controller_data.config_entry.data.get(CONF_LEGACY_UNIQUE_ID): self._unique_id = str(self.vera_device.vera_device_id) else: self._unique_id = f"vera_{controller_data.config_entry.unique_id}_{self.vera_device.vera_device_id}" async def async_added_to_hass(self) -> None: """Subscribe to updates.""" self.controller.register(self.vera_device, self._update_callback) def _update_callback(self, _device: DeviceType) -> None: """Update the state.""" self.schedule_update_ha_state(True) def update(self): """Force a refresh from the device if the device is unavailable.""" refresh_needed = self.vera_device.should_poll or not self.available _LOGGER.debug("%s: update called (refresh=%s)", self._name, refresh_needed) if refresh_needed: self.vera_device.refresh() @property def name(self) -> str: """Return the name of the device.""" return self._name @property def extra_state_attributes(self) -> dict[str, Any] | None: """Return the state attributes of the device.""" attr = {} if self.vera_device.has_battery: attr[ATTR_BATTERY_LEVEL] = self.vera_device.battery_level if self.vera_device.is_armable: armed = self.vera_device.is_armed attr[ATTR_ARMED] = "True" if armed else "False" if self.vera_device.is_trippable: last_tripped = self.vera_device.last_trip if last_tripped is not None: utc_time = utc_from_timestamp(int(last_tripped)) attr[ATTR_LAST_TRIP_TIME] = utc_time.isoformat() else: attr[ATTR_LAST_TRIP_TIME] = None tripped = self.vera_device.is_tripped attr[ATTR_TRIPPED] = "True" if tripped else "False" power = self.vera_device.power if power: attr[ATTR_CURRENT_POWER_W] = convert(power, float, 0.0) energy = self.vera_device.energy if energy: attr[ATTR_CURRENT_ENERGY_KWH] = convert(energy, float, 0.0) attr["Vera Device Id"] = self.vera_device.vera_device_id return attr @property def available(self): """If device communications have failed return false.""" return not self.vera_device.comm_failure @property def unique_id(self) -> str: """Return a unique ID. The Vera assigns a unique and immutable ID number to each device. """ return self._unique_id
"""Tests for the Google Assistant traits.""" from datetime import datetime, timedelta from unittest.mock import patch import pytest from homeassistant.components import ( alarm_control_panel, binary_sensor, camera, cover, fan, group, input_boolean, input_select, light, lock, media_player, scene, script, sensor, switch, vacuum, ) from homeassistant.components.climate import const as climate from homeassistant.components.google_assistant import const, error, helpers, trait from homeassistant.components.google_assistant.error import SmartHomeError from homeassistant.components.humidifier import const as humidifier from homeassistant.config import async_process_ha_core_config from homeassistant.const import ( ATTR_ASSUMED_STATE, ATTR_DEVICE_CLASS, ATTR_ENTITY_ID, ATTR_MODE, ATTR_SUPPORTED_FEATURES, ATTR_TEMPERATURE, SERVICE_TURN_OFF, SERVICE_TURN_ON, STATE_ALARM_ARMED_AWAY, STATE_ALARM_DISARMED, STATE_ALARM_PENDING, STATE_IDLE, STATE_OFF, STATE_ON, STATE_PAUSED, STATE_PLAYING, STATE_STANDBY, STATE_UNAVAILABLE, STATE_UNKNOWN, TEMP_CELSIUS, TEMP_FAHRENHEIT, ) from homeassistant.core import DOMAIN as HA_DOMAIN, EVENT_CALL_SERVICE, State from homeassistant.util import color from . import BASIC_CONFIG, MockConfig from tests.common import async_capture_events, async_mock_service REQ_ID = "ff36a3cc-ec34-11e6-b1a0-64510650abcf" BASIC_DATA = helpers.RequestData( BASIC_CONFIG, "test-agent", const.SOURCE_CLOUD, REQ_ID, None ) PIN_CONFIG = MockConfig(secure_devices_pin="1234") PIN_DATA = helpers.RequestData( PIN_CONFIG, "test-agent", const.SOURCE_CLOUD, REQ_ID, None ) async def test_brightness_light(hass): """Test brightness trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert trait.BrightnessTrait.supported(light.DOMAIN, light.SUPPORT_BRIGHTNESS, None) trt = trait.BrightnessTrait( hass, State("light.bla", light.STATE_ON, {light.ATTR_BRIGHTNESS: 243}), BASIC_CONFIG, ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"brightness": 95} events = async_capture_events(hass, EVENT_CALL_SERVICE) calls = async_mock_service(hass, light.DOMAIN, light.SERVICE_TURN_ON) await trt.execute( trait.COMMAND_BRIGHTNESS_ABSOLUTE, BASIC_DATA, {"brightness": 50}, {} ) await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "light.bla", light.ATTR_BRIGHTNESS_PCT: 50} assert len(events) == 1 assert events[0].data == { "domain": "light", "service": "turn_on", "service_data": {"brightness_pct": 50, "entity_id": "light.bla"}, } async def test_camera_stream(hass): """Test camera stream trait support for camera domain.""" await async_process_ha_core_config( hass, {"external_url": "https://example.com"}, ) assert helpers.get_google_type(camera.DOMAIN, None) is not None assert trait.CameraStreamTrait.supported(camera.DOMAIN, camera.SUPPORT_STREAM, None) trt = trait.CameraStreamTrait( hass, State("camera.bla", camera.STATE_IDLE, {}), BASIC_CONFIG ) assert trt.sync_attributes() == { "cameraStreamSupportedProtocols": ["hls"], "cameraStreamNeedAuthToken": False, "cameraStreamNeedDrmEncryption": False, } assert trt.query_attributes() == {} with patch( "homeassistant.components.camera.async_request_stream", return_value="/api/streams/bla", ): await trt.execute(trait.COMMAND_GET_CAMERA_STREAM, BASIC_DATA, {}, {}) assert trt.query_attributes() == { "cameraStreamAccessUrl": "https://example.com/api/streams/bla", "cameraStreamReceiverAppId": "B12CE3CA", } async def test_onoff_group(hass): """Test OnOff trait support for group domain.""" assert helpers.get_google_type(group.DOMAIN, None) is not None assert trait.OnOffTrait.supported(group.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("group.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("group.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, HA_DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "group.bla"} off_calls = async_mock_service(hass, HA_DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "group.bla"} async def test_onoff_input_boolean(hass): """Test OnOff trait support for input_boolean domain.""" assert helpers.get_google_type(input_boolean.DOMAIN, None) is not None assert trait.OnOffTrait.supported(input_boolean.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("input_boolean.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait( hass, State("input_boolean.bla", STATE_OFF), BASIC_CONFIG ) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, input_boolean.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "input_boolean.bla"} off_calls = async_mock_service(hass, input_boolean.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "input_boolean.bla"} async def test_onoff_switch(hass): """Test OnOff trait support for switch domain.""" assert helpers.get_google_type(switch.DOMAIN, None) is not None assert trait.OnOffTrait.supported(switch.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("switch.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("switch.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} trt_assumed = trait.OnOffTrait( hass, State("switch.bla", STATE_OFF, {"assumed_state": True}), BASIC_CONFIG ) assert trt_assumed.sync_attributes() == {"commandOnlyOnOff": True} on_calls = async_mock_service(hass, switch.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "switch.bla"} off_calls = async_mock_service(hass, switch.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "switch.bla"} async def test_onoff_fan(hass): """Test OnOff trait support for fan domain.""" assert helpers.get_google_type(fan.DOMAIN, None) is not None assert trait.OnOffTrait.supported(fan.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("fan.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("fan.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, fan.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "fan.bla"} off_calls = async_mock_service(hass, fan.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "fan.bla"} async def test_onoff_light(hass): """Test OnOff trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert trait.OnOffTrait.supported(light.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("light.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("light.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "light.bla"} off_calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "light.bla"} async def test_onoff_media_player(hass): """Test OnOff trait support for media_player domain.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.OnOffTrait.supported(media_player.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("media_player.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("media_player.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, media_player.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} off_calls = async_mock_service(hass, media_player.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} async def test_onoff_humidifier(hass): """Test OnOff trait support for humidifier domain.""" assert helpers.get_google_type(humidifier.DOMAIN, None) is not None assert trait.OnOffTrait.supported(humidifier.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("humidifier.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("humidifier.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, humidifier.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "humidifier.bla"} off_calls = async_mock_service(hass, humidifier.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "humidifier.bla"} async def test_dock_vacuum(hass): """Test dock trait support for vacuum domain.""" assert helpers.get_google_type(vacuum.DOMAIN, None) is not None assert trait.DockTrait.supported(vacuum.DOMAIN, 0, None) trt = trait.DockTrait(hass, State("vacuum.bla", vacuum.STATE_IDLE), BASIC_CONFIG) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"isDocked": False} calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_RETURN_TO_BASE) await trt.execute(trait.COMMAND_DOCK, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} async def test_startstop_vacuum(hass): """Test startStop trait support for vacuum domain.""" assert helpers.get_google_type(vacuum.DOMAIN, None) is not None assert trait.StartStopTrait.supported(vacuum.DOMAIN, 0, None) trt = trait.StartStopTrait( hass, State( "vacuum.bla", vacuum.STATE_PAUSED, {ATTR_SUPPORTED_FEATURES: vacuum.SUPPORT_PAUSE}, ), BASIC_CONFIG, ) assert trt.sync_attributes() == {"pausable": True} assert trt.query_attributes() == {"isRunning": False, "isPaused": True} start_calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_START) await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": True}, {}) assert len(start_calls) == 1 assert start_calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} stop_calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_STOP) await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": False}, {}) assert len(stop_calls) == 1 assert stop_calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} pause_calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_PAUSE) await trt.execute(trait.COMMAND_PAUSEUNPAUSE, BASIC_DATA, {"pause": True}, {}) assert len(pause_calls) == 1 assert pause_calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} unpause_calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_START) await trt.execute(trait.COMMAND_PAUSEUNPAUSE, BASIC_DATA, {"pause": False}, {}) assert len(unpause_calls) == 1 assert unpause_calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} async def test_startstop_cover(hass): """Test startStop trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.StartStopTrait.supported(cover.DOMAIN, cover.SUPPORT_STOP, None) state = State( "cover.bla", cover.STATE_CLOSED, {ATTR_SUPPORTED_FEATURES: cover.SUPPORT_STOP}, ) trt = trait.StartStopTrait( hass, state, BASIC_CONFIG, ) assert trt.sync_attributes() == {} for state_value in (cover.STATE_CLOSING, cover.STATE_OPENING): state.state = state_value assert trt.query_attributes() == {"isRunning": True} stop_calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_STOP_COVER) await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": False}, {}) assert len(stop_calls) == 1 assert stop_calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} for state_value in (cover.STATE_CLOSED, cover.STATE_OPEN): state.state = state_value assert trt.query_attributes() == {"isRunning": False} with pytest.raises(SmartHomeError, match="Cover is already stopped"): await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": False}, {}) with pytest.raises(SmartHomeError, match="Starting a cover is not supported"): await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": True}, {}) with pytest.raises( SmartHomeError, match="Command action.devices.commands.PauseUnpause is not supported", ): await trt.execute(trait.COMMAND_PAUSEUNPAUSE, BASIC_DATA, {"start": True}, {}) async def test_startstop_cover_assumed(hass): """Test startStop trait support for cover domain of assumed state.""" trt = trait.StartStopTrait( hass, State( "cover.bla", cover.STATE_CLOSED, {ATTR_SUPPORTED_FEATURES: cover.SUPPORT_STOP, ATTR_ASSUMED_STATE: True}, ), BASIC_CONFIG, ) stop_calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_STOP_COVER) await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": False}, {}) assert len(stop_calls) == 1 assert stop_calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} async def test_color_setting_color_light(hass): """Test ColorSpectrum trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert not trait.ColorSettingTrait.supported(light.DOMAIN, 0, None) assert trait.ColorSettingTrait.supported(light.DOMAIN, light.SUPPORT_COLOR, None) trt = trait.ColorSettingTrait( hass, State( "light.bla", STATE_ON, { light.ATTR_HS_COLOR: (20, 94), light.ATTR_BRIGHTNESS: 200, ATTR_SUPPORTED_FEATURES: light.SUPPORT_COLOR, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == {"colorModel": "hsv"} assert trt.query_attributes() == { "color": {"spectrumHsv": {"hue": 20, "saturation": 0.94, "value": 200 / 255}} } assert trt.can_execute( trait.COMMAND_COLOR_ABSOLUTE, {"color": {"spectrumRGB": 16715792}} ) calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON) await trt.execute( trait.COMMAND_COLOR_ABSOLUTE, BASIC_DATA, {"color": {"spectrumRGB": 1052927}}, {}, ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "light.bla", light.ATTR_HS_COLOR: (240, 93.725), } await trt.execute( trait.COMMAND_COLOR_ABSOLUTE, BASIC_DATA, {"color": {"spectrumHSV": {"hue": 100, "saturation": 0.50, "value": 0.20}}}, {}, ) assert len(calls) == 2 assert calls[1].data == { ATTR_ENTITY_ID: "light.bla", light.ATTR_HS_COLOR: [100, 50], light.ATTR_BRIGHTNESS: 0.2 * 255, } async def test_color_setting_temperature_light(hass): """Test ColorTemperature trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert not trait.ColorSettingTrait.supported(light.DOMAIN, 0, None) assert trait.ColorSettingTrait.supported( light.DOMAIN, light.SUPPORT_COLOR_TEMP, None ) trt = trait.ColorSettingTrait( hass, State( "light.bla", STATE_ON, { light.ATTR_MIN_MIREDS: 200, light.ATTR_COLOR_TEMP: 300, light.ATTR_MAX_MIREDS: 500, ATTR_SUPPORTED_FEATURES: light.SUPPORT_COLOR_TEMP, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "colorTemperatureRange": {"temperatureMinK": 2000, "temperatureMaxK": 5000} } assert trt.query_attributes() == {"color": {"temperatureK": 3333}} assert trt.can_execute( trait.COMMAND_COLOR_ABSOLUTE, {"color": {"temperature": 400}} ) calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON) with pytest.raises(helpers.SmartHomeError) as err: await trt.execute( trait.COMMAND_COLOR_ABSOLUTE, BASIC_DATA, {"color": {"temperature": 5555}}, {}, ) assert err.value.code == const.ERR_VALUE_OUT_OF_RANGE await trt.execute( trait.COMMAND_COLOR_ABSOLUTE, BASIC_DATA, {"color": {"temperature": 2857}}, {} ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "light.bla", light.ATTR_COLOR_TEMP: color.color_temperature_kelvin_to_mired(2857), } async def test_color_light_temperature_light_bad_temp(hass): """Test ColorTemperature trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert not trait.ColorSettingTrait.supported(light.DOMAIN, 0, None) assert trait.ColorSettingTrait.supported( light.DOMAIN, light.SUPPORT_COLOR_TEMP, None ) trt = trait.ColorSettingTrait( hass, State( "light.bla", STATE_ON, { light.ATTR_MIN_MIREDS: 200, light.ATTR_COLOR_TEMP: 0, light.ATTR_MAX_MIREDS: 500, }, ), BASIC_CONFIG, ) assert trt.query_attributes() == {} async def test_light_modes(hass): """Test Light Mode trait.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert trait.ModesTrait.supported(light.DOMAIN, light.SUPPORT_EFFECT, None) trt = trait.ModesTrait( hass, State( "light.living_room", light.STATE_ON, attributes={ light.ATTR_EFFECT_LIST: ["random", "colorloop"], light.ATTR_EFFECT: "random", }, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableModes": [ { "name": "effect", "name_values": [{"name_synonym": ["effect"], "lang": "en"}], "settings": [ { "setting_name": "random", "setting_values": [ {"setting_synonym": ["random"], "lang": "en"} ], }, { "setting_name": "colorloop", "setting_values": [ {"setting_synonym": ["colorloop"], "lang": "en"} ], }, ], "ordered": False, } ] } assert trt.query_attributes() == { "currentModeSettings": {"effect": "random"}, "on": True, } assert trt.can_execute( trait.COMMAND_MODES, params={"updateModeSettings": {"effect": "colorloop"}}, ) calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON) await trt.execute( trait.COMMAND_MODES, BASIC_DATA, {"updateModeSettings": {"effect": "colorloop"}}, {}, ) assert len(calls) == 1 assert calls[0].data == { "entity_id": "light.living_room", "effect": "colorloop", } async def test_scene_scene(hass): """Test Scene trait support for scene domain.""" assert helpers.get_google_type(scene.DOMAIN, None) is not None assert trait.SceneTrait.supported(scene.DOMAIN, 0, None) trt = trait.SceneTrait(hass, State("scene.bla", scene.STATE), BASIC_CONFIG) assert trt.sync_attributes() == {} assert trt.query_attributes() == {} assert trt.can_execute(trait.COMMAND_ACTIVATE_SCENE, {}) calls = async_mock_service(hass, scene.DOMAIN, SERVICE_TURN_ON) await trt.execute(trait.COMMAND_ACTIVATE_SCENE, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "scene.bla"} async def test_scene_script(hass): """Test Scene trait support for script domain.""" assert helpers.get_google_type(script.DOMAIN, None) is not None assert trait.SceneTrait.supported(script.DOMAIN, 0, None) trt = trait.SceneTrait(hass, State("script.bla", STATE_OFF), BASIC_CONFIG) assert trt.sync_attributes() == {} assert trt.query_attributes() == {} assert trt.can_execute(trait.COMMAND_ACTIVATE_SCENE, {}) calls = async_mock_service(hass, script.DOMAIN, SERVICE_TURN_ON) await trt.execute(trait.COMMAND_ACTIVATE_SCENE, BASIC_DATA, {}, {}) # We don't wait till script execution is done. await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "script.bla"} async def test_temperature_setting_climate_onoff(hass): """Test TemperatureSetting trait support for climate domain - range.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.TemperatureSettingTrait.supported(climate.DOMAIN, 0, None) hass.config.units.temperature_unit = TEMP_FAHRENHEIT trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_AUTO, { ATTR_SUPPORTED_FEATURES: climate.SUPPORT_TARGET_TEMPERATURE_RANGE, climate.ATTR_HVAC_MODES: [ climate.HVAC_MODE_OFF, climate.HVAC_MODE_COOL, climate.HVAC_MODE_HEAT, climate.HVAC_MODE_HEAT_COOL, ], climate.ATTR_MIN_TEMP: None, climate.ATTR_MAX_TEMP: None, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["off", "cool", "heat", "heatcool", "on"], "thermostatTemperatureUnit": "F", } assert trt.can_execute(trait.COMMAND_THERMOSTAT_SET_MODE, {}) calls = async_mock_service(hass, climate.DOMAIN, SERVICE_TURN_ON) await trt.execute( trait.COMMAND_THERMOSTAT_SET_MODE, BASIC_DATA, {"thermostatMode": "on"}, {} ) assert len(calls) == 1 calls = async_mock_service(hass, climate.DOMAIN, SERVICE_TURN_OFF) await trt.execute( trait.COMMAND_THERMOSTAT_SET_MODE, BASIC_DATA, {"thermostatMode": "off"}, {} ) assert len(calls) == 1 async def test_temperature_setting_climate_no_modes(hass): """Test TemperatureSetting trait support for climate domain not supporting any modes.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.TemperatureSettingTrait.supported(climate.DOMAIN, 0, None) hass.config.units.temperature_unit = TEMP_CELSIUS trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_AUTO, { climate.ATTR_HVAC_MODES: [], climate.ATTR_MIN_TEMP: None, climate.ATTR_MAX_TEMP: None, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["heat"], "thermostatTemperatureUnit": "C", } async def test_temperature_setting_climate_range(hass): """Test TemperatureSetting trait support for climate domain - range.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.TemperatureSettingTrait.supported(climate.DOMAIN, 0, None) hass.config.units.temperature_unit = TEMP_FAHRENHEIT trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_AUTO, { climate.ATTR_CURRENT_TEMPERATURE: 70, climate.ATTR_CURRENT_HUMIDITY: 25, ATTR_SUPPORTED_FEATURES: climate.SUPPORT_TARGET_TEMPERATURE_RANGE, climate.ATTR_HVAC_MODES: [ STATE_OFF, climate.HVAC_MODE_COOL, climate.HVAC_MODE_HEAT, climate.HVAC_MODE_AUTO, ], climate.ATTR_TARGET_TEMP_HIGH: 75, climate.ATTR_TARGET_TEMP_LOW: 65, climate.ATTR_MIN_TEMP: 50, climate.ATTR_MAX_TEMP: 80, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["off", "cool", "heat", "auto", "on"], "thermostatTemperatureUnit": "F", } assert trt.query_attributes() == { "thermostatMode": "auto", "thermostatTemperatureAmbient": 21.1, "thermostatHumidityAmbient": 25, "thermostatTemperatureSetpointLow": 18.3, "thermostatTemperatureSetpointHigh": 23.9, } assert trt.can_execute(trait.COMMAND_THERMOSTAT_TEMPERATURE_SET_RANGE, {}) assert trt.can_execute(trait.COMMAND_THERMOSTAT_SET_MODE, {}) calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_TEMPERATURE) await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SET_RANGE, BASIC_DATA, { "thermostatTemperatureSetpointHigh": 25, "thermostatTemperatureSetpointLow": 20, }, {}, ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "climate.bla", climate.ATTR_TARGET_TEMP_HIGH: 77, climate.ATTR_TARGET_TEMP_LOW: 68, } calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_HVAC_MODE) await trt.execute( trait.COMMAND_THERMOSTAT_SET_MODE, BASIC_DATA, {"thermostatMode": "cool"}, {} ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "climate.bla", climate.ATTR_HVAC_MODE: climate.HVAC_MODE_COOL, } with pytest.raises(helpers.SmartHomeError) as err: await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, BASIC_DATA, {"thermostatTemperatureSetpoint": -100}, {}, ) assert err.value.code == const.ERR_VALUE_OUT_OF_RANGE hass.config.units.temperature_unit = TEMP_CELSIUS async def test_temperature_setting_climate_setpoint(hass): """Test TemperatureSetting trait support for climate domain - setpoint.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.TemperatureSettingTrait.supported(climate.DOMAIN, 0, None) hass.config.units.temperature_unit = TEMP_CELSIUS trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_COOL, { climate.ATTR_HVAC_MODES: [STATE_OFF, climate.HVAC_MODE_COOL], climate.ATTR_MIN_TEMP: 10, climate.ATTR_MAX_TEMP: 30, ATTR_TEMPERATURE: 18, climate.ATTR_CURRENT_TEMPERATURE: 20, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["off", "cool", "on"], "thermostatTemperatureUnit": "C", } assert trt.query_attributes() == { "thermostatMode": "cool", "thermostatTemperatureAmbient": 20, "thermostatTemperatureSetpoint": 18, } assert trt.can_execute(trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, {}) assert trt.can_execute(trait.COMMAND_THERMOSTAT_SET_MODE, {}) calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_TEMPERATURE) with pytest.raises(helpers.SmartHomeError): await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, BASIC_DATA, {"thermostatTemperatureSetpoint": -100}, {}, ) await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, BASIC_DATA, {"thermostatTemperatureSetpoint": 19}, {}, ) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "climate.bla", ATTR_TEMPERATURE: 19} async def test_temperature_setting_climate_setpoint_auto(hass): """ Test TemperatureSetting trait support for climate domain. Setpoint in auto mode. """ hass.config.units.temperature_unit = TEMP_CELSIUS trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_HEAT_COOL, { climate.ATTR_HVAC_MODES: [ climate.HVAC_MODE_OFF, climate.HVAC_MODE_HEAT_COOL, ], climate.ATTR_MIN_TEMP: 10, climate.ATTR_MAX_TEMP: 30, ATTR_TEMPERATURE: 18, climate.ATTR_CURRENT_TEMPERATURE: 20, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["off", "heatcool", "on"], "thermostatTemperatureUnit": "C", } assert trt.query_attributes() == { "thermostatMode": "heatcool", "thermostatTemperatureAmbient": 20, "thermostatTemperatureSetpointHigh": 18, "thermostatTemperatureSetpointLow": 18, } assert trt.can_execute(trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, {}) assert trt.can_execute(trait.COMMAND_THERMOSTAT_SET_MODE, {}) calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_TEMPERATURE) await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, BASIC_DATA, {"thermostatTemperatureSetpoint": 19}, {}, ) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "climate.bla", ATTR_TEMPERATURE: 19} async def test_humidity_setting_humidifier_setpoint(hass): """Test HumiditySetting trait support for humidifier domain - setpoint.""" assert helpers.get_google_type(humidifier.DOMAIN, None) is not None assert trait.HumiditySettingTrait.supported(humidifier.DOMAIN, 0, None) trt = trait.HumiditySettingTrait( hass, State( "humidifier.bla", STATE_ON, { humidifier.ATTR_MIN_HUMIDITY: 20, humidifier.ATTR_MAX_HUMIDITY: 90, humidifier.ATTR_HUMIDITY: 38, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "humiditySetpointRange": {"minPercent": 20, "maxPercent": 90} } assert trt.query_attributes() == { "humiditySetpointPercent": 38, } assert trt.can_execute(trait.COMMAND_SET_HUMIDITY, {}) calls = async_mock_service(hass, humidifier.DOMAIN, humidifier.SERVICE_SET_HUMIDITY) await trt.execute(trait.COMMAND_SET_HUMIDITY, BASIC_DATA, {"humidity": 32}, {}) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "humidifier.bla", humidifier.ATTR_HUMIDITY: 32, } async def test_lock_unlock_lock(hass): """Test LockUnlock trait locking support for lock domain.""" assert helpers.get_google_type(lock.DOMAIN, None) is not None assert trait.LockUnlockTrait.supported(lock.DOMAIN, lock.SUPPORT_OPEN, None) assert trait.LockUnlockTrait.might_2fa(lock.DOMAIN, lock.SUPPORT_OPEN, None) trt = trait.LockUnlockTrait( hass, State("lock.front_door", lock.STATE_LOCKED), PIN_CONFIG ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"isLocked": True} assert trt.can_execute(trait.COMMAND_LOCKUNLOCK, {"lock": True}) calls = async_mock_service(hass, lock.DOMAIN, lock.SERVICE_LOCK) await trt.execute(trait.COMMAND_LOCKUNLOCK, PIN_DATA, {"lock": True}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "lock.front_door"} async def test_lock_unlock_unlock(hass): """Test LockUnlock trait unlocking support for lock domain.""" assert helpers.get_google_type(lock.DOMAIN, None) is not None assert trait.LockUnlockTrait.supported(lock.DOMAIN, lock.SUPPORT_OPEN, None) trt = trait.LockUnlockTrait( hass, State("lock.front_door", lock.STATE_LOCKED), PIN_CONFIG ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"isLocked": True} assert trt.can_execute(trait.COMMAND_LOCKUNLOCK, {"lock": False}) calls = async_mock_service(hass, lock.DOMAIN, lock.SERVICE_UNLOCK) # No challenge data with pytest.raises(error.ChallengeNeeded) as err: await trt.execute(trait.COMMAND_LOCKUNLOCK, PIN_DATA, {"lock": False}, {}) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # invalid pin with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_LOCKUNLOCK, PIN_DATA, {"lock": False}, {"pin": 9999} ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_FAILED_PIN_NEEDED await trt.execute( trait.COMMAND_LOCKUNLOCK, PIN_DATA, {"lock": False}, {"pin": "1234"} ) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "lock.front_door"} # Test without pin trt = trait.LockUnlockTrait( hass, State("lock.front_door", lock.STATE_LOCKED), BASIC_CONFIG ) with pytest.raises(error.SmartHomeError) as err: await trt.execute(trait.COMMAND_LOCKUNLOCK, BASIC_DATA, {"lock": False}, {}) assert len(calls) == 1 assert err.value.code == const.ERR_CHALLENGE_NOT_SETUP # Test with 2FA override with patch.object( BASIC_CONFIG, "should_2fa", return_value=False, ): await trt.execute(trait.COMMAND_LOCKUNLOCK, BASIC_DATA, {"lock": False}, {}) assert len(calls) == 2 async def test_arm_disarm_arm_away(hass): """Test ArmDisarm trait Arming support for alarm_control_panel domain.""" assert helpers.get_google_type(alarm_control_panel.DOMAIN, None) is not None assert trait.ArmDisArmTrait.supported(alarm_control_panel.DOMAIN, 0, None) assert trait.ArmDisArmTrait.might_2fa(alarm_control_panel.DOMAIN, 0, None) trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, { alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True, ATTR_SUPPORTED_FEATURES: alarm_control_panel.const.SUPPORT_ALARM_ARM_HOME | alarm_control_panel.const.SUPPORT_ALARM_ARM_AWAY, }, ), PIN_CONFIG, ) assert trt.sync_attributes() == { "availableArmLevels": { "levels": [ { "level_name": "armed_home", "level_values": [ {"level_synonym": ["armed home", "home"], "lang": "en"} ], }, { "level_name": "armed_away", "level_values": [ {"level_synonym": ["armed away", "away"], "lang": "en"} ], }, ], "ordered": False, } } assert trt.query_attributes() == { "isArmed": True, "currentArmLevel": STATE_ALARM_ARMED_AWAY, } assert trt.can_execute( trait.COMMAND_ARMDISARM, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY} ) calls = async_mock_service( hass, alarm_control_panel.DOMAIN, alarm_control_panel.SERVICE_ALARM_ARM_AWAY ) # Test with no secure_pin configured with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), BASIC_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, BASIC_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {}, ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NOT_SETUP trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), PIN_CONFIG, ) # No challenge data with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {}, ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # invalid pin with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {"pin": 9999}, ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_FAILED_PIN_NEEDED # correct pin await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {"pin": "1234"}, ) assert len(calls) == 1 # Test already armed with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), PIN_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {}, ) assert len(calls) == 1 assert err.value.code == const.ERR_ALREADY_ARMED # Test with code_arm_required False trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: False}, ), PIN_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {}, ) assert len(calls) == 2 with pytest.raises(error.SmartHomeError) as err: await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True}, {}, ) async def test_arm_disarm_disarm(hass): """Test ArmDisarm trait Disarming support for alarm_control_panel domain.""" assert helpers.get_google_type(alarm_control_panel.DOMAIN, None) is not None assert trait.ArmDisArmTrait.supported(alarm_control_panel.DOMAIN, 0, None) assert trait.ArmDisArmTrait.might_2fa(alarm_control_panel.DOMAIN, 0, None) trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, { alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True, ATTR_SUPPORTED_FEATURES: alarm_control_panel.const.SUPPORT_ALARM_TRIGGER | alarm_control_panel.const.SUPPORT_ALARM_ARM_CUSTOM_BYPASS, }, ), PIN_CONFIG, ) assert trt.sync_attributes() == { "availableArmLevels": { "levels": [ { "level_name": "armed_custom_bypass", "level_values": [ { "level_synonym": ["armed custom bypass", "custom"], "lang": "en", } ], }, { "level_name": "triggered", "level_values": [{"level_synonym": ["triggered"], "lang": "en"}], }, ], "ordered": False, } } assert trt.query_attributes() == {"isArmed": False} assert trt.can_execute(trait.COMMAND_ARMDISARM, {"arm": False}) calls = async_mock_service( hass, alarm_control_panel.DOMAIN, alarm_control_panel.SERVICE_ALARM_DISARM ) # Test without secure_pin configured with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), BASIC_CONFIG, ) await trt.execute(trait.COMMAND_ARMDISARM, BASIC_DATA, {"arm": False}, {}) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NOT_SETUP trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), PIN_CONFIG, ) # No challenge data with pytest.raises(error.ChallengeNeeded) as err: await trt.execute(trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": False}, {}) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # invalid pin with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": False}, {"pin": 9999} ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_FAILED_PIN_NEEDED # correct pin await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": False}, {"pin": "1234"} ) assert len(calls) == 1 # Test already disarmed with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), PIN_CONFIG, ) await trt.execute(trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": False}, {}) assert len(calls) == 1 assert err.value.code == const.ERR_ALREADY_DISARMED # Cancel arming after already armed will require pin with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: False}, ), PIN_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "cancel": True}, {} ) assert len(calls) == 1 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # Cancel arming while pending to arm doesn't require pin trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_PENDING, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: False}, ), PIN_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "cancel": True}, {} ) assert len(calls) == 2 async def test_fan_speed(hass): """Test FanSpeed trait speed control support for fan domain.""" assert helpers.get_google_type(fan.DOMAIN, None) is not None assert trait.FanSpeedTrait.supported(fan.DOMAIN, fan.SUPPORT_SET_SPEED, None) trt = trait.FanSpeedTrait( hass, State( "fan.living_room_fan", fan.SPEED_HIGH, attributes={ "speed_list": [ fan.SPEED_OFF, fan.SPEED_LOW, fan.SPEED_MEDIUM, fan.SPEED_HIGH, ], "speed": "low", "percentage": 33, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableFanSpeeds": { "ordered": True, "speeds": [ { "speed_name": "off", "speed_values": [{"speed_synonym": ["stop", "off"], "lang": "en"}], }, { "speed_name": "low", "speed_values": [ { "speed_synonym": ["slow", "low", "slowest", "lowest"], "lang": "en", } ], }, { "speed_name": "medium", "speed_values": [ {"speed_synonym": ["medium", "mid", "middle"], "lang": "en"} ], }, { "speed_name": "high", "speed_values": [ { "speed_synonym": [ "high", "max", "fast", "highest", "fastest", "maximum", ], "lang": "en", } ], }, ], }, "reversible": False, "supportsFanSpeedPercent": True, } assert trt.query_attributes() == { "currentFanSpeedSetting": "low", "on": True, "currentFanSpeedPercent": 33, } assert trt.can_execute(trait.COMMAND_FANSPEED, params={"fanSpeed": "medium"}) calls = async_mock_service(hass, fan.DOMAIN, fan.SERVICE_SET_SPEED) await trt.execute(trait.COMMAND_FANSPEED, BASIC_DATA, {"fanSpeed": "medium"}, {}) assert len(calls) == 1 assert calls[0].data == {"entity_id": "fan.living_room_fan", "speed": "medium"} assert trt.can_execute(trait.COMMAND_FANSPEED, params={"fanSpeedPercent": 10}) calls = async_mock_service(hass, fan.DOMAIN, fan.SERVICE_SET_PERCENTAGE) await trt.execute(trait.COMMAND_FANSPEED, BASIC_DATA, {"fanSpeedPercent": 10}, {}) assert len(calls) == 1 assert calls[0].data == {"entity_id": "fan.living_room_fan", "percentage": 10} async def test_climate_fan_speed(hass): """Test FanSpeed trait speed control support for climate domain.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.FanSpeedTrait.supported(climate.DOMAIN, climate.SUPPORT_FAN_MODE, None) trt = trait.FanSpeedTrait( hass, State( "climate.living_room_ac", "on", attributes={ "fan_modes": ["auto", "low", "medium", "high"], "fan_mode": "low", }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableFanSpeeds": { "ordered": True, "speeds": [ { "speed_name": "auto", "speed_values": [{"speed_synonym": ["auto"], "lang": "en"}], }, { "speed_name": "low", "speed_values": [{"speed_synonym": ["low"], "lang": "en"}], }, { "speed_name": "medium", "speed_values": [{"speed_synonym": ["medium"], "lang": "en"}], }, { "speed_name": "high", "speed_values": [{"speed_synonym": ["high"], "lang": "en"}], }, ], }, "reversible": False, "supportsFanSpeedPercent": True, } assert trt.query_attributes() == { "currentFanSpeedSetting": "low", } assert trt.can_execute(trait.COMMAND_FANSPEED, params={"fanSpeed": "medium"}) calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_FAN_MODE) await trt.execute(trait.COMMAND_FANSPEED, BASIC_DATA, {"fanSpeed": "medium"}, {}) assert len(calls) == 1 assert calls[0].data == { "entity_id": "climate.living_room_ac", "fan_mode": "medium", } async def test_inputselector(hass): """Test input selector trait.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.InputSelectorTrait.supported( media_player.DOMAIN, media_player.SUPPORT_SELECT_SOURCE, None ) trt = trait.InputSelectorTrait( hass, State( "media_player.living_room", media_player.STATE_PLAYING, attributes={ media_player.ATTR_INPUT_SOURCE_LIST: [ "media", "game", "chromecast", "plex", ], media_player.ATTR_INPUT_SOURCE: "game", }, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableInputs": [ {"key": "media", "names": [{"name_synonym": ["media"], "lang": "en"}]}, {"key": "game", "names": [{"name_synonym": ["game"], "lang": "en"}]}, { "key": "chromecast", "names": [{"name_synonym": ["chromecast"], "lang": "en"}], }, {"key": "plex", "names": [{"name_synonym": ["plex"], "lang": "en"}]}, ], "orderedInputs": True, } assert trt.query_attributes() == { "currentInput": "game", } assert trt.can_execute( trait.COMMAND_INPUT, params={"newInput": "media"}, ) calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_SELECT_SOURCE ) await trt.execute( trait.COMMAND_INPUT, BASIC_DATA, {"newInput": "media"}, {}, ) assert len(calls) == 1 assert calls[0].data == {"entity_id": "media_player.living_room", "source": "media"} @pytest.mark.parametrize( "sources,source,source_next,source_prev", [ (["a"], "a", "a", "a"), (["a", "b"], "a", "b", "b"), (["a", "b", "c"], "a", "b", "c"), ], ) async def test_inputselector_nextprev(hass, sources, source, source_next, source_prev): """Test input selector trait.""" trt = trait.InputSelectorTrait( hass, State( "media_player.living_room", media_player.STATE_PLAYING, attributes={ media_player.ATTR_INPUT_SOURCE_LIST: sources, media_player.ATTR_INPUT_SOURCE: source, }, ), BASIC_CONFIG, ) assert trt.can_execute("action.devices.commands.NextInput", params={}) assert trt.can_execute("action.devices.commands.PreviousInput", params={}) calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_SELECT_SOURCE ) await trt.execute( "action.devices.commands.NextInput", BASIC_DATA, {}, {}, ) await trt.execute( "action.devices.commands.PreviousInput", BASIC_DATA, {}, {}, ) assert len(calls) == 2 assert calls[0].data == { "entity_id": "media_player.living_room", "source": source_next, } assert calls[1].data == { "entity_id": "media_player.living_room", "source": source_prev, } @pytest.mark.parametrize( "sources,source", [(None, "a"), (["a", "b"], None), (["a", "b"], "c")] ) async def test_inputselector_nextprev_invalid(hass, sources, source): """Test input selector trait.""" trt = trait.InputSelectorTrait( hass, State( "media_player.living_room", media_player.STATE_PLAYING, attributes={ media_player.ATTR_INPUT_SOURCE_LIST: sources, media_player.ATTR_INPUT_SOURCE: source, }, ), BASIC_CONFIG, ) with pytest.raises(SmartHomeError): await trt.execute( "action.devices.commands.NextInput", BASIC_DATA, {}, {}, ) with pytest.raises(SmartHomeError): await trt.execute( "action.devices.commands.PreviousInput", BASIC_DATA, {}, {}, ) with pytest.raises(SmartHomeError): await trt.execute( "action.devices.commands.InvalidCommand", BASIC_DATA, {}, {}, ) async def test_modes_input_select(hass): """Test Input Select Mode trait.""" assert helpers.get_google_type(input_select.DOMAIN, None) is not None assert trait.ModesTrait.supported(input_select.DOMAIN, None, None) trt = trait.ModesTrait( hass, State("input_select.bla", "unavailable"), BASIC_CONFIG, ) assert trt.sync_attributes() == {"availableModes": []} trt = trait.ModesTrait( hass, State( "input_select.bla", "abc", attributes={input_select.ATTR_OPTIONS: ["abc", "123", "xyz"]}, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableModes": [ { "name": "option", "name_values": [ { "name_synonym": ["option", "setting", "mode", "value"], "lang": "en", } ], "settings": [ { "setting_name": "abc", "setting_values": [{"setting_synonym": ["abc"], "lang": "en"}], }, { "setting_name": "123", "setting_values": [{"setting_synonym": ["123"], "lang": "en"}], }, { "setting_name": "xyz", "setting_values": [{"setting_synonym": ["xyz"], "lang": "en"}], }, ], "ordered": False, } ] } assert trt.query_attributes() == { "currentModeSettings": {"option": "abc"}, "on": True, } assert trt.can_execute( trait.COMMAND_MODES, params={"updateModeSettings": {"option": "xyz"}}, ) calls = async_mock_service( hass, input_select.DOMAIN, input_select.SERVICE_SELECT_OPTION ) await trt.execute( trait.COMMAND_MODES, BASIC_DATA, {"updateModeSettings": {"option": "xyz"}}, {}, ) assert len(calls) == 1 assert calls[0].data == {"entity_id": "input_select.bla", "option": "xyz"} async def test_modes_humidifier(hass): """Test Humidifier Mode trait.""" assert helpers.get_google_type(humidifier.DOMAIN, None) is not None assert trait.ModesTrait.supported(humidifier.DOMAIN, humidifier.SUPPORT_MODES, None) trt = trait.ModesTrait( hass, State( "humidifier.humidifier", STATE_OFF, attributes={ humidifier.ATTR_AVAILABLE_MODES: [ humidifier.MODE_NORMAL, humidifier.MODE_AUTO, humidifier.MODE_AWAY, ], ATTR_SUPPORTED_FEATURES: humidifier.SUPPORT_MODES, humidifier.ATTR_MIN_HUMIDITY: 30, humidifier.ATTR_MAX_HUMIDITY: 99, humidifier.ATTR_HUMIDITY: 50, ATTR_MODE: humidifier.MODE_AUTO, }, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableModes": [ { "name": "mode", "name_values": [{"name_synonym": ["mode"], "lang": "en"}], "settings": [ { "setting_name": "normal", "setting_values": [ {"setting_synonym": ["normal"], "lang": "en"} ], }, { "setting_name": "auto", "setting_values": [{"setting_synonym": ["auto"], "lang": "en"}], }, { "setting_name": "away", "setting_values": [{"setting_synonym": ["away"], "lang": "en"}], }, ], "ordered": False, }, ] } assert trt.query_attributes() == { "currentModeSettings": {"mode": "auto"}, "on": False, } assert trt.can_execute( trait.COMMAND_MODES, params={"updateModeSettings": {"mode": "away"}} ) calls = async_mock_service(hass, humidifier.DOMAIN, humidifier.SERVICE_SET_MODE) await trt.execute( trait.COMMAND_MODES, BASIC_DATA, {"updateModeSettings": {"mode": "away"}}, {}, ) assert len(calls) == 1 assert calls[0].data == { "entity_id": "humidifier.humidifier", "mode": "away", } async def test_sound_modes(hass): """Test Mode trait.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.ModesTrait.supported( media_player.DOMAIN, media_player.SUPPORT_SELECT_SOUND_MODE, None ) trt = trait.ModesTrait( hass, State( "media_player.living_room", media_player.STATE_PLAYING, attributes={ media_player.ATTR_SOUND_MODE_LIST: ["stereo", "prologic"], media_player.ATTR_SOUND_MODE: "stereo", }, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableModes": [ { "name": "sound mode", "name_values": [ {"name_synonym": ["sound mode", "effects"], "lang": "en"} ], "settings": [ { "setting_name": "stereo", "setting_values": [ {"setting_synonym": ["stereo"], "lang": "en"} ], }, { "setting_name": "prologic", "setting_values": [ {"setting_synonym": ["prologic"], "lang": "en"} ], }, ], "ordered": False, } ] } assert trt.query_attributes() == { "currentModeSettings": {"sound mode": "stereo"}, "on": True, } assert trt.can_execute( trait.COMMAND_MODES, params={"updateModeSettings": {"sound mode": "stereo"}}, ) calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_SELECT_SOUND_MODE ) await trt.execute( trait.COMMAND_MODES, BASIC_DATA, {"updateModeSettings": {"sound mode": "stereo"}}, {}, ) assert len(calls) == 1 assert calls[0].data == { "entity_id": "media_player.living_room", "sound_mode": "stereo", } async def test_openclose_cover(hass): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_SET_POSITION, None ) trt = trait.OpenCloseTrait( hass, State( "cover.bla", cover.STATE_OPEN, { cover.ATTR_CURRENT_POSITION: 75, ATTR_SUPPORTED_FEATURES: cover.SUPPORT_SET_POSITION, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"openPercent": 75} calls_set = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_SET_COVER_POSITION) calls_open = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_OPEN_COVER) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 50}, {}) await trt.execute( trait.COMMAND_OPENCLOSE_RELATIVE, BASIC_DATA, {"openRelativePercent": 50}, {} ) assert len(calls_set) == 1 assert calls_set[0].data == {ATTR_ENTITY_ID: "cover.bla", cover.ATTR_POSITION: 50} assert len(calls_open) == 1 assert calls_open[0].data == {ATTR_ENTITY_ID: "cover.bla"} async def test_openclose_cover_unknown_state(hass): """Test OpenClose trait support for cover domain with unknown state.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_SET_POSITION, None ) # No state trt = trait.OpenCloseTrait( hass, State( "cover.bla", STATE_UNKNOWN, {ATTR_SUPPORTED_FEATURES: cover.SUPPORT_OPEN} ), BASIC_CONFIG, ) assert trt.sync_attributes() == {"discreteOnlyOpenClose": True} with pytest.raises(helpers.SmartHomeError): trt.query_attributes() calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_OPEN_COVER) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 100}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} with pytest.raises(helpers.SmartHomeError): trt.query_attributes() async def test_openclose_cover_assumed_state(hass): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_SET_POSITION, None ) trt = trait.OpenCloseTrait( hass, State( "cover.bla", cover.STATE_OPEN, { ATTR_ASSUMED_STATE: True, ATTR_SUPPORTED_FEATURES: cover.SUPPORT_SET_POSITION, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == {"commandOnlyOpenClose": True} assert trt.query_attributes() == {} calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_SET_COVER_POSITION) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 40}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla", cover.ATTR_POSITION: 40} async def test_openclose_cover_query_only(hass): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported(cover.DOMAIN, 0, None) state = State( "cover.bla", cover.STATE_OPEN, ) trt = trait.OpenCloseTrait( hass, state, BASIC_CONFIG, ) assert trt.sync_attributes() == { "discreteOnlyOpenClose": True, "queryOnlyOpenClose": True, } assert trt.query_attributes() == {"openPercent": 100} async def test_openclose_cover_no_position(hass): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_OPEN | cover.SUPPORT_CLOSE, None ) state = State( "cover.bla", cover.STATE_OPEN, { ATTR_SUPPORTED_FEATURES: cover.SUPPORT_OPEN | cover.SUPPORT_CLOSE, }, ) trt = trait.OpenCloseTrait( hass, state, BASIC_CONFIG, ) assert trt.sync_attributes() == {"discreteOnlyOpenClose": True} assert trt.query_attributes() == {"openPercent": 100} state.state = cover.STATE_CLOSED assert trt.sync_attributes() == {"discreteOnlyOpenClose": True} assert trt.query_attributes() == {"openPercent": 0} calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_CLOSE_COVER) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 0}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_OPEN_COVER) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 100}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} with pytest.raises( SmartHomeError, match=r"Current position not know for relative command" ): await trt.execute( trait.COMMAND_OPENCLOSE_RELATIVE, BASIC_DATA, {"openRelativePercent": 100}, {}, ) with pytest.raises(SmartHomeError, match=r"No support for partial open close"): await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 50}, {}) @pytest.mark.parametrize( "device_class", (cover.DEVICE_CLASS_DOOR, cover.DEVICE_CLASS_GARAGE, cover.DEVICE_CLASS_GATE), ) async def test_openclose_cover_secure(hass, device_class): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, device_class) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_SET_POSITION, device_class ) assert trait.OpenCloseTrait.might_2fa( cover.DOMAIN, cover.SUPPORT_SET_POSITION, device_class ) trt = trait.OpenCloseTrait( hass, State( "cover.bla", cover.STATE_OPEN, { ATTR_DEVICE_CLASS: device_class, ATTR_SUPPORTED_FEATURES: cover.SUPPORT_SET_POSITION, cover.ATTR_CURRENT_POSITION: 75, }, ), PIN_CONFIG, ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"openPercent": 75} calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_SET_COVER_POSITION) calls_close = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_CLOSE_COVER) # No challenge data with pytest.raises(error.ChallengeNeeded) as err: await trt.execute(trait.COMMAND_OPENCLOSE, PIN_DATA, {"openPercent": 50}, {}) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # invalid pin with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_OPENCLOSE, PIN_DATA, {"openPercent": 50}, {"pin": "9999"} ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_FAILED_PIN_NEEDED await trt.execute( trait.COMMAND_OPENCLOSE, PIN_DATA, {"openPercent": 50}, {"pin": "1234"} ) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla", cover.ATTR_POSITION: 50} # no challenge on close await trt.execute(trait.COMMAND_OPENCLOSE, PIN_DATA, {"openPercent": 0}, {}) assert len(calls_close) == 1 assert calls_close[0].data == {ATTR_ENTITY_ID: "cover.bla"} @pytest.mark.parametrize( "device_class", ( binary_sensor.DEVICE_CLASS_DOOR, binary_sensor.DEVICE_CLASS_GARAGE_DOOR, binary_sensor.DEVICE_CLASS_LOCK, binary_sensor.DEVICE_CLASS_OPENING, binary_sensor.DEVICE_CLASS_WINDOW, ), ) async def test_openclose_binary_sensor(hass, device_class): """Test OpenClose trait support for binary_sensor domain.""" assert helpers.get_google_type(binary_sensor.DOMAIN, device_class) is not None assert trait.OpenCloseTrait.supported(binary_sensor.DOMAIN, 0, device_class) trt = trait.OpenCloseTrait( hass, State("binary_sensor.test", STATE_ON, {ATTR_DEVICE_CLASS: device_class}), BASIC_CONFIG, ) assert trt.sync_attributes() == { "queryOnlyOpenClose": True, "discreteOnlyOpenClose": True, } assert trt.query_attributes() == {"openPercent": 100} trt = trait.OpenCloseTrait( hass, State("binary_sensor.test", STATE_OFF, {ATTR_DEVICE_CLASS: device_class}), BASIC_CONFIG, ) assert trt.sync_attributes() == { "queryOnlyOpenClose": True, "discreteOnlyOpenClose": True, } assert trt.query_attributes() == {"openPercent": 0} async def test_volume_media_player(hass): """Test volume trait support for media player domain.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.VolumeTrait.supported( media_player.DOMAIN, media_player.SUPPORT_VOLUME_SET, None, ) trt = trait.VolumeTrait( hass, State( "media_player.bla", media_player.STATE_PLAYING, { ATTR_SUPPORTED_FEATURES: media_player.SUPPORT_VOLUME_SET, media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.3, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "volumeMaxLevel": 100, "levelStepSize": 10, "volumeCanMuteAndUnmute": False, "commandOnlyVolume": False, } assert trt.query_attributes() == {"currentVolume": 30} calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_SET ) await trt.execute(trait.COMMAND_SET_VOLUME, BASIC_DATA, {"volumeLevel": 60}, {}) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.6, } calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_SET ) await trt.execute( trait.COMMAND_VOLUME_RELATIVE, BASIC_DATA, {"relativeSteps": 10}, {} ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.4, } async def test_volume_media_player_relative(hass): """Test volume trait support for relative-volume-only media players.""" assert trait.VolumeTrait.supported( media_player.DOMAIN, media_player.SUPPORT_VOLUME_STEP, None, ) trt = trait.VolumeTrait( hass, State( "media_player.bla", media_player.STATE_PLAYING, { ATTR_ASSUMED_STATE: True, ATTR_SUPPORTED_FEATURES: media_player.SUPPORT_VOLUME_STEP, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "volumeMaxLevel": 100, "levelStepSize": 10, "volumeCanMuteAndUnmute": False, "commandOnlyVolume": True, } assert trt.query_attributes() == {} calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_UP ) await trt.execute( trait.COMMAND_VOLUME_RELATIVE, BASIC_DATA, {"relativeSteps": 10}, {}, ) assert len(calls) == 10 for call in calls: assert call.data == { ATTR_ENTITY_ID: "media_player.bla", } calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_DOWN ) await trt.execute( trait.COMMAND_VOLUME_RELATIVE, BASIC_DATA, {"relativeSteps": -10}, {}, ) assert len(calls) == 10 for call in calls: assert call.data == { ATTR_ENTITY_ID: "media_player.bla", } with pytest.raises(SmartHomeError): await trt.execute(trait.COMMAND_SET_VOLUME, BASIC_DATA, {"volumeLevel": 42}, {}) with pytest.raises(SmartHomeError): await trt.execute(trait.COMMAND_MUTE, BASIC_DATA, {"mute": True}, {}) async def test_media_player_mute(hass): """Test volume trait support for muting.""" assert trait.VolumeTrait.supported( media_player.DOMAIN, media_player.SUPPORT_VOLUME_STEP | media_player.SUPPORT_VOLUME_MUTE, None, ) trt = trait.VolumeTrait( hass, State( "media_player.bla", media_player.STATE_PLAYING, { ATTR_SUPPORTED_FEATURES: ( media_player.SUPPORT_VOLUME_STEP | media_player.SUPPORT_VOLUME_MUTE ), media_player.ATTR_MEDIA_VOLUME_MUTED: False, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "volumeMaxLevel": 100, "levelStepSize": 10, "volumeCanMuteAndUnmute": True, "commandOnlyVolume": False, } assert trt.query_attributes() == {"isMuted": False} mute_calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_MUTE ) await trt.execute( trait.COMMAND_MUTE, BASIC_DATA, {"mute": True}, {}, ) assert len(mute_calls) == 1 assert mute_calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_VOLUME_MUTED: True, } unmute_calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_MUTE ) await trt.execute( trait.COMMAND_MUTE, BASIC_DATA, {"mute": False}, {}, ) assert len(unmute_calls) == 1 assert unmute_calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_VOLUME_MUTED: False, } async def test_temperature_setting_sensor(hass): """Test TemperatureSetting trait support for temperature sensor.""" assert ( helpers.get_google_type(sensor.DOMAIN, sensor.DEVICE_CLASS_TEMPERATURE) is not None ) assert not trait.TemperatureSettingTrait.supported( sensor.DOMAIN, 0, sensor.DEVICE_CLASS_HUMIDITY ) assert trait.TemperatureSettingTrait.supported( sensor.DOMAIN, 0, sensor.DEVICE_CLASS_TEMPERATURE ) @pytest.mark.parametrize( "unit_in,unit_out,state,ambient", [ (TEMP_FAHRENHEIT, "F", "70", 21.1), (TEMP_CELSIUS, "C", "21.1", 21.1), (TEMP_FAHRENHEIT, "F", "unavailable", None), (TEMP_FAHRENHEIT, "F", "unknown", None), ], ) async def test_temperature_setting_sensor_data(hass, unit_in, unit_out, state, ambient): """Test TemperatureSetting trait support for temperature sensor.""" hass.config.units.temperature_unit = unit_in trt = trait.TemperatureSettingTrait( hass, State( "sensor.test", state, {ATTR_DEVICE_CLASS: sensor.DEVICE_CLASS_TEMPERATURE} ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "queryOnlyTemperatureSetting": True, "thermostatTemperatureUnit": unit_out, } if ambient: assert trt.query_attributes() == {"thermostatTemperatureAmbient": ambient} else: assert trt.query_attributes() == {} hass.config.units.temperature_unit = TEMP_CELSIUS async def test_humidity_setting_sensor(hass): """Test HumiditySetting trait support for humidity sensor.""" assert ( helpers.get_google_type(sensor.DOMAIN, sensor.DEVICE_CLASS_HUMIDITY) is not None ) assert not trait.HumiditySettingTrait.supported( sensor.DOMAIN, 0, sensor.DEVICE_CLASS_TEMPERATURE ) assert trait.HumiditySettingTrait.supported( sensor.DOMAIN, 0, sensor.DEVICE_CLASS_HUMIDITY ) @pytest.mark.parametrize( "state,ambient", [("70", 70), ("unavailable", None), ("unknown", None)] ) async def test_humidity_setting_sensor_data(hass, state, ambient): """Test HumiditySetting trait support for humidity sensor.""" trt = trait.HumiditySettingTrait( hass, State("sensor.test", state, {ATTR_DEVICE_CLASS: sensor.DEVICE_CLASS_HUMIDITY}), BASIC_CONFIG, ) assert trt.sync_attributes() == {"queryOnlyHumiditySetting": True} if ambient: assert trt.query_attributes() == {"humidityAmbientPercent": ambient} else: assert trt.query_attributes() == {} with pytest.raises(helpers.SmartHomeError) as err: await trt.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert err.value.code == const.ERR_NOT_SUPPORTED async def test_transport_control(hass): """Test the TransportControlTrait.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None for feature in trait.MEDIA_COMMAND_SUPPORT_MAPPING.values(): assert trait.TransportControlTrait.supported(media_player.DOMAIN, feature, None) now = datetime(2020, 1, 1) trt = trait.TransportControlTrait( hass, State( "media_player.bla", media_player.STATE_PLAYING, { media_player.ATTR_MEDIA_POSITION: 100, media_player.ATTR_MEDIA_DURATION: 200, media_player.ATTR_MEDIA_POSITION_UPDATED_AT: now - timedelta(seconds=10), media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.5, ATTR_SUPPORTED_FEATURES: media_player.SUPPORT_PLAY | media_player.SUPPORT_STOP, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "transportControlSupportedCommands": ["RESUME", "STOP"] } assert trt.query_attributes() == {} # COMMAND_MEDIA_SEEK_RELATIVE calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_SEEK ) # Patch to avoid time ticking over during the command failing the test with patch("homeassistant.util.dt.utcnow", return_value=now): await trt.execute( trait.COMMAND_MEDIA_SEEK_RELATIVE, BASIC_DATA, {"relativePositionMs": 10000}, {}, ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", # 100s (current position) + 10s (from command) + 10s (from updated_at) media_player.ATTR_MEDIA_SEEK_POSITION: 120, } # COMMAND_MEDIA_SEEK_TO_POSITION calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_SEEK ) await trt.execute( trait.COMMAND_MEDIA_SEEK_TO_POSITION, BASIC_DATA, {"absPositionMs": 50000}, {} ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_SEEK_POSITION: 50, } # COMMAND_MEDIA_NEXT calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_NEXT_TRACK ) await trt.execute(trait.COMMAND_MEDIA_NEXT, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} # COMMAND_MEDIA_PAUSE calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_PAUSE ) await trt.execute(trait.COMMAND_MEDIA_PAUSE, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} # COMMAND_MEDIA_PREVIOUS calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_PREVIOUS_TRACK ) await trt.execute(trait.COMMAND_MEDIA_PREVIOUS, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} # COMMAND_MEDIA_RESUME calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_PLAY ) await trt.execute(trait.COMMAND_MEDIA_RESUME, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} # COMMAND_MEDIA_SHUFFLE calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_SHUFFLE_SET ) await trt.execute(trait.COMMAND_MEDIA_SHUFFLE, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_SHUFFLE: True, } # COMMAND_MEDIA_STOP calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_STOP ) await trt.execute(trait.COMMAND_MEDIA_STOP, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} @pytest.mark.parametrize( "state", ( STATE_OFF, STATE_IDLE, STATE_PLAYING, STATE_ON, STATE_PAUSED, STATE_STANDBY, STATE_UNAVAILABLE, STATE_UNKNOWN, ), ) async def test_media_state(hass, state): """Test the MediaStateTrait.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.TransportControlTrait.supported( media_player.DOMAIN, media_player.SUPPORT_PLAY, None ) trt = trait.MediaStateTrait( hass, State( "media_player.bla", state, { media_player.ATTR_MEDIA_POSITION: 100, media_player.ATTR_MEDIA_DURATION: 200, media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.5, ATTR_SUPPORTED_FEATURES: media_player.SUPPORT_PLAY | media_player.SUPPORT_STOP, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "supportActivityState": True, "supportPlaybackState": True, } assert trt.query_attributes() == { "activityState": trt.activity_lookup.get(state), "playbackState": trt.playback_lookup.get(state), }
sander76/home-assistant
tests/components/google_assistant/test_trait.py
homeassistant/components/vera/__init__.py
"""Support for RFXtrx lights.""" import logging import RFXtrx as rfxtrxmod from homeassistant.components.light import ( ATTR_BRIGHTNESS, SUPPORT_BRIGHTNESS, LightEntity, ) from homeassistant.const import CONF_DEVICES, STATE_ON from homeassistant.core import callback from . import ( CONF_DATA_BITS, CONF_SIGNAL_REPETITIONS, DEFAULT_SIGNAL_REPETITIONS, RfxtrxCommandEntity, connect_auto_add, get_device_id, get_rfx_object, ) from .const import COMMAND_OFF_LIST, COMMAND_ON_LIST _LOGGER = logging.getLogger(__name__) SUPPORT_RFXTRX = SUPPORT_BRIGHTNESS def supported(event): """Return whether an event supports light.""" return ( isinstance(event.device, rfxtrxmod.LightingDevice) and event.device.known_to_be_dimmable ) async def async_setup_entry( hass, config_entry, async_add_entities, ): """Set up config entry.""" discovery_info = config_entry.data device_ids = set() # Add switch from config file entities = [] for packet_id, entity_info in discovery_info[CONF_DEVICES].items(): event = get_rfx_object(packet_id) if event is None: _LOGGER.error("Invalid device: %s", packet_id) continue if not supported(event): continue device_id = get_device_id( event.device, data_bits=entity_info.get(CONF_DATA_BITS) ) if device_id in device_ids: continue device_ids.add(device_id) entity = RfxtrxLight( event.device, device_id, entity_info[CONF_SIGNAL_REPETITIONS] ) entities.append(entity) async_add_entities(entities) @callback def light_update(event, device_id): """Handle light updates from the RFXtrx gateway.""" if not supported(event): return if device_id in device_ids: return device_ids.add(device_id) _LOGGER.info( "Added light (Device ID: %s Class: %s Sub: %s, Event: %s)", event.device.id_string.lower(), event.device.__class__.__name__, event.device.subtype, "".join(f"{x:02x}" for x in event.data), ) entity = RfxtrxLight( event.device, device_id, DEFAULT_SIGNAL_REPETITIONS, event=event ) async_add_entities([entity]) # Subscribe to main RFXtrx events connect_auto_add(hass, discovery_info, light_update) class RfxtrxLight(RfxtrxCommandEntity, LightEntity): """Representation of a RFXtrx light.""" _brightness = 0 async def async_added_to_hass(self): """Restore RFXtrx device state (ON/OFF).""" await super().async_added_to_hass() if self._event is None: old_state = await self.async_get_last_state() if old_state is not None: self._state = old_state.state == STATE_ON self._brightness = old_state.attributes.get(ATTR_BRIGHTNESS) @property def brightness(self): """Return the brightness of this light between 0..255.""" return self._brightness @property def supported_features(self): """Flag supported features.""" return SUPPORT_RFXTRX @property def is_on(self): """Return true if device is on.""" return self._state async def async_turn_on(self, **kwargs): """Turn the device on.""" brightness = kwargs.get(ATTR_BRIGHTNESS) self._state = True if brightness is None: await self._async_send(self._device.send_on) self._brightness = 255 else: await self._async_send(self._device.send_dim, brightness * 100 // 255) self._brightness = brightness self.async_write_ha_state() async def async_turn_off(self, **kwargs): """Turn the device off.""" await self._async_send(self._device.send_off) self._state = False self._brightness = 0 self.async_write_ha_state() def _apply_event(self, event): """Apply command from rfxtrx.""" super()._apply_event(event) if event.values["Command"] in COMMAND_ON_LIST: self._state = True elif event.values["Command"] in COMMAND_OFF_LIST: self._state = False elif event.values["Command"] == "Set level": self._brightness = event.values["Dim level"] * 255 // 100 self._state = self._brightness > 0 @callback def _handle_event(self, event, device_id): """Check if event applies to me and update.""" if device_id != self._device_id: return self._apply_event(event) self.async_write_ha_state()
"""Tests for the Google Assistant traits.""" from datetime import datetime, timedelta from unittest.mock import patch import pytest from homeassistant.components import ( alarm_control_panel, binary_sensor, camera, cover, fan, group, input_boolean, input_select, light, lock, media_player, scene, script, sensor, switch, vacuum, ) from homeassistant.components.climate import const as climate from homeassistant.components.google_assistant import const, error, helpers, trait from homeassistant.components.google_assistant.error import SmartHomeError from homeassistant.components.humidifier import const as humidifier from homeassistant.config import async_process_ha_core_config from homeassistant.const import ( ATTR_ASSUMED_STATE, ATTR_DEVICE_CLASS, ATTR_ENTITY_ID, ATTR_MODE, ATTR_SUPPORTED_FEATURES, ATTR_TEMPERATURE, SERVICE_TURN_OFF, SERVICE_TURN_ON, STATE_ALARM_ARMED_AWAY, STATE_ALARM_DISARMED, STATE_ALARM_PENDING, STATE_IDLE, STATE_OFF, STATE_ON, STATE_PAUSED, STATE_PLAYING, STATE_STANDBY, STATE_UNAVAILABLE, STATE_UNKNOWN, TEMP_CELSIUS, TEMP_FAHRENHEIT, ) from homeassistant.core import DOMAIN as HA_DOMAIN, EVENT_CALL_SERVICE, State from homeassistant.util import color from . import BASIC_CONFIG, MockConfig from tests.common import async_capture_events, async_mock_service REQ_ID = "ff36a3cc-ec34-11e6-b1a0-64510650abcf" BASIC_DATA = helpers.RequestData( BASIC_CONFIG, "test-agent", const.SOURCE_CLOUD, REQ_ID, None ) PIN_CONFIG = MockConfig(secure_devices_pin="1234") PIN_DATA = helpers.RequestData( PIN_CONFIG, "test-agent", const.SOURCE_CLOUD, REQ_ID, None ) async def test_brightness_light(hass): """Test brightness trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert trait.BrightnessTrait.supported(light.DOMAIN, light.SUPPORT_BRIGHTNESS, None) trt = trait.BrightnessTrait( hass, State("light.bla", light.STATE_ON, {light.ATTR_BRIGHTNESS: 243}), BASIC_CONFIG, ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"brightness": 95} events = async_capture_events(hass, EVENT_CALL_SERVICE) calls = async_mock_service(hass, light.DOMAIN, light.SERVICE_TURN_ON) await trt.execute( trait.COMMAND_BRIGHTNESS_ABSOLUTE, BASIC_DATA, {"brightness": 50}, {} ) await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "light.bla", light.ATTR_BRIGHTNESS_PCT: 50} assert len(events) == 1 assert events[0].data == { "domain": "light", "service": "turn_on", "service_data": {"brightness_pct": 50, "entity_id": "light.bla"}, } async def test_camera_stream(hass): """Test camera stream trait support for camera domain.""" await async_process_ha_core_config( hass, {"external_url": "https://example.com"}, ) assert helpers.get_google_type(camera.DOMAIN, None) is not None assert trait.CameraStreamTrait.supported(camera.DOMAIN, camera.SUPPORT_STREAM, None) trt = trait.CameraStreamTrait( hass, State("camera.bla", camera.STATE_IDLE, {}), BASIC_CONFIG ) assert trt.sync_attributes() == { "cameraStreamSupportedProtocols": ["hls"], "cameraStreamNeedAuthToken": False, "cameraStreamNeedDrmEncryption": False, } assert trt.query_attributes() == {} with patch( "homeassistant.components.camera.async_request_stream", return_value="/api/streams/bla", ): await trt.execute(trait.COMMAND_GET_CAMERA_STREAM, BASIC_DATA, {}, {}) assert trt.query_attributes() == { "cameraStreamAccessUrl": "https://example.com/api/streams/bla", "cameraStreamReceiverAppId": "B12CE3CA", } async def test_onoff_group(hass): """Test OnOff trait support for group domain.""" assert helpers.get_google_type(group.DOMAIN, None) is not None assert trait.OnOffTrait.supported(group.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("group.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("group.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, HA_DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "group.bla"} off_calls = async_mock_service(hass, HA_DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "group.bla"} async def test_onoff_input_boolean(hass): """Test OnOff trait support for input_boolean domain.""" assert helpers.get_google_type(input_boolean.DOMAIN, None) is not None assert trait.OnOffTrait.supported(input_boolean.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("input_boolean.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait( hass, State("input_boolean.bla", STATE_OFF), BASIC_CONFIG ) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, input_boolean.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "input_boolean.bla"} off_calls = async_mock_service(hass, input_boolean.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "input_boolean.bla"} async def test_onoff_switch(hass): """Test OnOff trait support for switch domain.""" assert helpers.get_google_type(switch.DOMAIN, None) is not None assert trait.OnOffTrait.supported(switch.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("switch.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("switch.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} trt_assumed = trait.OnOffTrait( hass, State("switch.bla", STATE_OFF, {"assumed_state": True}), BASIC_CONFIG ) assert trt_assumed.sync_attributes() == {"commandOnlyOnOff": True} on_calls = async_mock_service(hass, switch.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "switch.bla"} off_calls = async_mock_service(hass, switch.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "switch.bla"} async def test_onoff_fan(hass): """Test OnOff trait support for fan domain.""" assert helpers.get_google_type(fan.DOMAIN, None) is not None assert trait.OnOffTrait.supported(fan.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("fan.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("fan.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, fan.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "fan.bla"} off_calls = async_mock_service(hass, fan.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "fan.bla"} async def test_onoff_light(hass): """Test OnOff trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert trait.OnOffTrait.supported(light.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("light.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("light.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "light.bla"} off_calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "light.bla"} async def test_onoff_media_player(hass): """Test OnOff trait support for media_player domain.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.OnOffTrait.supported(media_player.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("media_player.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("media_player.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, media_player.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} off_calls = async_mock_service(hass, media_player.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} async def test_onoff_humidifier(hass): """Test OnOff trait support for humidifier domain.""" assert helpers.get_google_type(humidifier.DOMAIN, None) is not None assert trait.OnOffTrait.supported(humidifier.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("humidifier.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("humidifier.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, humidifier.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "humidifier.bla"} off_calls = async_mock_service(hass, humidifier.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "humidifier.bla"} async def test_dock_vacuum(hass): """Test dock trait support for vacuum domain.""" assert helpers.get_google_type(vacuum.DOMAIN, None) is not None assert trait.DockTrait.supported(vacuum.DOMAIN, 0, None) trt = trait.DockTrait(hass, State("vacuum.bla", vacuum.STATE_IDLE), BASIC_CONFIG) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"isDocked": False} calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_RETURN_TO_BASE) await trt.execute(trait.COMMAND_DOCK, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} async def test_startstop_vacuum(hass): """Test startStop trait support for vacuum domain.""" assert helpers.get_google_type(vacuum.DOMAIN, None) is not None assert trait.StartStopTrait.supported(vacuum.DOMAIN, 0, None) trt = trait.StartStopTrait( hass, State( "vacuum.bla", vacuum.STATE_PAUSED, {ATTR_SUPPORTED_FEATURES: vacuum.SUPPORT_PAUSE}, ), BASIC_CONFIG, ) assert trt.sync_attributes() == {"pausable": True} assert trt.query_attributes() == {"isRunning": False, "isPaused": True} start_calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_START) await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": True}, {}) assert len(start_calls) == 1 assert start_calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} stop_calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_STOP) await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": False}, {}) assert len(stop_calls) == 1 assert stop_calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} pause_calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_PAUSE) await trt.execute(trait.COMMAND_PAUSEUNPAUSE, BASIC_DATA, {"pause": True}, {}) assert len(pause_calls) == 1 assert pause_calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} unpause_calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_START) await trt.execute(trait.COMMAND_PAUSEUNPAUSE, BASIC_DATA, {"pause": False}, {}) assert len(unpause_calls) == 1 assert unpause_calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} async def test_startstop_cover(hass): """Test startStop trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.StartStopTrait.supported(cover.DOMAIN, cover.SUPPORT_STOP, None) state = State( "cover.bla", cover.STATE_CLOSED, {ATTR_SUPPORTED_FEATURES: cover.SUPPORT_STOP}, ) trt = trait.StartStopTrait( hass, state, BASIC_CONFIG, ) assert trt.sync_attributes() == {} for state_value in (cover.STATE_CLOSING, cover.STATE_OPENING): state.state = state_value assert trt.query_attributes() == {"isRunning": True} stop_calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_STOP_COVER) await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": False}, {}) assert len(stop_calls) == 1 assert stop_calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} for state_value in (cover.STATE_CLOSED, cover.STATE_OPEN): state.state = state_value assert trt.query_attributes() == {"isRunning": False} with pytest.raises(SmartHomeError, match="Cover is already stopped"): await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": False}, {}) with pytest.raises(SmartHomeError, match="Starting a cover is not supported"): await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": True}, {}) with pytest.raises( SmartHomeError, match="Command action.devices.commands.PauseUnpause is not supported", ): await trt.execute(trait.COMMAND_PAUSEUNPAUSE, BASIC_DATA, {"start": True}, {}) async def test_startstop_cover_assumed(hass): """Test startStop trait support for cover domain of assumed state.""" trt = trait.StartStopTrait( hass, State( "cover.bla", cover.STATE_CLOSED, {ATTR_SUPPORTED_FEATURES: cover.SUPPORT_STOP, ATTR_ASSUMED_STATE: True}, ), BASIC_CONFIG, ) stop_calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_STOP_COVER) await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": False}, {}) assert len(stop_calls) == 1 assert stop_calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} async def test_color_setting_color_light(hass): """Test ColorSpectrum trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert not trait.ColorSettingTrait.supported(light.DOMAIN, 0, None) assert trait.ColorSettingTrait.supported(light.DOMAIN, light.SUPPORT_COLOR, None) trt = trait.ColorSettingTrait( hass, State( "light.bla", STATE_ON, { light.ATTR_HS_COLOR: (20, 94), light.ATTR_BRIGHTNESS: 200, ATTR_SUPPORTED_FEATURES: light.SUPPORT_COLOR, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == {"colorModel": "hsv"} assert trt.query_attributes() == { "color": {"spectrumHsv": {"hue": 20, "saturation": 0.94, "value": 200 / 255}} } assert trt.can_execute( trait.COMMAND_COLOR_ABSOLUTE, {"color": {"spectrumRGB": 16715792}} ) calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON) await trt.execute( trait.COMMAND_COLOR_ABSOLUTE, BASIC_DATA, {"color": {"spectrumRGB": 1052927}}, {}, ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "light.bla", light.ATTR_HS_COLOR: (240, 93.725), } await trt.execute( trait.COMMAND_COLOR_ABSOLUTE, BASIC_DATA, {"color": {"spectrumHSV": {"hue": 100, "saturation": 0.50, "value": 0.20}}}, {}, ) assert len(calls) == 2 assert calls[1].data == { ATTR_ENTITY_ID: "light.bla", light.ATTR_HS_COLOR: [100, 50], light.ATTR_BRIGHTNESS: 0.2 * 255, } async def test_color_setting_temperature_light(hass): """Test ColorTemperature trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert not trait.ColorSettingTrait.supported(light.DOMAIN, 0, None) assert trait.ColorSettingTrait.supported( light.DOMAIN, light.SUPPORT_COLOR_TEMP, None ) trt = trait.ColorSettingTrait( hass, State( "light.bla", STATE_ON, { light.ATTR_MIN_MIREDS: 200, light.ATTR_COLOR_TEMP: 300, light.ATTR_MAX_MIREDS: 500, ATTR_SUPPORTED_FEATURES: light.SUPPORT_COLOR_TEMP, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "colorTemperatureRange": {"temperatureMinK": 2000, "temperatureMaxK": 5000} } assert trt.query_attributes() == {"color": {"temperatureK": 3333}} assert trt.can_execute( trait.COMMAND_COLOR_ABSOLUTE, {"color": {"temperature": 400}} ) calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON) with pytest.raises(helpers.SmartHomeError) as err: await trt.execute( trait.COMMAND_COLOR_ABSOLUTE, BASIC_DATA, {"color": {"temperature": 5555}}, {}, ) assert err.value.code == const.ERR_VALUE_OUT_OF_RANGE await trt.execute( trait.COMMAND_COLOR_ABSOLUTE, BASIC_DATA, {"color": {"temperature": 2857}}, {} ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "light.bla", light.ATTR_COLOR_TEMP: color.color_temperature_kelvin_to_mired(2857), } async def test_color_light_temperature_light_bad_temp(hass): """Test ColorTemperature trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert not trait.ColorSettingTrait.supported(light.DOMAIN, 0, None) assert trait.ColorSettingTrait.supported( light.DOMAIN, light.SUPPORT_COLOR_TEMP, None ) trt = trait.ColorSettingTrait( hass, State( "light.bla", STATE_ON, { light.ATTR_MIN_MIREDS: 200, light.ATTR_COLOR_TEMP: 0, light.ATTR_MAX_MIREDS: 500, }, ), BASIC_CONFIG, ) assert trt.query_attributes() == {} async def test_light_modes(hass): """Test Light Mode trait.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert trait.ModesTrait.supported(light.DOMAIN, light.SUPPORT_EFFECT, None) trt = trait.ModesTrait( hass, State( "light.living_room", light.STATE_ON, attributes={ light.ATTR_EFFECT_LIST: ["random", "colorloop"], light.ATTR_EFFECT: "random", }, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableModes": [ { "name": "effect", "name_values": [{"name_synonym": ["effect"], "lang": "en"}], "settings": [ { "setting_name": "random", "setting_values": [ {"setting_synonym": ["random"], "lang": "en"} ], }, { "setting_name": "colorloop", "setting_values": [ {"setting_synonym": ["colorloop"], "lang": "en"} ], }, ], "ordered": False, } ] } assert trt.query_attributes() == { "currentModeSettings": {"effect": "random"}, "on": True, } assert trt.can_execute( trait.COMMAND_MODES, params={"updateModeSettings": {"effect": "colorloop"}}, ) calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON) await trt.execute( trait.COMMAND_MODES, BASIC_DATA, {"updateModeSettings": {"effect": "colorloop"}}, {}, ) assert len(calls) == 1 assert calls[0].data == { "entity_id": "light.living_room", "effect": "colorloop", } async def test_scene_scene(hass): """Test Scene trait support for scene domain.""" assert helpers.get_google_type(scene.DOMAIN, None) is not None assert trait.SceneTrait.supported(scene.DOMAIN, 0, None) trt = trait.SceneTrait(hass, State("scene.bla", scene.STATE), BASIC_CONFIG) assert trt.sync_attributes() == {} assert trt.query_attributes() == {} assert trt.can_execute(trait.COMMAND_ACTIVATE_SCENE, {}) calls = async_mock_service(hass, scene.DOMAIN, SERVICE_TURN_ON) await trt.execute(trait.COMMAND_ACTIVATE_SCENE, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "scene.bla"} async def test_scene_script(hass): """Test Scene trait support for script domain.""" assert helpers.get_google_type(script.DOMAIN, None) is not None assert trait.SceneTrait.supported(script.DOMAIN, 0, None) trt = trait.SceneTrait(hass, State("script.bla", STATE_OFF), BASIC_CONFIG) assert trt.sync_attributes() == {} assert trt.query_attributes() == {} assert trt.can_execute(trait.COMMAND_ACTIVATE_SCENE, {}) calls = async_mock_service(hass, script.DOMAIN, SERVICE_TURN_ON) await trt.execute(trait.COMMAND_ACTIVATE_SCENE, BASIC_DATA, {}, {}) # We don't wait till script execution is done. await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "script.bla"} async def test_temperature_setting_climate_onoff(hass): """Test TemperatureSetting trait support for climate domain - range.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.TemperatureSettingTrait.supported(climate.DOMAIN, 0, None) hass.config.units.temperature_unit = TEMP_FAHRENHEIT trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_AUTO, { ATTR_SUPPORTED_FEATURES: climate.SUPPORT_TARGET_TEMPERATURE_RANGE, climate.ATTR_HVAC_MODES: [ climate.HVAC_MODE_OFF, climate.HVAC_MODE_COOL, climate.HVAC_MODE_HEAT, climate.HVAC_MODE_HEAT_COOL, ], climate.ATTR_MIN_TEMP: None, climate.ATTR_MAX_TEMP: None, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["off", "cool", "heat", "heatcool", "on"], "thermostatTemperatureUnit": "F", } assert trt.can_execute(trait.COMMAND_THERMOSTAT_SET_MODE, {}) calls = async_mock_service(hass, climate.DOMAIN, SERVICE_TURN_ON) await trt.execute( trait.COMMAND_THERMOSTAT_SET_MODE, BASIC_DATA, {"thermostatMode": "on"}, {} ) assert len(calls) == 1 calls = async_mock_service(hass, climate.DOMAIN, SERVICE_TURN_OFF) await trt.execute( trait.COMMAND_THERMOSTAT_SET_MODE, BASIC_DATA, {"thermostatMode": "off"}, {} ) assert len(calls) == 1 async def test_temperature_setting_climate_no_modes(hass): """Test TemperatureSetting trait support for climate domain not supporting any modes.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.TemperatureSettingTrait.supported(climate.DOMAIN, 0, None) hass.config.units.temperature_unit = TEMP_CELSIUS trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_AUTO, { climate.ATTR_HVAC_MODES: [], climate.ATTR_MIN_TEMP: None, climate.ATTR_MAX_TEMP: None, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["heat"], "thermostatTemperatureUnit": "C", } async def test_temperature_setting_climate_range(hass): """Test TemperatureSetting trait support for climate domain - range.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.TemperatureSettingTrait.supported(climate.DOMAIN, 0, None) hass.config.units.temperature_unit = TEMP_FAHRENHEIT trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_AUTO, { climate.ATTR_CURRENT_TEMPERATURE: 70, climate.ATTR_CURRENT_HUMIDITY: 25, ATTR_SUPPORTED_FEATURES: climate.SUPPORT_TARGET_TEMPERATURE_RANGE, climate.ATTR_HVAC_MODES: [ STATE_OFF, climate.HVAC_MODE_COOL, climate.HVAC_MODE_HEAT, climate.HVAC_MODE_AUTO, ], climate.ATTR_TARGET_TEMP_HIGH: 75, climate.ATTR_TARGET_TEMP_LOW: 65, climate.ATTR_MIN_TEMP: 50, climate.ATTR_MAX_TEMP: 80, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["off", "cool", "heat", "auto", "on"], "thermostatTemperatureUnit": "F", } assert trt.query_attributes() == { "thermostatMode": "auto", "thermostatTemperatureAmbient": 21.1, "thermostatHumidityAmbient": 25, "thermostatTemperatureSetpointLow": 18.3, "thermostatTemperatureSetpointHigh": 23.9, } assert trt.can_execute(trait.COMMAND_THERMOSTAT_TEMPERATURE_SET_RANGE, {}) assert trt.can_execute(trait.COMMAND_THERMOSTAT_SET_MODE, {}) calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_TEMPERATURE) await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SET_RANGE, BASIC_DATA, { "thermostatTemperatureSetpointHigh": 25, "thermostatTemperatureSetpointLow": 20, }, {}, ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "climate.bla", climate.ATTR_TARGET_TEMP_HIGH: 77, climate.ATTR_TARGET_TEMP_LOW: 68, } calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_HVAC_MODE) await trt.execute( trait.COMMAND_THERMOSTAT_SET_MODE, BASIC_DATA, {"thermostatMode": "cool"}, {} ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "climate.bla", climate.ATTR_HVAC_MODE: climate.HVAC_MODE_COOL, } with pytest.raises(helpers.SmartHomeError) as err: await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, BASIC_DATA, {"thermostatTemperatureSetpoint": -100}, {}, ) assert err.value.code == const.ERR_VALUE_OUT_OF_RANGE hass.config.units.temperature_unit = TEMP_CELSIUS async def test_temperature_setting_climate_setpoint(hass): """Test TemperatureSetting trait support for climate domain - setpoint.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.TemperatureSettingTrait.supported(climate.DOMAIN, 0, None) hass.config.units.temperature_unit = TEMP_CELSIUS trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_COOL, { climate.ATTR_HVAC_MODES: [STATE_OFF, climate.HVAC_MODE_COOL], climate.ATTR_MIN_TEMP: 10, climate.ATTR_MAX_TEMP: 30, ATTR_TEMPERATURE: 18, climate.ATTR_CURRENT_TEMPERATURE: 20, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["off", "cool", "on"], "thermostatTemperatureUnit": "C", } assert trt.query_attributes() == { "thermostatMode": "cool", "thermostatTemperatureAmbient": 20, "thermostatTemperatureSetpoint": 18, } assert trt.can_execute(trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, {}) assert trt.can_execute(trait.COMMAND_THERMOSTAT_SET_MODE, {}) calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_TEMPERATURE) with pytest.raises(helpers.SmartHomeError): await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, BASIC_DATA, {"thermostatTemperatureSetpoint": -100}, {}, ) await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, BASIC_DATA, {"thermostatTemperatureSetpoint": 19}, {}, ) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "climate.bla", ATTR_TEMPERATURE: 19} async def test_temperature_setting_climate_setpoint_auto(hass): """ Test TemperatureSetting trait support for climate domain. Setpoint in auto mode. """ hass.config.units.temperature_unit = TEMP_CELSIUS trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_HEAT_COOL, { climate.ATTR_HVAC_MODES: [ climate.HVAC_MODE_OFF, climate.HVAC_MODE_HEAT_COOL, ], climate.ATTR_MIN_TEMP: 10, climate.ATTR_MAX_TEMP: 30, ATTR_TEMPERATURE: 18, climate.ATTR_CURRENT_TEMPERATURE: 20, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["off", "heatcool", "on"], "thermostatTemperatureUnit": "C", } assert trt.query_attributes() == { "thermostatMode": "heatcool", "thermostatTemperatureAmbient": 20, "thermostatTemperatureSetpointHigh": 18, "thermostatTemperatureSetpointLow": 18, } assert trt.can_execute(trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, {}) assert trt.can_execute(trait.COMMAND_THERMOSTAT_SET_MODE, {}) calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_TEMPERATURE) await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, BASIC_DATA, {"thermostatTemperatureSetpoint": 19}, {}, ) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "climate.bla", ATTR_TEMPERATURE: 19} async def test_humidity_setting_humidifier_setpoint(hass): """Test HumiditySetting trait support for humidifier domain - setpoint.""" assert helpers.get_google_type(humidifier.DOMAIN, None) is not None assert trait.HumiditySettingTrait.supported(humidifier.DOMAIN, 0, None) trt = trait.HumiditySettingTrait( hass, State( "humidifier.bla", STATE_ON, { humidifier.ATTR_MIN_HUMIDITY: 20, humidifier.ATTR_MAX_HUMIDITY: 90, humidifier.ATTR_HUMIDITY: 38, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "humiditySetpointRange": {"minPercent": 20, "maxPercent": 90} } assert trt.query_attributes() == { "humiditySetpointPercent": 38, } assert trt.can_execute(trait.COMMAND_SET_HUMIDITY, {}) calls = async_mock_service(hass, humidifier.DOMAIN, humidifier.SERVICE_SET_HUMIDITY) await trt.execute(trait.COMMAND_SET_HUMIDITY, BASIC_DATA, {"humidity": 32}, {}) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "humidifier.bla", humidifier.ATTR_HUMIDITY: 32, } async def test_lock_unlock_lock(hass): """Test LockUnlock trait locking support for lock domain.""" assert helpers.get_google_type(lock.DOMAIN, None) is not None assert trait.LockUnlockTrait.supported(lock.DOMAIN, lock.SUPPORT_OPEN, None) assert trait.LockUnlockTrait.might_2fa(lock.DOMAIN, lock.SUPPORT_OPEN, None) trt = trait.LockUnlockTrait( hass, State("lock.front_door", lock.STATE_LOCKED), PIN_CONFIG ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"isLocked": True} assert trt.can_execute(trait.COMMAND_LOCKUNLOCK, {"lock": True}) calls = async_mock_service(hass, lock.DOMAIN, lock.SERVICE_LOCK) await trt.execute(trait.COMMAND_LOCKUNLOCK, PIN_DATA, {"lock": True}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "lock.front_door"} async def test_lock_unlock_unlock(hass): """Test LockUnlock trait unlocking support for lock domain.""" assert helpers.get_google_type(lock.DOMAIN, None) is not None assert trait.LockUnlockTrait.supported(lock.DOMAIN, lock.SUPPORT_OPEN, None) trt = trait.LockUnlockTrait( hass, State("lock.front_door", lock.STATE_LOCKED), PIN_CONFIG ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"isLocked": True} assert trt.can_execute(trait.COMMAND_LOCKUNLOCK, {"lock": False}) calls = async_mock_service(hass, lock.DOMAIN, lock.SERVICE_UNLOCK) # No challenge data with pytest.raises(error.ChallengeNeeded) as err: await trt.execute(trait.COMMAND_LOCKUNLOCK, PIN_DATA, {"lock": False}, {}) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # invalid pin with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_LOCKUNLOCK, PIN_DATA, {"lock": False}, {"pin": 9999} ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_FAILED_PIN_NEEDED await trt.execute( trait.COMMAND_LOCKUNLOCK, PIN_DATA, {"lock": False}, {"pin": "1234"} ) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "lock.front_door"} # Test without pin trt = trait.LockUnlockTrait( hass, State("lock.front_door", lock.STATE_LOCKED), BASIC_CONFIG ) with pytest.raises(error.SmartHomeError) as err: await trt.execute(trait.COMMAND_LOCKUNLOCK, BASIC_DATA, {"lock": False}, {}) assert len(calls) == 1 assert err.value.code == const.ERR_CHALLENGE_NOT_SETUP # Test with 2FA override with patch.object( BASIC_CONFIG, "should_2fa", return_value=False, ): await trt.execute(trait.COMMAND_LOCKUNLOCK, BASIC_DATA, {"lock": False}, {}) assert len(calls) == 2 async def test_arm_disarm_arm_away(hass): """Test ArmDisarm trait Arming support for alarm_control_panel domain.""" assert helpers.get_google_type(alarm_control_panel.DOMAIN, None) is not None assert trait.ArmDisArmTrait.supported(alarm_control_panel.DOMAIN, 0, None) assert trait.ArmDisArmTrait.might_2fa(alarm_control_panel.DOMAIN, 0, None) trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, { alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True, ATTR_SUPPORTED_FEATURES: alarm_control_panel.const.SUPPORT_ALARM_ARM_HOME | alarm_control_panel.const.SUPPORT_ALARM_ARM_AWAY, }, ), PIN_CONFIG, ) assert trt.sync_attributes() == { "availableArmLevels": { "levels": [ { "level_name": "armed_home", "level_values": [ {"level_synonym": ["armed home", "home"], "lang": "en"} ], }, { "level_name": "armed_away", "level_values": [ {"level_synonym": ["armed away", "away"], "lang": "en"} ], }, ], "ordered": False, } } assert trt.query_attributes() == { "isArmed": True, "currentArmLevel": STATE_ALARM_ARMED_AWAY, } assert trt.can_execute( trait.COMMAND_ARMDISARM, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY} ) calls = async_mock_service( hass, alarm_control_panel.DOMAIN, alarm_control_panel.SERVICE_ALARM_ARM_AWAY ) # Test with no secure_pin configured with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), BASIC_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, BASIC_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {}, ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NOT_SETUP trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), PIN_CONFIG, ) # No challenge data with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {}, ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # invalid pin with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {"pin": 9999}, ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_FAILED_PIN_NEEDED # correct pin await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {"pin": "1234"}, ) assert len(calls) == 1 # Test already armed with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), PIN_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {}, ) assert len(calls) == 1 assert err.value.code == const.ERR_ALREADY_ARMED # Test with code_arm_required False trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: False}, ), PIN_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {}, ) assert len(calls) == 2 with pytest.raises(error.SmartHomeError) as err: await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True}, {}, ) async def test_arm_disarm_disarm(hass): """Test ArmDisarm trait Disarming support for alarm_control_panel domain.""" assert helpers.get_google_type(alarm_control_panel.DOMAIN, None) is not None assert trait.ArmDisArmTrait.supported(alarm_control_panel.DOMAIN, 0, None) assert trait.ArmDisArmTrait.might_2fa(alarm_control_panel.DOMAIN, 0, None) trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, { alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True, ATTR_SUPPORTED_FEATURES: alarm_control_panel.const.SUPPORT_ALARM_TRIGGER | alarm_control_panel.const.SUPPORT_ALARM_ARM_CUSTOM_BYPASS, }, ), PIN_CONFIG, ) assert trt.sync_attributes() == { "availableArmLevels": { "levels": [ { "level_name": "armed_custom_bypass", "level_values": [ { "level_synonym": ["armed custom bypass", "custom"], "lang": "en", } ], }, { "level_name": "triggered", "level_values": [{"level_synonym": ["triggered"], "lang": "en"}], }, ], "ordered": False, } } assert trt.query_attributes() == {"isArmed": False} assert trt.can_execute(trait.COMMAND_ARMDISARM, {"arm": False}) calls = async_mock_service( hass, alarm_control_panel.DOMAIN, alarm_control_panel.SERVICE_ALARM_DISARM ) # Test without secure_pin configured with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), BASIC_CONFIG, ) await trt.execute(trait.COMMAND_ARMDISARM, BASIC_DATA, {"arm": False}, {}) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NOT_SETUP trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), PIN_CONFIG, ) # No challenge data with pytest.raises(error.ChallengeNeeded) as err: await trt.execute(trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": False}, {}) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # invalid pin with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": False}, {"pin": 9999} ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_FAILED_PIN_NEEDED # correct pin await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": False}, {"pin": "1234"} ) assert len(calls) == 1 # Test already disarmed with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), PIN_CONFIG, ) await trt.execute(trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": False}, {}) assert len(calls) == 1 assert err.value.code == const.ERR_ALREADY_DISARMED # Cancel arming after already armed will require pin with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: False}, ), PIN_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "cancel": True}, {} ) assert len(calls) == 1 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # Cancel arming while pending to arm doesn't require pin trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_PENDING, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: False}, ), PIN_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "cancel": True}, {} ) assert len(calls) == 2 async def test_fan_speed(hass): """Test FanSpeed trait speed control support for fan domain.""" assert helpers.get_google_type(fan.DOMAIN, None) is not None assert trait.FanSpeedTrait.supported(fan.DOMAIN, fan.SUPPORT_SET_SPEED, None) trt = trait.FanSpeedTrait( hass, State( "fan.living_room_fan", fan.SPEED_HIGH, attributes={ "speed_list": [ fan.SPEED_OFF, fan.SPEED_LOW, fan.SPEED_MEDIUM, fan.SPEED_HIGH, ], "speed": "low", "percentage": 33, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableFanSpeeds": { "ordered": True, "speeds": [ { "speed_name": "off", "speed_values": [{"speed_synonym": ["stop", "off"], "lang": "en"}], }, { "speed_name": "low", "speed_values": [ { "speed_synonym": ["slow", "low", "slowest", "lowest"], "lang": "en", } ], }, { "speed_name": "medium", "speed_values": [ {"speed_synonym": ["medium", "mid", "middle"], "lang": "en"} ], }, { "speed_name": "high", "speed_values": [ { "speed_synonym": [ "high", "max", "fast", "highest", "fastest", "maximum", ], "lang": "en", } ], }, ], }, "reversible": False, "supportsFanSpeedPercent": True, } assert trt.query_attributes() == { "currentFanSpeedSetting": "low", "on": True, "currentFanSpeedPercent": 33, } assert trt.can_execute(trait.COMMAND_FANSPEED, params={"fanSpeed": "medium"}) calls = async_mock_service(hass, fan.DOMAIN, fan.SERVICE_SET_SPEED) await trt.execute(trait.COMMAND_FANSPEED, BASIC_DATA, {"fanSpeed": "medium"}, {}) assert len(calls) == 1 assert calls[0].data == {"entity_id": "fan.living_room_fan", "speed": "medium"} assert trt.can_execute(trait.COMMAND_FANSPEED, params={"fanSpeedPercent": 10}) calls = async_mock_service(hass, fan.DOMAIN, fan.SERVICE_SET_PERCENTAGE) await trt.execute(trait.COMMAND_FANSPEED, BASIC_DATA, {"fanSpeedPercent": 10}, {}) assert len(calls) == 1 assert calls[0].data == {"entity_id": "fan.living_room_fan", "percentage": 10} async def test_climate_fan_speed(hass): """Test FanSpeed trait speed control support for climate domain.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.FanSpeedTrait.supported(climate.DOMAIN, climate.SUPPORT_FAN_MODE, None) trt = trait.FanSpeedTrait( hass, State( "climate.living_room_ac", "on", attributes={ "fan_modes": ["auto", "low", "medium", "high"], "fan_mode": "low", }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableFanSpeeds": { "ordered": True, "speeds": [ { "speed_name": "auto", "speed_values": [{"speed_synonym": ["auto"], "lang": "en"}], }, { "speed_name": "low", "speed_values": [{"speed_synonym": ["low"], "lang": "en"}], }, { "speed_name": "medium", "speed_values": [{"speed_synonym": ["medium"], "lang": "en"}], }, { "speed_name": "high", "speed_values": [{"speed_synonym": ["high"], "lang": "en"}], }, ], }, "reversible": False, "supportsFanSpeedPercent": True, } assert trt.query_attributes() == { "currentFanSpeedSetting": "low", } assert trt.can_execute(trait.COMMAND_FANSPEED, params={"fanSpeed": "medium"}) calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_FAN_MODE) await trt.execute(trait.COMMAND_FANSPEED, BASIC_DATA, {"fanSpeed": "medium"}, {}) assert len(calls) == 1 assert calls[0].data == { "entity_id": "climate.living_room_ac", "fan_mode": "medium", } async def test_inputselector(hass): """Test input selector trait.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.InputSelectorTrait.supported( media_player.DOMAIN, media_player.SUPPORT_SELECT_SOURCE, None ) trt = trait.InputSelectorTrait( hass, State( "media_player.living_room", media_player.STATE_PLAYING, attributes={ media_player.ATTR_INPUT_SOURCE_LIST: [ "media", "game", "chromecast", "plex", ], media_player.ATTR_INPUT_SOURCE: "game", }, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableInputs": [ {"key": "media", "names": [{"name_synonym": ["media"], "lang": "en"}]}, {"key": "game", "names": [{"name_synonym": ["game"], "lang": "en"}]}, { "key": "chromecast", "names": [{"name_synonym": ["chromecast"], "lang": "en"}], }, {"key": "plex", "names": [{"name_synonym": ["plex"], "lang": "en"}]}, ], "orderedInputs": True, } assert trt.query_attributes() == { "currentInput": "game", } assert trt.can_execute( trait.COMMAND_INPUT, params={"newInput": "media"}, ) calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_SELECT_SOURCE ) await trt.execute( trait.COMMAND_INPUT, BASIC_DATA, {"newInput": "media"}, {}, ) assert len(calls) == 1 assert calls[0].data == {"entity_id": "media_player.living_room", "source": "media"} @pytest.mark.parametrize( "sources,source,source_next,source_prev", [ (["a"], "a", "a", "a"), (["a", "b"], "a", "b", "b"), (["a", "b", "c"], "a", "b", "c"), ], ) async def test_inputselector_nextprev(hass, sources, source, source_next, source_prev): """Test input selector trait.""" trt = trait.InputSelectorTrait( hass, State( "media_player.living_room", media_player.STATE_PLAYING, attributes={ media_player.ATTR_INPUT_SOURCE_LIST: sources, media_player.ATTR_INPUT_SOURCE: source, }, ), BASIC_CONFIG, ) assert trt.can_execute("action.devices.commands.NextInput", params={}) assert trt.can_execute("action.devices.commands.PreviousInput", params={}) calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_SELECT_SOURCE ) await trt.execute( "action.devices.commands.NextInput", BASIC_DATA, {}, {}, ) await trt.execute( "action.devices.commands.PreviousInput", BASIC_DATA, {}, {}, ) assert len(calls) == 2 assert calls[0].data == { "entity_id": "media_player.living_room", "source": source_next, } assert calls[1].data == { "entity_id": "media_player.living_room", "source": source_prev, } @pytest.mark.parametrize( "sources,source", [(None, "a"), (["a", "b"], None), (["a", "b"], "c")] ) async def test_inputselector_nextprev_invalid(hass, sources, source): """Test input selector trait.""" trt = trait.InputSelectorTrait( hass, State( "media_player.living_room", media_player.STATE_PLAYING, attributes={ media_player.ATTR_INPUT_SOURCE_LIST: sources, media_player.ATTR_INPUT_SOURCE: source, }, ), BASIC_CONFIG, ) with pytest.raises(SmartHomeError): await trt.execute( "action.devices.commands.NextInput", BASIC_DATA, {}, {}, ) with pytest.raises(SmartHomeError): await trt.execute( "action.devices.commands.PreviousInput", BASIC_DATA, {}, {}, ) with pytest.raises(SmartHomeError): await trt.execute( "action.devices.commands.InvalidCommand", BASIC_DATA, {}, {}, ) async def test_modes_input_select(hass): """Test Input Select Mode trait.""" assert helpers.get_google_type(input_select.DOMAIN, None) is not None assert trait.ModesTrait.supported(input_select.DOMAIN, None, None) trt = trait.ModesTrait( hass, State("input_select.bla", "unavailable"), BASIC_CONFIG, ) assert trt.sync_attributes() == {"availableModes": []} trt = trait.ModesTrait( hass, State( "input_select.bla", "abc", attributes={input_select.ATTR_OPTIONS: ["abc", "123", "xyz"]}, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableModes": [ { "name": "option", "name_values": [ { "name_synonym": ["option", "setting", "mode", "value"], "lang": "en", } ], "settings": [ { "setting_name": "abc", "setting_values": [{"setting_synonym": ["abc"], "lang": "en"}], }, { "setting_name": "123", "setting_values": [{"setting_synonym": ["123"], "lang": "en"}], }, { "setting_name": "xyz", "setting_values": [{"setting_synonym": ["xyz"], "lang": "en"}], }, ], "ordered": False, } ] } assert trt.query_attributes() == { "currentModeSettings": {"option": "abc"}, "on": True, } assert trt.can_execute( trait.COMMAND_MODES, params={"updateModeSettings": {"option": "xyz"}}, ) calls = async_mock_service( hass, input_select.DOMAIN, input_select.SERVICE_SELECT_OPTION ) await trt.execute( trait.COMMAND_MODES, BASIC_DATA, {"updateModeSettings": {"option": "xyz"}}, {}, ) assert len(calls) == 1 assert calls[0].data == {"entity_id": "input_select.bla", "option": "xyz"} async def test_modes_humidifier(hass): """Test Humidifier Mode trait.""" assert helpers.get_google_type(humidifier.DOMAIN, None) is not None assert trait.ModesTrait.supported(humidifier.DOMAIN, humidifier.SUPPORT_MODES, None) trt = trait.ModesTrait( hass, State( "humidifier.humidifier", STATE_OFF, attributes={ humidifier.ATTR_AVAILABLE_MODES: [ humidifier.MODE_NORMAL, humidifier.MODE_AUTO, humidifier.MODE_AWAY, ], ATTR_SUPPORTED_FEATURES: humidifier.SUPPORT_MODES, humidifier.ATTR_MIN_HUMIDITY: 30, humidifier.ATTR_MAX_HUMIDITY: 99, humidifier.ATTR_HUMIDITY: 50, ATTR_MODE: humidifier.MODE_AUTO, }, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableModes": [ { "name": "mode", "name_values": [{"name_synonym": ["mode"], "lang": "en"}], "settings": [ { "setting_name": "normal", "setting_values": [ {"setting_synonym": ["normal"], "lang": "en"} ], }, { "setting_name": "auto", "setting_values": [{"setting_synonym": ["auto"], "lang": "en"}], }, { "setting_name": "away", "setting_values": [{"setting_synonym": ["away"], "lang": "en"}], }, ], "ordered": False, }, ] } assert trt.query_attributes() == { "currentModeSettings": {"mode": "auto"}, "on": False, } assert trt.can_execute( trait.COMMAND_MODES, params={"updateModeSettings": {"mode": "away"}} ) calls = async_mock_service(hass, humidifier.DOMAIN, humidifier.SERVICE_SET_MODE) await trt.execute( trait.COMMAND_MODES, BASIC_DATA, {"updateModeSettings": {"mode": "away"}}, {}, ) assert len(calls) == 1 assert calls[0].data == { "entity_id": "humidifier.humidifier", "mode": "away", } async def test_sound_modes(hass): """Test Mode trait.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.ModesTrait.supported( media_player.DOMAIN, media_player.SUPPORT_SELECT_SOUND_MODE, None ) trt = trait.ModesTrait( hass, State( "media_player.living_room", media_player.STATE_PLAYING, attributes={ media_player.ATTR_SOUND_MODE_LIST: ["stereo", "prologic"], media_player.ATTR_SOUND_MODE: "stereo", }, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableModes": [ { "name": "sound mode", "name_values": [ {"name_synonym": ["sound mode", "effects"], "lang": "en"} ], "settings": [ { "setting_name": "stereo", "setting_values": [ {"setting_synonym": ["stereo"], "lang": "en"} ], }, { "setting_name": "prologic", "setting_values": [ {"setting_synonym": ["prologic"], "lang": "en"} ], }, ], "ordered": False, } ] } assert trt.query_attributes() == { "currentModeSettings": {"sound mode": "stereo"}, "on": True, } assert trt.can_execute( trait.COMMAND_MODES, params={"updateModeSettings": {"sound mode": "stereo"}}, ) calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_SELECT_SOUND_MODE ) await trt.execute( trait.COMMAND_MODES, BASIC_DATA, {"updateModeSettings": {"sound mode": "stereo"}}, {}, ) assert len(calls) == 1 assert calls[0].data == { "entity_id": "media_player.living_room", "sound_mode": "stereo", } async def test_openclose_cover(hass): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_SET_POSITION, None ) trt = trait.OpenCloseTrait( hass, State( "cover.bla", cover.STATE_OPEN, { cover.ATTR_CURRENT_POSITION: 75, ATTR_SUPPORTED_FEATURES: cover.SUPPORT_SET_POSITION, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"openPercent": 75} calls_set = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_SET_COVER_POSITION) calls_open = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_OPEN_COVER) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 50}, {}) await trt.execute( trait.COMMAND_OPENCLOSE_RELATIVE, BASIC_DATA, {"openRelativePercent": 50}, {} ) assert len(calls_set) == 1 assert calls_set[0].data == {ATTR_ENTITY_ID: "cover.bla", cover.ATTR_POSITION: 50} assert len(calls_open) == 1 assert calls_open[0].data == {ATTR_ENTITY_ID: "cover.bla"} async def test_openclose_cover_unknown_state(hass): """Test OpenClose trait support for cover domain with unknown state.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_SET_POSITION, None ) # No state trt = trait.OpenCloseTrait( hass, State( "cover.bla", STATE_UNKNOWN, {ATTR_SUPPORTED_FEATURES: cover.SUPPORT_OPEN} ), BASIC_CONFIG, ) assert trt.sync_attributes() == {"discreteOnlyOpenClose": True} with pytest.raises(helpers.SmartHomeError): trt.query_attributes() calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_OPEN_COVER) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 100}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} with pytest.raises(helpers.SmartHomeError): trt.query_attributes() async def test_openclose_cover_assumed_state(hass): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_SET_POSITION, None ) trt = trait.OpenCloseTrait( hass, State( "cover.bla", cover.STATE_OPEN, { ATTR_ASSUMED_STATE: True, ATTR_SUPPORTED_FEATURES: cover.SUPPORT_SET_POSITION, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == {"commandOnlyOpenClose": True} assert trt.query_attributes() == {} calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_SET_COVER_POSITION) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 40}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla", cover.ATTR_POSITION: 40} async def test_openclose_cover_query_only(hass): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported(cover.DOMAIN, 0, None) state = State( "cover.bla", cover.STATE_OPEN, ) trt = trait.OpenCloseTrait( hass, state, BASIC_CONFIG, ) assert trt.sync_attributes() == { "discreteOnlyOpenClose": True, "queryOnlyOpenClose": True, } assert trt.query_attributes() == {"openPercent": 100} async def test_openclose_cover_no_position(hass): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_OPEN | cover.SUPPORT_CLOSE, None ) state = State( "cover.bla", cover.STATE_OPEN, { ATTR_SUPPORTED_FEATURES: cover.SUPPORT_OPEN | cover.SUPPORT_CLOSE, }, ) trt = trait.OpenCloseTrait( hass, state, BASIC_CONFIG, ) assert trt.sync_attributes() == {"discreteOnlyOpenClose": True} assert trt.query_attributes() == {"openPercent": 100} state.state = cover.STATE_CLOSED assert trt.sync_attributes() == {"discreteOnlyOpenClose": True} assert trt.query_attributes() == {"openPercent": 0} calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_CLOSE_COVER) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 0}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_OPEN_COVER) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 100}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} with pytest.raises( SmartHomeError, match=r"Current position not know for relative command" ): await trt.execute( trait.COMMAND_OPENCLOSE_RELATIVE, BASIC_DATA, {"openRelativePercent": 100}, {}, ) with pytest.raises(SmartHomeError, match=r"No support for partial open close"): await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 50}, {}) @pytest.mark.parametrize( "device_class", (cover.DEVICE_CLASS_DOOR, cover.DEVICE_CLASS_GARAGE, cover.DEVICE_CLASS_GATE), ) async def test_openclose_cover_secure(hass, device_class): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, device_class) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_SET_POSITION, device_class ) assert trait.OpenCloseTrait.might_2fa( cover.DOMAIN, cover.SUPPORT_SET_POSITION, device_class ) trt = trait.OpenCloseTrait( hass, State( "cover.bla", cover.STATE_OPEN, { ATTR_DEVICE_CLASS: device_class, ATTR_SUPPORTED_FEATURES: cover.SUPPORT_SET_POSITION, cover.ATTR_CURRENT_POSITION: 75, }, ), PIN_CONFIG, ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"openPercent": 75} calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_SET_COVER_POSITION) calls_close = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_CLOSE_COVER) # No challenge data with pytest.raises(error.ChallengeNeeded) as err: await trt.execute(trait.COMMAND_OPENCLOSE, PIN_DATA, {"openPercent": 50}, {}) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # invalid pin with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_OPENCLOSE, PIN_DATA, {"openPercent": 50}, {"pin": "9999"} ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_FAILED_PIN_NEEDED await trt.execute( trait.COMMAND_OPENCLOSE, PIN_DATA, {"openPercent": 50}, {"pin": "1234"} ) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla", cover.ATTR_POSITION: 50} # no challenge on close await trt.execute(trait.COMMAND_OPENCLOSE, PIN_DATA, {"openPercent": 0}, {}) assert len(calls_close) == 1 assert calls_close[0].data == {ATTR_ENTITY_ID: "cover.bla"} @pytest.mark.parametrize( "device_class", ( binary_sensor.DEVICE_CLASS_DOOR, binary_sensor.DEVICE_CLASS_GARAGE_DOOR, binary_sensor.DEVICE_CLASS_LOCK, binary_sensor.DEVICE_CLASS_OPENING, binary_sensor.DEVICE_CLASS_WINDOW, ), ) async def test_openclose_binary_sensor(hass, device_class): """Test OpenClose trait support for binary_sensor domain.""" assert helpers.get_google_type(binary_sensor.DOMAIN, device_class) is not None assert trait.OpenCloseTrait.supported(binary_sensor.DOMAIN, 0, device_class) trt = trait.OpenCloseTrait( hass, State("binary_sensor.test", STATE_ON, {ATTR_DEVICE_CLASS: device_class}), BASIC_CONFIG, ) assert trt.sync_attributes() == { "queryOnlyOpenClose": True, "discreteOnlyOpenClose": True, } assert trt.query_attributes() == {"openPercent": 100} trt = trait.OpenCloseTrait( hass, State("binary_sensor.test", STATE_OFF, {ATTR_DEVICE_CLASS: device_class}), BASIC_CONFIG, ) assert trt.sync_attributes() == { "queryOnlyOpenClose": True, "discreteOnlyOpenClose": True, } assert trt.query_attributes() == {"openPercent": 0} async def test_volume_media_player(hass): """Test volume trait support for media player domain.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.VolumeTrait.supported( media_player.DOMAIN, media_player.SUPPORT_VOLUME_SET, None, ) trt = trait.VolumeTrait( hass, State( "media_player.bla", media_player.STATE_PLAYING, { ATTR_SUPPORTED_FEATURES: media_player.SUPPORT_VOLUME_SET, media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.3, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "volumeMaxLevel": 100, "levelStepSize": 10, "volumeCanMuteAndUnmute": False, "commandOnlyVolume": False, } assert trt.query_attributes() == {"currentVolume": 30} calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_SET ) await trt.execute(trait.COMMAND_SET_VOLUME, BASIC_DATA, {"volumeLevel": 60}, {}) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.6, } calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_SET ) await trt.execute( trait.COMMAND_VOLUME_RELATIVE, BASIC_DATA, {"relativeSteps": 10}, {} ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.4, } async def test_volume_media_player_relative(hass): """Test volume trait support for relative-volume-only media players.""" assert trait.VolumeTrait.supported( media_player.DOMAIN, media_player.SUPPORT_VOLUME_STEP, None, ) trt = trait.VolumeTrait( hass, State( "media_player.bla", media_player.STATE_PLAYING, { ATTR_ASSUMED_STATE: True, ATTR_SUPPORTED_FEATURES: media_player.SUPPORT_VOLUME_STEP, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "volumeMaxLevel": 100, "levelStepSize": 10, "volumeCanMuteAndUnmute": False, "commandOnlyVolume": True, } assert trt.query_attributes() == {} calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_UP ) await trt.execute( trait.COMMAND_VOLUME_RELATIVE, BASIC_DATA, {"relativeSteps": 10}, {}, ) assert len(calls) == 10 for call in calls: assert call.data == { ATTR_ENTITY_ID: "media_player.bla", } calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_DOWN ) await trt.execute( trait.COMMAND_VOLUME_RELATIVE, BASIC_DATA, {"relativeSteps": -10}, {}, ) assert len(calls) == 10 for call in calls: assert call.data == { ATTR_ENTITY_ID: "media_player.bla", } with pytest.raises(SmartHomeError): await trt.execute(trait.COMMAND_SET_VOLUME, BASIC_DATA, {"volumeLevel": 42}, {}) with pytest.raises(SmartHomeError): await trt.execute(trait.COMMAND_MUTE, BASIC_DATA, {"mute": True}, {}) async def test_media_player_mute(hass): """Test volume trait support for muting.""" assert trait.VolumeTrait.supported( media_player.DOMAIN, media_player.SUPPORT_VOLUME_STEP | media_player.SUPPORT_VOLUME_MUTE, None, ) trt = trait.VolumeTrait( hass, State( "media_player.bla", media_player.STATE_PLAYING, { ATTR_SUPPORTED_FEATURES: ( media_player.SUPPORT_VOLUME_STEP | media_player.SUPPORT_VOLUME_MUTE ), media_player.ATTR_MEDIA_VOLUME_MUTED: False, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "volumeMaxLevel": 100, "levelStepSize": 10, "volumeCanMuteAndUnmute": True, "commandOnlyVolume": False, } assert trt.query_attributes() == {"isMuted": False} mute_calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_MUTE ) await trt.execute( trait.COMMAND_MUTE, BASIC_DATA, {"mute": True}, {}, ) assert len(mute_calls) == 1 assert mute_calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_VOLUME_MUTED: True, } unmute_calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_MUTE ) await trt.execute( trait.COMMAND_MUTE, BASIC_DATA, {"mute": False}, {}, ) assert len(unmute_calls) == 1 assert unmute_calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_VOLUME_MUTED: False, } async def test_temperature_setting_sensor(hass): """Test TemperatureSetting trait support for temperature sensor.""" assert ( helpers.get_google_type(sensor.DOMAIN, sensor.DEVICE_CLASS_TEMPERATURE) is not None ) assert not trait.TemperatureSettingTrait.supported( sensor.DOMAIN, 0, sensor.DEVICE_CLASS_HUMIDITY ) assert trait.TemperatureSettingTrait.supported( sensor.DOMAIN, 0, sensor.DEVICE_CLASS_TEMPERATURE ) @pytest.mark.parametrize( "unit_in,unit_out,state,ambient", [ (TEMP_FAHRENHEIT, "F", "70", 21.1), (TEMP_CELSIUS, "C", "21.1", 21.1), (TEMP_FAHRENHEIT, "F", "unavailable", None), (TEMP_FAHRENHEIT, "F", "unknown", None), ], ) async def test_temperature_setting_sensor_data(hass, unit_in, unit_out, state, ambient): """Test TemperatureSetting trait support for temperature sensor.""" hass.config.units.temperature_unit = unit_in trt = trait.TemperatureSettingTrait( hass, State( "sensor.test", state, {ATTR_DEVICE_CLASS: sensor.DEVICE_CLASS_TEMPERATURE} ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "queryOnlyTemperatureSetting": True, "thermostatTemperatureUnit": unit_out, } if ambient: assert trt.query_attributes() == {"thermostatTemperatureAmbient": ambient} else: assert trt.query_attributes() == {} hass.config.units.temperature_unit = TEMP_CELSIUS async def test_humidity_setting_sensor(hass): """Test HumiditySetting trait support for humidity sensor.""" assert ( helpers.get_google_type(sensor.DOMAIN, sensor.DEVICE_CLASS_HUMIDITY) is not None ) assert not trait.HumiditySettingTrait.supported( sensor.DOMAIN, 0, sensor.DEVICE_CLASS_TEMPERATURE ) assert trait.HumiditySettingTrait.supported( sensor.DOMAIN, 0, sensor.DEVICE_CLASS_HUMIDITY ) @pytest.mark.parametrize( "state,ambient", [("70", 70), ("unavailable", None), ("unknown", None)] ) async def test_humidity_setting_sensor_data(hass, state, ambient): """Test HumiditySetting trait support for humidity sensor.""" trt = trait.HumiditySettingTrait( hass, State("sensor.test", state, {ATTR_DEVICE_CLASS: sensor.DEVICE_CLASS_HUMIDITY}), BASIC_CONFIG, ) assert trt.sync_attributes() == {"queryOnlyHumiditySetting": True} if ambient: assert trt.query_attributes() == {"humidityAmbientPercent": ambient} else: assert trt.query_attributes() == {} with pytest.raises(helpers.SmartHomeError) as err: await trt.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert err.value.code == const.ERR_NOT_SUPPORTED async def test_transport_control(hass): """Test the TransportControlTrait.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None for feature in trait.MEDIA_COMMAND_SUPPORT_MAPPING.values(): assert trait.TransportControlTrait.supported(media_player.DOMAIN, feature, None) now = datetime(2020, 1, 1) trt = trait.TransportControlTrait( hass, State( "media_player.bla", media_player.STATE_PLAYING, { media_player.ATTR_MEDIA_POSITION: 100, media_player.ATTR_MEDIA_DURATION: 200, media_player.ATTR_MEDIA_POSITION_UPDATED_AT: now - timedelta(seconds=10), media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.5, ATTR_SUPPORTED_FEATURES: media_player.SUPPORT_PLAY | media_player.SUPPORT_STOP, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "transportControlSupportedCommands": ["RESUME", "STOP"] } assert trt.query_attributes() == {} # COMMAND_MEDIA_SEEK_RELATIVE calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_SEEK ) # Patch to avoid time ticking over during the command failing the test with patch("homeassistant.util.dt.utcnow", return_value=now): await trt.execute( trait.COMMAND_MEDIA_SEEK_RELATIVE, BASIC_DATA, {"relativePositionMs": 10000}, {}, ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", # 100s (current position) + 10s (from command) + 10s (from updated_at) media_player.ATTR_MEDIA_SEEK_POSITION: 120, } # COMMAND_MEDIA_SEEK_TO_POSITION calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_SEEK ) await trt.execute( trait.COMMAND_MEDIA_SEEK_TO_POSITION, BASIC_DATA, {"absPositionMs": 50000}, {} ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_SEEK_POSITION: 50, } # COMMAND_MEDIA_NEXT calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_NEXT_TRACK ) await trt.execute(trait.COMMAND_MEDIA_NEXT, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} # COMMAND_MEDIA_PAUSE calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_PAUSE ) await trt.execute(trait.COMMAND_MEDIA_PAUSE, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} # COMMAND_MEDIA_PREVIOUS calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_PREVIOUS_TRACK ) await trt.execute(trait.COMMAND_MEDIA_PREVIOUS, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} # COMMAND_MEDIA_RESUME calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_PLAY ) await trt.execute(trait.COMMAND_MEDIA_RESUME, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} # COMMAND_MEDIA_SHUFFLE calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_SHUFFLE_SET ) await trt.execute(trait.COMMAND_MEDIA_SHUFFLE, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_SHUFFLE: True, } # COMMAND_MEDIA_STOP calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_STOP ) await trt.execute(trait.COMMAND_MEDIA_STOP, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} @pytest.mark.parametrize( "state", ( STATE_OFF, STATE_IDLE, STATE_PLAYING, STATE_ON, STATE_PAUSED, STATE_STANDBY, STATE_UNAVAILABLE, STATE_UNKNOWN, ), ) async def test_media_state(hass, state): """Test the MediaStateTrait.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.TransportControlTrait.supported( media_player.DOMAIN, media_player.SUPPORT_PLAY, None ) trt = trait.MediaStateTrait( hass, State( "media_player.bla", state, { media_player.ATTR_MEDIA_POSITION: 100, media_player.ATTR_MEDIA_DURATION: 200, media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.5, ATTR_SUPPORTED_FEATURES: media_player.SUPPORT_PLAY | media_player.SUPPORT_STOP, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "supportActivityState": True, "supportPlaybackState": True, } assert trt.query_attributes() == { "activityState": trt.activity_lookup.get(state), "playbackState": trt.playback_lookup.get(state), }
sander76/home-assistant
tests/components/google_assistant/test_trait.py
homeassistant/components/rfxtrx/light.py
"""Entity for Firmata devices.""" from __future__ import annotations from homeassistant.config_entries import ConfigEntry from .board import FirmataPinType from .const import DOMAIN, FIRMATA_MANUFACTURER from .pin import FirmataBoardPin class FirmataEntity: """Representation of a Firmata entity.""" def __init__(self, api): """Initialize the entity.""" self._api = api @property def device_info(self) -> dict: """Return device info.""" return { "connections": {}, "identifiers": {(DOMAIN, self._api.board.name)}, "manufacturer": FIRMATA_MANUFACTURER, "name": self._api.board.name, "sw_version": self._api.board.firmware_version, } class FirmataPinEntity(FirmataEntity): """Representation of a Firmata pin entity.""" def __init__( self, api: type[FirmataBoardPin], config_entry: ConfigEntry, name: str, pin: FirmataPinType, ): """Initialize the pin entity.""" super().__init__(api) self._name = name location = (config_entry.entry_id, "pin", pin) self._unique_id = "_".join(str(i) for i in location) @property def name(self) -> str: """Get the name of the pin.""" return self._name @property def should_poll(self) -> bool: """No polling needed.""" return False @property def unique_id(self) -> str: """Return a unique identifier for this device.""" return self._unique_id
"""Tests for the Google Assistant traits.""" from datetime import datetime, timedelta from unittest.mock import patch import pytest from homeassistant.components import ( alarm_control_panel, binary_sensor, camera, cover, fan, group, input_boolean, input_select, light, lock, media_player, scene, script, sensor, switch, vacuum, ) from homeassistant.components.climate import const as climate from homeassistant.components.google_assistant import const, error, helpers, trait from homeassistant.components.google_assistant.error import SmartHomeError from homeassistant.components.humidifier import const as humidifier from homeassistant.config import async_process_ha_core_config from homeassistant.const import ( ATTR_ASSUMED_STATE, ATTR_DEVICE_CLASS, ATTR_ENTITY_ID, ATTR_MODE, ATTR_SUPPORTED_FEATURES, ATTR_TEMPERATURE, SERVICE_TURN_OFF, SERVICE_TURN_ON, STATE_ALARM_ARMED_AWAY, STATE_ALARM_DISARMED, STATE_ALARM_PENDING, STATE_IDLE, STATE_OFF, STATE_ON, STATE_PAUSED, STATE_PLAYING, STATE_STANDBY, STATE_UNAVAILABLE, STATE_UNKNOWN, TEMP_CELSIUS, TEMP_FAHRENHEIT, ) from homeassistant.core import DOMAIN as HA_DOMAIN, EVENT_CALL_SERVICE, State from homeassistant.util import color from . import BASIC_CONFIG, MockConfig from tests.common import async_capture_events, async_mock_service REQ_ID = "ff36a3cc-ec34-11e6-b1a0-64510650abcf" BASIC_DATA = helpers.RequestData( BASIC_CONFIG, "test-agent", const.SOURCE_CLOUD, REQ_ID, None ) PIN_CONFIG = MockConfig(secure_devices_pin="1234") PIN_DATA = helpers.RequestData( PIN_CONFIG, "test-agent", const.SOURCE_CLOUD, REQ_ID, None ) async def test_brightness_light(hass): """Test brightness trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert trait.BrightnessTrait.supported(light.DOMAIN, light.SUPPORT_BRIGHTNESS, None) trt = trait.BrightnessTrait( hass, State("light.bla", light.STATE_ON, {light.ATTR_BRIGHTNESS: 243}), BASIC_CONFIG, ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"brightness": 95} events = async_capture_events(hass, EVENT_CALL_SERVICE) calls = async_mock_service(hass, light.DOMAIN, light.SERVICE_TURN_ON) await trt.execute( trait.COMMAND_BRIGHTNESS_ABSOLUTE, BASIC_DATA, {"brightness": 50}, {} ) await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "light.bla", light.ATTR_BRIGHTNESS_PCT: 50} assert len(events) == 1 assert events[0].data == { "domain": "light", "service": "turn_on", "service_data": {"brightness_pct": 50, "entity_id": "light.bla"}, } async def test_camera_stream(hass): """Test camera stream trait support for camera domain.""" await async_process_ha_core_config( hass, {"external_url": "https://example.com"}, ) assert helpers.get_google_type(camera.DOMAIN, None) is not None assert trait.CameraStreamTrait.supported(camera.DOMAIN, camera.SUPPORT_STREAM, None) trt = trait.CameraStreamTrait( hass, State("camera.bla", camera.STATE_IDLE, {}), BASIC_CONFIG ) assert trt.sync_attributes() == { "cameraStreamSupportedProtocols": ["hls"], "cameraStreamNeedAuthToken": False, "cameraStreamNeedDrmEncryption": False, } assert trt.query_attributes() == {} with patch( "homeassistant.components.camera.async_request_stream", return_value="/api/streams/bla", ): await trt.execute(trait.COMMAND_GET_CAMERA_STREAM, BASIC_DATA, {}, {}) assert trt.query_attributes() == { "cameraStreamAccessUrl": "https://example.com/api/streams/bla", "cameraStreamReceiverAppId": "B12CE3CA", } async def test_onoff_group(hass): """Test OnOff trait support for group domain.""" assert helpers.get_google_type(group.DOMAIN, None) is not None assert trait.OnOffTrait.supported(group.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("group.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("group.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, HA_DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "group.bla"} off_calls = async_mock_service(hass, HA_DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "group.bla"} async def test_onoff_input_boolean(hass): """Test OnOff trait support for input_boolean domain.""" assert helpers.get_google_type(input_boolean.DOMAIN, None) is not None assert trait.OnOffTrait.supported(input_boolean.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("input_boolean.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait( hass, State("input_boolean.bla", STATE_OFF), BASIC_CONFIG ) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, input_boolean.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "input_boolean.bla"} off_calls = async_mock_service(hass, input_boolean.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "input_boolean.bla"} async def test_onoff_switch(hass): """Test OnOff trait support for switch domain.""" assert helpers.get_google_type(switch.DOMAIN, None) is not None assert trait.OnOffTrait.supported(switch.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("switch.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("switch.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} trt_assumed = trait.OnOffTrait( hass, State("switch.bla", STATE_OFF, {"assumed_state": True}), BASIC_CONFIG ) assert trt_assumed.sync_attributes() == {"commandOnlyOnOff": True} on_calls = async_mock_service(hass, switch.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "switch.bla"} off_calls = async_mock_service(hass, switch.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "switch.bla"} async def test_onoff_fan(hass): """Test OnOff trait support for fan domain.""" assert helpers.get_google_type(fan.DOMAIN, None) is not None assert trait.OnOffTrait.supported(fan.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("fan.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("fan.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, fan.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "fan.bla"} off_calls = async_mock_service(hass, fan.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "fan.bla"} async def test_onoff_light(hass): """Test OnOff trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert trait.OnOffTrait.supported(light.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("light.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("light.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "light.bla"} off_calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "light.bla"} async def test_onoff_media_player(hass): """Test OnOff trait support for media_player domain.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.OnOffTrait.supported(media_player.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("media_player.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("media_player.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, media_player.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} off_calls = async_mock_service(hass, media_player.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} async def test_onoff_humidifier(hass): """Test OnOff trait support for humidifier domain.""" assert helpers.get_google_type(humidifier.DOMAIN, None) is not None assert trait.OnOffTrait.supported(humidifier.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("humidifier.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("humidifier.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, humidifier.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "humidifier.bla"} off_calls = async_mock_service(hass, humidifier.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "humidifier.bla"} async def test_dock_vacuum(hass): """Test dock trait support for vacuum domain.""" assert helpers.get_google_type(vacuum.DOMAIN, None) is not None assert trait.DockTrait.supported(vacuum.DOMAIN, 0, None) trt = trait.DockTrait(hass, State("vacuum.bla", vacuum.STATE_IDLE), BASIC_CONFIG) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"isDocked": False} calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_RETURN_TO_BASE) await trt.execute(trait.COMMAND_DOCK, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} async def test_startstop_vacuum(hass): """Test startStop trait support for vacuum domain.""" assert helpers.get_google_type(vacuum.DOMAIN, None) is not None assert trait.StartStopTrait.supported(vacuum.DOMAIN, 0, None) trt = trait.StartStopTrait( hass, State( "vacuum.bla", vacuum.STATE_PAUSED, {ATTR_SUPPORTED_FEATURES: vacuum.SUPPORT_PAUSE}, ), BASIC_CONFIG, ) assert trt.sync_attributes() == {"pausable": True} assert trt.query_attributes() == {"isRunning": False, "isPaused": True} start_calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_START) await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": True}, {}) assert len(start_calls) == 1 assert start_calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} stop_calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_STOP) await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": False}, {}) assert len(stop_calls) == 1 assert stop_calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} pause_calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_PAUSE) await trt.execute(trait.COMMAND_PAUSEUNPAUSE, BASIC_DATA, {"pause": True}, {}) assert len(pause_calls) == 1 assert pause_calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} unpause_calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_START) await trt.execute(trait.COMMAND_PAUSEUNPAUSE, BASIC_DATA, {"pause": False}, {}) assert len(unpause_calls) == 1 assert unpause_calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} async def test_startstop_cover(hass): """Test startStop trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.StartStopTrait.supported(cover.DOMAIN, cover.SUPPORT_STOP, None) state = State( "cover.bla", cover.STATE_CLOSED, {ATTR_SUPPORTED_FEATURES: cover.SUPPORT_STOP}, ) trt = trait.StartStopTrait( hass, state, BASIC_CONFIG, ) assert trt.sync_attributes() == {} for state_value in (cover.STATE_CLOSING, cover.STATE_OPENING): state.state = state_value assert trt.query_attributes() == {"isRunning": True} stop_calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_STOP_COVER) await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": False}, {}) assert len(stop_calls) == 1 assert stop_calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} for state_value in (cover.STATE_CLOSED, cover.STATE_OPEN): state.state = state_value assert trt.query_attributes() == {"isRunning": False} with pytest.raises(SmartHomeError, match="Cover is already stopped"): await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": False}, {}) with pytest.raises(SmartHomeError, match="Starting a cover is not supported"): await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": True}, {}) with pytest.raises( SmartHomeError, match="Command action.devices.commands.PauseUnpause is not supported", ): await trt.execute(trait.COMMAND_PAUSEUNPAUSE, BASIC_DATA, {"start": True}, {}) async def test_startstop_cover_assumed(hass): """Test startStop trait support for cover domain of assumed state.""" trt = trait.StartStopTrait( hass, State( "cover.bla", cover.STATE_CLOSED, {ATTR_SUPPORTED_FEATURES: cover.SUPPORT_STOP, ATTR_ASSUMED_STATE: True}, ), BASIC_CONFIG, ) stop_calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_STOP_COVER) await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": False}, {}) assert len(stop_calls) == 1 assert stop_calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} async def test_color_setting_color_light(hass): """Test ColorSpectrum trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert not trait.ColorSettingTrait.supported(light.DOMAIN, 0, None) assert trait.ColorSettingTrait.supported(light.DOMAIN, light.SUPPORT_COLOR, None) trt = trait.ColorSettingTrait( hass, State( "light.bla", STATE_ON, { light.ATTR_HS_COLOR: (20, 94), light.ATTR_BRIGHTNESS: 200, ATTR_SUPPORTED_FEATURES: light.SUPPORT_COLOR, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == {"colorModel": "hsv"} assert trt.query_attributes() == { "color": {"spectrumHsv": {"hue": 20, "saturation": 0.94, "value": 200 / 255}} } assert trt.can_execute( trait.COMMAND_COLOR_ABSOLUTE, {"color": {"spectrumRGB": 16715792}} ) calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON) await trt.execute( trait.COMMAND_COLOR_ABSOLUTE, BASIC_DATA, {"color": {"spectrumRGB": 1052927}}, {}, ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "light.bla", light.ATTR_HS_COLOR: (240, 93.725), } await trt.execute( trait.COMMAND_COLOR_ABSOLUTE, BASIC_DATA, {"color": {"spectrumHSV": {"hue": 100, "saturation": 0.50, "value": 0.20}}}, {}, ) assert len(calls) == 2 assert calls[1].data == { ATTR_ENTITY_ID: "light.bla", light.ATTR_HS_COLOR: [100, 50], light.ATTR_BRIGHTNESS: 0.2 * 255, } async def test_color_setting_temperature_light(hass): """Test ColorTemperature trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert not trait.ColorSettingTrait.supported(light.DOMAIN, 0, None) assert trait.ColorSettingTrait.supported( light.DOMAIN, light.SUPPORT_COLOR_TEMP, None ) trt = trait.ColorSettingTrait( hass, State( "light.bla", STATE_ON, { light.ATTR_MIN_MIREDS: 200, light.ATTR_COLOR_TEMP: 300, light.ATTR_MAX_MIREDS: 500, ATTR_SUPPORTED_FEATURES: light.SUPPORT_COLOR_TEMP, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "colorTemperatureRange": {"temperatureMinK": 2000, "temperatureMaxK": 5000} } assert trt.query_attributes() == {"color": {"temperatureK": 3333}} assert trt.can_execute( trait.COMMAND_COLOR_ABSOLUTE, {"color": {"temperature": 400}} ) calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON) with pytest.raises(helpers.SmartHomeError) as err: await trt.execute( trait.COMMAND_COLOR_ABSOLUTE, BASIC_DATA, {"color": {"temperature": 5555}}, {}, ) assert err.value.code == const.ERR_VALUE_OUT_OF_RANGE await trt.execute( trait.COMMAND_COLOR_ABSOLUTE, BASIC_DATA, {"color": {"temperature": 2857}}, {} ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "light.bla", light.ATTR_COLOR_TEMP: color.color_temperature_kelvin_to_mired(2857), } async def test_color_light_temperature_light_bad_temp(hass): """Test ColorTemperature trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert not trait.ColorSettingTrait.supported(light.DOMAIN, 0, None) assert trait.ColorSettingTrait.supported( light.DOMAIN, light.SUPPORT_COLOR_TEMP, None ) trt = trait.ColorSettingTrait( hass, State( "light.bla", STATE_ON, { light.ATTR_MIN_MIREDS: 200, light.ATTR_COLOR_TEMP: 0, light.ATTR_MAX_MIREDS: 500, }, ), BASIC_CONFIG, ) assert trt.query_attributes() == {} async def test_light_modes(hass): """Test Light Mode trait.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert trait.ModesTrait.supported(light.DOMAIN, light.SUPPORT_EFFECT, None) trt = trait.ModesTrait( hass, State( "light.living_room", light.STATE_ON, attributes={ light.ATTR_EFFECT_LIST: ["random", "colorloop"], light.ATTR_EFFECT: "random", }, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableModes": [ { "name": "effect", "name_values": [{"name_synonym": ["effect"], "lang": "en"}], "settings": [ { "setting_name": "random", "setting_values": [ {"setting_synonym": ["random"], "lang": "en"} ], }, { "setting_name": "colorloop", "setting_values": [ {"setting_synonym": ["colorloop"], "lang": "en"} ], }, ], "ordered": False, } ] } assert trt.query_attributes() == { "currentModeSettings": {"effect": "random"}, "on": True, } assert trt.can_execute( trait.COMMAND_MODES, params={"updateModeSettings": {"effect": "colorloop"}}, ) calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON) await trt.execute( trait.COMMAND_MODES, BASIC_DATA, {"updateModeSettings": {"effect": "colorloop"}}, {}, ) assert len(calls) == 1 assert calls[0].data == { "entity_id": "light.living_room", "effect": "colorloop", } async def test_scene_scene(hass): """Test Scene trait support for scene domain.""" assert helpers.get_google_type(scene.DOMAIN, None) is not None assert trait.SceneTrait.supported(scene.DOMAIN, 0, None) trt = trait.SceneTrait(hass, State("scene.bla", scene.STATE), BASIC_CONFIG) assert trt.sync_attributes() == {} assert trt.query_attributes() == {} assert trt.can_execute(trait.COMMAND_ACTIVATE_SCENE, {}) calls = async_mock_service(hass, scene.DOMAIN, SERVICE_TURN_ON) await trt.execute(trait.COMMAND_ACTIVATE_SCENE, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "scene.bla"} async def test_scene_script(hass): """Test Scene trait support for script domain.""" assert helpers.get_google_type(script.DOMAIN, None) is not None assert trait.SceneTrait.supported(script.DOMAIN, 0, None) trt = trait.SceneTrait(hass, State("script.bla", STATE_OFF), BASIC_CONFIG) assert trt.sync_attributes() == {} assert trt.query_attributes() == {} assert trt.can_execute(trait.COMMAND_ACTIVATE_SCENE, {}) calls = async_mock_service(hass, script.DOMAIN, SERVICE_TURN_ON) await trt.execute(trait.COMMAND_ACTIVATE_SCENE, BASIC_DATA, {}, {}) # We don't wait till script execution is done. await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "script.bla"} async def test_temperature_setting_climate_onoff(hass): """Test TemperatureSetting trait support for climate domain - range.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.TemperatureSettingTrait.supported(climate.DOMAIN, 0, None) hass.config.units.temperature_unit = TEMP_FAHRENHEIT trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_AUTO, { ATTR_SUPPORTED_FEATURES: climate.SUPPORT_TARGET_TEMPERATURE_RANGE, climate.ATTR_HVAC_MODES: [ climate.HVAC_MODE_OFF, climate.HVAC_MODE_COOL, climate.HVAC_MODE_HEAT, climate.HVAC_MODE_HEAT_COOL, ], climate.ATTR_MIN_TEMP: None, climate.ATTR_MAX_TEMP: None, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["off", "cool", "heat", "heatcool", "on"], "thermostatTemperatureUnit": "F", } assert trt.can_execute(trait.COMMAND_THERMOSTAT_SET_MODE, {}) calls = async_mock_service(hass, climate.DOMAIN, SERVICE_TURN_ON) await trt.execute( trait.COMMAND_THERMOSTAT_SET_MODE, BASIC_DATA, {"thermostatMode": "on"}, {} ) assert len(calls) == 1 calls = async_mock_service(hass, climate.DOMAIN, SERVICE_TURN_OFF) await trt.execute( trait.COMMAND_THERMOSTAT_SET_MODE, BASIC_DATA, {"thermostatMode": "off"}, {} ) assert len(calls) == 1 async def test_temperature_setting_climate_no_modes(hass): """Test TemperatureSetting trait support for climate domain not supporting any modes.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.TemperatureSettingTrait.supported(climate.DOMAIN, 0, None) hass.config.units.temperature_unit = TEMP_CELSIUS trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_AUTO, { climate.ATTR_HVAC_MODES: [], climate.ATTR_MIN_TEMP: None, climate.ATTR_MAX_TEMP: None, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["heat"], "thermostatTemperatureUnit": "C", } async def test_temperature_setting_climate_range(hass): """Test TemperatureSetting trait support for climate domain - range.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.TemperatureSettingTrait.supported(climate.DOMAIN, 0, None) hass.config.units.temperature_unit = TEMP_FAHRENHEIT trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_AUTO, { climate.ATTR_CURRENT_TEMPERATURE: 70, climate.ATTR_CURRENT_HUMIDITY: 25, ATTR_SUPPORTED_FEATURES: climate.SUPPORT_TARGET_TEMPERATURE_RANGE, climate.ATTR_HVAC_MODES: [ STATE_OFF, climate.HVAC_MODE_COOL, climate.HVAC_MODE_HEAT, climate.HVAC_MODE_AUTO, ], climate.ATTR_TARGET_TEMP_HIGH: 75, climate.ATTR_TARGET_TEMP_LOW: 65, climate.ATTR_MIN_TEMP: 50, climate.ATTR_MAX_TEMP: 80, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["off", "cool", "heat", "auto", "on"], "thermostatTemperatureUnit": "F", } assert trt.query_attributes() == { "thermostatMode": "auto", "thermostatTemperatureAmbient": 21.1, "thermostatHumidityAmbient": 25, "thermostatTemperatureSetpointLow": 18.3, "thermostatTemperatureSetpointHigh": 23.9, } assert trt.can_execute(trait.COMMAND_THERMOSTAT_TEMPERATURE_SET_RANGE, {}) assert trt.can_execute(trait.COMMAND_THERMOSTAT_SET_MODE, {}) calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_TEMPERATURE) await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SET_RANGE, BASIC_DATA, { "thermostatTemperatureSetpointHigh": 25, "thermostatTemperatureSetpointLow": 20, }, {}, ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "climate.bla", climate.ATTR_TARGET_TEMP_HIGH: 77, climate.ATTR_TARGET_TEMP_LOW: 68, } calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_HVAC_MODE) await trt.execute( trait.COMMAND_THERMOSTAT_SET_MODE, BASIC_DATA, {"thermostatMode": "cool"}, {} ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "climate.bla", climate.ATTR_HVAC_MODE: climate.HVAC_MODE_COOL, } with pytest.raises(helpers.SmartHomeError) as err: await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, BASIC_DATA, {"thermostatTemperatureSetpoint": -100}, {}, ) assert err.value.code == const.ERR_VALUE_OUT_OF_RANGE hass.config.units.temperature_unit = TEMP_CELSIUS async def test_temperature_setting_climate_setpoint(hass): """Test TemperatureSetting trait support for climate domain - setpoint.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.TemperatureSettingTrait.supported(climate.DOMAIN, 0, None) hass.config.units.temperature_unit = TEMP_CELSIUS trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_COOL, { climate.ATTR_HVAC_MODES: [STATE_OFF, climate.HVAC_MODE_COOL], climate.ATTR_MIN_TEMP: 10, climate.ATTR_MAX_TEMP: 30, ATTR_TEMPERATURE: 18, climate.ATTR_CURRENT_TEMPERATURE: 20, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["off", "cool", "on"], "thermostatTemperatureUnit": "C", } assert trt.query_attributes() == { "thermostatMode": "cool", "thermostatTemperatureAmbient": 20, "thermostatTemperatureSetpoint": 18, } assert trt.can_execute(trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, {}) assert trt.can_execute(trait.COMMAND_THERMOSTAT_SET_MODE, {}) calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_TEMPERATURE) with pytest.raises(helpers.SmartHomeError): await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, BASIC_DATA, {"thermostatTemperatureSetpoint": -100}, {}, ) await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, BASIC_DATA, {"thermostatTemperatureSetpoint": 19}, {}, ) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "climate.bla", ATTR_TEMPERATURE: 19} async def test_temperature_setting_climate_setpoint_auto(hass): """ Test TemperatureSetting trait support for climate domain. Setpoint in auto mode. """ hass.config.units.temperature_unit = TEMP_CELSIUS trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_HEAT_COOL, { climate.ATTR_HVAC_MODES: [ climate.HVAC_MODE_OFF, climate.HVAC_MODE_HEAT_COOL, ], climate.ATTR_MIN_TEMP: 10, climate.ATTR_MAX_TEMP: 30, ATTR_TEMPERATURE: 18, climate.ATTR_CURRENT_TEMPERATURE: 20, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["off", "heatcool", "on"], "thermostatTemperatureUnit": "C", } assert trt.query_attributes() == { "thermostatMode": "heatcool", "thermostatTemperatureAmbient": 20, "thermostatTemperatureSetpointHigh": 18, "thermostatTemperatureSetpointLow": 18, } assert trt.can_execute(trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, {}) assert trt.can_execute(trait.COMMAND_THERMOSTAT_SET_MODE, {}) calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_TEMPERATURE) await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, BASIC_DATA, {"thermostatTemperatureSetpoint": 19}, {}, ) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "climate.bla", ATTR_TEMPERATURE: 19} async def test_humidity_setting_humidifier_setpoint(hass): """Test HumiditySetting trait support for humidifier domain - setpoint.""" assert helpers.get_google_type(humidifier.DOMAIN, None) is not None assert trait.HumiditySettingTrait.supported(humidifier.DOMAIN, 0, None) trt = trait.HumiditySettingTrait( hass, State( "humidifier.bla", STATE_ON, { humidifier.ATTR_MIN_HUMIDITY: 20, humidifier.ATTR_MAX_HUMIDITY: 90, humidifier.ATTR_HUMIDITY: 38, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "humiditySetpointRange": {"minPercent": 20, "maxPercent": 90} } assert trt.query_attributes() == { "humiditySetpointPercent": 38, } assert trt.can_execute(trait.COMMAND_SET_HUMIDITY, {}) calls = async_mock_service(hass, humidifier.DOMAIN, humidifier.SERVICE_SET_HUMIDITY) await trt.execute(trait.COMMAND_SET_HUMIDITY, BASIC_DATA, {"humidity": 32}, {}) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "humidifier.bla", humidifier.ATTR_HUMIDITY: 32, } async def test_lock_unlock_lock(hass): """Test LockUnlock trait locking support for lock domain.""" assert helpers.get_google_type(lock.DOMAIN, None) is not None assert trait.LockUnlockTrait.supported(lock.DOMAIN, lock.SUPPORT_OPEN, None) assert trait.LockUnlockTrait.might_2fa(lock.DOMAIN, lock.SUPPORT_OPEN, None) trt = trait.LockUnlockTrait( hass, State("lock.front_door", lock.STATE_LOCKED), PIN_CONFIG ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"isLocked": True} assert trt.can_execute(trait.COMMAND_LOCKUNLOCK, {"lock": True}) calls = async_mock_service(hass, lock.DOMAIN, lock.SERVICE_LOCK) await trt.execute(trait.COMMAND_LOCKUNLOCK, PIN_DATA, {"lock": True}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "lock.front_door"} async def test_lock_unlock_unlock(hass): """Test LockUnlock trait unlocking support for lock domain.""" assert helpers.get_google_type(lock.DOMAIN, None) is not None assert trait.LockUnlockTrait.supported(lock.DOMAIN, lock.SUPPORT_OPEN, None) trt = trait.LockUnlockTrait( hass, State("lock.front_door", lock.STATE_LOCKED), PIN_CONFIG ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"isLocked": True} assert trt.can_execute(trait.COMMAND_LOCKUNLOCK, {"lock": False}) calls = async_mock_service(hass, lock.DOMAIN, lock.SERVICE_UNLOCK) # No challenge data with pytest.raises(error.ChallengeNeeded) as err: await trt.execute(trait.COMMAND_LOCKUNLOCK, PIN_DATA, {"lock": False}, {}) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # invalid pin with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_LOCKUNLOCK, PIN_DATA, {"lock": False}, {"pin": 9999} ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_FAILED_PIN_NEEDED await trt.execute( trait.COMMAND_LOCKUNLOCK, PIN_DATA, {"lock": False}, {"pin": "1234"} ) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "lock.front_door"} # Test without pin trt = trait.LockUnlockTrait( hass, State("lock.front_door", lock.STATE_LOCKED), BASIC_CONFIG ) with pytest.raises(error.SmartHomeError) as err: await trt.execute(trait.COMMAND_LOCKUNLOCK, BASIC_DATA, {"lock": False}, {}) assert len(calls) == 1 assert err.value.code == const.ERR_CHALLENGE_NOT_SETUP # Test with 2FA override with patch.object( BASIC_CONFIG, "should_2fa", return_value=False, ): await trt.execute(trait.COMMAND_LOCKUNLOCK, BASIC_DATA, {"lock": False}, {}) assert len(calls) == 2 async def test_arm_disarm_arm_away(hass): """Test ArmDisarm trait Arming support for alarm_control_panel domain.""" assert helpers.get_google_type(alarm_control_panel.DOMAIN, None) is not None assert trait.ArmDisArmTrait.supported(alarm_control_panel.DOMAIN, 0, None) assert trait.ArmDisArmTrait.might_2fa(alarm_control_panel.DOMAIN, 0, None) trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, { alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True, ATTR_SUPPORTED_FEATURES: alarm_control_panel.const.SUPPORT_ALARM_ARM_HOME | alarm_control_panel.const.SUPPORT_ALARM_ARM_AWAY, }, ), PIN_CONFIG, ) assert trt.sync_attributes() == { "availableArmLevels": { "levels": [ { "level_name": "armed_home", "level_values": [ {"level_synonym": ["armed home", "home"], "lang": "en"} ], }, { "level_name": "armed_away", "level_values": [ {"level_synonym": ["armed away", "away"], "lang": "en"} ], }, ], "ordered": False, } } assert trt.query_attributes() == { "isArmed": True, "currentArmLevel": STATE_ALARM_ARMED_AWAY, } assert trt.can_execute( trait.COMMAND_ARMDISARM, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY} ) calls = async_mock_service( hass, alarm_control_panel.DOMAIN, alarm_control_panel.SERVICE_ALARM_ARM_AWAY ) # Test with no secure_pin configured with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), BASIC_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, BASIC_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {}, ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NOT_SETUP trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), PIN_CONFIG, ) # No challenge data with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {}, ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # invalid pin with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {"pin": 9999}, ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_FAILED_PIN_NEEDED # correct pin await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {"pin": "1234"}, ) assert len(calls) == 1 # Test already armed with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), PIN_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {}, ) assert len(calls) == 1 assert err.value.code == const.ERR_ALREADY_ARMED # Test with code_arm_required False trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: False}, ), PIN_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {}, ) assert len(calls) == 2 with pytest.raises(error.SmartHomeError) as err: await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True}, {}, ) async def test_arm_disarm_disarm(hass): """Test ArmDisarm trait Disarming support for alarm_control_panel domain.""" assert helpers.get_google_type(alarm_control_panel.DOMAIN, None) is not None assert trait.ArmDisArmTrait.supported(alarm_control_panel.DOMAIN, 0, None) assert trait.ArmDisArmTrait.might_2fa(alarm_control_panel.DOMAIN, 0, None) trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, { alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True, ATTR_SUPPORTED_FEATURES: alarm_control_panel.const.SUPPORT_ALARM_TRIGGER | alarm_control_panel.const.SUPPORT_ALARM_ARM_CUSTOM_BYPASS, }, ), PIN_CONFIG, ) assert trt.sync_attributes() == { "availableArmLevels": { "levels": [ { "level_name": "armed_custom_bypass", "level_values": [ { "level_synonym": ["armed custom bypass", "custom"], "lang": "en", } ], }, { "level_name": "triggered", "level_values": [{"level_synonym": ["triggered"], "lang": "en"}], }, ], "ordered": False, } } assert trt.query_attributes() == {"isArmed": False} assert trt.can_execute(trait.COMMAND_ARMDISARM, {"arm": False}) calls = async_mock_service( hass, alarm_control_panel.DOMAIN, alarm_control_panel.SERVICE_ALARM_DISARM ) # Test without secure_pin configured with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), BASIC_CONFIG, ) await trt.execute(trait.COMMAND_ARMDISARM, BASIC_DATA, {"arm": False}, {}) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NOT_SETUP trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), PIN_CONFIG, ) # No challenge data with pytest.raises(error.ChallengeNeeded) as err: await trt.execute(trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": False}, {}) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # invalid pin with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": False}, {"pin": 9999} ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_FAILED_PIN_NEEDED # correct pin await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": False}, {"pin": "1234"} ) assert len(calls) == 1 # Test already disarmed with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), PIN_CONFIG, ) await trt.execute(trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": False}, {}) assert len(calls) == 1 assert err.value.code == const.ERR_ALREADY_DISARMED # Cancel arming after already armed will require pin with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: False}, ), PIN_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "cancel": True}, {} ) assert len(calls) == 1 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # Cancel arming while pending to arm doesn't require pin trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_PENDING, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: False}, ), PIN_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "cancel": True}, {} ) assert len(calls) == 2 async def test_fan_speed(hass): """Test FanSpeed trait speed control support for fan domain.""" assert helpers.get_google_type(fan.DOMAIN, None) is not None assert trait.FanSpeedTrait.supported(fan.DOMAIN, fan.SUPPORT_SET_SPEED, None) trt = trait.FanSpeedTrait( hass, State( "fan.living_room_fan", fan.SPEED_HIGH, attributes={ "speed_list": [ fan.SPEED_OFF, fan.SPEED_LOW, fan.SPEED_MEDIUM, fan.SPEED_HIGH, ], "speed": "low", "percentage": 33, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableFanSpeeds": { "ordered": True, "speeds": [ { "speed_name": "off", "speed_values": [{"speed_synonym": ["stop", "off"], "lang": "en"}], }, { "speed_name": "low", "speed_values": [ { "speed_synonym": ["slow", "low", "slowest", "lowest"], "lang": "en", } ], }, { "speed_name": "medium", "speed_values": [ {"speed_synonym": ["medium", "mid", "middle"], "lang": "en"} ], }, { "speed_name": "high", "speed_values": [ { "speed_synonym": [ "high", "max", "fast", "highest", "fastest", "maximum", ], "lang": "en", } ], }, ], }, "reversible": False, "supportsFanSpeedPercent": True, } assert trt.query_attributes() == { "currentFanSpeedSetting": "low", "on": True, "currentFanSpeedPercent": 33, } assert trt.can_execute(trait.COMMAND_FANSPEED, params={"fanSpeed": "medium"}) calls = async_mock_service(hass, fan.DOMAIN, fan.SERVICE_SET_SPEED) await trt.execute(trait.COMMAND_FANSPEED, BASIC_DATA, {"fanSpeed": "medium"}, {}) assert len(calls) == 1 assert calls[0].data == {"entity_id": "fan.living_room_fan", "speed": "medium"} assert trt.can_execute(trait.COMMAND_FANSPEED, params={"fanSpeedPercent": 10}) calls = async_mock_service(hass, fan.DOMAIN, fan.SERVICE_SET_PERCENTAGE) await trt.execute(trait.COMMAND_FANSPEED, BASIC_DATA, {"fanSpeedPercent": 10}, {}) assert len(calls) == 1 assert calls[0].data == {"entity_id": "fan.living_room_fan", "percentage": 10} async def test_climate_fan_speed(hass): """Test FanSpeed trait speed control support for climate domain.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.FanSpeedTrait.supported(climate.DOMAIN, climate.SUPPORT_FAN_MODE, None) trt = trait.FanSpeedTrait( hass, State( "climate.living_room_ac", "on", attributes={ "fan_modes": ["auto", "low", "medium", "high"], "fan_mode": "low", }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableFanSpeeds": { "ordered": True, "speeds": [ { "speed_name": "auto", "speed_values": [{"speed_synonym": ["auto"], "lang": "en"}], }, { "speed_name": "low", "speed_values": [{"speed_synonym": ["low"], "lang": "en"}], }, { "speed_name": "medium", "speed_values": [{"speed_synonym": ["medium"], "lang": "en"}], }, { "speed_name": "high", "speed_values": [{"speed_synonym": ["high"], "lang": "en"}], }, ], }, "reversible": False, "supportsFanSpeedPercent": True, } assert trt.query_attributes() == { "currentFanSpeedSetting": "low", } assert trt.can_execute(trait.COMMAND_FANSPEED, params={"fanSpeed": "medium"}) calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_FAN_MODE) await trt.execute(trait.COMMAND_FANSPEED, BASIC_DATA, {"fanSpeed": "medium"}, {}) assert len(calls) == 1 assert calls[0].data == { "entity_id": "climate.living_room_ac", "fan_mode": "medium", } async def test_inputselector(hass): """Test input selector trait.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.InputSelectorTrait.supported( media_player.DOMAIN, media_player.SUPPORT_SELECT_SOURCE, None ) trt = trait.InputSelectorTrait( hass, State( "media_player.living_room", media_player.STATE_PLAYING, attributes={ media_player.ATTR_INPUT_SOURCE_LIST: [ "media", "game", "chromecast", "plex", ], media_player.ATTR_INPUT_SOURCE: "game", }, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableInputs": [ {"key": "media", "names": [{"name_synonym": ["media"], "lang": "en"}]}, {"key": "game", "names": [{"name_synonym": ["game"], "lang": "en"}]}, { "key": "chromecast", "names": [{"name_synonym": ["chromecast"], "lang": "en"}], }, {"key": "plex", "names": [{"name_synonym": ["plex"], "lang": "en"}]}, ], "orderedInputs": True, } assert trt.query_attributes() == { "currentInput": "game", } assert trt.can_execute( trait.COMMAND_INPUT, params={"newInput": "media"}, ) calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_SELECT_SOURCE ) await trt.execute( trait.COMMAND_INPUT, BASIC_DATA, {"newInput": "media"}, {}, ) assert len(calls) == 1 assert calls[0].data == {"entity_id": "media_player.living_room", "source": "media"} @pytest.mark.parametrize( "sources,source,source_next,source_prev", [ (["a"], "a", "a", "a"), (["a", "b"], "a", "b", "b"), (["a", "b", "c"], "a", "b", "c"), ], ) async def test_inputselector_nextprev(hass, sources, source, source_next, source_prev): """Test input selector trait.""" trt = trait.InputSelectorTrait( hass, State( "media_player.living_room", media_player.STATE_PLAYING, attributes={ media_player.ATTR_INPUT_SOURCE_LIST: sources, media_player.ATTR_INPUT_SOURCE: source, }, ), BASIC_CONFIG, ) assert trt.can_execute("action.devices.commands.NextInput", params={}) assert trt.can_execute("action.devices.commands.PreviousInput", params={}) calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_SELECT_SOURCE ) await trt.execute( "action.devices.commands.NextInput", BASIC_DATA, {}, {}, ) await trt.execute( "action.devices.commands.PreviousInput", BASIC_DATA, {}, {}, ) assert len(calls) == 2 assert calls[0].data == { "entity_id": "media_player.living_room", "source": source_next, } assert calls[1].data == { "entity_id": "media_player.living_room", "source": source_prev, } @pytest.mark.parametrize( "sources,source", [(None, "a"), (["a", "b"], None), (["a", "b"], "c")] ) async def test_inputselector_nextprev_invalid(hass, sources, source): """Test input selector trait.""" trt = trait.InputSelectorTrait( hass, State( "media_player.living_room", media_player.STATE_PLAYING, attributes={ media_player.ATTR_INPUT_SOURCE_LIST: sources, media_player.ATTR_INPUT_SOURCE: source, }, ), BASIC_CONFIG, ) with pytest.raises(SmartHomeError): await trt.execute( "action.devices.commands.NextInput", BASIC_DATA, {}, {}, ) with pytest.raises(SmartHomeError): await trt.execute( "action.devices.commands.PreviousInput", BASIC_DATA, {}, {}, ) with pytest.raises(SmartHomeError): await trt.execute( "action.devices.commands.InvalidCommand", BASIC_DATA, {}, {}, ) async def test_modes_input_select(hass): """Test Input Select Mode trait.""" assert helpers.get_google_type(input_select.DOMAIN, None) is not None assert trait.ModesTrait.supported(input_select.DOMAIN, None, None) trt = trait.ModesTrait( hass, State("input_select.bla", "unavailable"), BASIC_CONFIG, ) assert trt.sync_attributes() == {"availableModes": []} trt = trait.ModesTrait( hass, State( "input_select.bla", "abc", attributes={input_select.ATTR_OPTIONS: ["abc", "123", "xyz"]}, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableModes": [ { "name": "option", "name_values": [ { "name_synonym": ["option", "setting", "mode", "value"], "lang": "en", } ], "settings": [ { "setting_name": "abc", "setting_values": [{"setting_synonym": ["abc"], "lang": "en"}], }, { "setting_name": "123", "setting_values": [{"setting_synonym": ["123"], "lang": "en"}], }, { "setting_name": "xyz", "setting_values": [{"setting_synonym": ["xyz"], "lang": "en"}], }, ], "ordered": False, } ] } assert trt.query_attributes() == { "currentModeSettings": {"option": "abc"}, "on": True, } assert trt.can_execute( trait.COMMAND_MODES, params={"updateModeSettings": {"option": "xyz"}}, ) calls = async_mock_service( hass, input_select.DOMAIN, input_select.SERVICE_SELECT_OPTION ) await trt.execute( trait.COMMAND_MODES, BASIC_DATA, {"updateModeSettings": {"option": "xyz"}}, {}, ) assert len(calls) == 1 assert calls[0].data == {"entity_id": "input_select.bla", "option": "xyz"} async def test_modes_humidifier(hass): """Test Humidifier Mode trait.""" assert helpers.get_google_type(humidifier.DOMAIN, None) is not None assert trait.ModesTrait.supported(humidifier.DOMAIN, humidifier.SUPPORT_MODES, None) trt = trait.ModesTrait( hass, State( "humidifier.humidifier", STATE_OFF, attributes={ humidifier.ATTR_AVAILABLE_MODES: [ humidifier.MODE_NORMAL, humidifier.MODE_AUTO, humidifier.MODE_AWAY, ], ATTR_SUPPORTED_FEATURES: humidifier.SUPPORT_MODES, humidifier.ATTR_MIN_HUMIDITY: 30, humidifier.ATTR_MAX_HUMIDITY: 99, humidifier.ATTR_HUMIDITY: 50, ATTR_MODE: humidifier.MODE_AUTO, }, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableModes": [ { "name": "mode", "name_values": [{"name_synonym": ["mode"], "lang": "en"}], "settings": [ { "setting_name": "normal", "setting_values": [ {"setting_synonym": ["normal"], "lang": "en"} ], }, { "setting_name": "auto", "setting_values": [{"setting_synonym": ["auto"], "lang": "en"}], }, { "setting_name": "away", "setting_values": [{"setting_synonym": ["away"], "lang": "en"}], }, ], "ordered": False, }, ] } assert trt.query_attributes() == { "currentModeSettings": {"mode": "auto"}, "on": False, } assert trt.can_execute( trait.COMMAND_MODES, params={"updateModeSettings": {"mode": "away"}} ) calls = async_mock_service(hass, humidifier.DOMAIN, humidifier.SERVICE_SET_MODE) await trt.execute( trait.COMMAND_MODES, BASIC_DATA, {"updateModeSettings": {"mode": "away"}}, {}, ) assert len(calls) == 1 assert calls[0].data == { "entity_id": "humidifier.humidifier", "mode": "away", } async def test_sound_modes(hass): """Test Mode trait.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.ModesTrait.supported( media_player.DOMAIN, media_player.SUPPORT_SELECT_SOUND_MODE, None ) trt = trait.ModesTrait( hass, State( "media_player.living_room", media_player.STATE_PLAYING, attributes={ media_player.ATTR_SOUND_MODE_LIST: ["stereo", "prologic"], media_player.ATTR_SOUND_MODE: "stereo", }, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableModes": [ { "name": "sound mode", "name_values": [ {"name_synonym": ["sound mode", "effects"], "lang": "en"} ], "settings": [ { "setting_name": "stereo", "setting_values": [ {"setting_synonym": ["stereo"], "lang": "en"} ], }, { "setting_name": "prologic", "setting_values": [ {"setting_synonym": ["prologic"], "lang": "en"} ], }, ], "ordered": False, } ] } assert trt.query_attributes() == { "currentModeSettings": {"sound mode": "stereo"}, "on": True, } assert trt.can_execute( trait.COMMAND_MODES, params={"updateModeSettings": {"sound mode": "stereo"}}, ) calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_SELECT_SOUND_MODE ) await trt.execute( trait.COMMAND_MODES, BASIC_DATA, {"updateModeSettings": {"sound mode": "stereo"}}, {}, ) assert len(calls) == 1 assert calls[0].data == { "entity_id": "media_player.living_room", "sound_mode": "stereo", } async def test_openclose_cover(hass): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_SET_POSITION, None ) trt = trait.OpenCloseTrait( hass, State( "cover.bla", cover.STATE_OPEN, { cover.ATTR_CURRENT_POSITION: 75, ATTR_SUPPORTED_FEATURES: cover.SUPPORT_SET_POSITION, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"openPercent": 75} calls_set = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_SET_COVER_POSITION) calls_open = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_OPEN_COVER) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 50}, {}) await trt.execute( trait.COMMAND_OPENCLOSE_RELATIVE, BASIC_DATA, {"openRelativePercent": 50}, {} ) assert len(calls_set) == 1 assert calls_set[0].data == {ATTR_ENTITY_ID: "cover.bla", cover.ATTR_POSITION: 50} assert len(calls_open) == 1 assert calls_open[0].data == {ATTR_ENTITY_ID: "cover.bla"} async def test_openclose_cover_unknown_state(hass): """Test OpenClose trait support for cover domain with unknown state.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_SET_POSITION, None ) # No state trt = trait.OpenCloseTrait( hass, State( "cover.bla", STATE_UNKNOWN, {ATTR_SUPPORTED_FEATURES: cover.SUPPORT_OPEN} ), BASIC_CONFIG, ) assert trt.sync_attributes() == {"discreteOnlyOpenClose": True} with pytest.raises(helpers.SmartHomeError): trt.query_attributes() calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_OPEN_COVER) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 100}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} with pytest.raises(helpers.SmartHomeError): trt.query_attributes() async def test_openclose_cover_assumed_state(hass): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_SET_POSITION, None ) trt = trait.OpenCloseTrait( hass, State( "cover.bla", cover.STATE_OPEN, { ATTR_ASSUMED_STATE: True, ATTR_SUPPORTED_FEATURES: cover.SUPPORT_SET_POSITION, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == {"commandOnlyOpenClose": True} assert trt.query_attributes() == {} calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_SET_COVER_POSITION) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 40}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla", cover.ATTR_POSITION: 40} async def test_openclose_cover_query_only(hass): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported(cover.DOMAIN, 0, None) state = State( "cover.bla", cover.STATE_OPEN, ) trt = trait.OpenCloseTrait( hass, state, BASIC_CONFIG, ) assert trt.sync_attributes() == { "discreteOnlyOpenClose": True, "queryOnlyOpenClose": True, } assert trt.query_attributes() == {"openPercent": 100} async def test_openclose_cover_no_position(hass): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_OPEN | cover.SUPPORT_CLOSE, None ) state = State( "cover.bla", cover.STATE_OPEN, { ATTR_SUPPORTED_FEATURES: cover.SUPPORT_OPEN | cover.SUPPORT_CLOSE, }, ) trt = trait.OpenCloseTrait( hass, state, BASIC_CONFIG, ) assert trt.sync_attributes() == {"discreteOnlyOpenClose": True} assert trt.query_attributes() == {"openPercent": 100} state.state = cover.STATE_CLOSED assert trt.sync_attributes() == {"discreteOnlyOpenClose": True} assert trt.query_attributes() == {"openPercent": 0} calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_CLOSE_COVER) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 0}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_OPEN_COVER) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 100}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} with pytest.raises( SmartHomeError, match=r"Current position not know for relative command" ): await trt.execute( trait.COMMAND_OPENCLOSE_RELATIVE, BASIC_DATA, {"openRelativePercent": 100}, {}, ) with pytest.raises(SmartHomeError, match=r"No support for partial open close"): await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 50}, {}) @pytest.mark.parametrize( "device_class", (cover.DEVICE_CLASS_DOOR, cover.DEVICE_CLASS_GARAGE, cover.DEVICE_CLASS_GATE), ) async def test_openclose_cover_secure(hass, device_class): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, device_class) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_SET_POSITION, device_class ) assert trait.OpenCloseTrait.might_2fa( cover.DOMAIN, cover.SUPPORT_SET_POSITION, device_class ) trt = trait.OpenCloseTrait( hass, State( "cover.bla", cover.STATE_OPEN, { ATTR_DEVICE_CLASS: device_class, ATTR_SUPPORTED_FEATURES: cover.SUPPORT_SET_POSITION, cover.ATTR_CURRENT_POSITION: 75, }, ), PIN_CONFIG, ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"openPercent": 75} calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_SET_COVER_POSITION) calls_close = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_CLOSE_COVER) # No challenge data with pytest.raises(error.ChallengeNeeded) as err: await trt.execute(trait.COMMAND_OPENCLOSE, PIN_DATA, {"openPercent": 50}, {}) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # invalid pin with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_OPENCLOSE, PIN_DATA, {"openPercent": 50}, {"pin": "9999"} ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_FAILED_PIN_NEEDED await trt.execute( trait.COMMAND_OPENCLOSE, PIN_DATA, {"openPercent": 50}, {"pin": "1234"} ) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla", cover.ATTR_POSITION: 50} # no challenge on close await trt.execute(trait.COMMAND_OPENCLOSE, PIN_DATA, {"openPercent": 0}, {}) assert len(calls_close) == 1 assert calls_close[0].data == {ATTR_ENTITY_ID: "cover.bla"} @pytest.mark.parametrize( "device_class", ( binary_sensor.DEVICE_CLASS_DOOR, binary_sensor.DEVICE_CLASS_GARAGE_DOOR, binary_sensor.DEVICE_CLASS_LOCK, binary_sensor.DEVICE_CLASS_OPENING, binary_sensor.DEVICE_CLASS_WINDOW, ), ) async def test_openclose_binary_sensor(hass, device_class): """Test OpenClose trait support for binary_sensor domain.""" assert helpers.get_google_type(binary_sensor.DOMAIN, device_class) is not None assert trait.OpenCloseTrait.supported(binary_sensor.DOMAIN, 0, device_class) trt = trait.OpenCloseTrait( hass, State("binary_sensor.test", STATE_ON, {ATTR_DEVICE_CLASS: device_class}), BASIC_CONFIG, ) assert trt.sync_attributes() == { "queryOnlyOpenClose": True, "discreteOnlyOpenClose": True, } assert trt.query_attributes() == {"openPercent": 100} trt = trait.OpenCloseTrait( hass, State("binary_sensor.test", STATE_OFF, {ATTR_DEVICE_CLASS: device_class}), BASIC_CONFIG, ) assert trt.sync_attributes() == { "queryOnlyOpenClose": True, "discreteOnlyOpenClose": True, } assert trt.query_attributes() == {"openPercent": 0} async def test_volume_media_player(hass): """Test volume trait support for media player domain.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.VolumeTrait.supported( media_player.DOMAIN, media_player.SUPPORT_VOLUME_SET, None, ) trt = trait.VolumeTrait( hass, State( "media_player.bla", media_player.STATE_PLAYING, { ATTR_SUPPORTED_FEATURES: media_player.SUPPORT_VOLUME_SET, media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.3, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "volumeMaxLevel": 100, "levelStepSize": 10, "volumeCanMuteAndUnmute": False, "commandOnlyVolume": False, } assert trt.query_attributes() == {"currentVolume": 30} calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_SET ) await trt.execute(trait.COMMAND_SET_VOLUME, BASIC_DATA, {"volumeLevel": 60}, {}) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.6, } calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_SET ) await trt.execute( trait.COMMAND_VOLUME_RELATIVE, BASIC_DATA, {"relativeSteps": 10}, {} ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.4, } async def test_volume_media_player_relative(hass): """Test volume trait support for relative-volume-only media players.""" assert trait.VolumeTrait.supported( media_player.DOMAIN, media_player.SUPPORT_VOLUME_STEP, None, ) trt = trait.VolumeTrait( hass, State( "media_player.bla", media_player.STATE_PLAYING, { ATTR_ASSUMED_STATE: True, ATTR_SUPPORTED_FEATURES: media_player.SUPPORT_VOLUME_STEP, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "volumeMaxLevel": 100, "levelStepSize": 10, "volumeCanMuteAndUnmute": False, "commandOnlyVolume": True, } assert trt.query_attributes() == {} calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_UP ) await trt.execute( trait.COMMAND_VOLUME_RELATIVE, BASIC_DATA, {"relativeSteps": 10}, {}, ) assert len(calls) == 10 for call in calls: assert call.data == { ATTR_ENTITY_ID: "media_player.bla", } calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_DOWN ) await trt.execute( trait.COMMAND_VOLUME_RELATIVE, BASIC_DATA, {"relativeSteps": -10}, {}, ) assert len(calls) == 10 for call in calls: assert call.data == { ATTR_ENTITY_ID: "media_player.bla", } with pytest.raises(SmartHomeError): await trt.execute(trait.COMMAND_SET_VOLUME, BASIC_DATA, {"volumeLevel": 42}, {}) with pytest.raises(SmartHomeError): await trt.execute(trait.COMMAND_MUTE, BASIC_DATA, {"mute": True}, {}) async def test_media_player_mute(hass): """Test volume trait support for muting.""" assert trait.VolumeTrait.supported( media_player.DOMAIN, media_player.SUPPORT_VOLUME_STEP | media_player.SUPPORT_VOLUME_MUTE, None, ) trt = trait.VolumeTrait( hass, State( "media_player.bla", media_player.STATE_PLAYING, { ATTR_SUPPORTED_FEATURES: ( media_player.SUPPORT_VOLUME_STEP | media_player.SUPPORT_VOLUME_MUTE ), media_player.ATTR_MEDIA_VOLUME_MUTED: False, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "volumeMaxLevel": 100, "levelStepSize": 10, "volumeCanMuteAndUnmute": True, "commandOnlyVolume": False, } assert trt.query_attributes() == {"isMuted": False} mute_calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_MUTE ) await trt.execute( trait.COMMAND_MUTE, BASIC_DATA, {"mute": True}, {}, ) assert len(mute_calls) == 1 assert mute_calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_VOLUME_MUTED: True, } unmute_calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_MUTE ) await trt.execute( trait.COMMAND_MUTE, BASIC_DATA, {"mute": False}, {}, ) assert len(unmute_calls) == 1 assert unmute_calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_VOLUME_MUTED: False, } async def test_temperature_setting_sensor(hass): """Test TemperatureSetting trait support for temperature sensor.""" assert ( helpers.get_google_type(sensor.DOMAIN, sensor.DEVICE_CLASS_TEMPERATURE) is not None ) assert not trait.TemperatureSettingTrait.supported( sensor.DOMAIN, 0, sensor.DEVICE_CLASS_HUMIDITY ) assert trait.TemperatureSettingTrait.supported( sensor.DOMAIN, 0, sensor.DEVICE_CLASS_TEMPERATURE ) @pytest.mark.parametrize( "unit_in,unit_out,state,ambient", [ (TEMP_FAHRENHEIT, "F", "70", 21.1), (TEMP_CELSIUS, "C", "21.1", 21.1), (TEMP_FAHRENHEIT, "F", "unavailable", None), (TEMP_FAHRENHEIT, "F", "unknown", None), ], ) async def test_temperature_setting_sensor_data(hass, unit_in, unit_out, state, ambient): """Test TemperatureSetting trait support for temperature sensor.""" hass.config.units.temperature_unit = unit_in trt = trait.TemperatureSettingTrait( hass, State( "sensor.test", state, {ATTR_DEVICE_CLASS: sensor.DEVICE_CLASS_TEMPERATURE} ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "queryOnlyTemperatureSetting": True, "thermostatTemperatureUnit": unit_out, } if ambient: assert trt.query_attributes() == {"thermostatTemperatureAmbient": ambient} else: assert trt.query_attributes() == {} hass.config.units.temperature_unit = TEMP_CELSIUS async def test_humidity_setting_sensor(hass): """Test HumiditySetting trait support for humidity sensor.""" assert ( helpers.get_google_type(sensor.DOMAIN, sensor.DEVICE_CLASS_HUMIDITY) is not None ) assert not trait.HumiditySettingTrait.supported( sensor.DOMAIN, 0, sensor.DEVICE_CLASS_TEMPERATURE ) assert trait.HumiditySettingTrait.supported( sensor.DOMAIN, 0, sensor.DEVICE_CLASS_HUMIDITY ) @pytest.mark.parametrize( "state,ambient", [("70", 70), ("unavailable", None), ("unknown", None)] ) async def test_humidity_setting_sensor_data(hass, state, ambient): """Test HumiditySetting trait support for humidity sensor.""" trt = trait.HumiditySettingTrait( hass, State("sensor.test", state, {ATTR_DEVICE_CLASS: sensor.DEVICE_CLASS_HUMIDITY}), BASIC_CONFIG, ) assert trt.sync_attributes() == {"queryOnlyHumiditySetting": True} if ambient: assert trt.query_attributes() == {"humidityAmbientPercent": ambient} else: assert trt.query_attributes() == {} with pytest.raises(helpers.SmartHomeError) as err: await trt.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert err.value.code == const.ERR_NOT_SUPPORTED async def test_transport_control(hass): """Test the TransportControlTrait.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None for feature in trait.MEDIA_COMMAND_SUPPORT_MAPPING.values(): assert trait.TransportControlTrait.supported(media_player.DOMAIN, feature, None) now = datetime(2020, 1, 1) trt = trait.TransportControlTrait( hass, State( "media_player.bla", media_player.STATE_PLAYING, { media_player.ATTR_MEDIA_POSITION: 100, media_player.ATTR_MEDIA_DURATION: 200, media_player.ATTR_MEDIA_POSITION_UPDATED_AT: now - timedelta(seconds=10), media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.5, ATTR_SUPPORTED_FEATURES: media_player.SUPPORT_PLAY | media_player.SUPPORT_STOP, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "transportControlSupportedCommands": ["RESUME", "STOP"] } assert trt.query_attributes() == {} # COMMAND_MEDIA_SEEK_RELATIVE calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_SEEK ) # Patch to avoid time ticking over during the command failing the test with patch("homeassistant.util.dt.utcnow", return_value=now): await trt.execute( trait.COMMAND_MEDIA_SEEK_RELATIVE, BASIC_DATA, {"relativePositionMs": 10000}, {}, ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", # 100s (current position) + 10s (from command) + 10s (from updated_at) media_player.ATTR_MEDIA_SEEK_POSITION: 120, } # COMMAND_MEDIA_SEEK_TO_POSITION calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_SEEK ) await trt.execute( trait.COMMAND_MEDIA_SEEK_TO_POSITION, BASIC_DATA, {"absPositionMs": 50000}, {} ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_SEEK_POSITION: 50, } # COMMAND_MEDIA_NEXT calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_NEXT_TRACK ) await trt.execute(trait.COMMAND_MEDIA_NEXT, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} # COMMAND_MEDIA_PAUSE calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_PAUSE ) await trt.execute(trait.COMMAND_MEDIA_PAUSE, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} # COMMAND_MEDIA_PREVIOUS calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_PREVIOUS_TRACK ) await trt.execute(trait.COMMAND_MEDIA_PREVIOUS, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} # COMMAND_MEDIA_RESUME calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_PLAY ) await trt.execute(trait.COMMAND_MEDIA_RESUME, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} # COMMAND_MEDIA_SHUFFLE calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_SHUFFLE_SET ) await trt.execute(trait.COMMAND_MEDIA_SHUFFLE, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_SHUFFLE: True, } # COMMAND_MEDIA_STOP calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_STOP ) await trt.execute(trait.COMMAND_MEDIA_STOP, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} @pytest.mark.parametrize( "state", ( STATE_OFF, STATE_IDLE, STATE_PLAYING, STATE_ON, STATE_PAUSED, STATE_STANDBY, STATE_UNAVAILABLE, STATE_UNKNOWN, ), ) async def test_media_state(hass, state): """Test the MediaStateTrait.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.TransportControlTrait.supported( media_player.DOMAIN, media_player.SUPPORT_PLAY, None ) trt = trait.MediaStateTrait( hass, State( "media_player.bla", state, { media_player.ATTR_MEDIA_POSITION: 100, media_player.ATTR_MEDIA_DURATION: 200, media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.5, ATTR_SUPPORTED_FEATURES: media_player.SUPPORT_PLAY | media_player.SUPPORT_STOP, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "supportActivityState": True, "supportPlaybackState": True, } assert trt.query_attributes() == { "activityState": trt.activity_lookup.get(state), "playbackState": trt.playback_lookup.get(state), }
sander76/home-assistant
tests/components/google_assistant/test_trait.py
homeassistant/components/firmata/entity.py
"""Support the binary sensors of a BloomSky weather station.""" import voluptuous as vol from homeassistant.components.binary_sensor import ( DEVICE_CLASS_MOISTURE, PLATFORM_SCHEMA, BinarySensorEntity, ) from homeassistant.const import CONF_MONITORED_CONDITIONS import homeassistant.helpers.config_validation as cv from . import DOMAIN SENSOR_TYPES = {"Rain": DEVICE_CLASS_MOISTURE, "Night": None} PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( { vol.Optional(CONF_MONITORED_CONDITIONS, default=list(SENSOR_TYPES)): vol.All( cv.ensure_list, [vol.In(SENSOR_TYPES)] ) } ) def setup_platform(hass, config, add_entities, discovery_info=None): """Set up the available BloomSky weather binary sensors.""" # Default needed in case of discovery if discovery_info is not None: return sensors = config[CONF_MONITORED_CONDITIONS] bloomsky = hass.data[DOMAIN] for device in bloomsky.devices.values(): for variable in sensors: add_entities([BloomSkySensor(bloomsky, device, variable)], True) class BloomSkySensor(BinarySensorEntity): """Representation of a single binary sensor in a BloomSky device.""" def __init__(self, bs, device, sensor_name): """Initialize a BloomSky binary sensor.""" self._bloomsky = bs self._device_id = device["DeviceID"] self._sensor_name = sensor_name self._name = f"{device['DeviceName']} {sensor_name}" self._state = None self._unique_id = f"{self._device_id}-{self._sensor_name}" @property def unique_id(self): """Return a unique ID.""" return self._unique_id @property def name(self): """Return the name of the BloomSky device and this sensor.""" return self._name @property def device_class(self): """Return the class of this sensor, from DEVICE_CLASSES.""" return SENSOR_TYPES.get(self._sensor_name) @property def is_on(self): """Return true if binary sensor is on.""" return self._state def update(self): """Request an update from the BloomSky API.""" self._bloomsky.refresh_devices() self._state = self._bloomsky.devices[self._device_id]["Data"][self._sensor_name]
"""Tests for the Google Assistant traits.""" from datetime import datetime, timedelta from unittest.mock import patch import pytest from homeassistant.components import ( alarm_control_panel, binary_sensor, camera, cover, fan, group, input_boolean, input_select, light, lock, media_player, scene, script, sensor, switch, vacuum, ) from homeassistant.components.climate import const as climate from homeassistant.components.google_assistant import const, error, helpers, trait from homeassistant.components.google_assistant.error import SmartHomeError from homeassistant.components.humidifier import const as humidifier from homeassistant.config import async_process_ha_core_config from homeassistant.const import ( ATTR_ASSUMED_STATE, ATTR_DEVICE_CLASS, ATTR_ENTITY_ID, ATTR_MODE, ATTR_SUPPORTED_FEATURES, ATTR_TEMPERATURE, SERVICE_TURN_OFF, SERVICE_TURN_ON, STATE_ALARM_ARMED_AWAY, STATE_ALARM_DISARMED, STATE_ALARM_PENDING, STATE_IDLE, STATE_OFF, STATE_ON, STATE_PAUSED, STATE_PLAYING, STATE_STANDBY, STATE_UNAVAILABLE, STATE_UNKNOWN, TEMP_CELSIUS, TEMP_FAHRENHEIT, ) from homeassistant.core import DOMAIN as HA_DOMAIN, EVENT_CALL_SERVICE, State from homeassistant.util import color from . import BASIC_CONFIG, MockConfig from tests.common import async_capture_events, async_mock_service REQ_ID = "ff36a3cc-ec34-11e6-b1a0-64510650abcf" BASIC_DATA = helpers.RequestData( BASIC_CONFIG, "test-agent", const.SOURCE_CLOUD, REQ_ID, None ) PIN_CONFIG = MockConfig(secure_devices_pin="1234") PIN_DATA = helpers.RequestData( PIN_CONFIG, "test-agent", const.SOURCE_CLOUD, REQ_ID, None ) async def test_brightness_light(hass): """Test brightness trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert trait.BrightnessTrait.supported(light.DOMAIN, light.SUPPORT_BRIGHTNESS, None) trt = trait.BrightnessTrait( hass, State("light.bla", light.STATE_ON, {light.ATTR_BRIGHTNESS: 243}), BASIC_CONFIG, ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"brightness": 95} events = async_capture_events(hass, EVENT_CALL_SERVICE) calls = async_mock_service(hass, light.DOMAIN, light.SERVICE_TURN_ON) await trt.execute( trait.COMMAND_BRIGHTNESS_ABSOLUTE, BASIC_DATA, {"brightness": 50}, {} ) await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "light.bla", light.ATTR_BRIGHTNESS_PCT: 50} assert len(events) == 1 assert events[0].data == { "domain": "light", "service": "turn_on", "service_data": {"brightness_pct": 50, "entity_id": "light.bla"}, } async def test_camera_stream(hass): """Test camera stream trait support for camera domain.""" await async_process_ha_core_config( hass, {"external_url": "https://example.com"}, ) assert helpers.get_google_type(camera.DOMAIN, None) is not None assert trait.CameraStreamTrait.supported(camera.DOMAIN, camera.SUPPORT_STREAM, None) trt = trait.CameraStreamTrait( hass, State("camera.bla", camera.STATE_IDLE, {}), BASIC_CONFIG ) assert trt.sync_attributes() == { "cameraStreamSupportedProtocols": ["hls"], "cameraStreamNeedAuthToken": False, "cameraStreamNeedDrmEncryption": False, } assert trt.query_attributes() == {} with patch( "homeassistant.components.camera.async_request_stream", return_value="/api/streams/bla", ): await trt.execute(trait.COMMAND_GET_CAMERA_STREAM, BASIC_DATA, {}, {}) assert trt.query_attributes() == { "cameraStreamAccessUrl": "https://example.com/api/streams/bla", "cameraStreamReceiverAppId": "B12CE3CA", } async def test_onoff_group(hass): """Test OnOff trait support for group domain.""" assert helpers.get_google_type(group.DOMAIN, None) is not None assert trait.OnOffTrait.supported(group.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("group.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("group.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, HA_DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "group.bla"} off_calls = async_mock_service(hass, HA_DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "group.bla"} async def test_onoff_input_boolean(hass): """Test OnOff trait support for input_boolean domain.""" assert helpers.get_google_type(input_boolean.DOMAIN, None) is not None assert trait.OnOffTrait.supported(input_boolean.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("input_boolean.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait( hass, State("input_boolean.bla", STATE_OFF), BASIC_CONFIG ) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, input_boolean.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "input_boolean.bla"} off_calls = async_mock_service(hass, input_boolean.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "input_boolean.bla"} async def test_onoff_switch(hass): """Test OnOff trait support for switch domain.""" assert helpers.get_google_type(switch.DOMAIN, None) is not None assert trait.OnOffTrait.supported(switch.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("switch.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("switch.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} trt_assumed = trait.OnOffTrait( hass, State("switch.bla", STATE_OFF, {"assumed_state": True}), BASIC_CONFIG ) assert trt_assumed.sync_attributes() == {"commandOnlyOnOff": True} on_calls = async_mock_service(hass, switch.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "switch.bla"} off_calls = async_mock_service(hass, switch.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "switch.bla"} async def test_onoff_fan(hass): """Test OnOff trait support for fan domain.""" assert helpers.get_google_type(fan.DOMAIN, None) is not None assert trait.OnOffTrait.supported(fan.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("fan.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("fan.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, fan.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "fan.bla"} off_calls = async_mock_service(hass, fan.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "fan.bla"} async def test_onoff_light(hass): """Test OnOff trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert trait.OnOffTrait.supported(light.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("light.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("light.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "light.bla"} off_calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "light.bla"} async def test_onoff_media_player(hass): """Test OnOff trait support for media_player domain.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.OnOffTrait.supported(media_player.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("media_player.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("media_player.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, media_player.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} off_calls = async_mock_service(hass, media_player.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} async def test_onoff_humidifier(hass): """Test OnOff trait support for humidifier domain.""" assert helpers.get_google_type(humidifier.DOMAIN, None) is not None assert trait.OnOffTrait.supported(humidifier.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("humidifier.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("humidifier.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, humidifier.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "humidifier.bla"} off_calls = async_mock_service(hass, humidifier.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "humidifier.bla"} async def test_dock_vacuum(hass): """Test dock trait support for vacuum domain.""" assert helpers.get_google_type(vacuum.DOMAIN, None) is not None assert trait.DockTrait.supported(vacuum.DOMAIN, 0, None) trt = trait.DockTrait(hass, State("vacuum.bla", vacuum.STATE_IDLE), BASIC_CONFIG) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"isDocked": False} calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_RETURN_TO_BASE) await trt.execute(trait.COMMAND_DOCK, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} async def test_startstop_vacuum(hass): """Test startStop trait support for vacuum domain.""" assert helpers.get_google_type(vacuum.DOMAIN, None) is not None assert trait.StartStopTrait.supported(vacuum.DOMAIN, 0, None) trt = trait.StartStopTrait( hass, State( "vacuum.bla", vacuum.STATE_PAUSED, {ATTR_SUPPORTED_FEATURES: vacuum.SUPPORT_PAUSE}, ), BASIC_CONFIG, ) assert trt.sync_attributes() == {"pausable": True} assert trt.query_attributes() == {"isRunning": False, "isPaused": True} start_calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_START) await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": True}, {}) assert len(start_calls) == 1 assert start_calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} stop_calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_STOP) await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": False}, {}) assert len(stop_calls) == 1 assert stop_calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} pause_calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_PAUSE) await trt.execute(trait.COMMAND_PAUSEUNPAUSE, BASIC_DATA, {"pause": True}, {}) assert len(pause_calls) == 1 assert pause_calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} unpause_calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_START) await trt.execute(trait.COMMAND_PAUSEUNPAUSE, BASIC_DATA, {"pause": False}, {}) assert len(unpause_calls) == 1 assert unpause_calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} async def test_startstop_cover(hass): """Test startStop trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.StartStopTrait.supported(cover.DOMAIN, cover.SUPPORT_STOP, None) state = State( "cover.bla", cover.STATE_CLOSED, {ATTR_SUPPORTED_FEATURES: cover.SUPPORT_STOP}, ) trt = trait.StartStopTrait( hass, state, BASIC_CONFIG, ) assert trt.sync_attributes() == {} for state_value in (cover.STATE_CLOSING, cover.STATE_OPENING): state.state = state_value assert trt.query_attributes() == {"isRunning": True} stop_calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_STOP_COVER) await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": False}, {}) assert len(stop_calls) == 1 assert stop_calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} for state_value in (cover.STATE_CLOSED, cover.STATE_OPEN): state.state = state_value assert trt.query_attributes() == {"isRunning": False} with pytest.raises(SmartHomeError, match="Cover is already stopped"): await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": False}, {}) with pytest.raises(SmartHomeError, match="Starting a cover is not supported"): await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": True}, {}) with pytest.raises( SmartHomeError, match="Command action.devices.commands.PauseUnpause is not supported", ): await trt.execute(trait.COMMAND_PAUSEUNPAUSE, BASIC_DATA, {"start": True}, {}) async def test_startstop_cover_assumed(hass): """Test startStop trait support for cover domain of assumed state.""" trt = trait.StartStopTrait( hass, State( "cover.bla", cover.STATE_CLOSED, {ATTR_SUPPORTED_FEATURES: cover.SUPPORT_STOP, ATTR_ASSUMED_STATE: True}, ), BASIC_CONFIG, ) stop_calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_STOP_COVER) await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": False}, {}) assert len(stop_calls) == 1 assert stop_calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} async def test_color_setting_color_light(hass): """Test ColorSpectrum trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert not trait.ColorSettingTrait.supported(light.DOMAIN, 0, None) assert trait.ColorSettingTrait.supported(light.DOMAIN, light.SUPPORT_COLOR, None) trt = trait.ColorSettingTrait( hass, State( "light.bla", STATE_ON, { light.ATTR_HS_COLOR: (20, 94), light.ATTR_BRIGHTNESS: 200, ATTR_SUPPORTED_FEATURES: light.SUPPORT_COLOR, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == {"colorModel": "hsv"} assert trt.query_attributes() == { "color": {"spectrumHsv": {"hue": 20, "saturation": 0.94, "value": 200 / 255}} } assert trt.can_execute( trait.COMMAND_COLOR_ABSOLUTE, {"color": {"spectrumRGB": 16715792}} ) calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON) await trt.execute( trait.COMMAND_COLOR_ABSOLUTE, BASIC_DATA, {"color": {"spectrumRGB": 1052927}}, {}, ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "light.bla", light.ATTR_HS_COLOR: (240, 93.725), } await trt.execute( trait.COMMAND_COLOR_ABSOLUTE, BASIC_DATA, {"color": {"spectrumHSV": {"hue": 100, "saturation": 0.50, "value": 0.20}}}, {}, ) assert len(calls) == 2 assert calls[1].data == { ATTR_ENTITY_ID: "light.bla", light.ATTR_HS_COLOR: [100, 50], light.ATTR_BRIGHTNESS: 0.2 * 255, } async def test_color_setting_temperature_light(hass): """Test ColorTemperature trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert not trait.ColorSettingTrait.supported(light.DOMAIN, 0, None) assert trait.ColorSettingTrait.supported( light.DOMAIN, light.SUPPORT_COLOR_TEMP, None ) trt = trait.ColorSettingTrait( hass, State( "light.bla", STATE_ON, { light.ATTR_MIN_MIREDS: 200, light.ATTR_COLOR_TEMP: 300, light.ATTR_MAX_MIREDS: 500, ATTR_SUPPORTED_FEATURES: light.SUPPORT_COLOR_TEMP, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "colorTemperatureRange": {"temperatureMinK": 2000, "temperatureMaxK": 5000} } assert trt.query_attributes() == {"color": {"temperatureK": 3333}} assert trt.can_execute( trait.COMMAND_COLOR_ABSOLUTE, {"color": {"temperature": 400}} ) calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON) with pytest.raises(helpers.SmartHomeError) as err: await trt.execute( trait.COMMAND_COLOR_ABSOLUTE, BASIC_DATA, {"color": {"temperature": 5555}}, {}, ) assert err.value.code == const.ERR_VALUE_OUT_OF_RANGE await trt.execute( trait.COMMAND_COLOR_ABSOLUTE, BASIC_DATA, {"color": {"temperature": 2857}}, {} ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "light.bla", light.ATTR_COLOR_TEMP: color.color_temperature_kelvin_to_mired(2857), } async def test_color_light_temperature_light_bad_temp(hass): """Test ColorTemperature trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert not trait.ColorSettingTrait.supported(light.DOMAIN, 0, None) assert trait.ColorSettingTrait.supported( light.DOMAIN, light.SUPPORT_COLOR_TEMP, None ) trt = trait.ColorSettingTrait( hass, State( "light.bla", STATE_ON, { light.ATTR_MIN_MIREDS: 200, light.ATTR_COLOR_TEMP: 0, light.ATTR_MAX_MIREDS: 500, }, ), BASIC_CONFIG, ) assert trt.query_attributes() == {} async def test_light_modes(hass): """Test Light Mode trait.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert trait.ModesTrait.supported(light.DOMAIN, light.SUPPORT_EFFECT, None) trt = trait.ModesTrait( hass, State( "light.living_room", light.STATE_ON, attributes={ light.ATTR_EFFECT_LIST: ["random", "colorloop"], light.ATTR_EFFECT: "random", }, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableModes": [ { "name": "effect", "name_values": [{"name_synonym": ["effect"], "lang": "en"}], "settings": [ { "setting_name": "random", "setting_values": [ {"setting_synonym": ["random"], "lang": "en"} ], }, { "setting_name": "colorloop", "setting_values": [ {"setting_synonym": ["colorloop"], "lang": "en"} ], }, ], "ordered": False, } ] } assert trt.query_attributes() == { "currentModeSettings": {"effect": "random"}, "on": True, } assert trt.can_execute( trait.COMMAND_MODES, params={"updateModeSettings": {"effect": "colorloop"}}, ) calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON) await trt.execute( trait.COMMAND_MODES, BASIC_DATA, {"updateModeSettings": {"effect": "colorloop"}}, {}, ) assert len(calls) == 1 assert calls[0].data == { "entity_id": "light.living_room", "effect": "colorloop", } async def test_scene_scene(hass): """Test Scene trait support for scene domain.""" assert helpers.get_google_type(scene.DOMAIN, None) is not None assert trait.SceneTrait.supported(scene.DOMAIN, 0, None) trt = trait.SceneTrait(hass, State("scene.bla", scene.STATE), BASIC_CONFIG) assert trt.sync_attributes() == {} assert trt.query_attributes() == {} assert trt.can_execute(trait.COMMAND_ACTIVATE_SCENE, {}) calls = async_mock_service(hass, scene.DOMAIN, SERVICE_TURN_ON) await trt.execute(trait.COMMAND_ACTIVATE_SCENE, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "scene.bla"} async def test_scene_script(hass): """Test Scene trait support for script domain.""" assert helpers.get_google_type(script.DOMAIN, None) is not None assert trait.SceneTrait.supported(script.DOMAIN, 0, None) trt = trait.SceneTrait(hass, State("script.bla", STATE_OFF), BASIC_CONFIG) assert trt.sync_attributes() == {} assert trt.query_attributes() == {} assert trt.can_execute(trait.COMMAND_ACTIVATE_SCENE, {}) calls = async_mock_service(hass, script.DOMAIN, SERVICE_TURN_ON) await trt.execute(trait.COMMAND_ACTIVATE_SCENE, BASIC_DATA, {}, {}) # We don't wait till script execution is done. await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "script.bla"} async def test_temperature_setting_climate_onoff(hass): """Test TemperatureSetting trait support for climate domain - range.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.TemperatureSettingTrait.supported(climate.DOMAIN, 0, None) hass.config.units.temperature_unit = TEMP_FAHRENHEIT trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_AUTO, { ATTR_SUPPORTED_FEATURES: climate.SUPPORT_TARGET_TEMPERATURE_RANGE, climate.ATTR_HVAC_MODES: [ climate.HVAC_MODE_OFF, climate.HVAC_MODE_COOL, climate.HVAC_MODE_HEAT, climate.HVAC_MODE_HEAT_COOL, ], climate.ATTR_MIN_TEMP: None, climate.ATTR_MAX_TEMP: None, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["off", "cool", "heat", "heatcool", "on"], "thermostatTemperatureUnit": "F", } assert trt.can_execute(trait.COMMAND_THERMOSTAT_SET_MODE, {}) calls = async_mock_service(hass, climate.DOMAIN, SERVICE_TURN_ON) await trt.execute( trait.COMMAND_THERMOSTAT_SET_MODE, BASIC_DATA, {"thermostatMode": "on"}, {} ) assert len(calls) == 1 calls = async_mock_service(hass, climate.DOMAIN, SERVICE_TURN_OFF) await trt.execute( trait.COMMAND_THERMOSTAT_SET_MODE, BASIC_DATA, {"thermostatMode": "off"}, {} ) assert len(calls) == 1 async def test_temperature_setting_climate_no_modes(hass): """Test TemperatureSetting trait support for climate domain not supporting any modes.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.TemperatureSettingTrait.supported(climate.DOMAIN, 0, None) hass.config.units.temperature_unit = TEMP_CELSIUS trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_AUTO, { climate.ATTR_HVAC_MODES: [], climate.ATTR_MIN_TEMP: None, climate.ATTR_MAX_TEMP: None, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["heat"], "thermostatTemperatureUnit": "C", } async def test_temperature_setting_climate_range(hass): """Test TemperatureSetting trait support for climate domain - range.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.TemperatureSettingTrait.supported(climate.DOMAIN, 0, None) hass.config.units.temperature_unit = TEMP_FAHRENHEIT trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_AUTO, { climate.ATTR_CURRENT_TEMPERATURE: 70, climate.ATTR_CURRENT_HUMIDITY: 25, ATTR_SUPPORTED_FEATURES: climate.SUPPORT_TARGET_TEMPERATURE_RANGE, climate.ATTR_HVAC_MODES: [ STATE_OFF, climate.HVAC_MODE_COOL, climate.HVAC_MODE_HEAT, climate.HVAC_MODE_AUTO, ], climate.ATTR_TARGET_TEMP_HIGH: 75, climate.ATTR_TARGET_TEMP_LOW: 65, climate.ATTR_MIN_TEMP: 50, climate.ATTR_MAX_TEMP: 80, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["off", "cool", "heat", "auto", "on"], "thermostatTemperatureUnit": "F", } assert trt.query_attributes() == { "thermostatMode": "auto", "thermostatTemperatureAmbient": 21.1, "thermostatHumidityAmbient": 25, "thermostatTemperatureSetpointLow": 18.3, "thermostatTemperatureSetpointHigh": 23.9, } assert trt.can_execute(trait.COMMAND_THERMOSTAT_TEMPERATURE_SET_RANGE, {}) assert trt.can_execute(trait.COMMAND_THERMOSTAT_SET_MODE, {}) calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_TEMPERATURE) await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SET_RANGE, BASIC_DATA, { "thermostatTemperatureSetpointHigh": 25, "thermostatTemperatureSetpointLow": 20, }, {}, ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "climate.bla", climate.ATTR_TARGET_TEMP_HIGH: 77, climate.ATTR_TARGET_TEMP_LOW: 68, } calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_HVAC_MODE) await trt.execute( trait.COMMAND_THERMOSTAT_SET_MODE, BASIC_DATA, {"thermostatMode": "cool"}, {} ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "climate.bla", climate.ATTR_HVAC_MODE: climate.HVAC_MODE_COOL, } with pytest.raises(helpers.SmartHomeError) as err: await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, BASIC_DATA, {"thermostatTemperatureSetpoint": -100}, {}, ) assert err.value.code == const.ERR_VALUE_OUT_OF_RANGE hass.config.units.temperature_unit = TEMP_CELSIUS async def test_temperature_setting_climate_setpoint(hass): """Test TemperatureSetting trait support for climate domain - setpoint.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.TemperatureSettingTrait.supported(climate.DOMAIN, 0, None) hass.config.units.temperature_unit = TEMP_CELSIUS trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_COOL, { climate.ATTR_HVAC_MODES: [STATE_OFF, climate.HVAC_MODE_COOL], climate.ATTR_MIN_TEMP: 10, climate.ATTR_MAX_TEMP: 30, ATTR_TEMPERATURE: 18, climate.ATTR_CURRENT_TEMPERATURE: 20, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["off", "cool", "on"], "thermostatTemperatureUnit": "C", } assert trt.query_attributes() == { "thermostatMode": "cool", "thermostatTemperatureAmbient": 20, "thermostatTemperatureSetpoint": 18, } assert trt.can_execute(trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, {}) assert trt.can_execute(trait.COMMAND_THERMOSTAT_SET_MODE, {}) calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_TEMPERATURE) with pytest.raises(helpers.SmartHomeError): await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, BASIC_DATA, {"thermostatTemperatureSetpoint": -100}, {}, ) await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, BASIC_DATA, {"thermostatTemperatureSetpoint": 19}, {}, ) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "climate.bla", ATTR_TEMPERATURE: 19} async def test_temperature_setting_climate_setpoint_auto(hass): """ Test TemperatureSetting trait support for climate domain. Setpoint in auto mode. """ hass.config.units.temperature_unit = TEMP_CELSIUS trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_HEAT_COOL, { climate.ATTR_HVAC_MODES: [ climate.HVAC_MODE_OFF, climate.HVAC_MODE_HEAT_COOL, ], climate.ATTR_MIN_TEMP: 10, climate.ATTR_MAX_TEMP: 30, ATTR_TEMPERATURE: 18, climate.ATTR_CURRENT_TEMPERATURE: 20, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["off", "heatcool", "on"], "thermostatTemperatureUnit": "C", } assert trt.query_attributes() == { "thermostatMode": "heatcool", "thermostatTemperatureAmbient": 20, "thermostatTemperatureSetpointHigh": 18, "thermostatTemperatureSetpointLow": 18, } assert trt.can_execute(trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, {}) assert trt.can_execute(trait.COMMAND_THERMOSTAT_SET_MODE, {}) calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_TEMPERATURE) await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, BASIC_DATA, {"thermostatTemperatureSetpoint": 19}, {}, ) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "climate.bla", ATTR_TEMPERATURE: 19} async def test_humidity_setting_humidifier_setpoint(hass): """Test HumiditySetting trait support for humidifier domain - setpoint.""" assert helpers.get_google_type(humidifier.DOMAIN, None) is not None assert trait.HumiditySettingTrait.supported(humidifier.DOMAIN, 0, None) trt = trait.HumiditySettingTrait( hass, State( "humidifier.bla", STATE_ON, { humidifier.ATTR_MIN_HUMIDITY: 20, humidifier.ATTR_MAX_HUMIDITY: 90, humidifier.ATTR_HUMIDITY: 38, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "humiditySetpointRange": {"minPercent": 20, "maxPercent": 90} } assert trt.query_attributes() == { "humiditySetpointPercent": 38, } assert trt.can_execute(trait.COMMAND_SET_HUMIDITY, {}) calls = async_mock_service(hass, humidifier.DOMAIN, humidifier.SERVICE_SET_HUMIDITY) await trt.execute(trait.COMMAND_SET_HUMIDITY, BASIC_DATA, {"humidity": 32}, {}) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "humidifier.bla", humidifier.ATTR_HUMIDITY: 32, } async def test_lock_unlock_lock(hass): """Test LockUnlock trait locking support for lock domain.""" assert helpers.get_google_type(lock.DOMAIN, None) is not None assert trait.LockUnlockTrait.supported(lock.DOMAIN, lock.SUPPORT_OPEN, None) assert trait.LockUnlockTrait.might_2fa(lock.DOMAIN, lock.SUPPORT_OPEN, None) trt = trait.LockUnlockTrait( hass, State("lock.front_door", lock.STATE_LOCKED), PIN_CONFIG ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"isLocked": True} assert trt.can_execute(trait.COMMAND_LOCKUNLOCK, {"lock": True}) calls = async_mock_service(hass, lock.DOMAIN, lock.SERVICE_LOCK) await trt.execute(trait.COMMAND_LOCKUNLOCK, PIN_DATA, {"lock": True}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "lock.front_door"} async def test_lock_unlock_unlock(hass): """Test LockUnlock trait unlocking support for lock domain.""" assert helpers.get_google_type(lock.DOMAIN, None) is not None assert trait.LockUnlockTrait.supported(lock.DOMAIN, lock.SUPPORT_OPEN, None) trt = trait.LockUnlockTrait( hass, State("lock.front_door", lock.STATE_LOCKED), PIN_CONFIG ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"isLocked": True} assert trt.can_execute(trait.COMMAND_LOCKUNLOCK, {"lock": False}) calls = async_mock_service(hass, lock.DOMAIN, lock.SERVICE_UNLOCK) # No challenge data with pytest.raises(error.ChallengeNeeded) as err: await trt.execute(trait.COMMAND_LOCKUNLOCK, PIN_DATA, {"lock": False}, {}) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # invalid pin with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_LOCKUNLOCK, PIN_DATA, {"lock": False}, {"pin": 9999} ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_FAILED_PIN_NEEDED await trt.execute( trait.COMMAND_LOCKUNLOCK, PIN_DATA, {"lock": False}, {"pin": "1234"} ) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "lock.front_door"} # Test without pin trt = trait.LockUnlockTrait( hass, State("lock.front_door", lock.STATE_LOCKED), BASIC_CONFIG ) with pytest.raises(error.SmartHomeError) as err: await trt.execute(trait.COMMAND_LOCKUNLOCK, BASIC_DATA, {"lock": False}, {}) assert len(calls) == 1 assert err.value.code == const.ERR_CHALLENGE_NOT_SETUP # Test with 2FA override with patch.object( BASIC_CONFIG, "should_2fa", return_value=False, ): await trt.execute(trait.COMMAND_LOCKUNLOCK, BASIC_DATA, {"lock": False}, {}) assert len(calls) == 2 async def test_arm_disarm_arm_away(hass): """Test ArmDisarm trait Arming support for alarm_control_panel domain.""" assert helpers.get_google_type(alarm_control_panel.DOMAIN, None) is not None assert trait.ArmDisArmTrait.supported(alarm_control_panel.DOMAIN, 0, None) assert trait.ArmDisArmTrait.might_2fa(alarm_control_panel.DOMAIN, 0, None) trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, { alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True, ATTR_SUPPORTED_FEATURES: alarm_control_panel.const.SUPPORT_ALARM_ARM_HOME | alarm_control_panel.const.SUPPORT_ALARM_ARM_AWAY, }, ), PIN_CONFIG, ) assert trt.sync_attributes() == { "availableArmLevels": { "levels": [ { "level_name": "armed_home", "level_values": [ {"level_synonym": ["armed home", "home"], "lang": "en"} ], }, { "level_name": "armed_away", "level_values": [ {"level_synonym": ["armed away", "away"], "lang": "en"} ], }, ], "ordered": False, } } assert trt.query_attributes() == { "isArmed": True, "currentArmLevel": STATE_ALARM_ARMED_AWAY, } assert trt.can_execute( trait.COMMAND_ARMDISARM, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY} ) calls = async_mock_service( hass, alarm_control_panel.DOMAIN, alarm_control_panel.SERVICE_ALARM_ARM_AWAY ) # Test with no secure_pin configured with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), BASIC_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, BASIC_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {}, ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NOT_SETUP trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), PIN_CONFIG, ) # No challenge data with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {}, ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # invalid pin with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {"pin": 9999}, ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_FAILED_PIN_NEEDED # correct pin await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {"pin": "1234"}, ) assert len(calls) == 1 # Test already armed with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), PIN_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {}, ) assert len(calls) == 1 assert err.value.code == const.ERR_ALREADY_ARMED # Test with code_arm_required False trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: False}, ), PIN_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {}, ) assert len(calls) == 2 with pytest.raises(error.SmartHomeError) as err: await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True}, {}, ) async def test_arm_disarm_disarm(hass): """Test ArmDisarm trait Disarming support for alarm_control_panel domain.""" assert helpers.get_google_type(alarm_control_panel.DOMAIN, None) is not None assert trait.ArmDisArmTrait.supported(alarm_control_panel.DOMAIN, 0, None) assert trait.ArmDisArmTrait.might_2fa(alarm_control_panel.DOMAIN, 0, None) trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, { alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True, ATTR_SUPPORTED_FEATURES: alarm_control_panel.const.SUPPORT_ALARM_TRIGGER | alarm_control_panel.const.SUPPORT_ALARM_ARM_CUSTOM_BYPASS, }, ), PIN_CONFIG, ) assert trt.sync_attributes() == { "availableArmLevels": { "levels": [ { "level_name": "armed_custom_bypass", "level_values": [ { "level_synonym": ["armed custom bypass", "custom"], "lang": "en", } ], }, { "level_name": "triggered", "level_values": [{"level_synonym": ["triggered"], "lang": "en"}], }, ], "ordered": False, } } assert trt.query_attributes() == {"isArmed": False} assert trt.can_execute(trait.COMMAND_ARMDISARM, {"arm": False}) calls = async_mock_service( hass, alarm_control_panel.DOMAIN, alarm_control_panel.SERVICE_ALARM_DISARM ) # Test without secure_pin configured with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), BASIC_CONFIG, ) await trt.execute(trait.COMMAND_ARMDISARM, BASIC_DATA, {"arm": False}, {}) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NOT_SETUP trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), PIN_CONFIG, ) # No challenge data with pytest.raises(error.ChallengeNeeded) as err: await trt.execute(trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": False}, {}) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # invalid pin with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": False}, {"pin": 9999} ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_FAILED_PIN_NEEDED # correct pin await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": False}, {"pin": "1234"} ) assert len(calls) == 1 # Test already disarmed with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), PIN_CONFIG, ) await trt.execute(trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": False}, {}) assert len(calls) == 1 assert err.value.code == const.ERR_ALREADY_DISARMED # Cancel arming after already armed will require pin with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: False}, ), PIN_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "cancel": True}, {} ) assert len(calls) == 1 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # Cancel arming while pending to arm doesn't require pin trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_PENDING, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: False}, ), PIN_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "cancel": True}, {} ) assert len(calls) == 2 async def test_fan_speed(hass): """Test FanSpeed trait speed control support for fan domain.""" assert helpers.get_google_type(fan.DOMAIN, None) is not None assert trait.FanSpeedTrait.supported(fan.DOMAIN, fan.SUPPORT_SET_SPEED, None) trt = trait.FanSpeedTrait( hass, State( "fan.living_room_fan", fan.SPEED_HIGH, attributes={ "speed_list": [ fan.SPEED_OFF, fan.SPEED_LOW, fan.SPEED_MEDIUM, fan.SPEED_HIGH, ], "speed": "low", "percentage": 33, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableFanSpeeds": { "ordered": True, "speeds": [ { "speed_name": "off", "speed_values": [{"speed_synonym": ["stop", "off"], "lang": "en"}], }, { "speed_name": "low", "speed_values": [ { "speed_synonym": ["slow", "low", "slowest", "lowest"], "lang": "en", } ], }, { "speed_name": "medium", "speed_values": [ {"speed_synonym": ["medium", "mid", "middle"], "lang": "en"} ], }, { "speed_name": "high", "speed_values": [ { "speed_synonym": [ "high", "max", "fast", "highest", "fastest", "maximum", ], "lang": "en", } ], }, ], }, "reversible": False, "supportsFanSpeedPercent": True, } assert trt.query_attributes() == { "currentFanSpeedSetting": "low", "on": True, "currentFanSpeedPercent": 33, } assert trt.can_execute(trait.COMMAND_FANSPEED, params={"fanSpeed": "medium"}) calls = async_mock_service(hass, fan.DOMAIN, fan.SERVICE_SET_SPEED) await trt.execute(trait.COMMAND_FANSPEED, BASIC_DATA, {"fanSpeed": "medium"}, {}) assert len(calls) == 1 assert calls[0].data == {"entity_id": "fan.living_room_fan", "speed": "medium"} assert trt.can_execute(trait.COMMAND_FANSPEED, params={"fanSpeedPercent": 10}) calls = async_mock_service(hass, fan.DOMAIN, fan.SERVICE_SET_PERCENTAGE) await trt.execute(trait.COMMAND_FANSPEED, BASIC_DATA, {"fanSpeedPercent": 10}, {}) assert len(calls) == 1 assert calls[0].data == {"entity_id": "fan.living_room_fan", "percentage": 10} async def test_climate_fan_speed(hass): """Test FanSpeed trait speed control support for climate domain.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.FanSpeedTrait.supported(climate.DOMAIN, climate.SUPPORT_FAN_MODE, None) trt = trait.FanSpeedTrait( hass, State( "climate.living_room_ac", "on", attributes={ "fan_modes": ["auto", "low", "medium", "high"], "fan_mode": "low", }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableFanSpeeds": { "ordered": True, "speeds": [ { "speed_name": "auto", "speed_values": [{"speed_synonym": ["auto"], "lang": "en"}], }, { "speed_name": "low", "speed_values": [{"speed_synonym": ["low"], "lang": "en"}], }, { "speed_name": "medium", "speed_values": [{"speed_synonym": ["medium"], "lang": "en"}], }, { "speed_name": "high", "speed_values": [{"speed_synonym": ["high"], "lang": "en"}], }, ], }, "reversible": False, "supportsFanSpeedPercent": True, } assert trt.query_attributes() == { "currentFanSpeedSetting": "low", } assert trt.can_execute(trait.COMMAND_FANSPEED, params={"fanSpeed": "medium"}) calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_FAN_MODE) await trt.execute(trait.COMMAND_FANSPEED, BASIC_DATA, {"fanSpeed": "medium"}, {}) assert len(calls) == 1 assert calls[0].data == { "entity_id": "climate.living_room_ac", "fan_mode": "medium", } async def test_inputselector(hass): """Test input selector trait.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.InputSelectorTrait.supported( media_player.DOMAIN, media_player.SUPPORT_SELECT_SOURCE, None ) trt = trait.InputSelectorTrait( hass, State( "media_player.living_room", media_player.STATE_PLAYING, attributes={ media_player.ATTR_INPUT_SOURCE_LIST: [ "media", "game", "chromecast", "plex", ], media_player.ATTR_INPUT_SOURCE: "game", }, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableInputs": [ {"key": "media", "names": [{"name_synonym": ["media"], "lang": "en"}]}, {"key": "game", "names": [{"name_synonym": ["game"], "lang": "en"}]}, { "key": "chromecast", "names": [{"name_synonym": ["chromecast"], "lang": "en"}], }, {"key": "plex", "names": [{"name_synonym": ["plex"], "lang": "en"}]}, ], "orderedInputs": True, } assert trt.query_attributes() == { "currentInput": "game", } assert trt.can_execute( trait.COMMAND_INPUT, params={"newInput": "media"}, ) calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_SELECT_SOURCE ) await trt.execute( trait.COMMAND_INPUT, BASIC_DATA, {"newInput": "media"}, {}, ) assert len(calls) == 1 assert calls[0].data == {"entity_id": "media_player.living_room", "source": "media"} @pytest.mark.parametrize( "sources,source,source_next,source_prev", [ (["a"], "a", "a", "a"), (["a", "b"], "a", "b", "b"), (["a", "b", "c"], "a", "b", "c"), ], ) async def test_inputselector_nextprev(hass, sources, source, source_next, source_prev): """Test input selector trait.""" trt = trait.InputSelectorTrait( hass, State( "media_player.living_room", media_player.STATE_PLAYING, attributes={ media_player.ATTR_INPUT_SOURCE_LIST: sources, media_player.ATTR_INPUT_SOURCE: source, }, ), BASIC_CONFIG, ) assert trt.can_execute("action.devices.commands.NextInput", params={}) assert trt.can_execute("action.devices.commands.PreviousInput", params={}) calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_SELECT_SOURCE ) await trt.execute( "action.devices.commands.NextInput", BASIC_DATA, {}, {}, ) await trt.execute( "action.devices.commands.PreviousInput", BASIC_DATA, {}, {}, ) assert len(calls) == 2 assert calls[0].data == { "entity_id": "media_player.living_room", "source": source_next, } assert calls[1].data == { "entity_id": "media_player.living_room", "source": source_prev, } @pytest.mark.parametrize( "sources,source", [(None, "a"), (["a", "b"], None), (["a", "b"], "c")] ) async def test_inputselector_nextprev_invalid(hass, sources, source): """Test input selector trait.""" trt = trait.InputSelectorTrait( hass, State( "media_player.living_room", media_player.STATE_PLAYING, attributes={ media_player.ATTR_INPUT_SOURCE_LIST: sources, media_player.ATTR_INPUT_SOURCE: source, }, ), BASIC_CONFIG, ) with pytest.raises(SmartHomeError): await trt.execute( "action.devices.commands.NextInput", BASIC_DATA, {}, {}, ) with pytest.raises(SmartHomeError): await trt.execute( "action.devices.commands.PreviousInput", BASIC_DATA, {}, {}, ) with pytest.raises(SmartHomeError): await trt.execute( "action.devices.commands.InvalidCommand", BASIC_DATA, {}, {}, ) async def test_modes_input_select(hass): """Test Input Select Mode trait.""" assert helpers.get_google_type(input_select.DOMAIN, None) is not None assert trait.ModesTrait.supported(input_select.DOMAIN, None, None) trt = trait.ModesTrait( hass, State("input_select.bla", "unavailable"), BASIC_CONFIG, ) assert trt.sync_attributes() == {"availableModes": []} trt = trait.ModesTrait( hass, State( "input_select.bla", "abc", attributes={input_select.ATTR_OPTIONS: ["abc", "123", "xyz"]}, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableModes": [ { "name": "option", "name_values": [ { "name_synonym": ["option", "setting", "mode", "value"], "lang": "en", } ], "settings": [ { "setting_name": "abc", "setting_values": [{"setting_synonym": ["abc"], "lang": "en"}], }, { "setting_name": "123", "setting_values": [{"setting_synonym": ["123"], "lang": "en"}], }, { "setting_name": "xyz", "setting_values": [{"setting_synonym": ["xyz"], "lang": "en"}], }, ], "ordered": False, } ] } assert trt.query_attributes() == { "currentModeSettings": {"option": "abc"}, "on": True, } assert trt.can_execute( trait.COMMAND_MODES, params={"updateModeSettings": {"option": "xyz"}}, ) calls = async_mock_service( hass, input_select.DOMAIN, input_select.SERVICE_SELECT_OPTION ) await trt.execute( trait.COMMAND_MODES, BASIC_DATA, {"updateModeSettings": {"option": "xyz"}}, {}, ) assert len(calls) == 1 assert calls[0].data == {"entity_id": "input_select.bla", "option": "xyz"} async def test_modes_humidifier(hass): """Test Humidifier Mode trait.""" assert helpers.get_google_type(humidifier.DOMAIN, None) is not None assert trait.ModesTrait.supported(humidifier.DOMAIN, humidifier.SUPPORT_MODES, None) trt = trait.ModesTrait( hass, State( "humidifier.humidifier", STATE_OFF, attributes={ humidifier.ATTR_AVAILABLE_MODES: [ humidifier.MODE_NORMAL, humidifier.MODE_AUTO, humidifier.MODE_AWAY, ], ATTR_SUPPORTED_FEATURES: humidifier.SUPPORT_MODES, humidifier.ATTR_MIN_HUMIDITY: 30, humidifier.ATTR_MAX_HUMIDITY: 99, humidifier.ATTR_HUMIDITY: 50, ATTR_MODE: humidifier.MODE_AUTO, }, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableModes": [ { "name": "mode", "name_values": [{"name_synonym": ["mode"], "lang": "en"}], "settings": [ { "setting_name": "normal", "setting_values": [ {"setting_synonym": ["normal"], "lang": "en"} ], }, { "setting_name": "auto", "setting_values": [{"setting_synonym": ["auto"], "lang": "en"}], }, { "setting_name": "away", "setting_values": [{"setting_synonym": ["away"], "lang": "en"}], }, ], "ordered": False, }, ] } assert trt.query_attributes() == { "currentModeSettings": {"mode": "auto"}, "on": False, } assert trt.can_execute( trait.COMMAND_MODES, params={"updateModeSettings": {"mode": "away"}} ) calls = async_mock_service(hass, humidifier.DOMAIN, humidifier.SERVICE_SET_MODE) await trt.execute( trait.COMMAND_MODES, BASIC_DATA, {"updateModeSettings": {"mode": "away"}}, {}, ) assert len(calls) == 1 assert calls[0].data == { "entity_id": "humidifier.humidifier", "mode": "away", } async def test_sound_modes(hass): """Test Mode trait.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.ModesTrait.supported( media_player.DOMAIN, media_player.SUPPORT_SELECT_SOUND_MODE, None ) trt = trait.ModesTrait( hass, State( "media_player.living_room", media_player.STATE_PLAYING, attributes={ media_player.ATTR_SOUND_MODE_LIST: ["stereo", "prologic"], media_player.ATTR_SOUND_MODE: "stereo", }, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableModes": [ { "name": "sound mode", "name_values": [ {"name_synonym": ["sound mode", "effects"], "lang": "en"} ], "settings": [ { "setting_name": "stereo", "setting_values": [ {"setting_synonym": ["stereo"], "lang": "en"} ], }, { "setting_name": "prologic", "setting_values": [ {"setting_synonym": ["prologic"], "lang": "en"} ], }, ], "ordered": False, } ] } assert trt.query_attributes() == { "currentModeSettings": {"sound mode": "stereo"}, "on": True, } assert trt.can_execute( trait.COMMAND_MODES, params={"updateModeSettings": {"sound mode": "stereo"}}, ) calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_SELECT_SOUND_MODE ) await trt.execute( trait.COMMAND_MODES, BASIC_DATA, {"updateModeSettings": {"sound mode": "stereo"}}, {}, ) assert len(calls) == 1 assert calls[0].data == { "entity_id": "media_player.living_room", "sound_mode": "stereo", } async def test_openclose_cover(hass): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_SET_POSITION, None ) trt = trait.OpenCloseTrait( hass, State( "cover.bla", cover.STATE_OPEN, { cover.ATTR_CURRENT_POSITION: 75, ATTR_SUPPORTED_FEATURES: cover.SUPPORT_SET_POSITION, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"openPercent": 75} calls_set = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_SET_COVER_POSITION) calls_open = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_OPEN_COVER) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 50}, {}) await trt.execute( trait.COMMAND_OPENCLOSE_RELATIVE, BASIC_DATA, {"openRelativePercent": 50}, {} ) assert len(calls_set) == 1 assert calls_set[0].data == {ATTR_ENTITY_ID: "cover.bla", cover.ATTR_POSITION: 50} assert len(calls_open) == 1 assert calls_open[0].data == {ATTR_ENTITY_ID: "cover.bla"} async def test_openclose_cover_unknown_state(hass): """Test OpenClose trait support for cover domain with unknown state.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_SET_POSITION, None ) # No state trt = trait.OpenCloseTrait( hass, State( "cover.bla", STATE_UNKNOWN, {ATTR_SUPPORTED_FEATURES: cover.SUPPORT_OPEN} ), BASIC_CONFIG, ) assert trt.sync_attributes() == {"discreteOnlyOpenClose": True} with pytest.raises(helpers.SmartHomeError): trt.query_attributes() calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_OPEN_COVER) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 100}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} with pytest.raises(helpers.SmartHomeError): trt.query_attributes() async def test_openclose_cover_assumed_state(hass): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_SET_POSITION, None ) trt = trait.OpenCloseTrait( hass, State( "cover.bla", cover.STATE_OPEN, { ATTR_ASSUMED_STATE: True, ATTR_SUPPORTED_FEATURES: cover.SUPPORT_SET_POSITION, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == {"commandOnlyOpenClose": True} assert trt.query_attributes() == {} calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_SET_COVER_POSITION) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 40}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla", cover.ATTR_POSITION: 40} async def test_openclose_cover_query_only(hass): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported(cover.DOMAIN, 0, None) state = State( "cover.bla", cover.STATE_OPEN, ) trt = trait.OpenCloseTrait( hass, state, BASIC_CONFIG, ) assert trt.sync_attributes() == { "discreteOnlyOpenClose": True, "queryOnlyOpenClose": True, } assert trt.query_attributes() == {"openPercent": 100} async def test_openclose_cover_no_position(hass): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_OPEN | cover.SUPPORT_CLOSE, None ) state = State( "cover.bla", cover.STATE_OPEN, { ATTR_SUPPORTED_FEATURES: cover.SUPPORT_OPEN | cover.SUPPORT_CLOSE, }, ) trt = trait.OpenCloseTrait( hass, state, BASIC_CONFIG, ) assert trt.sync_attributes() == {"discreteOnlyOpenClose": True} assert trt.query_attributes() == {"openPercent": 100} state.state = cover.STATE_CLOSED assert trt.sync_attributes() == {"discreteOnlyOpenClose": True} assert trt.query_attributes() == {"openPercent": 0} calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_CLOSE_COVER) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 0}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_OPEN_COVER) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 100}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} with pytest.raises( SmartHomeError, match=r"Current position not know for relative command" ): await trt.execute( trait.COMMAND_OPENCLOSE_RELATIVE, BASIC_DATA, {"openRelativePercent": 100}, {}, ) with pytest.raises(SmartHomeError, match=r"No support for partial open close"): await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 50}, {}) @pytest.mark.parametrize( "device_class", (cover.DEVICE_CLASS_DOOR, cover.DEVICE_CLASS_GARAGE, cover.DEVICE_CLASS_GATE), ) async def test_openclose_cover_secure(hass, device_class): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, device_class) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_SET_POSITION, device_class ) assert trait.OpenCloseTrait.might_2fa( cover.DOMAIN, cover.SUPPORT_SET_POSITION, device_class ) trt = trait.OpenCloseTrait( hass, State( "cover.bla", cover.STATE_OPEN, { ATTR_DEVICE_CLASS: device_class, ATTR_SUPPORTED_FEATURES: cover.SUPPORT_SET_POSITION, cover.ATTR_CURRENT_POSITION: 75, }, ), PIN_CONFIG, ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"openPercent": 75} calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_SET_COVER_POSITION) calls_close = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_CLOSE_COVER) # No challenge data with pytest.raises(error.ChallengeNeeded) as err: await trt.execute(trait.COMMAND_OPENCLOSE, PIN_DATA, {"openPercent": 50}, {}) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # invalid pin with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_OPENCLOSE, PIN_DATA, {"openPercent": 50}, {"pin": "9999"} ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_FAILED_PIN_NEEDED await trt.execute( trait.COMMAND_OPENCLOSE, PIN_DATA, {"openPercent": 50}, {"pin": "1234"} ) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla", cover.ATTR_POSITION: 50} # no challenge on close await trt.execute(trait.COMMAND_OPENCLOSE, PIN_DATA, {"openPercent": 0}, {}) assert len(calls_close) == 1 assert calls_close[0].data == {ATTR_ENTITY_ID: "cover.bla"} @pytest.mark.parametrize( "device_class", ( binary_sensor.DEVICE_CLASS_DOOR, binary_sensor.DEVICE_CLASS_GARAGE_DOOR, binary_sensor.DEVICE_CLASS_LOCK, binary_sensor.DEVICE_CLASS_OPENING, binary_sensor.DEVICE_CLASS_WINDOW, ), ) async def test_openclose_binary_sensor(hass, device_class): """Test OpenClose trait support for binary_sensor domain.""" assert helpers.get_google_type(binary_sensor.DOMAIN, device_class) is not None assert trait.OpenCloseTrait.supported(binary_sensor.DOMAIN, 0, device_class) trt = trait.OpenCloseTrait( hass, State("binary_sensor.test", STATE_ON, {ATTR_DEVICE_CLASS: device_class}), BASIC_CONFIG, ) assert trt.sync_attributes() == { "queryOnlyOpenClose": True, "discreteOnlyOpenClose": True, } assert trt.query_attributes() == {"openPercent": 100} trt = trait.OpenCloseTrait( hass, State("binary_sensor.test", STATE_OFF, {ATTR_DEVICE_CLASS: device_class}), BASIC_CONFIG, ) assert trt.sync_attributes() == { "queryOnlyOpenClose": True, "discreteOnlyOpenClose": True, } assert trt.query_attributes() == {"openPercent": 0} async def test_volume_media_player(hass): """Test volume trait support for media player domain.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.VolumeTrait.supported( media_player.DOMAIN, media_player.SUPPORT_VOLUME_SET, None, ) trt = trait.VolumeTrait( hass, State( "media_player.bla", media_player.STATE_PLAYING, { ATTR_SUPPORTED_FEATURES: media_player.SUPPORT_VOLUME_SET, media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.3, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "volumeMaxLevel": 100, "levelStepSize": 10, "volumeCanMuteAndUnmute": False, "commandOnlyVolume": False, } assert trt.query_attributes() == {"currentVolume": 30} calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_SET ) await trt.execute(trait.COMMAND_SET_VOLUME, BASIC_DATA, {"volumeLevel": 60}, {}) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.6, } calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_SET ) await trt.execute( trait.COMMAND_VOLUME_RELATIVE, BASIC_DATA, {"relativeSteps": 10}, {} ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.4, } async def test_volume_media_player_relative(hass): """Test volume trait support for relative-volume-only media players.""" assert trait.VolumeTrait.supported( media_player.DOMAIN, media_player.SUPPORT_VOLUME_STEP, None, ) trt = trait.VolumeTrait( hass, State( "media_player.bla", media_player.STATE_PLAYING, { ATTR_ASSUMED_STATE: True, ATTR_SUPPORTED_FEATURES: media_player.SUPPORT_VOLUME_STEP, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "volumeMaxLevel": 100, "levelStepSize": 10, "volumeCanMuteAndUnmute": False, "commandOnlyVolume": True, } assert trt.query_attributes() == {} calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_UP ) await trt.execute( trait.COMMAND_VOLUME_RELATIVE, BASIC_DATA, {"relativeSteps": 10}, {}, ) assert len(calls) == 10 for call in calls: assert call.data == { ATTR_ENTITY_ID: "media_player.bla", } calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_DOWN ) await trt.execute( trait.COMMAND_VOLUME_RELATIVE, BASIC_DATA, {"relativeSteps": -10}, {}, ) assert len(calls) == 10 for call in calls: assert call.data == { ATTR_ENTITY_ID: "media_player.bla", } with pytest.raises(SmartHomeError): await trt.execute(trait.COMMAND_SET_VOLUME, BASIC_DATA, {"volumeLevel": 42}, {}) with pytest.raises(SmartHomeError): await trt.execute(trait.COMMAND_MUTE, BASIC_DATA, {"mute": True}, {}) async def test_media_player_mute(hass): """Test volume trait support for muting.""" assert trait.VolumeTrait.supported( media_player.DOMAIN, media_player.SUPPORT_VOLUME_STEP | media_player.SUPPORT_VOLUME_MUTE, None, ) trt = trait.VolumeTrait( hass, State( "media_player.bla", media_player.STATE_PLAYING, { ATTR_SUPPORTED_FEATURES: ( media_player.SUPPORT_VOLUME_STEP | media_player.SUPPORT_VOLUME_MUTE ), media_player.ATTR_MEDIA_VOLUME_MUTED: False, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "volumeMaxLevel": 100, "levelStepSize": 10, "volumeCanMuteAndUnmute": True, "commandOnlyVolume": False, } assert trt.query_attributes() == {"isMuted": False} mute_calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_MUTE ) await trt.execute( trait.COMMAND_MUTE, BASIC_DATA, {"mute": True}, {}, ) assert len(mute_calls) == 1 assert mute_calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_VOLUME_MUTED: True, } unmute_calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_MUTE ) await trt.execute( trait.COMMAND_MUTE, BASIC_DATA, {"mute": False}, {}, ) assert len(unmute_calls) == 1 assert unmute_calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_VOLUME_MUTED: False, } async def test_temperature_setting_sensor(hass): """Test TemperatureSetting trait support for temperature sensor.""" assert ( helpers.get_google_type(sensor.DOMAIN, sensor.DEVICE_CLASS_TEMPERATURE) is not None ) assert not trait.TemperatureSettingTrait.supported( sensor.DOMAIN, 0, sensor.DEVICE_CLASS_HUMIDITY ) assert trait.TemperatureSettingTrait.supported( sensor.DOMAIN, 0, sensor.DEVICE_CLASS_TEMPERATURE ) @pytest.mark.parametrize( "unit_in,unit_out,state,ambient", [ (TEMP_FAHRENHEIT, "F", "70", 21.1), (TEMP_CELSIUS, "C", "21.1", 21.1), (TEMP_FAHRENHEIT, "F", "unavailable", None), (TEMP_FAHRENHEIT, "F", "unknown", None), ], ) async def test_temperature_setting_sensor_data(hass, unit_in, unit_out, state, ambient): """Test TemperatureSetting trait support for temperature sensor.""" hass.config.units.temperature_unit = unit_in trt = trait.TemperatureSettingTrait( hass, State( "sensor.test", state, {ATTR_DEVICE_CLASS: sensor.DEVICE_CLASS_TEMPERATURE} ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "queryOnlyTemperatureSetting": True, "thermostatTemperatureUnit": unit_out, } if ambient: assert trt.query_attributes() == {"thermostatTemperatureAmbient": ambient} else: assert trt.query_attributes() == {} hass.config.units.temperature_unit = TEMP_CELSIUS async def test_humidity_setting_sensor(hass): """Test HumiditySetting trait support for humidity sensor.""" assert ( helpers.get_google_type(sensor.DOMAIN, sensor.DEVICE_CLASS_HUMIDITY) is not None ) assert not trait.HumiditySettingTrait.supported( sensor.DOMAIN, 0, sensor.DEVICE_CLASS_TEMPERATURE ) assert trait.HumiditySettingTrait.supported( sensor.DOMAIN, 0, sensor.DEVICE_CLASS_HUMIDITY ) @pytest.mark.parametrize( "state,ambient", [("70", 70), ("unavailable", None), ("unknown", None)] ) async def test_humidity_setting_sensor_data(hass, state, ambient): """Test HumiditySetting trait support for humidity sensor.""" trt = trait.HumiditySettingTrait( hass, State("sensor.test", state, {ATTR_DEVICE_CLASS: sensor.DEVICE_CLASS_HUMIDITY}), BASIC_CONFIG, ) assert trt.sync_attributes() == {"queryOnlyHumiditySetting": True} if ambient: assert trt.query_attributes() == {"humidityAmbientPercent": ambient} else: assert trt.query_attributes() == {} with pytest.raises(helpers.SmartHomeError) as err: await trt.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert err.value.code == const.ERR_NOT_SUPPORTED async def test_transport_control(hass): """Test the TransportControlTrait.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None for feature in trait.MEDIA_COMMAND_SUPPORT_MAPPING.values(): assert trait.TransportControlTrait.supported(media_player.DOMAIN, feature, None) now = datetime(2020, 1, 1) trt = trait.TransportControlTrait( hass, State( "media_player.bla", media_player.STATE_PLAYING, { media_player.ATTR_MEDIA_POSITION: 100, media_player.ATTR_MEDIA_DURATION: 200, media_player.ATTR_MEDIA_POSITION_UPDATED_AT: now - timedelta(seconds=10), media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.5, ATTR_SUPPORTED_FEATURES: media_player.SUPPORT_PLAY | media_player.SUPPORT_STOP, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "transportControlSupportedCommands": ["RESUME", "STOP"] } assert trt.query_attributes() == {} # COMMAND_MEDIA_SEEK_RELATIVE calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_SEEK ) # Patch to avoid time ticking over during the command failing the test with patch("homeassistant.util.dt.utcnow", return_value=now): await trt.execute( trait.COMMAND_MEDIA_SEEK_RELATIVE, BASIC_DATA, {"relativePositionMs": 10000}, {}, ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", # 100s (current position) + 10s (from command) + 10s (from updated_at) media_player.ATTR_MEDIA_SEEK_POSITION: 120, } # COMMAND_MEDIA_SEEK_TO_POSITION calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_SEEK ) await trt.execute( trait.COMMAND_MEDIA_SEEK_TO_POSITION, BASIC_DATA, {"absPositionMs": 50000}, {} ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_SEEK_POSITION: 50, } # COMMAND_MEDIA_NEXT calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_NEXT_TRACK ) await trt.execute(trait.COMMAND_MEDIA_NEXT, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} # COMMAND_MEDIA_PAUSE calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_PAUSE ) await trt.execute(trait.COMMAND_MEDIA_PAUSE, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} # COMMAND_MEDIA_PREVIOUS calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_PREVIOUS_TRACK ) await trt.execute(trait.COMMAND_MEDIA_PREVIOUS, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} # COMMAND_MEDIA_RESUME calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_PLAY ) await trt.execute(trait.COMMAND_MEDIA_RESUME, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} # COMMAND_MEDIA_SHUFFLE calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_SHUFFLE_SET ) await trt.execute(trait.COMMAND_MEDIA_SHUFFLE, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_SHUFFLE: True, } # COMMAND_MEDIA_STOP calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_STOP ) await trt.execute(trait.COMMAND_MEDIA_STOP, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} @pytest.mark.parametrize( "state", ( STATE_OFF, STATE_IDLE, STATE_PLAYING, STATE_ON, STATE_PAUSED, STATE_STANDBY, STATE_UNAVAILABLE, STATE_UNKNOWN, ), ) async def test_media_state(hass, state): """Test the MediaStateTrait.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.TransportControlTrait.supported( media_player.DOMAIN, media_player.SUPPORT_PLAY, None ) trt = trait.MediaStateTrait( hass, State( "media_player.bla", state, { media_player.ATTR_MEDIA_POSITION: 100, media_player.ATTR_MEDIA_DURATION: 200, media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.5, ATTR_SUPPORTED_FEATURES: media_player.SUPPORT_PLAY | media_player.SUPPORT_STOP, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "supportActivityState": True, "supportPlaybackState": True, } assert trt.query_attributes() == { "activityState": trt.activity_lookup.get(state), "playbackState": trt.playback_lookup.get(state), }
sander76/home-assistant
tests/components/google_assistant/test_trait.py
homeassistant/components/bloomsky/binary_sensor.py
"""Switch representing the shutoff valve for the Flo by Moen integration.""" from __future__ import annotations from aioflo.location import SLEEP_MINUTE_OPTIONS, SYSTEM_MODE_HOME, SYSTEM_REVERT_MODES import voluptuous as vol from homeassistant.components.switch import SwitchEntity from homeassistant.core import callback from homeassistant.helpers import entity_platform from .const import DOMAIN as FLO_DOMAIN from .device import FloDeviceDataUpdateCoordinator from .entity import FloEntity ATTR_REVERT_TO_MODE = "revert_to_mode" ATTR_SLEEP_MINUTES = "sleep_minutes" SERVICE_SET_SLEEP_MODE = "set_sleep_mode" SERVICE_SET_AWAY_MODE = "set_away_mode" SERVICE_SET_HOME_MODE = "set_home_mode" SERVICE_RUN_HEALTH_TEST = "run_health_test" async def async_setup_entry(hass, config_entry, async_add_entities): """Set up the Flo switches from config entry.""" devices: list[FloDeviceDataUpdateCoordinator] = hass.data[FLO_DOMAIN][ config_entry.entry_id ]["devices"] entities = [] for device in devices: if device.device_type != "puck_oem": entities.append(FloSwitch(device)) async_add_entities(entities) platform = entity_platform.current_platform.get() platform.async_register_entity_service( SERVICE_SET_AWAY_MODE, {}, "async_set_mode_away" ) platform.async_register_entity_service( SERVICE_SET_HOME_MODE, {}, "async_set_mode_home" ) platform.async_register_entity_service( SERVICE_RUN_HEALTH_TEST, {}, "async_run_health_test" ) platform.async_register_entity_service( SERVICE_SET_SLEEP_MODE, { vol.Required(ATTR_SLEEP_MINUTES, default=120): vol.In(SLEEP_MINUTE_OPTIONS), vol.Required(ATTR_REVERT_TO_MODE, default=SYSTEM_MODE_HOME): vol.In( SYSTEM_REVERT_MODES ), }, "async_set_mode_sleep", ) class FloSwitch(FloEntity, SwitchEntity): """Switch class for the Flo by Moen valve.""" def __init__(self, device: FloDeviceDataUpdateCoordinator): """Initialize the Flo switch.""" super().__init__("shutoff_valve", "Shutoff Valve", device) self._state = self._device.last_known_valve_state == "open" @property def is_on(self) -> bool: """Return True if the valve is open.""" return self._state @property def icon(self): """Return the icon to use for the valve.""" if self.is_on: return "mdi:valve-open" return "mdi:valve-closed" async def async_turn_on(self, **kwargs) -> None: """Open the valve.""" await self._device.api_client.device.open_valve(self._device.id) self._state = True self.async_write_ha_state() async def async_turn_off(self, **kwargs) -> None: """Close the valve.""" await self._device.api_client.device.close_valve(self._device.id) self._state = False self.async_write_ha_state() @callback def async_update_state(self) -> None: """Retrieve the latest valve state and update the state machine.""" self._state = self._device.last_known_valve_state == "open" self.async_write_ha_state() async def async_added_to_hass(self): """When entity is added to hass.""" self.async_on_remove(self._device.async_add_listener(self.async_update_state)) async def async_set_mode_home(self): """Set the Flo location to home mode.""" await self._device.async_set_mode_home() async def async_set_mode_away(self): """Set the Flo location to away mode.""" await self._device.async_set_mode_away() async def async_set_mode_sleep(self, sleep_minutes, revert_to_mode): """Set the Flo location to sleep mode.""" await self._device.async_set_mode_sleep(sleep_minutes, revert_to_mode) async def async_run_health_test(self): """Run a Flo device health test.""" await self._device.async_run_health_test()
"""Tests for the Google Assistant traits.""" from datetime import datetime, timedelta from unittest.mock import patch import pytest from homeassistant.components import ( alarm_control_panel, binary_sensor, camera, cover, fan, group, input_boolean, input_select, light, lock, media_player, scene, script, sensor, switch, vacuum, ) from homeassistant.components.climate import const as climate from homeassistant.components.google_assistant import const, error, helpers, trait from homeassistant.components.google_assistant.error import SmartHomeError from homeassistant.components.humidifier import const as humidifier from homeassistant.config import async_process_ha_core_config from homeassistant.const import ( ATTR_ASSUMED_STATE, ATTR_DEVICE_CLASS, ATTR_ENTITY_ID, ATTR_MODE, ATTR_SUPPORTED_FEATURES, ATTR_TEMPERATURE, SERVICE_TURN_OFF, SERVICE_TURN_ON, STATE_ALARM_ARMED_AWAY, STATE_ALARM_DISARMED, STATE_ALARM_PENDING, STATE_IDLE, STATE_OFF, STATE_ON, STATE_PAUSED, STATE_PLAYING, STATE_STANDBY, STATE_UNAVAILABLE, STATE_UNKNOWN, TEMP_CELSIUS, TEMP_FAHRENHEIT, ) from homeassistant.core import DOMAIN as HA_DOMAIN, EVENT_CALL_SERVICE, State from homeassistant.util import color from . import BASIC_CONFIG, MockConfig from tests.common import async_capture_events, async_mock_service REQ_ID = "ff36a3cc-ec34-11e6-b1a0-64510650abcf" BASIC_DATA = helpers.RequestData( BASIC_CONFIG, "test-agent", const.SOURCE_CLOUD, REQ_ID, None ) PIN_CONFIG = MockConfig(secure_devices_pin="1234") PIN_DATA = helpers.RequestData( PIN_CONFIG, "test-agent", const.SOURCE_CLOUD, REQ_ID, None ) async def test_brightness_light(hass): """Test brightness trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert trait.BrightnessTrait.supported(light.DOMAIN, light.SUPPORT_BRIGHTNESS, None) trt = trait.BrightnessTrait( hass, State("light.bla", light.STATE_ON, {light.ATTR_BRIGHTNESS: 243}), BASIC_CONFIG, ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"brightness": 95} events = async_capture_events(hass, EVENT_CALL_SERVICE) calls = async_mock_service(hass, light.DOMAIN, light.SERVICE_TURN_ON) await trt.execute( trait.COMMAND_BRIGHTNESS_ABSOLUTE, BASIC_DATA, {"brightness": 50}, {} ) await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "light.bla", light.ATTR_BRIGHTNESS_PCT: 50} assert len(events) == 1 assert events[0].data == { "domain": "light", "service": "turn_on", "service_data": {"brightness_pct": 50, "entity_id": "light.bla"}, } async def test_camera_stream(hass): """Test camera stream trait support for camera domain.""" await async_process_ha_core_config( hass, {"external_url": "https://example.com"}, ) assert helpers.get_google_type(camera.DOMAIN, None) is not None assert trait.CameraStreamTrait.supported(camera.DOMAIN, camera.SUPPORT_STREAM, None) trt = trait.CameraStreamTrait( hass, State("camera.bla", camera.STATE_IDLE, {}), BASIC_CONFIG ) assert trt.sync_attributes() == { "cameraStreamSupportedProtocols": ["hls"], "cameraStreamNeedAuthToken": False, "cameraStreamNeedDrmEncryption": False, } assert trt.query_attributes() == {} with patch( "homeassistant.components.camera.async_request_stream", return_value="/api/streams/bla", ): await trt.execute(trait.COMMAND_GET_CAMERA_STREAM, BASIC_DATA, {}, {}) assert trt.query_attributes() == { "cameraStreamAccessUrl": "https://example.com/api/streams/bla", "cameraStreamReceiverAppId": "B12CE3CA", } async def test_onoff_group(hass): """Test OnOff trait support for group domain.""" assert helpers.get_google_type(group.DOMAIN, None) is not None assert trait.OnOffTrait.supported(group.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("group.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("group.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, HA_DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "group.bla"} off_calls = async_mock_service(hass, HA_DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "group.bla"} async def test_onoff_input_boolean(hass): """Test OnOff trait support for input_boolean domain.""" assert helpers.get_google_type(input_boolean.DOMAIN, None) is not None assert trait.OnOffTrait.supported(input_boolean.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("input_boolean.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait( hass, State("input_boolean.bla", STATE_OFF), BASIC_CONFIG ) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, input_boolean.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "input_boolean.bla"} off_calls = async_mock_service(hass, input_boolean.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "input_boolean.bla"} async def test_onoff_switch(hass): """Test OnOff trait support for switch domain.""" assert helpers.get_google_type(switch.DOMAIN, None) is not None assert trait.OnOffTrait.supported(switch.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("switch.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("switch.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} trt_assumed = trait.OnOffTrait( hass, State("switch.bla", STATE_OFF, {"assumed_state": True}), BASIC_CONFIG ) assert trt_assumed.sync_attributes() == {"commandOnlyOnOff": True} on_calls = async_mock_service(hass, switch.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "switch.bla"} off_calls = async_mock_service(hass, switch.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "switch.bla"} async def test_onoff_fan(hass): """Test OnOff trait support for fan domain.""" assert helpers.get_google_type(fan.DOMAIN, None) is not None assert trait.OnOffTrait.supported(fan.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("fan.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("fan.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, fan.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "fan.bla"} off_calls = async_mock_service(hass, fan.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "fan.bla"} async def test_onoff_light(hass): """Test OnOff trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert trait.OnOffTrait.supported(light.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("light.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("light.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "light.bla"} off_calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "light.bla"} async def test_onoff_media_player(hass): """Test OnOff trait support for media_player domain.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.OnOffTrait.supported(media_player.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("media_player.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("media_player.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, media_player.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} off_calls = async_mock_service(hass, media_player.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} async def test_onoff_humidifier(hass): """Test OnOff trait support for humidifier domain.""" assert helpers.get_google_type(humidifier.DOMAIN, None) is not None assert trait.OnOffTrait.supported(humidifier.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("humidifier.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("humidifier.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, humidifier.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "humidifier.bla"} off_calls = async_mock_service(hass, humidifier.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "humidifier.bla"} async def test_dock_vacuum(hass): """Test dock trait support for vacuum domain.""" assert helpers.get_google_type(vacuum.DOMAIN, None) is not None assert trait.DockTrait.supported(vacuum.DOMAIN, 0, None) trt = trait.DockTrait(hass, State("vacuum.bla", vacuum.STATE_IDLE), BASIC_CONFIG) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"isDocked": False} calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_RETURN_TO_BASE) await trt.execute(trait.COMMAND_DOCK, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} async def test_startstop_vacuum(hass): """Test startStop trait support for vacuum domain.""" assert helpers.get_google_type(vacuum.DOMAIN, None) is not None assert trait.StartStopTrait.supported(vacuum.DOMAIN, 0, None) trt = trait.StartStopTrait( hass, State( "vacuum.bla", vacuum.STATE_PAUSED, {ATTR_SUPPORTED_FEATURES: vacuum.SUPPORT_PAUSE}, ), BASIC_CONFIG, ) assert trt.sync_attributes() == {"pausable": True} assert trt.query_attributes() == {"isRunning": False, "isPaused": True} start_calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_START) await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": True}, {}) assert len(start_calls) == 1 assert start_calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} stop_calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_STOP) await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": False}, {}) assert len(stop_calls) == 1 assert stop_calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} pause_calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_PAUSE) await trt.execute(trait.COMMAND_PAUSEUNPAUSE, BASIC_DATA, {"pause": True}, {}) assert len(pause_calls) == 1 assert pause_calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} unpause_calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_START) await trt.execute(trait.COMMAND_PAUSEUNPAUSE, BASIC_DATA, {"pause": False}, {}) assert len(unpause_calls) == 1 assert unpause_calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} async def test_startstop_cover(hass): """Test startStop trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.StartStopTrait.supported(cover.DOMAIN, cover.SUPPORT_STOP, None) state = State( "cover.bla", cover.STATE_CLOSED, {ATTR_SUPPORTED_FEATURES: cover.SUPPORT_STOP}, ) trt = trait.StartStopTrait( hass, state, BASIC_CONFIG, ) assert trt.sync_attributes() == {} for state_value in (cover.STATE_CLOSING, cover.STATE_OPENING): state.state = state_value assert trt.query_attributes() == {"isRunning": True} stop_calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_STOP_COVER) await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": False}, {}) assert len(stop_calls) == 1 assert stop_calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} for state_value in (cover.STATE_CLOSED, cover.STATE_OPEN): state.state = state_value assert trt.query_attributes() == {"isRunning": False} with pytest.raises(SmartHomeError, match="Cover is already stopped"): await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": False}, {}) with pytest.raises(SmartHomeError, match="Starting a cover is not supported"): await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": True}, {}) with pytest.raises( SmartHomeError, match="Command action.devices.commands.PauseUnpause is not supported", ): await trt.execute(trait.COMMAND_PAUSEUNPAUSE, BASIC_DATA, {"start": True}, {}) async def test_startstop_cover_assumed(hass): """Test startStop trait support for cover domain of assumed state.""" trt = trait.StartStopTrait( hass, State( "cover.bla", cover.STATE_CLOSED, {ATTR_SUPPORTED_FEATURES: cover.SUPPORT_STOP, ATTR_ASSUMED_STATE: True}, ), BASIC_CONFIG, ) stop_calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_STOP_COVER) await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": False}, {}) assert len(stop_calls) == 1 assert stop_calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} async def test_color_setting_color_light(hass): """Test ColorSpectrum trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert not trait.ColorSettingTrait.supported(light.DOMAIN, 0, None) assert trait.ColorSettingTrait.supported(light.DOMAIN, light.SUPPORT_COLOR, None) trt = trait.ColorSettingTrait( hass, State( "light.bla", STATE_ON, { light.ATTR_HS_COLOR: (20, 94), light.ATTR_BRIGHTNESS: 200, ATTR_SUPPORTED_FEATURES: light.SUPPORT_COLOR, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == {"colorModel": "hsv"} assert trt.query_attributes() == { "color": {"spectrumHsv": {"hue": 20, "saturation": 0.94, "value": 200 / 255}} } assert trt.can_execute( trait.COMMAND_COLOR_ABSOLUTE, {"color": {"spectrumRGB": 16715792}} ) calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON) await trt.execute( trait.COMMAND_COLOR_ABSOLUTE, BASIC_DATA, {"color": {"spectrumRGB": 1052927}}, {}, ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "light.bla", light.ATTR_HS_COLOR: (240, 93.725), } await trt.execute( trait.COMMAND_COLOR_ABSOLUTE, BASIC_DATA, {"color": {"spectrumHSV": {"hue": 100, "saturation": 0.50, "value": 0.20}}}, {}, ) assert len(calls) == 2 assert calls[1].data == { ATTR_ENTITY_ID: "light.bla", light.ATTR_HS_COLOR: [100, 50], light.ATTR_BRIGHTNESS: 0.2 * 255, } async def test_color_setting_temperature_light(hass): """Test ColorTemperature trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert not trait.ColorSettingTrait.supported(light.DOMAIN, 0, None) assert trait.ColorSettingTrait.supported( light.DOMAIN, light.SUPPORT_COLOR_TEMP, None ) trt = trait.ColorSettingTrait( hass, State( "light.bla", STATE_ON, { light.ATTR_MIN_MIREDS: 200, light.ATTR_COLOR_TEMP: 300, light.ATTR_MAX_MIREDS: 500, ATTR_SUPPORTED_FEATURES: light.SUPPORT_COLOR_TEMP, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "colorTemperatureRange": {"temperatureMinK": 2000, "temperatureMaxK": 5000} } assert trt.query_attributes() == {"color": {"temperatureK": 3333}} assert trt.can_execute( trait.COMMAND_COLOR_ABSOLUTE, {"color": {"temperature": 400}} ) calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON) with pytest.raises(helpers.SmartHomeError) as err: await trt.execute( trait.COMMAND_COLOR_ABSOLUTE, BASIC_DATA, {"color": {"temperature": 5555}}, {}, ) assert err.value.code == const.ERR_VALUE_OUT_OF_RANGE await trt.execute( trait.COMMAND_COLOR_ABSOLUTE, BASIC_DATA, {"color": {"temperature": 2857}}, {} ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "light.bla", light.ATTR_COLOR_TEMP: color.color_temperature_kelvin_to_mired(2857), } async def test_color_light_temperature_light_bad_temp(hass): """Test ColorTemperature trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert not trait.ColorSettingTrait.supported(light.DOMAIN, 0, None) assert trait.ColorSettingTrait.supported( light.DOMAIN, light.SUPPORT_COLOR_TEMP, None ) trt = trait.ColorSettingTrait( hass, State( "light.bla", STATE_ON, { light.ATTR_MIN_MIREDS: 200, light.ATTR_COLOR_TEMP: 0, light.ATTR_MAX_MIREDS: 500, }, ), BASIC_CONFIG, ) assert trt.query_attributes() == {} async def test_light_modes(hass): """Test Light Mode trait.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert trait.ModesTrait.supported(light.DOMAIN, light.SUPPORT_EFFECT, None) trt = trait.ModesTrait( hass, State( "light.living_room", light.STATE_ON, attributes={ light.ATTR_EFFECT_LIST: ["random", "colorloop"], light.ATTR_EFFECT: "random", }, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableModes": [ { "name": "effect", "name_values": [{"name_synonym": ["effect"], "lang": "en"}], "settings": [ { "setting_name": "random", "setting_values": [ {"setting_synonym": ["random"], "lang": "en"} ], }, { "setting_name": "colorloop", "setting_values": [ {"setting_synonym": ["colorloop"], "lang": "en"} ], }, ], "ordered": False, } ] } assert trt.query_attributes() == { "currentModeSettings": {"effect": "random"}, "on": True, } assert trt.can_execute( trait.COMMAND_MODES, params={"updateModeSettings": {"effect": "colorloop"}}, ) calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON) await trt.execute( trait.COMMAND_MODES, BASIC_DATA, {"updateModeSettings": {"effect": "colorloop"}}, {}, ) assert len(calls) == 1 assert calls[0].data == { "entity_id": "light.living_room", "effect": "colorloop", } async def test_scene_scene(hass): """Test Scene trait support for scene domain.""" assert helpers.get_google_type(scene.DOMAIN, None) is not None assert trait.SceneTrait.supported(scene.DOMAIN, 0, None) trt = trait.SceneTrait(hass, State("scene.bla", scene.STATE), BASIC_CONFIG) assert trt.sync_attributes() == {} assert trt.query_attributes() == {} assert trt.can_execute(trait.COMMAND_ACTIVATE_SCENE, {}) calls = async_mock_service(hass, scene.DOMAIN, SERVICE_TURN_ON) await trt.execute(trait.COMMAND_ACTIVATE_SCENE, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "scene.bla"} async def test_scene_script(hass): """Test Scene trait support for script domain.""" assert helpers.get_google_type(script.DOMAIN, None) is not None assert trait.SceneTrait.supported(script.DOMAIN, 0, None) trt = trait.SceneTrait(hass, State("script.bla", STATE_OFF), BASIC_CONFIG) assert trt.sync_attributes() == {} assert trt.query_attributes() == {} assert trt.can_execute(trait.COMMAND_ACTIVATE_SCENE, {}) calls = async_mock_service(hass, script.DOMAIN, SERVICE_TURN_ON) await trt.execute(trait.COMMAND_ACTIVATE_SCENE, BASIC_DATA, {}, {}) # We don't wait till script execution is done. await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "script.bla"} async def test_temperature_setting_climate_onoff(hass): """Test TemperatureSetting trait support for climate domain - range.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.TemperatureSettingTrait.supported(climate.DOMAIN, 0, None) hass.config.units.temperature_unit = TEMP_FAHRENHEIT trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_AUTO, { ATTR_SUPPORTED_FEATURES: climate.SUPPORT_TARGET_TEMPERATURE_RANGE, climate.ATTR_HVAC_MODES: [ climate.HVAC_MODE_OFF, climate.HVAC_MODE_COOL, climate.HVAC_MODE_HEAT, climate.HVAC_MODE_HEAT_COOL, ], climate.ATTR_MIN_TEMP: None, climate.ATTR_MAX_TEMP: None, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["off", "cool", "heat", "heatcool", "on"], "thermostatTemperatureUnit": "F", } assert trt.can_execute(trait.COMMAND_THERMOSTAT_SET_MODE, {}) calls = async_mock_service(hass, climate.DOMAIN, SERVICE_TURN_ON) await trt.execute( trait.COMMAND_THERMOSTAT_SET_MODE, BASIC_DATA, {"thermostatMode": "on"}, {} ) assert len(calls) == 1 calls = async_mock_service(hass, climate.DOMAIN, SERVICE_TURN_OFF) await trt.execute( trait.COMMAND_THERMOSTAT_SET_MODE, BASIC_DATA, {"thermostatMode": "off"}, {} ) assert len(calls) == 1 async def test_temperature_setting_climate_no_modes(hass): """Test TemperatureSetting trait support for climate domain not supporting any modes.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.TemperatureSettingTrait.supported(climate.DOMAIN, 0, None) hass.config.units.temperature_unit = TEMP_CELSIUS trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_AUTO, { climate.ATTR_HVAC_MODES: [], climate.ATTR_MIN_TEMP: None, climate.ATTR_MAX_TEMP: None, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["heat"], "thermostatTemperatureUnit": "C", } async def test_temperature_setting_climate_range(hass): """Test TemperatureSetting trait support for climate domain - range.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.TemperatureSettingTrait.supported(climate.DOMAIN, 0, None) hass.config.units.temperature_unit = TEMP_FAHRENHEIT trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_AUTO, { climate.ATTR_CURRENT_TEMPERATURE: 70, climate.ATTR_CURRENT_HUMIDITY: 25, ATTR_SUPPORTED_FEATURES: climate.SUPPORT_TARGET_TEMPERATURE_RANGE, climate.ATTR_HVAC_MODES: [ STATE_OFF, climate.HVAC_MODE_COOL, climate.HVAC_MODE_HEAT, climate.HVAC_MODE_AUTO, ], climate.ATTR_TARGET_TEMP_HIGH: 75, climate.ATTR_TARGET_TEMP_LOW: 65, climate.ATTR_MIN_TEMP: 50, climate.ATTR_MAX_TEMP: 80, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["off", "cool", "heat", "auto", "on"], "thermostatTemperatureUnit": "F", } assert trt.query_attributes() == { "thermostatMode": "auto", "thermostatTemperatureAmbient": 21.1, "thermostatHumidityAmbient": 25, "thermostatTemperatureSetpointLow": 18.3, "thermostatTemperatureSetpointHigh": 23.9, } assert trt.can_execute(trait.COMMAND_THERMOSTAT_TEMPERATURE_SET_RANGE, {}) assert trt.can_execute(trait.COMMAND_THERMOSTAT_SET_MODE, {}) calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_TEMPERATURE) await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SET_RANGE, BASIC_DATA, { "thermostatTemperatureSetpointHigh": 25, "thermostatTemperatureSetpointLow": 20, }, {}, ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "climate.bla", climate.ATTR_TARGET_TEMP_HIGH: 77, climate.ATTR_TARGET_TEMP_LOW: 68, } calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_HVAC_MODE) await trt.execute( trait.COMMAND_THERMOSTAT_SET_MODE, BASIC_DATA, {"thermostatMode": "cool"}, {} ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "climate.bla", climate.ATTR_HVAC_MODE: climate.HVAC_MODE_COOL, } with pytest.raises(helpers.SmartHomeError) as err: await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, BASIC_DATA, {"thermostatTemperatureSetpoint": -100}, {}, ) assert err.value.code == const.ERR_VALUE_OUT_OF_RANGE hass.config.units.temperature_unit = TEMP_CELSIUS async def test_temperature_setting_climate_setpoint(hass): """Test TemperatureSetting trait support for climate domain - setpoint.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.TemperatureSettingTrait.supported(climate.DOMAIN, 0, None) hass.config.units.temperature_unit = TEMP_CELSIUS trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_COOL, { climate.ATTR_HVAC_MODES: [STATE_OFF, climate.HVAC_MODE_COOL], climate.ATTR_MIN_TEMP: 10, climate.ATTR_MAX_TEMP: 30, ATTR_TEMPERATURE: 18, climate.ATTR_CURRENT_TEMPERATURE: 20, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["off", "cool", "on"], "thermostatTemperatureUnit": "C", } assert trt.query_attributes() == { "thermostatMode": "cool", "thermostatTemperatureAmbient": 20, "thermostatTemperatureSetpoint": 18, } assert trt.can_execute(trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, {}) assert trt.can_execute(trait.COMMAND_THERMOSTAT_SET_MODE, {}) calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_TEMPERATURE) with pytest.raises(helpers.SmartHomeError): await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, BASIC_DATA, {"thermostatTemperatureSetpoint": -100}, {}, ) await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, BASIC_DATA, {"thermostatTemperatureSetpoint": 19}, {}, ) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "climate.bla", ATTR_TEMPERATURE: 19} async def test_temperature_setting_climate_setpoint_auto(hass): """ Test TemperatureSetting trait support for climate domain. Setpoint in auto mode. """ hass.config.units.temperature_unit = TEMP_CELSIUS trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_HEAT_COOL, { climate.ATTR_HVAC_MODES: [ climate.HVAC_MODE_OFF, climate.HVAC_MODE_HEAT_COOL, ], climate.ATTR_MIN_TEMP: 10, climate.ATTR_MAX_TEMP: 30, ATTR_TEMPERATURE: 18, climate.ATTR_CURRENT_TEMPERATURE: 20, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["off", "heatcool", "on"], "thermostatTemperatureUnit": "C", } assert trt.query_attributes() == { "thermostatMode": "heatcool", "thermostatTemperatureAmbient": 20, "thermostatTemperatureSetpointHigh": 18, "thermostatTemperatureSetpointLow": 18, } assert trt.can_execute(trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, {}) assert trt.can_execute(trait.COMMAND_THERMOSTAT_SET_MODE, {}) calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_TEMPERATURE) await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, BASIC_DATA, {"thermostatTemperatureSetpoint": 19}, {}, ) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "climate.bla", ATTR_TEMPERATURE: 19} async def test_humidity_setting_humidifier_setpoint(hass): """Test HumiditySetting trait support for humidifier domain - setpoint.""" assert helpers.get_google_type(humidifier.DOMAIN, None) is not None assert trait.HumiditySettingTrait.supported(humidifier.DOMAIN, 0, None) trt = trait.HumiditySettingTrait( hass, State( "humidifier.bla", STATE_ON, { humidifier.ATTR_MIN_HUMIDITY: 20, humidifier.ATTR_MAX_HUMIDITY: 90, humidifier.ATTR_HUMIDITY: 38, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "humiditySetpointRange": {"minPercent": 20, "maxPercent": 90} } assert trt.query_attributes() == { "humiditySetpointPercent": 38, } assert trt.can_execute(trait.COMMAND_SET_HUMIDITY, {}) calls = async_mock_service(hass, humidifier.DOMAIN, humidifier.SERVICE_SET_HUMIDITY) await trt.execute(trait.COMMAND_SET_HUMIDITY, BASIC_DATA, {"humidity": 32}, {}) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "humidifier.bla", humidifier.ATTR_HUMIDITY: 32, } async def test_lock_unlock_lock(hass): """Test LockUnlock trait locking support for lock domain.""" assert helpers.get_google_type(lock.DOMAIN, None) is not None assert trait.LockUnlockTrait.supported(lock.DOMAIN, lock.SUPPORT_OPEN, None) assert trait.LockUnlockTrait.might_2fa(lock.DOMAIN, lock.SUPPORT_OPEN, None) trt = trait.LockUnlockTrait( hass, State("lock.front_door", lock.STATE_LOCKED), PIN_CONFIG ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"isLocked": True} assert trt.can_execute(trait.COMMAND_LOCKUNLOCK, {"lock": True}) calls = async_mock_service(hass, lock.DOMAIN, lock.SERVICE_LOCK) await trt.execute(trait.COMMAND_LOCKUNLOCK, PIN_DATA, {"lock": True}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "lock.front_door"} async def test_lock_unlock_unlock(hass): """Test LockUnlock trait unlocking support for lock domain.""" assert helpers.get_google_type(lock.DOMAIN, None) is not None assert trait.LockUnlockTrait.supported(lock.DOMAIN, lock.SUPPORT_OPEN, None) trt = trait.LockUnlockTrait( hass, State("lock.front_door", lock.STATE_LOCKED), PIN_CONFIG ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"isLocked": True} assert trt.can_execute(trait.COMMAND_LOCKUNLOCK, {"lock": False}) calls = async_mock_service(hass, lock.DOMAIN, lock.SERVICE_UNLOCK) # No challenge data with pytest.raises(error.ChallengeNeeded) as err: await trt.execute(trait.COMMAND_LOCKUNLOCK, PIN_DATA, {"lock": False}, {}) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # invalid pin with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_LOCKUNLOCK, PIN_DATA, {"lock": False}, {"pin": 9999} ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_FAILED_PIN_NEEDED await trt.execute( trait.COMMAND_LOCKUNLOCK, PIN_DATA, {"lock": False}, {"pin": "1234"} ) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "lock.front_door"} # Test without pin trt = trait.LockUnlockTrait( hass, State("lock.front_door", lock.STATE_LOCKED), BASIC_CONFIG ) with pytest.raises(error.SmartHomeError) as err: await trt.execute(trait.COMMAND_LOCKUNLOCK, BASIC_DATA, {"lock": False}, {}) assert len(calls) == 1 assert err.value.code == const.ERR_CHALLENGE_NOT_SETUP # Test with 2FA override with patch.object( BASIC_CONFIG, "should_2fa", return_value=False, ): await trt.execute(trait.COMMAND_LOCKUNLOCK, BASIC_DATA, {"lock": False}, {}) assert len(calls) == 2 async def test_arm_disarm_arm_away(hass): """Test ArmDisarm trait Arming support for alarm_control_panel domain.""" assert helpers.get_google_type(alarm_control_panel.DOMAIN, None) is not None assert trait.ArmDisArmTrait.supported(alarm_control_panel.DOMAIN, 0, None) assert trait.ArmDisArmTrait.might_2fa(alarm_control_panel.DOMAIN, 0, None) trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, { alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True, ATTR_SUPPORTED_FEATURES: alarm_control_panel.const.SUPPORT_ALARM_ARM_HOME | alarm_control_panel.const.SUPPORT_ALARM_ARM_AWAY, }, ), PIN_CONFIG, ) assert trt.sync_attributes() == { "availableArmLevels": { "levels": [ { "level_name": "armed_home", "level_values": [ {"level_synonym": ["armed home", "home"], "lang": "en"} ], }, { "level_name": "armed_away", "level_values": [ {"level_synonym": ["armed away", "away"], "lang": "en"} ], }, ], "ordered": False, } } assert trt.query_attributes() == { "isArmed": True, "currentArmLevel": STATE_ALARM_ARMED_AWAY, } assert trt.can_execute( trait.COMMAND_ARMDISARM, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY} ) calls = async_mock_service( hass, alarm_control_panel.DOMAIN, alarm_control_panel.SERVICE_ALARM_ARM_AWAY ) # Test with no secure_pin configured with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), BASIC_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, BASIC_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {}, ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NOT_SETUP trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), PIN_CONFIG, ) # No challenge data with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {}, ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # invalid pin with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {"pin": 9999}, ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_FAILED_PIN_NEEDED # correct pin await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {"pin": "1234"}, ) assert len(calls) == 1 # Test already armed with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), PIN_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {}, ) assert len(calls) == 1 assert err.value.code == const.ERR_ALREADY_ARMED # Test with code_arm_required False trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: False}, ), PIN_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {}, ) assert len(calls) == 2 with pytest.raises(error.SmartHomeError) as err: await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True}, {}, ) async def test_arm_disarm_disarm(hass): """Test ArmDisarm trait Disarming support for alarm_control_panel domain.""" assert helpers.get_google_type(alarm_control_panel.DOMAIN, None) is not None assert trait.ArmDisArmTrait.supported(alarm_control_panel.DOMAIN, 0, None) assert trait.ArmDisArmTrait.might_2fa(alarm_control_panel.DOMAIN, 0, None) trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, { alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True, ATTR_SUPPORTED_FEATURES: alarm_control_panel.const.SUPPORT_ALARM_TRIGGER | alarm_control_panel.const.SUPPORT_ALARM_ARM_CUSTOM_BYPASS, }, ), PIN_CONFIG, ) assert trt.sync_attributes() == { "availableArmLevels": { "levels": [ { "level_name": "armed_custom_bypass", "level_values": [ { "level_synonym": ["armed custom bypass", "custom"], "lang": "en", } ], }, { "level_name": "triggered", "level_values": [{"level_synonym": ["triggered"], "lang": "en"}], }, ], "ordered": False, } } assert trt.query_attributes() == {"isArmed": False} assert trt.can_execute(trait.COMMAND_ARMDISARM, {"arm": False}) calls = async_mock_service( hass, alarm_control_panel.DOMAIN, alarm_control_panel.SERVICE_ALARM_DISARM ) # Test without secure_pin configured with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), BASIC_CONFIG, ) await trt.execute(trait.COMMAND_ARMDISARM, BASIC_DATA, {"arm": False}, {}) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NOT_SETUP trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), PIN_CONFIG, ) # No challenge data with pytest.raises(error.ChallengeNeeded) as err: await trt.execute(trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": False}, {}) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # invalid pin with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": False}, {"pin": 9999} ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_FAILED_PIN_NEEDED # correct pin await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": False}, {"pin": "1234"} ) assert len(calls) == 1 # Test already disarmed with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), PIN_CONFIG, ) await trt.execute(trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": False}, {}) assert len(calls) == 1 assert err.value.code == const.ERR_ALREADY_DISARMED # Cancel arming after already armed will require pin with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: False}, ), PIN_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "cancel": True}, {} ) assert len(calls) == 1 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # Cancel arming while pending to arm doesn't require pin trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_PENDING, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: False}, ), PIN_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "cancel": True}, {} ) assert len(calls) == 2 async def test_fan_speed(hass): """Test FanSpeed trait speed control support for fan domain.""" assert helpers.get_google_type(fan.DOMAIN, None) is not None assert trait.FanSpeedTrait.supported(fan.DOMAIN, fan.SUPPORT_SET_SPEED, None) trt = trait.FanSpeedTrait( hass, State( "fan.living_room_fan", fan.SPEED_HIGH, attributes={ "speed_list": [ fan.SPEED_OFF, fan.SPEED_LOW, fan.SPEED_MEDIUM, fan.SPEED_HIGH, ], "speed": "low", "percentage": 33, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableFanSpeeds": { "ordered": True, "speeds": [ { "speed_name": "off", "speed_values": [{"speed_synonym": ["stop", "off"], "lang": "en"}], }, { "speed_name": "low", "speed_values": [ { "speed_synonym": ["slow", "low", "slowest", "lowest"], "lang": "en", } ], }, { "speed_name": "medium", "speed_values": [ {"speed_synonym": ["medium", "mid", "middle"], "lang": "en"} ], }, { "speed_name": "high", "speed_values": [ { "speed_synonym": [ "high", "max", "fast", "highest", "fastest", "maximum", ], "lang": "en", } ], }, ], }, "reversible": False, "supportsFanSpeedPercent": True, } assert trt.query_attributes() == { "currentFanSpeedSetting": "low", "on": True, "currentFanSpeedPercent": 33, } assert trt.can_execute(trait.COMMAND_FANSPEED, params={"fanSpeed": "medium"}) calls = async_mock_service(hass, fan.DOMAIN, fan.SERVICE_SET_SPEED) await trt.execute(trait.COMMAND_FANSPEED, BASIC_DATA, {"fanSpeed": "medium"}, {}) assert len(calls) == 1 assert calls[0].data == {"entity_id": "fan.living_room_fan", "speed": "medium"} assert trt.can_execute(trait.COMMAND_FANSPEED, params={"fanSpeedPercent": 10}) calls = async_mock_service(hass, fan.DOMAIN, fan.SERVICE_SET_PERCENTAGE) await trt.execute(trait.COMMAND_FANSPEED, BASIC_DATA, {"fanSpeedPercent": 10}, {}) assert len(calls) == 1 assert calls[0].data == {"entity_id": "fan.living_room_fan", "percentage": 10} async def test_climate_fan_speed(hass): """Test FanSpeed trait speed control support for climate domain.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.FanSpeedTrait.supported(climate.DOMAIN, climate.SUPPORT_FAN_MODE, None) trt = trait.FanSpeedTrait( hass, State( "climate.living_room_ac", "on", attributes={ "fan_modes": ["auto", "low", "medium", "high"], "fan_mode": "low", }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableFanSpeeds": { "ordered": True, "speeds": [ { "speed_name": "auto", "speed_values": [{"speed_synonym": ["auto"], "lang": "en"}], }, { "speed_name": "low", "speed_values": [{"speed_synonym": ["low"], "lang": "en"}], }, { "speed_name": "medium", "speed_values": [{"speed_synonym": ["medium"], "lang": "en"}], }, { "speed_name": "high", "speed_values": [{"speed_synonym": ["high"], "lang": "en"}], }, ], }, "reversible": False, "supportsFanSpeedPercent": True, } assert trt.query_attributes() == { "currentFanSpeedSetting": "low", } assert trt.can_execute(trait.COMMAND_FANSPEED, params={"fanSpeed": "medium"}) calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_FAN_MODE) await trt.execute(trait.COMMAND_FANSPEED, BASIC_DATA, {"fanSpeed": "medium"}, {}) assert len(calls) == 1 assert calls[0].data == { "entity_id": "climate.living_room_ac", "fan_mode": "medium", } async def test_inputselector(hass): """Test input selector trait.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.InputSelectorTrait.supported( media_player.DOMAIN, media_player.SUPPORT_SELECT_SOURCE, None ) trt = trait.InputSelectorTrait( hass, State( "media_player.living_room", media_player.STATE_PLAYING, attributes={ media_player.ATTR_INPUT_SOURCE_LIST: [ "media", "game", "chromecast", "plex", ], media_player.ATTR_INPUT_SOURCE: "game", }, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableInputs": [ {"key": "media", "names": [{"name_synonym": ["media"], "lang": "en"}]}, {"key": "game", "names": [{"name_synonym": ["game"], "lang": "en"}]}, { "key": "chromecast", "names": [{"name_synonym": ["chromecast"], "lang": "en"}], }, {"key": "plex", "names": [{"name_synonym": ["plex"], "lang": "en"}]}, ], "orderedInputs": True, } assert trt.query_attributes() == { "currentInput": "game", } assert trt.can_execute( trait.COMMAND_INPUT, params={"newInput": "media"}, ) calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_SELECT_SOURCE ) await trt.execute( trait.COMMAND_INPUT, BASIC_DATA, {"newInput": "media"}, {}, ) assert len(calls) == 1 assert calls[0].data == {"entity_id": "media_player.living_room", "source": "media"} @pytest.mark.parametrize( "sources,source,source_next,source_prev", [ (["a"], "a", "a", "a"), (["a", "b"], "a", "b", "b"), (["a", "b", "c"], "a", "b", "c"), ], ) async def test_inputselector_nextprev(hass, sources, source, source_next, source_prev): """Test input selector trait.""" trt = trait.InputSelectorTrait( hass, State( "media_player.living_room", media_player.STATE_PLAYING, attributes={ media_player.ATTR_INPUT_SOURCE_LIST: sources, media_player.ATTR_INPUT_SOURCE: source, }, ), BASIC_CONFIG, ) assert trt.can_execute("action.devices.commands.NextInput", params={}) assert trt.can_execute("action.devices.commands.PreviousInput", params={}) calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_SELECT_SOURCE ) await trt.execute( "action.devices.commands.NextInput", BASIC_DATA, {}, {}, ) await trt.execute( "action.devices.commands.PreviousInput", BASIC_DATA, {}, {}, ) assert len(calls) == 2 assert calls[0].data == { "entity_id": "media_player.living_room", "source": source_next, } assert calls[1].data == { "entity_id": "media_player.living_room", "source": source_prev, } @pytest.mark.parametrize( "sources,source", [(None, "a"), (["a", "b"], None), (["a", "b"], "c")] ) async def test_inputselector_nextprev_invalid(hass, sources, source): """Test input selector trait.""" trt = trait.InputSelectorTrait( hass, State( "media_player.living_room", media_player.STATE_PLAYING, attributes={ media_player.ATTR_INPUT_SOURCE_LIST: sources, media_player.ATTR_INPUT_SOURCE: source, }, ), BASIC_CONFIG, ) with pytest.raises(SmartHomeError): await trt.execute( "action.devices.commands.NextInput", BASIC_DATA, {}, {}, ) with pytest.raises(SmartHomeError): await trt.execute( "action.devices.commands.PreviousInput", BASIC_DATA, {}, {}, ) with pytest.raises(SmartHomeError): await trt.execute( "action.devices.commands.InvalidCommand", BASIC_DATA, {}, {}, ) async def test_modes_input_select(hass): """Test Input Select Mode trait.""" assert helpers.get_google_type(input_select.DOMAIN, None) is not None assert trait.ModesTrait.supported(input_select.DOMAIN, None, None) trt = trait.ModesTrait( hass, State("input_select.bla", "unavailable"), BASIC_CONFIG, ) assert trt.sync_attributes() == {"availableModes": []} trt = trait.ModesTrait( hass, State( "input_select.bla", "abc", attributes={input_select.ATTR_OPTIONS: ["abc", "123", "xyz"]}, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableModes": [ { "name": "option", "name_values": [ { "name_synonym": ["option", "setting", "mode", "value"], "lang": "en", } ], "settings": [ { "setting_name": "abc", "setting_values": [{"setting_synonym": ["abc"], "lang": "en"}], }, { "setting_name": "123", "setting_values": [{"setting_synonym": ["123"], "lang": "en"}], }, { "setting_name": "xyz", "setting_values": [{"setting_synonym": ["xyz"], "lang": "en"}], }, ], "ordered": False, } ] } assert trt.query_attributes() == { "currentModeSettings": {"option": "abc"}, "on": True, } assert trt.can_execute( trait.COMMAND_MODES, params={"updateModeSettings": {"option": "xyz"}}, ) calls = async_mock_service( hass, input_select.DOMAIN, input_select.SERVICE_SELECT_OPTION ) await trt.execute( trait.COMMAND_MODES, BASIC_DATA, {"updateModeSettings": {"option": "xyz"}}, {}, ) assert len(calls) == 1 assert calls[0].data == {"entity_id": "input_select.bla", "option": "xyz"} async def test_modes_humidifier(hass): """Test Humidifier Mode trait.""" assert helpers.get_google_type(humidifier.DOMAIN, None) is not None assert trait.ModesTrait.supported(humidifier.DOMAIN, humidifier.SUPPORT_MODES, None) trt = trait.ModesTrait( hass, State( "humidifier.humidifier", STATE_OFF, attributes={ humidifier.ATTR_AVAILABLE_MODES: [ humidifier.MODE_NORMAL, humidifier.MODE_AUTO, humidifier.MODE_AWAY, ], ATTR_SUPPORTED_FEATURES: humidifier.SUPPORT_MODES, humidifier.ATTR_MIN_HUMIDITY: 30, humidifier.ATTR_MAX_HUMIDITY: 99, humidifier.ATTR_HUMIDITY: 50, ATTR_MODE: humidifier.MODE_AUTO, }, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableModes": [ { "name": "mode", "name_values": [{"name_synonym": ["mode"], "lang": "en"}], "settings": [ { "setting_name": "normal", "setting_values": [ {"setting_synonym": ["normal"], "lang": "en"} ], }, { "setting_name": "auto", "setting_values": [{"setting_synonym": ["auto"], "lang": "en"}], }, { "setting_name": "away", "setting_values": [{"setting_synonym": ["away"], "lang": "en"}], }, ], "ordered": False, }, ] } assert trt.query_attributes() == { "currentModeSettings": {"mode": "auto"}, "on": False, } assert trt.can_execute( trait.COMMAND_MODES, params={"updateModeSettings": {"mode": "away"}} ) calls = async_mock_service(hass, humidifier.DOMAIN, humidifier.SERVICE_SET_MODE) await trt.execute( trait.COMMAND_MODES, BASIC_DATA, {"updateModeSettings": {"mode": "away"}}, {}, ) assert len(calls) == 1 assert calls[0].data == { "entity_id": "humidifier.humidifier", "mode": "away", } async def test_sound_modes(hass): """Test Mode trait.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.ModesTrait.supported( media_player.DOMAIN, media_player.SUPPORT_SELECT_SOUND_MODE, None ) trt = trait.ModesTrait( hass, State( "media_player.living_room", media_player.STATE_PLAYING, attributes={ media_player.ATTR_SOUND_MODE_LIST: ["stereo", "prologic"], media_player.ATTR_SOUND_MODE: "stereo", }, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableModes": [ { "name": "sound mode", "name_values": [ {"name_synonym": ["sound mode", "effects"], "lang": "en"} ], "settings": [ { "setting_name": "stereo", "setting_values": [ {"setting_synonym": ["stereo"], "lang": "en"} ], }, { "setting_name": "prologic", "setting_values": [ {"setting_synonym": ["prologic"], "lang": "en"} ], }, ], "ordered": False, } ] } assert trt.query_attributes() == { "currentModeSettings": {"sound mode": "stereo"}, "on": True, } assert trt.can_execute( trait.COMMAND_MODES, params={"updateModeSettings": {"sound mode": "stereo"}}, ) calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_SELECT_SOUND_MODE ) await trt.execute( trait.COMMAND_MODES, BASIC_DATA, {"updateModeSettings": {"sound mode": "stereo"}}, {}, ) assert len(calls) == 1 assert calls[0].data == { "entity_id": "media_player.living_room", "sound_mode": "stereo", } async def test_openclose_cover(hass): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_SET_POSITION, None ) trt = trait.OpenCloseTrait( hass, State( "cover.bla", cover.STATE_OPEN, { cover.ATTR_CURRENT_POSITION: 75, ATTR_SUPPORTED_FEATURES: cover.SUPPORT_SET_POSITION, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"openPercent": 75} calls_set = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_SET_COVER_POSITION) calls_open = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_OPEN_COVER) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 50}, {}) await trt.execute( trait.COMMAND_OPENCLOSE_RELATIVE, BASIC_DATA, {"openRelativePercent": 50}, {} ) assert len(calls_set) == 1 assert calls_set[0].data == {ATTR_ENTITY_ID: "cover.bla", cover.ATTR_POSITION: 50} assert len(calls_open) == 1 assert calls_open[0].data == {ATTR_ENTITY_ID: "cover.bla"} async def test_openclose_cover_unknown_state(hass): """Test OpenClose trait support for cover domain with unknown state.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_SET_POSITION, None ) # No state trt = trait.OpenCloseTrait( hass, State( "cover.bla", STATE_UNKNOWN, {ATTR_SUPPORTED_FEATURES: cover.SUPPORT_OPEN} ), BASIC_CONFIG, ) assert trt.sync_attributes() == {"discreteOnlyOpenClose": True} with pytest.raises(helpers.SmartHomeError): trt.query_attributes() calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_OPEN_COVER) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 100}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} with pytest.raises(helpers.SmartHomeError): trt.query_attributes() async def test_openclose_cover_assumed_state(hass): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_SET_POSITION, None ) trt = trait.OpenCloseTrait( hass, State( "cover.bla", cover.STATE_OPEN, { ATTR_ASSUMED_STATE: True, ATTR_SUPPORTED_FEATURES: cover.SUPPORT_SET_POSITION, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == {"commandOnlyOpenClose": True} assert trt.query_attributes() == {} calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_SET_COVER_POSITION) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 40}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla", cover.ATTR_POSITION: 40} async def test_openclose_cover_query_only(hass): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported(cover.DOMAIN, 0, None) state = State( "cover.bla", cover.STATE_OPEN, ) trt = trait.OpenCloseTrait( hass, state, BASIC_CONFIG, ) assert trt.sync_attributes() == { "discreteOnlyOpenClose": True, "queryOnlyOpenClose": True, } assert trt.query_attributes() == {"openPercent": 100} async def test_openclose_cover_no_position(hass): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_OPEN | cover.SUPPORT_CLOSE, None ) state = State( "cover.bla", cover.STATE_OPEN, { ATTR_SUPPORTED_FEATURES: cover.SUPPORT_OPEN | cover.SUPPORT_CLOSE, }, ) trt = trait.OpenCloseTrait( hass, state, BASIC_CONFIG, ) assert trt.sync_attributes() == {"discreteOnlyOpenClose": True} assert trt.query_attributes() == {"openPercent": 100} state.state = cover.STATE_CLOSED assert trt.sync_attributes() == {"discreteOnlyOpenClose": True} assert trt.query_attributes() == {"openPercent": 0} calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_CLOSE_COVER) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 0}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_OPEN_COVER) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 100}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} with pytest.raises( SmartHomeError, match=r"Current position not know for relative command" ): await trt.execute( trait.COMMAND_OPENCLOSE_RELATIVE, BASIC_DATA, {"openRelativePercent": 100}, {}, ) with pytest.raises(SmartHomeError, match=r"No support for partial open close"): await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 50}, {}) @pytest.mark.parametrize( "device_class", (cover.DEVICE_CLASS_DOOR, cover.DEVICE_CLASS_GARAGE, cover.DEVICE_CLASS_GATE), ) async def test_openclose_cover_secure(hass, device_class): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, device_class) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_SET_POSITION, device_class ) assert trait.OpenCloseTrait.might_2fa( cover.DOMAIN, cover.SUPPORT_SET_POSITION, device_class ) trt = trait.OpenCloseTrait( hass, State( "cover.bla", cover.STATE_OPEN, { ATTR_DEVICE_CLASS: device_class, ATTR_SUPPORTED_FEATURES: cover.SUPPORT_SET_POSITION, cover.ATTR_CURRENT_POSITION: 75, }, ), PIN_CONFIG, ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"openPercent": 75} calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_SET_COVER_POSITION) calls_close = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_CLOSE_COVER) # No challenge data with pytest.raises(error.ChallengeNeeded) as err: await trt.execute(trait.COMMAND_OPENCLOSE, PIN_DATA, {"openPercent": 50}, {}) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # invalid pin with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_OPENCLOSE, PIN_DATA, {"openPercent": 50}, {"pin": "9999"} ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_FAILED_PIN_NEEDED await trt.execute( trait.COMMAND_OPENCLOSE, PIN_DATA, {"openPercent": 50}, {"pin": "1234"} ) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla", cover.ATTR_POSITION: 50} # no challenge on close await trt.execute(trait.COMMAND_OPENCLOSE, PIN_DATA, {"openPercent": 0}, {}) assert len(calls_close) == 1 assert calls_close[0].data == {ATTR_ENTITY_ID: "cover.bla"} @pytest.mark.parametrize( "device_class", ( binary_sensor.DEVICE_CLASS_DOOR, binary_sensor.DEVICE_CLASS_GARAGE_DOOR, binary_sensor.DEVICE_CLASS_LOCK, binary_sensor.DEVICE_CLASS_OPENING, binary_sensor.DEVICE_CLASS_WINDOW, ), ) async def test_openclose_binary_sensor(hass, device_class): """Test OpenClose trait support for binary_sensor domain.""" assert helpers.get_google_type(binary_sensor.DOMAIN, device_class) is not None assert trait.OpenCloseTrait.supported(binary_sensor.DOMAIN, 0, device_class) trt = trait.OpenCloseTrait( hass, State("binary_sensor.test", STATE_ON, {ATTR_DEVICE_CLASS: device_class}), BASIC_CONFIG, ) assert trt.sync_attributes() == { "queryOnlyOpenClose": True, "discreteOnlyOpenClose": True, } assert trt.query_attributes() == {"openPercent": 100} trt = trait.OpenCloseTrait( hass, State("binary_sensor.test", STATE_OFF, {ATTR_DEVICE_CLASS: device_class}), BASIC_CONFIG, ) assert trt.sync_attributes() == { "queryOnlyOpenClose": True, "discreteOnlyOpenClose": True, } assert trt.query_attributes() == {"openPercent": 0} async def test_volume_media_player(hass): """Test volume trait support for media player domain.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.VolumeTrait.supported( media_player.DOMAIN, media_player.SUPPORT_VOLUME_SET, None, ) trt = trait.VolumeTrait( hass, State( "media_player.bla", media_player.STATE_PLAYING, { ATTR_SUPPORTED_FEATURES: media_player.SUPPORT_VOLUME_SET, media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.3, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "volumeMaxLevel": 100, "levelStepSize": 10, "volumeCanMuteAndUnmute": False, "commandOnlyVolume": False, } assert trt.query_attributes() == {"currentVolume": 30} calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_SET ) await trt.execute(trait.COMMAND_SET_VOLUME, BASIC_DATA, {"volumeLevel": 60}, {}) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.6, } calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_SET ) await trt.execute( trait.COMMAND_VOLUME_RELATIVE, BASIC_DATA, {"relativeSteps": 10}, {} ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.4, } async def test_volume_media_player_relative(hass): """Test volume trait support for relative-volume-only media players.""" assert trait.VolumeTrait.supported( media_player.DOMAIN, media_player.SUPPORT_VOLUME_STEP, None, ) trt = trait.VolumeTrait( hass, State( "media_player.bla", media_player.STATE_PLAYING, { ATTR_ASSUMED_STATE: True, ATTR_SUPPORTED_FEATURES: media_player.SUPPORT_VOLUME_STEP, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "volumeMaxLevel": 100, "levelStepSize": 10, "volumeCanMuteAndUnmute": False, "commandOnlyVolume": True, } assert trt.query_attributes() == {} calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_UP ) await trt.execute( trait.COMMAND_VOLUME_RELATIVE, BASIC_DATA, {"relativeSteps": 10}, {}, ) assert len(calls) == 10 for call in calls: assert call.data == { ATTR_ENTITY_ID: "media_player.bla", } calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_DOWN ) await trt.execute( trait.COMMAND_VOLUME_RELATIVE, BASIC_DATA, {"relativeSteps": -10}, {}, ) assert len(calls) == 10 for call in calls: assert call.data == { ATTR_ENTITY_ID: "media_player.bla", } with pytest.raises(SmartHomeError): await trt.execute(trait.COMMAND_SET_VOLUME, BASIC_DATA, {"volumeLevel": 42}, {}) with pytest.raises(SmartHomeError): await trt.execute(trait.COMMAND_MUTE, BASIC_DATA, {"mute": True}, {}) async def test_media_player_mute(hass): """Test volume trait support for muting.""" assert trait.VolumeTrait.supported( media_player.DOMAIN, media_player.SUPPORT_VOLUME_STEP | media_player.SUPPORT_VOLUME_MUTE, None, ) trt = trait.VolumeTrait( hass, State( "media_player.bla", media_player.STATE_PLAYING, { ATTR_SUPPORTED_FEATURES: ( media_player.SUPPORT_VOLUME_STEP | media_player.SUPPORT_VOLUME_MUTE ), media_player.ATTR_MEDIA_VOLUME_MUTED: False, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "volumeMaxLevel": 100, "levelStepSize": 10, "volumeCanMuteAndUnmute": True, "commandOnlyVolume": False, } assert trt.query_attributes() == {"isMuted": False} mute_calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_MUTE ) await trt.execute( trait.COMMAND_MUTE, BASIC_DATA, {"mute": True}, {}, ) assert len(mute_calls) == 1 assert mute_calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_VOLUME_MUTED: True, } unmute_calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_MUTE ) await trt.execute( trait.COMMAND_MUTE, BASIC_DATA, {"mute": False}, {}, ) assert len(unmute_calls) == 1 assert unmute_calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_VOLUME_MUTED: False, } async def test_temperature_setting_sensor(hass): """Test TemperatureSetting trait support for temperature sensor.""" assert ( helpers.get_google_type(sensor.DOMAIN, sensor.DEVICE_CLASS_TEMPERATURE) is not None ) assert not trait.TemperatureSettingTrait.supported( sensor.DOMAIN, 0, sensor.DEVICE_CLASS_HUMIDITY ) assert trait.TemperatureSettingTrait.supported( sensor.DOMAIN, 0, sensor.DEVICE_CLASS_TEMPERATURE ) @pytest.mark.parametrize( "unit_in,unit_out,state,ambient", [ (TEMP_FAHRENHEIT, "F", "70", 21.1), (TEMP_CELSIUS, "C", "21.1", 21.1), (TEMP_FAHRENHEIT, "F", "unavailable", None), (TEMP_FAHRENHEIT, "F", "unknown", None), ], ) async def test_temperature_setting_sensor_data(hass, unit_in, unit_out, state, ambient): """Test TemperatureSetting trait support for temperature sensor.""" hass.config.units.temperature_unit = unit_in trt = trait.TemperatureSettingTrait( hass, State( "sensor.test", state, {ATTR_DEVICE_CLASS: sensor.DEVICE_CLASS_TEMPERATURE} ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "queryOnlyTemperatureSetting": True, "thermostatTemperatureUnit": unit_out, } if ambient: assert trt.query_attributes() == {"thermostatTemperatureAmbient": ambient} else: assert trt.query_attributes() == {} hass.config.units.temperature_unit = TEMP_CELSIUS async def test_humidity_setting_sensor(hass): """Test HumiditySetting trait support for humidity sensor.""" assert ( helpers.get_google_type(sensor.DOMAIN, sensor.DEVICE_CLASS_HUMIDITY) is not None ) assert not trait.HumiditySettingTrait.supported( sensor.DOMAIN, 0, sensor.DEVICE_CLASS_TEMPERATURE ) assert trait.HumiditySettingTrait.supported( sensor.DOMAIN, 0, sensor.DEVICE_CLASS_HUMIDITY ) @pytest.mark.parametrize( "state,ambient", [("70", 70), ("unavailable", None), ("unknown", None)] ) async def test_humidity_setting_sensor_data(hass, state, ambient): """Test HumiditySetting trait support for humidity sensor.""" trt = trait.HumiditySettingTrait( hass, State("sensor.test", state, {ATTR_DEVICE_CLASS: sensor.DEVICE_CLASS_HUMIDITY}), BASIC_CONFIG, ) assert trt.sync_attributes() == {"queryOnlyHumiditySetting": True} if ambient: assert trt.query_attributes() == {"humidityAmbientPercent": ambient} else: assert trt.query_attributes() == {} with pytest.raises(helpers.SmartHomeError) as err: await trt.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert err.value.code == const.ERR_NOT_SUPPORTED async def test_transport_control(hass): """Test the TransportControlTrait.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None for feature in trait.MEDIA_COMMAND_SUPPORT_MAPPING.values(): assert trait.TransportControlTrait.supported(media_player.DOMAIN, feature, None) now = datetime(2020, 1, 1) trt = trait.TransportControlTrait( hass, State( "media_player.bla", media_player.STATE_PLAYING, { media_player.ATTR_MEDIA_POSITION: 100, media_player.ATTR_MEDIA_DURATION: 200, media_player.ATTR_MEDIA_POSITION_UPDATED_AT: now - timedelta(seconds=10), media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.5, ATTR_SUPPORTED_FEATURES: media_player.SUPPORT_PLAY | media_player.SUPPORT_STOP, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "transportControlSupportedCommands": ["RESUME", "STOP"] } assert trt.query_attributes() == {} # COMMAND_MEDIA_SEEK_RELATIVE calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_SEEK ) # Patch to avoid time ticking over during the command failing the test with patch("homeassistant.util.dt.utcnow", return_value=now): await trt.execute( trait.COMMAND_MEDIA_SEEK_RELATIVE, BASIC_DATA, {"relativePositionMs": 10000}, {}, ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", # 100s (current position) + 10s (from command) + 10s (from updated_at) media_player.ATTR_MEDIA_SEEK_POSITION: 120, } # COMMAND_MEDIA_SEEK_TO_POSITION calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_SEEK ) await trt.execute( trait.COMMAND_MEDIA_SEEK_TO_POSITION, BASIC_DATA, {"absPositionMs": 50000}, {} ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_SEEK_POSITION: 50, } # COMMAND_MEDIA_NEXT calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_NEXT_TRACK ) await trt.execute(trait.COMMAND_MEDIA_NEXT, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} # COMMAND_MEDIA_PAUSE calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_PAUSE ) await trt.execute(trait.COMMAND_MEDIA_PAUSE, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} # COMMAND_MEDIA_PREVIOUS calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_PREVIOUS_TRACK ) await trt.execute(trait.COMMAND_MEDIA_PREVIOUS, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} # COMMAND_MEDIA_RESUME calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_PLAY ) await trt.execute(trait.COMMAND_MEDIA_RESUME, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} # COMMAND_MEDIA_SHUFFLE calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_SHUFFLE_SET ) await trt.execute(trait.COMMAND_MEDIA_SHUFFLE, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_SHUFFLE: True, } # COMMAND_MEDIA_STOP calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_STOP ) await trt.execute(trait.COMMAND_MEDIA_STOP, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} @pytest.mark.parametrize( "state", ( STATE_OFF, STATE_IDLE, STATE_PLAYING, STATE_ON, STATE_PAUSED, STATE_STANDBY, STATE_UNAVAILABLE, STATE_UNKNOWN, ), ) async def test_media_state(hass, state): """Test the MediaStateTrait.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.TransportControlTrait.supported( media_player.DOMAIN, media_player.SUPPORT_PLAY, None ) trt = trait.MediaStateTrait( hass, State( "media_player.bla", state, { media_player.ATTR_MEDIA_POSITION: 100, media_player.ATTR_MEDIA_DURATION: 200, media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.5, ATTR_SUPPORTED_FEATURES: media_player.SUPPORT_PLAY | media_player.SUPPORT_STOP, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "supportActivityState": True, "supportPlaybackState": True, } assert trt.query_attributes() == { "activityState": trt.activity_lookup.get(state), "playbackState": trt.playback_lookup.get(state), }
sander76/home-assistant
tests/components/google_assistant/test_trait.py
homeassistant/components/flo/switch.py
"""Sensor to collect the reference daily prices of electricity ('PVPC') in Spain.""" from __future__ import annotations import logging from random import randint from aiopvpc import PVPCData from homeassistant import config_entries from homeassistant.components.sensor import SensorEntity from homeassistant.const import CONF_NAME, CURRENCY_EURO, ENERGY_KILO_WATT_HOUR from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.aiohttp_client import async_get_clientsession from homeassistant.helpers.event import async_call_later, async_track_time_change from homeassistant.helpers.restore_state import RestoreEntity import homeassistant.util.dt as dt_util from .const import ATTR_TARIFF _LOGGER = logging.getLogger(__name__) ATTR_PRICE = "price" ICON = "mdi:currency-eur" UNIT = f"{CURRENCY_EURO}/{ENERGY_KILO_WATT_HOUR}" _DEFAULT_TIMEOUT = 10 async def async_setup_entry( hass: HomeAssistant, config_entry: config_entries.ConfigEntry, async_add_entities ): """Set up the electricity price sensor from config_entry.""" name = config_entry.data[CONF_NAME] pvpc_data_handler = PVPCData( tariff=config_entry.data[ATTR_TARIFF], local_timezone=hass.config.time_zone, websession=async_get_clientsession(hass), logger=_LOGGER, timeout=_DEFAULT_TIMEOUT, ) async_add_entities( [ElecPriceSensor(name, config_entry.unique_id, pvpc_data_handler)], False ) class ElecPriceSensor(RestoreEntity, SensorEntity): """Class to hold the prices of electricity as a sensor.""" unit_of_measurement = UNIT icon = ICON should_poll = False def __init__(self, name, unique_id, pvpc_data_handler): """Initialize the sensor object.""" self._name = name self._unique_id = unique_id self._pvpc_data = pvpc_data_handler self._num_retries = 0 self._hourly_tracker = None self._price_tracker = None async def async_will_remove_from_hass(self) -> None: """Cancel listeners for sensor updates.""" self._hourly_tracker() self._price_tracker() async def async_added_to_hass(self): """Handle entity which will be added.""" await super().async_added_to_hass() state = await self.async_get_last_state() if state: self._pvpc_data.state = state.state # Update 'state' value in hour changes self._hourly_tracker = async_track_time_change( self.hass, self.update_current_price, second=[0], minute=[0] ) # Update prices at random time, 2 times/hour (don't want to upset API) random_minute = randint(1, 29) mins_update = [random_minute, random_minute + 30] self._price_tracker = async_track_time_change( self.hass, self.async_update_prices, second=[0], minute=mins_update ) _LOGGER.debug( "Setup of price sensor %s (%s) with tariff '%s', " "updating prices each hour at %s min", self.name, self.entity_id, self._pvpc_data.tariff, mins_update, ) await self.async_update_prices(dt_util.utcnow()) self.update_current_price(dt_util.utcnow()) @property def unique_id(self) -> str | None: """Return a unique ID.""" return self._unique_id @property def name(self): """Return the name of the sensor.""" return self._name @property def state(self): """Return the state of the sensor.""" return self._pvpc_data.state @property def available(self) -> bool: """Return True if entity is available.""" return self._pvpc_data.state_available @property def extra_state_attributes(self): """Return the state attributes.""" return self._pvpc_data.attributes @callback def update_current_price(self, now): """Update the sensor state, by selecting the current price for this hour.""" self._pvpc_data.process_state_and_attributes(now) self.async_write_ha_state() async def async_update_prices(self, now): """Update electricity prices from the ESIOS API.""" prices = await self._pvpc_data.async_update_prices(now) if not prices and self._pvpc_data.source_available: self._num_retries += 1 if self._num_retries > 2: _LOGGER.warning( "%s: repeated bad data update, mark component as unavailable source", self.entity_id, ) self._pvpc_data.source_available = False return retry_delay = 2 * self._num_retries * self._pvpc_data.timeout _LOGGER.debug( "%s: Bad update[retry:%d], will try again in %d s", self.entity_id, self._num_retries, retry_delay, ) async_call_later(self.hass, retry_delay, self.async_update_prices) return if not prices: _LOGGER.debug("%s: data source is not yet available", self.entity_id) return self._num_retries = 0 if not self._pvpc_data.source_available: self._pvpc_data.source_available = True _LOGGER.warning("%s: component has recovered data access", self.entity_id) self.update_current_price(now)
"""Tests for the Google Assistant traits.""" from datetime import datetime, timedelta from unittest.mock import patch import pytest from homeassistant.components import ( alarm_control_panel, binary_sensor, camera, cover, fan, group, input_boolean, input_select, light, lock, media_player, scene, script, sensor, switch, vacuum, ) from homeassistant.components.climate import const as climate from homeassistant.components.google_assistant import const, error, helpers, trait from homeassistant.components.google_assistant.error import SmartHomeError from homeassistant.components.humidifier import const as humidifier from homeassistant.config import async_process_ha_core_config from homeassistant.const import ( ATTR_ASSUMED_STATE, ATTR_DEVICE_CLASS, ATTR_ENTITY_ID, ATTR_MODE, ATTR_SUPPORTED_FEATURES, ATTR_TEMPERATURE, SERVICE_TURN_OFF, SERVICE_TURN_ON, STATE_ALARM_ARMED_AWAY, STATE_ALARM_DISARMED, STATE_ALARM_PENDING, STATE_IDLE, STATE_OFF, STATE_ON, STATE_PAUSED, STATE_PLAYING, STATE_STANDBY, STATE_UNAVAILABLE, STATE_UNKNOWN, TEMP_CELSIUS, TEMP_FAHRENHEIT, ) from homeassistant.core import DOMAIN as HA_DOMAIN, EVENT_CALL_SERVICE, State from homeassistant.util import color from . import BASIC_CONFIG, MockConfig from tests.common import async_capture_events, async_mock_service REQ_ID = "ff36a3cc-ec34-11e6-b1a0-64510650abcf" BASIC_DATA = helpers.RequestData( BASIC_CONFIG, "test-agent", const.SOURCE_CLOUD, REQ_ID, None ) PIN_CONFIG = MockConfig(secure_devices_pin="1234") PIN_DATA = helpers.RequestData( PIN_CONFIG, "test-agent", const.SOURCE_CLOUD, REQ_ID, None ) async def test_brightness_light(hass): """Test brightness trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert trait.BrightnessTrait.supported(light.DOMAIN, light.SUPPORT_BRIGHTNESS, None) trt = trait.BrightnessTrait( hass, State("light.bla", light.STATE_ON, {light.ATTR_BRIGHTNESS: 243}), BASIC_CONFIG, ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"brightness": 95} events = async_capture_events(hass, EVENT_CALL_SERVICE) calls = async_mock_service(hass, light.DOMAIN, light.SERVICE_TURN_ON) await trt.execute( trait.COMMAND_BRIGHTNESS_ABSOLUTE, BASIC_DATA, {"brightness": 50}, {} ) await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "light.bla", light.ATTR_BRIGHTNESS_PCT: 50} assert len(events) == 1 assert events[0].data == { "domain": "light", "service": "turn_on", "service_data": {"brightness_pct": 50, "entity_id": "light.bla"}, } async def test_camera_stream(hass): """Test camera stream trait support for camera domain.""" await async_process_ha_core_config( hass, {"external_url": "https://example.com"}, ) assert helpers.get_google_type(camera.DOMAIN, None) is not None assert trait.CameraStreamTrait.supported(camera.DOMAIN, camera.SUPPORT_STREAM, None) trt = trait.CameraStreamTrait( hass, State("camera.bla", camera.STATE_IDLE, {}), BASIC_CONFIG ) assert trt.sync_attributes() == { "cameraStreamSupportedProtocols": ["hls"], "cameraStreamNeedAuthToken": False, "cameraStreamNeedDrmEncryption": False, } assert trt.query_attributes() == {} with patch( "homeassistant.components.camera.async_request_stream", return_value="/api/streams/bla", ): await trt.execute(trait.COMMAND_GET_CAMERA_STREAM, BASIC_DATA, {}, {}) assert trt.query_attributes() == { "cameraStreamAccessUrl": "https://example.com/api/streams/bla", "cameraStreamReceiverAppId": "B12CE3CA", } async def test_onoff_group(hass): """Test OnOff trait support for group domain.""" assert helpers.get_google_type(group.DOMAIN, None) is not None assert trait.OnOffTrait.supported(group.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("group.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("group.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, HA_DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "group.bla"} off_calls = async_mock_service(hass, HA_DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "group.bla"} async def test_onoff_input_boolean(hass): """Test OnOff trait support for input_boolean domain.""" assert helpers.get_google_type(input_boolean.DOMAIN, None) is not None assert trait.OnOffTrait.supported(input_boolean.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("input_boolean.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait( hass, State("input_boolean.bla", STATE_OFF), BASIC_CONFIG ) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, input_boolean.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "input_boolean.bla"} off_calls = async_mock_service(hass, input_boolean.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "input_boolean.bla"} async def test_onoff_switch(hass): """Test OnOff trait support for switch domain.""" assert helpers.get_google_type(switch.DOMAIN, None) is not None assert trait.OnOffTrait.supported(switch.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("switch.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("switch.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} trt_assumed = trait.OnOffTrait( hass, State("switch.bla", STATE_OFF, {"assumed_state": True}), BASIC_CONFIG ) assert trt_assumed.sync_attributes() == {"commandOnlyOnOff": True} on_calls = async_mock_service(hass, switch.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "switch.bla"} off_calls = async_mock_service(hass, switch.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "switch.bla"} async def test_onoff_fan(hass): """Test OnOff trait support for fan domain.""" assert helpers.get_google_type(fan.DOMAIN, None) is not None assert trait.OnOffTrait.supported(fan.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("fan.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("fan.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, fan.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "fan.bla"} off_calls = async_mock_service(hass, fan.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "fan.bla"} async def test_onoff_light(hass): """Test OnOff trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert trait.OnOffTrait.supported(light.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("light.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("light.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "light.bla"} off_calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "light.bla"} async def test_onoff_media_player(hass): """Test OnOff trait support for media_player domain.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.OnOffTrait.supported(media_player.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("media_player.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("media_player.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, media_player.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} off_calls = async_mock_service(hass, media_player.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} async def test_onoff_humidifier(hass): """Test OnOff trait support for humidifier domain.""" assert helpers.get_google_type(humidifier.DOMAIN, None) is not None assert trait.OnOffTrait.supported(humidifier.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("humidifier.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("humidifier.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, humidifier.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "humidifier.bla"} off_calls = async_mock_service(hass, humidifier.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "humidifier.bla"} async def test_dock_vacuum(hass): """Test dock trait support for vacuum domain.""" assert helpers.get_google_type(vacuum.DOMAIN, None) is not None assert trait.DockTrait.supported(vacuum.DOMAIN, 0, None) trt = trait.DockTrait(hass, State("vacuum.bla", vacuum.STATE_IDLE), BASIC_CONFIG) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"isDocked": False} calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_RETURN_TO_BASE) await trt.execute(trait.COMMAND_DOCK, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} async def test_startstop_vacuum(hass): """Test startStop trait support for vacuum domain.""" assert helpers.get_google_type(vacuum.DOMAIN, None) is not None assert trait.StartStopTrait.supported(vacuum.DOMAIN, 0, None) trt = trait.StartStopTrait( hass, State( "vacuum.bla", vacuum.STATE_PAUSED, {ATTR_SUPPORTED_FEATURES: vacuum.SUPPORT_PAUSE}, ), BASIC_CONFIG, ) assert trt.sync_attributes() == {"pausable": True} assert trt.query_attributes() == {"isRunning": False, "isPaused": True} start_calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_START) await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": True}, {}) assert len(start_calls) == 1 assert start_calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} stop_calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_STOP) await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": False}, {}) assert len(stop_calls) == 1 assert stop_calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} pause_calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_PAUSE) await trt.execute(trait.COMMAND_PAUSEUNPAUSE, BASIC_DATA, {"pause": True}, {}) assert len(pause_calls) == 1 assert pause_calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} unpause_calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_START) await trt.execute(trait.COMMAND_PAUSEUNPAUSE, BASIC_DATA, {"pause": False}, {}) assert len(unpause_calls) == 1 assert unpause_calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} async def test_startstop_cover(hass): """Test startStop trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.StartStopTrait.supported(cover.DOMAIN, cover.SUPPORT_STOP, None) state = State( "cover.bla", cover.STATE_CLOSED, {ATTR_SUPPORTED_FEATURES: cover.SUPPORT_STOP}, ) trt = trait.StartStopTrait( hass, state, BASIC_CONFIG, ) assert trt.sync_attributes() == {} for state_value in (cover.STATE_CLOSING, cover.STATE_OPENING): state.state = state_value assert trt.query_attributes() == {"isRunning": True} stop_calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_STOP_COVER) await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": False}, {}) assert len(stop_calls) == 1 assert stop_calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} for state_value in (cover.STATE_CLOSED, cover.STATE_OPEN): state.state = state_value assert trt.query_attributes() == {"isRunning": False} with pytest.raises(SmartHomeError, match="Cover is already stopped"): await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": False}, {}) with pytest.raises(SmartHomeError, match="Starting a cover is not supported"): await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": True}, {}) with pytest.raises( SmartHomeError, match="Command action.devices.commands.PauseUnpause is not supported", ): await trt.execute(trait.COMMAND_PAUSEUNPAUSE, BASIC_DATA, {"start": True}, {}) async def test_startstop_cover_assumed(hass): """Test startStop trait support for cover domain of assumed state.""" trt = trait.StartStopTrait( hass, State( "cover.bla", cover.STATE_CLOSED, {ATTR_SUPPORTED_FEATURES: cover.SUPPORT_STOP, ATTR_ASSUMED_STATE: True}, ), BASIC_CONFIG, ) stop_calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_STOP_COVER) await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": False}, {}) assert len(stop_calls) == 1 assert stop_calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} async def test_color_setting_color_light(hass): """Test ColorSpectrum trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert not trait.ColorSettingTrait.supported(light.DOMAIN, 0, None) assert trait.ColorSettingTrait.supported(light.DOMAIN, light.SUPPORT_COLOR, None) trt = trait.ColorSettingTrait( hass, State( "light.bla", STATE_ON, { light.ATTR_HS_COLOR: (20, 94), light.ATTR_BRIGHTNESS: 200, ATTR_SUPPORTED_FEATURES: light.SUPPORT_COLOR, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == {"colorModel": "hsv"} assert trt.query_attributes() == { "color": {"spectrumHsv": {"hue": 20, "saturation": 0.94, "value": 200 / 255}} } assert trt.can_execute( trait.COMMAND_COLOR_ABSOLUTE, {"color": {"spectrumRGB": 16715792}} ) calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON) await trt.execute( trait.COMMAND_COLOR_ABSOLUTE, BASIC_DATA, {"color": {"spectrumRGB": 1052927}}, {}, ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "light.bla", light.ATTR_HS_COLOR: (240, 93.725), } await trt.execute( trait.COMMAND_COLOR_ABSOLUTE, BASIC_DATA, {"color": {"spectrumHSV": {"hue": 100, "saturation": 0.50, "value": 0.20}}}, {}, ) assert len(calls) == 2 assert calls[1].data == { ATTR_ENTITY_ID: "light.bla", light.ATTR_HS_COLOR: [100, 50], light.ATTR_BRIGHTNESS: 0.2 * 255, } async def test_color_setting_temperature_light(hass): """Test ColorTemperature trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert not trait.ColorSettingTrait.supported(light.DOMAIN, 0, None) assert trait.ColorSettingTrait.supported( light.DOMAIN, light.SUPPORT_COLOR_TEMP, None ) trt = trait.ColorSettingTrait( hass, State( "light.bla", STATE_ON, { light.ATTR_MIN_MIREDS: 200, light.ATTR_COLOR_TEMP: 300, light.ATTR_MAX_MIREDS: 500, ATTR_SUPPORTED_FEATURES: light.SUPPORT_COLOR_TEMP, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "colorTemperatureRange": {"temperatureMinK": 2000, "temperatureMaxK": 5000} } assert trt.query_attributes() == {"color": {"temperatureK": 3333}} assert trt.can_execute( trait.COMMAND_COLOR_ABSOLUTE, {"color": {"temperature": 400}} ) calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON) with pytest.raises(helpers.SmartHomeError) as err: await trt.execute( trait.COMMAND_COLOR_ABSOLUTE, BASIC_DATA, {"color": {"temperature": 5555}}, {}, ) assert err.value.code == const.ERR_VALUE_OUT_OF_RANGE await trt.execute( trait.COMMAND_COLOR_ABSOLUTE, BASIC_DATA, {"color": {"temperature": 2857}}, {} ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "light.bla", light.ATTR_COLOR_TEMP: color.color_temperature_kelvin_to_mired(2857), } async def test_color_light_temperature_light_bad_temp(hass): """Test ColorTemperature trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert not trait.ColorSettingTrait.supported(light.DOMAIN, 0, None) assert trait.ColorSettingTrait.supported( light.DOMAIN, light.SUPPORT_COLOR_TEMP, None ) trt = trait.ColorSettingTrait( hass, State( "light.bla", STATE_ON, { light.ATTR_MIN_MIREDS: 200, light.ATTR_COLOR_TEMP: 0, light.ATTR_MAX_MIREDS: 500, }, ), BASIC_CONFIG, ) assert trt.query_attributes() == {} async def test_light_modes(hass): """Test Light Mode trait.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert trait.ModesTrait.supported(light.DOMAIN, light.SUPPORT_EFFECT, None) trt = trait.ModesTrait( hass, State( "light.living_room", light.STATE_ON, attributes={ light.ATTR_EFFECT_LIST: ["random", "colorloop"], light.ATTR_EFFECT: "random", }, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableModes": [ { "name": "effect", "name_values": [{"name_synonym": ["effect"], "lang": "en"}], "settings": [ { "setting_name": "random", "setting_values": [ {"setting_synonym": ["random"], "lang": "en"} ], }, { "setting_name": "colorloop", "setting_values": [ {"setting_synonym": ["colorloop"], "lang": "en"} ], }, ], "ordered": False, } ] } assert trt.query_attributes() == { "currentModeSettings": {"effect": "random"}, "on": True, } assert trt.can_execute( trait.COMMAND_MODES, params={"updateModeSettings": {"effect": "colorloop"}}, ) calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON) await trt.execute( trait.COMMAND_MODES, BASIC_DATA, {"updateModeSettings": {"effect": "colorloop"}}, {}, ) assert len(calls) == 1 assert calls[0].data == { "entity_id": "light.living_room", "effect": "colorloop", } async def test_scene_scene(hass): """Test Scene trait support for scene domain.""" assert helpers.get_google_type(scene.DOMAIN, None) is not None assert trait.SceneTrait.supported(scene.DOMAIN, 0, None) trt = trait.SceneTrait(hass, State("scene.bla", scene.STATE), BASIC_CONFIG) assert trt.sync_attributes() == {} assert trt.query_attributes() == {} assert trt.can_execute(trait.COMMAND_ACTIVATE_SCENE, {}) calls = async_mock_service(hass, scene.DOMAIN, SERVICE_TURN_ON) await trt.execute(trait.COMMAND_ACTIVATE_SCENE, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "scene.bla"} async def test_scene_script(hass): """Test Scene trait support for script domain.""" assert helpers.get_google_type(script.DOMAIN, None) is not None assert trait.SceneTrait.supported(script.DOMAIN, 0, None) trt = trait.SceneTrait(hass, State("script.bla", STATE_OFF), BASIC_CONFIG) assert trt.sync_attributes() == {} assert trt.query_attributes() == {} assert trt.can_execute(trait.COMMAND_ACTIVATE_SCENE, {}) calls = async_mock_service(hass, script.DOMAIN, SERVICE_TURN_ON) await trt.execute(trait.COMMAND_ACTIVATE_SCENE, BASIC_DATA, {}, {}) # We don't wait till script execution is done. await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "script.bla"} async def test_temperature_setting_climate_onoff(hass): """Test TemperatureSetting trait support for climate domain - range.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.TemperatureSettingTrait.supported(climate.DOMAIN, 0, None) hass.config.units.temperature_unit = TEMP_FAHRENHEIT trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_AUTO, { ATTR_SUPPORTED_FEATURES: climate.SUPPORT_TARGET_TEMPERATURE_RANGE, climate.ATTR_HVAC_MODES: [ climate.HVAC_MODE_OFF, climate.HVAC_MODE_COOL, climate.HVAC_MODE_HEAT, climate.HVAC_MODE_HEAT_COOL, ], climate.ATTR_MIN_TEMP: None, climate.ATTR_MAX_TEMP: None, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["off", "cool", "heat", "heatcool", "on"], "thermostatTemperatureUnit": "F", } assert trt.can_execute(trait.COMMAND_THERMOSTAT_SET_MODE, {}) calls = async_mock_service(hass, climate.DOMAIN, SERVICE_TURN_ON) await trt.execute( trait.COMMAND_THERMOSTAT_SET_MODE, BASIC_DATA, {"thermostatMode": "on"}, {} ) assert len(calls) == 1 calls = async_mock_service(hass, climate.DOMAIN, SERVICE_TURN_OFF) await trt.execute( trait.COMMAND_THERMOSTAT_SET_MODE, BASIC_DATA, {"thermostatMode": "off"}, {} ) assert len(calls) == 1 async def test_temperature_setting_climate_no_modes(hass): """Test TemperatureSetting trait support for climate domain not supporting any modes.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.TemperatureSettingTrait.supported(climate.DOMAIN, 0, None) hass.config.units.temperature_unit = TEMP_CELSIUS trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_AUTO, { climate.ATTR_HVAC_MODES: [], climate.ATTR_MIN_TEMP: None, climate.ATTR_MAX_TEMP: None, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["heat"], "thermostatTemperatureUnit": "C", } async def test_temperature_setting_climate_range(hass): """Test TemperatureSetting trait support for climate domain - range.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.TemperatureSettingTrait.supported(climate.DOMAIN, 0, None) hass.config.units.temperature_unit = TEMP_FAHRENHEIT trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_AUTO, { climate.ATTR_CURRENT_TEMPERATURE: 70, climate.ATTR_CURRENT_HUMIDITY: 25, ATTR_SUPPORTED_FEATURES: climate.SUPPORT_TARGET_TEMPERATURE_RANGE, climate.ATTR_HVAC_MODES: [ STATE_OFF, climate.HVAC_MODE_COOL, climate.HVAC_MODE_HEAT, climate.HVAC_MODE_AUTO, ], climate.ATTR_TARGET_TEMP_HIGH: 75, climate.ATTR_TARGET_TEMP_LOW: 65, climate.ATTR_MIN_TEMP: 50, climate.ATTR_MAX_TEMP: 80, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["off", "cool", "heat", "auto", "on"], "thermostatTemperatureUnit": "F", } assert trt.query_attributes() == { "thermostatMode": "auto", "thermostatTemperatureAmbient": 21.1, "thermostatHumidityAmbient": 25, "thermostatTemperatureSetpointLow": 18.3, "thermostatTemperatureSetpointHigh": 23.9, } assert trt.can_execute(trait.COMMAND_THERMOSTAT_TEMPERATURE_SET_RANGE, {}) assert trt.can_execute(trait.COMMAND_THERMOSTAT_SET_MODE, {}) calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_TEMPERATURE) await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SET_RANGE, BASIC_DATA, { "thermostatTemperatureSetpointHigh": 25, "thermostatTemperatureSetpointLow": 20, }, {}, ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "climate.bla", climate.ATTR_TARGET_TEMP_HIGH: 77, climate.ATTR_TARGET_TEMP_LOW: 68, } calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_HVAC_MODE) await trt.execute( trait.COMMAND_THERMOSTAT_SET_MODE, BASIC_DATA, {"thermostatMode": "cool"}, {} ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "climate.bla", climate.ATTR_HVAC_MODE: climate.HVAC_MODE_COOL, } with pytest.raises(helpers.SmartHomeError) as err: await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, BASIC_DATA, {"thermostatTemperatureSetpoint": -100}, {}, ) assert err.value.code == const.ERR_VALUE_OUT_OF_RANGE hass.config.units.temperature_unit = TEMP_CELSIUS async def test_temperature_setting_climate_setpoint(hass): """Test TemperatureSetting trait support for climate domain - setpoint.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.TemperatureSettingTrait.supported(climate.DOMAIN, 0, None) hass.config.units.temperature_unit = TEMP_CELSIUS trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_COOL, { climate.ATTR_HVAC_MODES: [STATE_OFF, climate.HVAC_MODE_COOL], climate.ATTR_MIN_TEMP: 10, climate.ATTR_MAX_TEMP: 30, ATTR_TEMPERATURE: 18, climate.ATTR_CURRENT_TEMPERATURE: 20, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["off", "cool", "on"], "thermostatTemperatureUnit": "C", } assert trt.query_attributes() == { "thermostatMode": "cool", "thermostatTemperatureAmbient": 20, "thermostatTemperatureSetpoint": 18, } assert trt.can_execute(trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, {}) assert trt.can_execute(trait.COMMAND_THERMOSTAT_SET_MODE, {}) calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_TEMPERATURE) with pytest.raises(helpers.SmartHomeError): await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, BASIC_DATA, {"thermostatTemperatureSetpoint": -100}, {}, ) await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, BASIC_DATA, {"thermostatTemperatureSetpoint": 19}, {}, ) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "climate.bla", ATTR_TEMPERATURE: 19} async def test_temperature_setting_climate_setpoint_auto(hass): """ Test TemperatureSetting trait support for climate domain. Setpoint in auto mode. """ hass.config.units.temperature_unit = TEMP_CELSIUS trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_HEAT_COOL, { climate.ATTR_HVAC_MODES: [ climate.HVAC_MODE_OFF, climate.HVAC_MODE_HEAT_COOL, ], climate.ATTR_MIN_TEMP: 10, climate.ATTR_MAX_TEMP: 30, ATTR_TEMPERATURE: 18, climate.ATTR_CURRENT_TEMPERATURE: 20, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["off", "heatcool", "on"], "thermostatTemperatureUnit": "C", } assert trt.query_attributes() == { "thermostatMode": "heatcool", "thermostatTemperatureAmbient": 20, "thermostatTemperatureSetpointHigh": 18, "thermostatTemperatureSetpointLow": 18, } assert trt.can_execute(trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, {}) assert trt.can_execute(trait.COMMAND_THERMOSTAT_SET_MODE, {}) calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_TEMPERATURE) await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, BASIC_DATA, {"thermostatTemperatureSetpoint": 19}, {}, ) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "climate.bla", ATTR_TEMPERATURE: 19} async def test_humidity_setting_humidifier_setpoint(hass): """Test HumiditySetting trait support for humidifier domain - setpoint.""" assert helpers.get_google_type(humidifier.DOMAIN, None) is not None assert trait.HumiditySettingTrait.supported(humidifier.DOMAIN, 0, None) trt = trait.HumiditySettingTrait( hass, State( "humidifier.bla", STATE_ON, { humidifier.ATTR_MIN_HUMIDITY: 20, humidifier.ATTR_MAX_HUMIDITY: 90, humidifier.ATTR_HUMIDITY: 38, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "humiditySetpointRange": {"minPercent": 20, "maxPercent": 90} } assert trt.query_attributes() == { "humiditySetpointPercent": 38, } assert trt.can_execute(trait.COMMAND_SET_HUMIDITY, {}) calls = async_mock_service(hass, humidifier.DOMAIN, humidifier.SERVICE_SET_HUMIDITY) await trt.execute(trait.COMMAND_SET_HUMIDITY, BASIC_DATA, {"humidity": 32}, {}) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "humidifier.bla", humidifier.ATTR_HUMIDITY: 32, } async def test_lock_unlock_lock(hass): """Test LockUnlock trait locking support for lock domain.""" assert helpers.get_google_type(lock.DOMAIN, None) is not None assert trait.LockUnlockTrait.supported(lock.DOMAIN, lock.SUPPORT_OPEN, None) assert trait.LockUnlockTrait.might_2fa(lock.DOMAIN, lock.SUPPORT_OPEN, None) trt = trait.LockUnlockTrait( hass, State("lock.front_door", lock.STATE_LOCKED), PIN_CONFIG ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"isLocked": True} assert trt.can_execute(trait.COMMAND_LOCKUNLOCK, {"lock": True}) calls = async_mock_service(hass, lock.DOMAIN, lock.SERVICE_LOCK) await trt.execute(trait.COMMAND_LOCKUNLOCK, PIN_DATA, {"lock": True}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "lock.front_door"} async def test_lock_unlock_unlock(hass): """Test LockUnlock trait unlocking support for lock domain.""" assert helpers.get_google_type(lock.DOMAIN, None) is not None assert trait.LockUnlockTrait.supported(lock.DOMAIN, lock.SUPPORT_OPEN, None) trt = trait.LockUnlockTrait( hass, State("lock.front_door", lock.STATE_LOCKED), PIN_CONFIG ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"isLocked": True} assert trt.can_execute(trait.COMMAND_LOCKUNLOCK, {"lock": False}) calls = async_mock_service(hass, lock.DOMAIN, lock.SERVICE_UNLOCK) # No challenge data with pytest.raises(error.ChallengeNeeded) as err: await trt.execute(trait.COMMAND_LOCKUNLOCK, PIN_DATA, {"lock": False}, {}) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # invalid pin with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_LOCKUNLOCK, PIN_DATA, {"lock": False}, {"pin": 9999} ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_FAILED_PIN_NEEDED await trt.execute( trait.COMMAND_LOCKUNLOCK, PIN_DATA, {"lock": False}, {"pin": "1234"} ) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "lock.front_door"} # Test without pin trt = trait.LockUnlockTrait( hass, State("lock.front_door", lock.STATE_LOCKED), BASIC_CONFIG ) with pytest.raises(error.SmartHomeError) as err: await trt.execute(trait.COMMAND_LOCKUNLOCK, BASIC_DATA, {"lock": False}, {}) assert len(calls) == 1 assert err.value.code == const.ERR_CHALLENGE_NOT_SETUP # Test with 2FA override with patch.object( BASIC_CONFIG, "should_2fa", return_value=False, ): await trt.execute(trait.COMMAND_LOCKUNLOCK, BASIC_DATA, {"lock": False}, {}) assert len(calls) == 2 async def test_arm_disarm_arm_away(hass): """Test ArmDisarm trait Arming support for alarm_control_panel domain.""" assert helpers.get_google_type(alarm_control_panel.DOMAIN, None) is not None assert trait.ArmDisArmTrait.supported(alarm_control_panel.DOMAIN, 0, None) assert trait.ArmDisArmTrait.might_2fa(alarm_control_panel.DOMAIN, 0, None) trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, { alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True, ATTR_SUPPORTED_FEATURES: alarm_control_panel.const.SUPPORT_ALARM_ARM_HOME | alarm_control_panel.const.SUPPORT_ALARM_ARM_AWAY, }, ), PIN_CONFIG, ) assert trt.sync_attributes() == { "availableArmLevels": { "levels": [ { "level_name": "armed_home", "level_values": [ {"level_synonym": ["armed home", "home"], "lang": "en"} ], }, { "level_name": "armed_away", "level_values": [ {"level_synonym": ["armed away", "away"], "lang": "en"} ], }, ], "ordered": False, } } assert trt.query_attributes() == { "isArmed": True, "currentArmLevel": STATE_ALARM_ARMED_AWAY, } assert trt.can_execute( trait.COMMAND_ARMDISARM, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY} ) calls = async_mock_service( hass, alarm_control_panel.DOMAIN, alarm_control_panel.SERVICE_ALARM_ARM_AWAY ) # Test with no secure_pin configured with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), BASIC_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, BASIC_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {}, ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NOT_SETUP trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), PIN_CONFIG, ) # No challenge data with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {}, ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # invalid pin with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {"pin": 9999}, ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_FAILED_PIN_NEEDED # correct pin await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {"pin": "1234"}, ) assert len(calls) == 1 # Test already armed with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), PIN_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {}, ) assert len(calls) == 1 assert err.value.code == const.ERR_ALREADY_ARMED # Test with code_arm_required False trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: False}, ), PIN_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {}, ) assert len(calls) == 2 with pytest.raises(error.SmartHomeError) as err: await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True}, {}, ) async def test_arm_disarm_disarm(hass): """Test ArmDisarm trait Disarming support for alarm_control_panel domain.""" assert helpers.get_google_type(alarm_control_panel.DOMAIN, None) is not None assert trait.ArmDisArmTrait.supported(alarm_control_panel.DOMAIN, 0, None) assert trait.ArmDisArmTrait.might_2fa(alarm_control_panel.DOMAIN, 0, None) trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, { alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True, ATTR_SUPPORTED_FEATURES: alarm_control_panel.const.SUPPORT_ALARM_TRIGGER | alarm_control_panel.const.SUPPORT_ALARM_ARM_CUSTOM_BYPASS, }, ), PIN_CONFIG, ) assert trt.sync_attributes() == { "availableArmLevels": { "levels": [ { "level_name": "armed_custom_bypass", "level_values": [ { "level_synonym": ["armed custom bypass", "custom"], "lang": "en", } ], }, { "level_name": "triggered", "level_values": [{"level_synonym": ["triggered"], "lang": "en"}], }, ], "ordered": False, } } assert trt.query_attributes() == {"isArmed": False} assert trt.can_execute(trait.COMMAND_ARMDISARM, {"arm": False}) calls = async_mock_service( hass, alarm_control_panel.DOMAIN, alarm_control_panel.SERVICE_ALARM_DISARM ) # Test without secure_pin configured with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), BASIC_CONFIG, ) await trt.execute(trait.COMMAND_ARMDISARM, BASIC_DATA, {"arm": False}, {}) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NOT_SETUP trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), PIN_CONFIG, ) # No challenge data with pytest.raises(error.ChallengeNeeded) as err: await trt.execute(trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": False}, {}) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # invalid pin with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": False}, {"pin": 9999} ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_FAILED_PIN_NEEDED # correct pin await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": False}, {"pin": "1234"} ) assert len(calls) == 1 # Test already disarmed with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), PIN_CONFIG, ) await trt.execute(trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": False}, {}) assert len(calls) == 1 assert err.value.code == const.ERR_ALREADY_DISARMED # Cancel arming after already armed will require pin with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: False}, ), PIN_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "cancel": True}, {} ) assert len(calls) == 1 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # Cancel arming while pending to arm doesn't require pin trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_PENDING, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: False}, ), PIN_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "cancel": True}, {} ) assert len(calls) == 2 async def test_fan_speed(hass): """Test FanSpeed trait speed control support for fan domain.""" assert helpers.get_google_type(fan.DOMAIN, None) is not None assert trait.FanSpeedTrait.supported(fan.DOMAIN, fan.SUPPORT_SET_SPEED, None) trt = trait.FanSpeedTrait( hass, State( "fan.living_room_fan", fan.SPEED_HIGH, attributes={ "speed_list": [ fan.SPEED_OFF, fan.SPEED_LOW, fan.SPEED_MEDIUM, fan.SPEED_HIGH, ], "speed": "low", "percentage": 33, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableFanSpeeds": { "ordered": True, "speeds": [ { "speed_name": "off", "speed_values": [{"speed_synonym": ["stop", "off"], "lang": "en"}], }, { "speed_name": "low", "speed_values": [ { "speed_synonym": ["slow", "low", "slowest", "lowest"], "lang": "en", } ], }, { "speed_name": "medium", "speed_values": [ {"speed_synonym": ["medium", "mid", "middle"], "lang": "en"} ], }, { "speed_name": "high", "speed_values": [ { "speed_synonym": [ "high", "max", "fast", "highest", "fastest", "maximum", ], "lang": "en", } ], }, ], }, "reversible": False, "supportsFanSpeedPercent": True, } assert trt.query_attributes() == { "currentFanSpeedSetting": "low", "on": True, "currentFanSpeedPercent": 33, } assert trt.can_execute(trait.COMMAND_FANSPEED, params={"fanSpeed": "medium"}) calls = async_mock_service(hass, fan.DOMAIN, fan.SERVICE_SET_SPEED) await trt.execute(trait.COMMAND_FANSPEED, BASIC_DATA, {"fanSpeed": "medium"}, {}) assert len(calls) == 1 assert calls[0].data == {"entity_id": "fan.living_room_fan", "speed": "medium"} assert trt.can_execute(trait.COMMAND_FANSPEED, params={"fanSpeedPercent": 10}) calls = async_mock_service(hass, fan.DOMAIN, fan.SERVICE_SET_PERCENTAGE) await trt.execute(trait.COMMAND_FANSPEED, BASIC_DATA, {"fanSpeedPercent": 10}, {}) assert len(calls) == 1 assert calls[0].data == {"entity_id": "fan.living_room_fan", "percentage": 10} async def test_climate_fan_speed(hass): """Test FanSpeed trait speed control support for climate domain.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.FanSpeedTrait.supported(climate.DOMAIN, climate.SUPPORT_FAN_MODE, None) trt = trait.FanSpeedTrait( hass, State( "climate.living_room_ac", "on", attributes={ "fan_modes": ["auto", "low", "medium", "high"], "fan_mode": "low", }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableFanSpeeds": { "ordered": True, "speeds": [ { "speed_name": "auto", "speed_values": [{"speed_synonym": ["auto"], "lang": "en"}], }, { "speed_name": "low", "speed_values": [{"speed_synonym": ["low"], "lang": "en"}], }, { "speed_name": "medium", "speed_values": [{"speed_synonym": ["medium"], "lang": "en"}], }, { "speed_name": "high", "speed_values": [{"speed_synonym": ["high"], "lang": "en"}], }, ], }, "reversible": False, "supportsFanSpeedPercent": True, } assert trt.query_attributes() == { "currentFanSpeedSetting": "low", } assert trt.can_execute(trait.COMMAND_FANSPEED, params={"fanSpeed": "medium"}) calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_FAN_MODE) await trt.execute(trait.COMMAND_FANSPEED, BASIC_DATA, {"fanSpeed": "medium"}, {}) assert len(calls) == 1 assert calls[0].data == { "entity_id": "climate.living_room_ac", "fan_mode": "medium", } async def test_inputselector(hass): """Test input selector trait.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.InputSelectorTrait.supported( media_player.DOMAIN, media_player.SUPPORT_SELECT_SOURCE, None ) trt = trait.InputSelectorTrait( hass, State( "media_player.living_room", media_player.STATE_PLAYING, attributes={ media_player.ATTR_INPUT_SOURCE_LIST: [ "media", "game", "chromecast", "plex", ], media_player.ATTR_INPUT_SOURCE: "game", }, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableInputs": [ {"key": "media", "names": [{"name_synonym": ["media"], "lang": "en"}]}, {"key": "game", "names": [{"name_synonym": ["game"], "lang": "en"}]}, { "key": "chromecast", "names": [{"name_synonym": ["chromecast"], "lang": "en"}], }, {"key": "plex", "names": [{"name_synonym": ["plex"], "lang": "en"}]}, ], "orderedInputs": True, } assert trt.query_attributes() == { "currentInput": "game", } assert trt.can_execute( trait.COMMAND_INPUT, params={"newInput": "media"}, ) calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_SELECT_SOURCE ) await trt.execute( trait.COMMAND_INPUT, BASIC_DATA, {"newInput": "media"}, {}, ) assert len(calls) == 1 assert calls[0].data == {"entity_id": "media_player.living_room", "source": "media"} @pytest.mark.parametrize( "sources,source,source_next,source_prev", [ (["a"], "a", "a", "a"), (["a", "b"], "a", "b", "b"), (["a", "b", "c"], "a", "b", "c"), ], ) async def test_inputselector_nextprev(hass, sources, source, source_next, source_prev): """Test input selector trait.""" trt = trait.InputSelectorTrait( hass, State( "media_player.living_room", media_player.STATE_PLAYING, attributes={ media_player.ATTR_INPUT_SOURCE_LIST: sources, media_player.ATTR_INPUT_SOURCE: source, }, ), BASIC_CONFIG, ) assert trt.can_execute("action.devices.commands.NextInput", params={}) assert trt.can_execute("action.devices.commands.PreviousInput", params={}) calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_SELECT_SOURCE ) await trt.execute( "action.devices.commands.NextInput", BASIC_DATA, {}, {}, ) await trt.execute( "action.devices.commands.PreviousInput", BASIC_DATA, {}, {}, ) assert len(calls) == 2 assert calls[0].data == { "entity_id": "media_player.living_room", "source": source_next, } assert calls[1].data == { "entity_id": "media_player.living_room", "source": source_prev, } @pytest.mark.parametrize( "sources,source", [(None, "a"), (["a", "b"], None), (["a", "b"], "c")] ) async def test_inputselector_nextprev_invalid(hass, sources, source): """Test input selector trait.""" trt = trait.InputSelectorTrait( hass, State( "media_player.living_room", media_player.STATE_PLAYING, attributes={ media_player.ATTR_INPUT_SOURCE_LIST: sources, media_player.ATTR_INPUT_SOURCE: source, }, ), BASIC_CONFIG, ) with pytest.raises(SmartHomeError): await trt.execute( "action.devices.commands.NextInput", BASIC_DATA, {}, {}, ) with pytest.raises(SmartHomeError): await trt.execute( "action.devices.commands.PreviousInput", BASIC_DATA, {}, {}, ) with pytest.raises(SmartHomeError): await trt.execute( "action.devices.commands.InvalidCommand", BASIC_DATA, {}, {}, ) async def test_modes_input_select(hass): """Test Input Select Mode trait.""" assert helpers.get_google_type(input_select.DOMAIN, None) is not None assert trait.ModesTrait.supported(input_select.DOMAIN, None, None) trt = trait.ModesTrait( hass, State("input_select.bla", "unavailable"), BASIC_CONFIG, ) assert trt.sync_attributes() == {"availableModes": []} trt = trait.ModesTrait( hass, State( "input_select.bla", "abc", attributes={input_select.ATTR_OPTIONS: ["abc", "123", "xyz"]}, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableModes": [ { "name": "option", "name_values": [ { "name_synonym": ["option", "setting", "mode", "value"], "lang": "en", } ], "settings": [ { "setting_name": "abc", "setting_values": [{"setting_synonym": ["abc"], "lang": "en"}], }, { "setting_name": "123", "setting_values": [{"setting_synonym": ["123"], "lang": "en"}], }, { "setting_name": "xyz", "setting_values": [{"setting_synonym": ["xyz"], "lang": "en"}], }, ], "ordered": False, } ] } assert trt.query_attributes() == { "currentModeSettings": {"option": "abc"}, "on": True, } assert trt.can_execute( trait.COMMAND_MODES, params={"updateModeSettings": {"option": "xyz"}}, ) calls = async_mock_service( hass, input_select.DOMAIN, input_select.SERVICE_SELECT_OPTION ) await trt.execute( trait.COMMAND_MODES, BASIC_DATA, {"updateModeSettings": {"option": "xyz"}}, {}, ) assert len(calls) == 1 assert calls[0].data == {"entity_id": "input_select.bla", "option": "xyz"} async def test_modes_humidifier(hass): """Test Humidifier Mode trait.""" assert helpers.get_google_type(humidifier.DOMAIN, None) is not None assert trait.ModesTrait.supported(humidifier.DOMAIN, humidifier.SUPPORT_MODES, None) trt = trait.ModesTrait( hass, State( "humidifier.humidifier", STATE_OFF, attributes={ humidifier.ATTR_AVAILABLE_MODES: [ humidifier.MODE_NORMAL, humidifier.MODE_AUTO, humidifier.MODE_AWAY, ], ATTR_SUPPORTED_FEATURES: humidifier.SUPPORT_MODES, humidifier.ATTR_MIN_HUMIDITY: 30, humidifier.ATTR_MAX_HUMIDITY: 99, humidifier.ATTR_HUMIDITY: 50, ATTR_MODE: humidifier.MODE_AUTO, }, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableModes": [ { "name": "mode", "name_values": [{"name_synonym": ["mode"], "lang": "en"}], "settings": [ { "setting_name": "normal", "setting_values": [ {"setting_synonym": ["normal"], "lang": "en"} ], }, { "setting_name": "auto", "setting_values": [{"setting_synonym": ["auto"], "lang": "en"}], }, { "setting_name": "away", "setting_values": [{"setting_synonym": ["away"], "lang": "en"}], }, ], "ordered": False, }, ] } assert trt.query_attributes() == { "currentModeSettings": {"mode": "auto"}, "on": False, } assert trt.can_execute( trait.COMMAND_MODES, params={"updateModeSettings": {"mode": "away"}} ) calls = async_mock_service(hass, humidifier.DOMAIN, humidifier.SERVICE_SET_MODE) await trt.execute( trait.COMMAND_MODES, BASIC_DATA, {"updateModeSettings": {"mode": "away"}}, {}, ) assert len(calls) == 1 assert calls[0].data == { "entity_id": "humidifier.humidifier", "mode": "away", } async def test_sound_modes(hass): """Test Mode trait.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.ModesTrait.supported( media_player.DOMAIN, media_player.SUPPORT_SELECT_SOUND_MODE, None ) trt = trait.ModesTrait( hass, State( "media_player.living_room", media_player.STATE_PLAYING, attributes={ media_player.ATTR_SOUND_MODE_LIST: ["stereo", "prologic"], media_player.ATTR_SOUND_MODE: "stereo", }, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableModes": [ { "name": "sound mode", "name_values": [ {"name_synonym": ["sound mode", "effects"], "lang": "en"} ], "settings": [ { "setting_name": "stereo", "setting_values": [ {"setting_synonym": ["stereo"], "lang": "en"} ], }, { "setting_name": "prologic", "setting_values": [ {"setting_synonym": ["prologic"], "lang": "en"} ], }, ], "ordered": False, } ] } assert trt.query_attributes() == { "currentModeSettings": {"sound mode": "stereo"}, "on": True, } assert trt.can_execute( trait.COMMAND_MODES, params={"updateModeSettings": {"sound mode": "stereo"}}, ) calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_SELECT_SOUND_MODE ) await trt.execute( trait.COMMAND_MODES, BASIC_DATA, {"updateModeSettings": {"sound mode": "stereo"}}, {}, ) assert len(calls) == 1 assert calls[0].data == { "entity_id": "media_player.living_room", "sound_mode": "stereo", } async def test_openclose_cover(hass): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_SET_POSITION, None ) trt = trait.OpenCloseTrait( hass, State( "cover.bla", cover.STATE_OPEN, { cover.ATTR_CURRENT_POSITION: 75, ATTR_SUPPORTED_FEATURES: cover.SUPPORT_SET_POSITION, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"openPercent": 75} calls_set = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_SET_COVER_POSITION) calls_open = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_OPEN_COVER) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 50}, {}) await trt.execute( trait.COMMAND_OPENCLOSE_RELATIVE, BASIC_DATA, {"openRelativePercent": 50}, {} ) assert len(calls_set) == 1 assert calls_set[0].data == {ATTR_ENTITY_ID: "cover.bla", cover.ATTR_POSITION: 50} assert len(calls_open) == 1 assert calls_open[0].data == {ATTR_ENTITY_ID: "cover.bla"} async def test_openclose_cover_unknown_state(hass): """Test OpenClose trait support for cover domain with unknown state.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_SET_POSITION, None ) # No state trt = trait.OpenCloseTrait( hass, State( "cover.bla", STATE_UNKNOWN, {ATTR_SUPPORTED_FEATURES: cover.SUPPORT_OPEN} ), BASIC_CONFIG, ) assert trt.sync_attributes() == {"discreteOnlyOpenClose": True} with pytest.raises(helpers.SmartHomeError): trt.query_attributes() calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_OPEN_COVER) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 100}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} with pytest.raises(helpers.SmartHomeError): trt.query_attributes() async def test_openclose_cover_assumed_state(hass): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_SET_POSITION, None ) trt = trait.OpenCloseTrait( hass, State( "cover.bla", cover.STATE_OPEN, { ATTR_ASSUMED_STATE: True, ATTR_SUPPORTED_FEATURES: cover.SUPPORT_SET_POSITION, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == {"commandOnlyOpenClose": True} assert trt.query_attributes() == {} calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_SET_COVER_POSITION) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 40}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla", cover.ATTR_POSITION: 40} async def test_openclose_cover_query_only(hass): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported(cover.DOMAIN, 0, None) state = State( "cover.bla", cover.STATE_OPEN, ) trt = trait.OpenCloseTrait( hass, state, BASIC_CONFIG, ) assert trt.sync_attributes() == { "discreteOnlyOpenClose": True, "queryOnlyOpenClose": True, } assert trt.query_attributes() == {"openPercent": 100} async def test_openclose_cover_no_position(hass): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_OPEN | cover.SUPPORT_CLOSE, None ) state = State( "cover.bla", cover.STATE_OPEN, { ATTR_SUPPORTED_FEATURES: cover.SUPPORT_OPEN | cover.SUPPORT_CLOSE, }, ) trt = trait.OpenCloseTrait( hass, state, BASIC_CONFIG, ) assert trt.sync_attributes() == {"discreteOnlyOpenClose": True} assert trt.query_attributes() == {"openPercent": 100} state.state = cover.STATE_CLOSED assert trt.sync_attributes() == {"discreteOnlyOpenClose": True} assert trt.query_attributes() == {"openPercent": 0} calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_CLOSE_COVER) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 0}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_OPEN_COVER) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 100}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} with pytest.raises( SmartHomeError, match=r"Current position not know for relative command" ): await trt.execute( trait.COMMAND_OPENCLOSE_RELATIVE, BASIC_DATA, {"openRelativePercent": 100}, {}, ) with pytest.raises(SmartHomeError, match=r"No support for partial open close"): await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 50}, {}) @pytest.mark.parametrize( "device_class", (cover.DEVICE_CLASS_DOOR, cover.DEVICE_CLASS_GARAGE, cover.DEVICE_CLASS_GATE), ) async def test_openclose_cover_secure(hass, device_class): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, device_class) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_SET_POSITION, device_class ) assert trait.OpenCloseTrait.might_2fa( cover.DOMAIN, cover.SUPPORT_SET_POSITION, device_class ) trt = trait.OpenCloseTrait( hass, State( "cover.bla", cover.STATE_OPEN, { ATTR_DEVICE_CLASS: device_class, ATTR_SUPPORTED_FEATURES: cover.SUPPORT_SET_POSITION, cover.ATTR_CURRENT_POSITION: 75, }, ), PIN_CONFIG, ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"openPercent": 75} calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_SET_COVER_POSITION) calls_close = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_CLOSE_COVER) # No challenge data with pytest.raises(error.ChallengeNeeded) as err: await trt.execute(trait.COMMAND_OPENCLOSE, PIN_DATA, {"openPercent": 50}, {}) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # invalid pin with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_OPENCLOSE, PIN_DATA, {"openPercent": 50}, {"pin": "9999"} ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_FAILED_PIN_NEEDED await trt.execute( trait.COMMAND_OPENCLOSE, PIN_DATA, {"openPercent": 50}, {"pin": "1234"} ) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla", cover.ATTR_POSITION: 50} # no challenge on close await trt.execute(trait.COMMAND_OPENCLOSE, PIN_DATA, {"openPercent": 0}, {}) assert len(calls_close) == 1 assert calls_close[0].data == {ATTR_ENTITY_ID: "cover.bla"} @pytest.mark.parametrize( "device_class", ( binary_sensor.DEVICE_CLASS_DOOR, binary_sensor.DEVICE_CLASS_GARAGE_DOOR, binary_sensor.DEVICE_CLASS_LOCK, binary_sensor.DEVICE_CLASS_OPENING, binary_sensor.DEVICE_CLASS_WINDOW, ), ) async def test_openclose_binary_sensor(hass, device_class): """Test OpenClose trait support for binary_sensor domain.""" assert helpers.get_google_type(binary_sensor.DOMAIN, device_class) is not None assert trait.OpenCloseTrait.supported(binary_sensor.DOMAIN, 0, device_class) trt = trait.OpenCloseTrait( hass, State("binary_sensor.test", STATE_ON, {ATTR_DEVICE_CLASS: device_class}), BASIC_CONFIG, ) assert trt.sync_attributes() == { "queryOnlyOpenClose": True, "discreteOnlyOpenClose": True, } assert trt.query_attributes() == {"openPercent": 100} trt = trait.OpenCloseTrait( hass, State("binary_sensor.test", STATE_OFF, {ATTR_DEVICE_CLASS: device_class}), BASIC_CONFIG, ) assert trt.sync_attributes() == { "queryOnlyOpenClose": True, "discreteOnlyOpenClose": True, } assert trt.query_attributes() == {"openPercent": 0} async def test_volume_media_player(hass): """Test volume trait support for media player domain.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.VolumeTrait.supported( media_player.DOMAIN, media_player.SUPPORT_VOLUME_SET, None, ) trt = trait.VolumeTrait( hass, State( "media_player.bla", media_player.STATE_PLAYING, { ATTR_SUPPORTED_FEATURES: media_player.SUPPORT_VOLUME_SET, media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.3, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "volumeMaxLevel": 100, "levelStepSize": 10, "volumeCanMuteAndUnmute": False, "commandOnlyVolume": False, } assert trt.query_attributes() == {"currentVolume": 30} calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_SET ) await trt.execute(trait.COMMAND_SET_VOLUME, BASIC_DATA, {"volumeLevel": 60}, {}) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.6, } calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_SET ) await trt.execute( trait.COMMAND_VOLUME_RELATIVE, BASIC_DATA, {"relativeSteps": 10}, {} ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.4, } async def test_volume_media_player_relative(hass): """Test volume trait support for relative-volume-only media players.""" assert trait.VolumeTrait.supported( media_player.DOMAIN, media_player.SUPPORT_VOLUME_STEP, None, ) trt = trait.VolumeTrait( hass, State( "media_player.bla", media_player.STATE_PLAYING, { ATTR_ASSUMED_STATE: True, ATTR_SUPPORTED_FEATURES: media_player.SUPPORT_VOLUME_STEP, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "volumeMaxLevel": 100, "levelStepSize": 10, "volumeCanMuteAndUnmute": False, "commandOnlyVolume": True, } assert trt.query_attributes() == {} calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_UP ) await trt.execute( trait.COMMAND_VOLUME_RELATIVE, BASIC_DATA, {"relativeSteps": 10}, {}, ) assert len(calls) == 10 for call in calls: assert call.data == { ATTR_ENTITY_ID: "media_player.bla", } calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_DOWN ) await trt.execute( trait.COMMAND_VOLUME_RELATIVE, BASIC_DATA, {"relativeSteps": -10}, {}, ) assert len(calls) == 10 for call in calls: assert call.data == { ATTR_ENTITY_ID: "media_player.bla", } with pytest.raises(SmartHomeError): await trt.execute(trait.COMMAND_SET_VOLUME, BASIC_DATA, {"volumeLevel": 42}, {}) with pytest.raises(SmartHomeError): await trt.execute(trait.COMMAND_MUTE, BASIC_DATA, {"mute": True}, {}) async def test_media_player_mute(hass): """Test volume trait support for muting.""" assert trait.VolumeTrait.supported( media_player.DOMAIN, media_player.SUPPORT_VOLUME_STEP | media_player.SUPPORT_VOLUME_MUTE, None, ) trt = trait.VolumeTrait( hass, State( "media_player.bla", media_player.STATE_PLAYING, { ATTR_SUPPORTED_FEATURES: ( media_player.SUPPORT_VOLUME_STEP | media_player.SUPPORT_VOLUME_MUTE ), media_player.ATTR_MEDIA_VOLUME_MUTED: False, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "volumeMaxLevel": 100, "levelStepSize": 10, "volumeCanMuteAndUnmute": True, "commandOnlyVolume": False, } assert trt.query_attributes() == {"isMuted": False} mute_calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_MUTE ) await trt.execute( trait.COMMAND_MUTE, BASIC_DATA, {"mute": True}, {}, ) assert len(mute_calls) == 1 assert mute_calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_VOLUME_MUTED: True, } unmute_calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_MUTE ) await trt.execute( trait.COMMAND_MUTE, BASIC_DATA, {"mute": False}, {}, ) assert len(unmute_calls) == 1 assert unmute_calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_VOLUME_MUTED: False, } async def test_temperature_setting_sensor(hass): """Test TemperatureSetting trait support for temperature sensor.""" assert ( helpers.get_google_type(sensor.DOMAIN, sensor.DEVICE_CLASS_TEMPERATURE) is not None ) assert not trait.TemperatureSettingTrait.supported( sensor.DOMAIN, 0, sensor.DEVICE_CLASS_HUMIDITY ) assert trait.TemperatureSettingTrait.supported( sensor.DOMAIN, 0, sensor.DEVICE_CLASS_TEMPERATURE ) @pytest.mark.parametrize( "unit_in,unit_out,state,ambient", [ (TEMP_FAHRENHEIT, "F", "70", 21.1), (TEMP_CELSIUS, "C", "21.1", 21.1), (TEMP_FAHRENHEIT, "F", "unavailable", None), (TEMP_FAHRENHEIT, "F", "unknown", None), ], ) async def test_temperature_setting_sensor_data(hass, unit_in, unit_out, state, ambient): """Test TemperatureSetting trait support for temperature sensor.""" hass.config.units.temperature_unit = unit_in trt = trait.TemperatureSettingTrait( hass, State( "sensor.test", state, {ATTR_DEVICE_CLASS: sensor.DEVICE_CLASS_TEMPERATURE} ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "queryOnlyTemperatureSetting": True, "thermostatTemperatureUnit": unit_out, } if ambient: assert trt.query_attributes() == {"thermostatTemperatureAmbient": ambient} else: assert trt.query_attributes() == {} hass.config.units.temperature_unit = TEMP_CELSIUS async def test_humidity_setting_sensor(hass): """Test HumiditySetting trait support for humidity sensor.""" assert ( helpers.get_google_type(sensor.DOMAIN, sensor.DEVICE_CLASS_HUMIDITY) is not None ) assert not trait.HumiditySettingTrait.supported( sensor.DOMAIN, 0, sensor.DEVICE_CLASS_TEMPERATURE ) assert trait.HumiditySettingTrait.supported( sensor.DOMAIN, 0, sensor.DEVICE_CLASS_HUMIDITY ) @pytest.mark.parametrize( "state,ambient", [("70", 70), ("unavailable", None), ("unknown", None)] ) async def test_humidity_setting_sensor_data(hass, state, ambient): """Test HumiditySetting trait support for humidity sensor.""" trt = trait.HumiditySettingTrait( hass, State("sensor.test", state, {ATTR_DEVICE_CLASS: sensor.DEVICE_CLASS_HUMIDITY}), BASIC_CONFIG, ) assert trt.sync_attributes() == {"queryOnlyHumiditySetting": True} if ambient: assert trt.query_attributes() == {"humidityAmbientPercent": ambient} else: assert trt.query_attributes() == {} with pytest.raises(helpers.SmartHomeError) as err: await trt.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert err.value.code == const.ERR_NOT_SUPPORTED async def test_transport_control(hass): """Test the TransportControlTrait.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None for feature in trait.MEDIA_COMMAND_SUPPORT_MAPPING.values(): assert trait.TransportControlTrait.supported(media_player.DOMAIN, feature, None) now = datetime(2020, 1, 1) trt = trait.TransportControlTrait( hass, State( "media_player.bla", media_player.STATE_PLAYING, { media_player.ATTR_MEDIA_POSITION: 100, media_player.ATTR_MEDIA_DURATION: 200, media_player.ATTR_MEDIA_POSITION_UPDATED_AT: now - timedelta(seconds=10), media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.5, ATTR_SUPPORTED_FEATURES: media_player.SUPPORT_PLAY | media_player.SUPPORT_STOP, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "transportControlSupportedCommands": ["RESUME", "STOP"] } assert trt.query_attributes() == {} # COMMAND_MEDIA_SEEK_RELATIVE calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_SEEK ) # Patch to avoid time ticking over during the command failing the test with patch("homeassistant.util.dt.utcnow", return_value=now): await trt.execute( trait.COMMAND_MEDIA_SEEK_RELATIVE, BASIC_DATA, {"relativePositionMs": 10000}, {}, ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", # 100s (current position) + 10s (from command) + 10s (from updated_at) media_player.ATTR_MEDIA_SEEK_POSITION: 120, } # COMMAND_MEDIA_SEEK_TO_POSITION calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_SEEK ) await trt.execute( trait.COMMAND_MEDIA_SEEK_TO_POSITION, BASIC_DATA, {"absPositionMs": 50000}, {} ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_SEEK_POSITION: 50, } # COMMAND_MEDIA_NEXT calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_NEXT_TRACK ) await trt.execute(trait.COMMAND_MEDIA_NEXT, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} # COMMAND_MEDIA_PAUSE calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_PAUSE ) await trt.execute(trait.COMMAND_MEDIA_PAUSE, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} # COMMAND_MEDIA_PREVIOUS calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_PREVIOUS_TRACK ) await trt.execute(trait.COMMAND_MEDIA_PREVIOUS, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} # COMMAND_MEDIA_RESUME calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_PLAY ) await trt.execute(trait.COMMAND_MEDIA_RESUME, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} # COMMAND_MEDIA_SHUFFLE calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_SHUFFLE_SET ) await trt.execute(trait.COMMAND_MEDIA_SHUFFLE, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_SHUFFLE: True, } # COMMAND_MEDIA_STOP calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_STOP ) await trt.execute(trait.COMMAND_MEDIA_STOP, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} @pytest.mark.parametrize( "state", ( STATE_OFF, STATE_IDLE, STATE_PLAYING, STATE_ON, STATE_PAUSED, STATE_STANDBY, STATE_UNAVAILABLE, STATE_UNKNOWN, ), ) async def test_media_state(hass, state): """Test the MediaStateTrait.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.TransportControlTrait.supported( media_player.DOMAIN, media_player.SUPPORT_PLAY, None ) trt = trait.MediaStateTrait( hass, State( "media_player.bla", state, { media_player.ATTR_MEDIA_POSITION: 100, media_player.ATTR_MEDIA_DURATION: 200, media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.5, ATTR_SUPPORTED_FEATURES: media_player.SUPPORT_PLAY | media_player.SUPPORT_STOP, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "supportActivityState": True, "supportPlaybackState": True, } assert trt.query_attributes() == { "activityState": trt.activity_lookup.get(state), "playbackState": trt.playback_lookup.get(state), }
sander76/home-assistant
tests/components/google_assistant/test_trait.py
homeassistant/components/pvpc_hourly_pricing/sensor.py
"""Provides device automations for Arcam FMJ Receiver control.""" from __future__ import annotations import voluptuous as vol from homeassistant.components.automation import AutomationActionType from homeassistant.components.device_automation import TRIGGER_BASE_SCHEMA from homeassistant.const import ( ATTR_ENTITY_ID, CONF_DEVICE_ID, CONF_DOMAIN, CONF_ENTITY_ID, CONF_PLATFORM, CONF_TYPE, ) from homeassistant.core import CALLBACK_TYPE, Event, HassJob, HomeAssistant, callback from homeassistant.helpers import config_validation as cv, entity_registry from homeassistant.helpers.typing import ConfigType from .const import DOMAIN, EVENT_TURN_ON TRIGGER_TYPES = {"turn_on"} TRIGGER_SCHEMA = TRIGGER_BASE_SCHEMA.extend( { vol.Required(CONF_ENTITY_ID): cv.entity_id, vol.Required(CONF_TYPE): vol.In(TRIGGER_TYPES), } ) async def async_get_triggers(hass: HomeAssistant, device_id: str) -> list[dict]: """List device triggers for Arcam FMJ Receiver control devices.""" registry = await entity_registry.async_get_registry(hass) triggers = [] # Get all the integrations entities for this device for entry in entity_registry.async_entries_for_device(registry, device_id): if entry.domain == "media_player": triggers.append( { CONF_PLATFORM: "device", CONF_DEVICE_ID: device_id, CONF_DOMAIN: DOMAIN, CONF_ENTITY_ID: entry.entity_id, CONF_TYPE: "turn_on", } ) return triggers async def async_attach_trigger( hass: HomeAssistant, config: ConfigType, action: AutomationActionType, automation_info: dict, ) -> CALLBACK_TYPE: """Attach a trigger.""" trigger_id = automation_info.get("trigger_id") if automation_info else None job = HassJob(action) if config[CONF_TYPE] == "turn_on": entity_id = config[CONF_ENTITY_ID] @callback def _handle_event(event: Event): if event.data[ATTR_ENTITY_ID] == entity_id: hass.async_run_hass_job( job, { "trigger": { **config, "description": f"{DOMAIN} - {entity_id}", "id": trigger_id, } }, event.context, ) return hass.bus.async_listen(EVENT_TURN_ON, _handle_event) return lambda: None
"""Tests for the Google Assistant traits.""" from datetime import datetime, timedelta from unittest.mock import patch import pytest from homeassistant.components import ( alarm_control_panel, binary_sensor, camera, cover, fan, group, input_boolean, input_select, light, lock, media_player, scene, script, sensor, switch, vacuum, ) from homeassistant.components.climate import const as climate from homeassistant.components.google_assistant import const, error, helpers, trait from homeassistant.components.google_assistant.error import SmartHomeError from homeassistant.components.humidifier import const as humidifier from homeassistant.config import async_process_ha_core_config from homeassistant.const import ( ATTR_ASSUMED_STATE, ATTR_DEVICE_CLASS, ATTR_ENTITY_ID, ATTR_MODE, ATTR_SUPPORTED_FEATURES, ATTR_TEMPERATURE, SERVICE_TURN_OFF, SERVICE_TURN_ON, STATE_ALARM_ARMED_AWAY, STATE_ALARM_DISARMED, STATE_ALARM_PENDING, STATE_IDLE, STATE_OFF, STATE_ON, STATE_PAUSED, STATE_PLAYING, STATE_STANDBY, STATE_UNAVAILABLE, STATE_UNKNOWN, TEMP_CELSIUS, TEMP_FAHRENHEIT, ) from homeassistant.core import DOMAIN as HA_DOMAIN, EVENT_CALL_SERVICE, State from homeassistant.util import color from . import BASIC_CONFIG, MockConfig from tests.common import async_capture_events, async_mock_service REQ_ID = "ff36a3cc-ec34-11e6-b1a0-64510650abcf" BASIC_DATA = helpers.RequestData( BASIC_CONFIG, "test-agent", const.SOURCE_CLOUD, REQ_ID, None ) PIN_CONFIG = MockConfig(secure_devices_pin="1234") PIN_DATA = helpers.RequestData( PIN_CONFIG, "test-agent", const.SOURCE_CLOUD, REQ_ID, None ) async def test_brightness_light(hass): """Test brightness trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert trait.BrightnessTrait.supported(light.DOMAIN, light.SUPPORT_BRIGHTNESS, None) trt = trait.BrightnessTrait( hass, State("light.bla", light.STATE_ON, {light.ATTR_BRIGHTNESS: 243}), BASIC_CONFIG, ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"brightness": 95} events = async_capture_events(hass, EVENT_CALL_SERVICE) calls = async_mock_service(hass, light.DOMAIN, light.SERVICE_TURN_ON) await trt.execute( trait.COMMAND_BRIGHTNESS_ABSOLUTE, BASIC_DATA, {"brightness": 50}, {} ) await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "light.bla", light.ATTR_BRIGHTNESS_PCT: 50} assert len(events) == 1 assert events[0].data == { "domain": "light", "service": "turn_on", "service_data": {"brightness_pct": 50, "entity_id": "light.bla"}, } async def test_camera_stream(hass): """Test camera stream trait support for camera domain.""" await async_process_ha_core_config( hass, {"external_url": "https://example.com"}, ) assert helpers.get_google_type(camera.DOMAIN, None) is not None assert trait.CameraStreamTrait.supported(camera.DOMAIN, camera.SUPPORT_STREAM, None) trt = trait.CameraStreamTrait( hass, State("camera.bla", camera.STATE_IDLE, {}), BASIC_CONFIG ) assert trt.sync_attributes() == { "cameraStreamSupportedProtocols": ["hls"], "cameraStreamNeedAuthToken": False, "cameraStreamNeedDrmEncryption": False, } assert trt.query_attributes() == {} with patch( "homeassistant.components.camera.async_request_stream", return_value="/api/streams/bla", ): await trt.execute(trait.COMMAND_GET_CAMERA_STREAM, BASIC_DATA, {}, {}) assert trt.query_attributes() == { "cameraStreamAccessUrl": "https://example.com/api/streams/bla", "cameraStreamReceiverAppId": "B12CE3CA", } async def test_onoff_group(hass): """Test OnOff trait support for group domain.""" assert helpers.get_google_type(group.DOMAIN, None) is not None assert trait.OnOffTrait.supported(group.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("group.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("group.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, HA_DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "group.bla"} off_calls = async_mock_service(hass, HA_DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "group.bla"} async def test_onoff_input_boolean(hass): """Test OnOff trait support for input_boolean domain.""" assert helpers.get_google_type(input_boolean.DOMAIN, None) is not None assert trait.OnOffTrait.supported(input_boolean.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("input_boolean.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait( hass, State("input_boolean.bla", STATE_OFF), BASIC_CONFIG ) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, input_boolean.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "input_boolean.bla"} off_calls = async_mock_service(hass, input_boolean.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "input_boolean.bla"} async def test_onoff_switch(hass): """Test OnOff trait support for switch domain.""" assert helpers.get_google_type(switch.DOMAIN, None) is not None assert trait.OnOffTrait.supported(switch.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("switch.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("switch.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} trt_assumed = trait.OnOffTrait( hass, State("switch.bla", STATE_OFF, {"assumed_state": True}), BASIC_CONFIG ) assert trt_assumed.sync_attributes() == {"commandOnlyOnOff": True} on_calls = async_mock_service(hass, switch.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "switch.bla"} off_calls = async_mock_service(hass, switch.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "switch.bla"} async def test_onoff_fan(hass): """Test OnOff trait support for fan domain.""" assert helpers.get_google_type(fan.DOMAIN, None) is not None assert trait.OnOffTrait.supported(fan.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("fan.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("fan.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, fan.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "fan.bla"} off_calls = async_mock_service(hass, fan.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "fan.bla"} async def test_onoff_light(hass): """Test OnOff trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert trait.OnOffTrait.supported(light.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("light.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("light.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "light.bla"} off_calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "light.bla"} async def test_onoff_media_player(hass): """Test OnOff trait support for media_player domain.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.OnOffTrait.supported(media_player.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("media_player.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("media_player.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, media_player.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} off_calls = async_mock_service(hass, media_player.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} async def test_onoff_humidifier(hass): """Test OnOff trait support for humidifier domain.""" assert helpers.get_google_type(humidifier.DOMAIN, None) is not None assert trait.OnOffTrait.supported(humidifier.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("humidifier.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("humidifier.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, humidifier.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "humidifier.bla"} off_calls = async_mock_service(hass, humidifier.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "humidifier.bla"} async def test_dock_vacuum(hass): """Test dock trait support for vacuum domain.""" assert helpers.get_google_type(vacuum.DOMAIN, None) is not None assert trait.DockTrait.supported(vacuum.DOMAIN, 0, None) trt = trait.DockTrait(hass, State("vacuum.bla", vacuum.STATE_IDLE), BASIC_CONFIG) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"isDocked": False} calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_RETURN_TO_BASE) await trt.execute(trait.COMMAND_DOCK, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} async def test_startstop_vacuum(hass): """Test startStop trait support for vacuum domain.""" assert helpers.get_google_type(vacuum.DOMAIN, None) is not None assert trait.StartStopTrait.supported(vacuum.DOMAIN, 0, None) trt = trait.StartStopTrait( hass, State( "vacuum.bla", vacuum.STATE_PAUSED, {ATTR_SUPPORTED_FEATURES: vacuum.SUPPORT_PAUSE}, ), BASIC_CONFIG, ) assert trt.sync_attributes() == {"pausable": True} assert trt.query_attributes() == {"isRunning": False, "isPaused": True} start_calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_START) await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": True}, {}) assert len(start_calls) == 1 assert start_calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} stop_calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_STOP) await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": False}, {}) assert len(stop_calls) == 1 assert stop_calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} pause_calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_PAUSE) await trt.execute(trait.COMMAND_PAUSEUNPAUSE, BASIC_DATA, {"pause": True}, {}) assert len(pause_calls) == 1 assert pause_calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} unpause_calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_START) await trt.execute(trait.COMMAND_PAUSEUNPAUSE, BASIC_DATA, {"pause": False}, {}) assert len(unpause_calls) == 1 assert unpause_calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} async def test_startstop_cover(hass): """Test startStop trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.StartStopTrait.supported(cover.DOMAIN, cover.SUPPORT_STOP, None) state = State( "cover.bla", cover.STATE_CLOSED, {ATTR_SUPPORTED_FEATURES: cover.SUPPORT_STOP}, ) trt = trait.StartStopTrait( hass, state, BASIC_CONFIG, ) assert trt.sync_attributes() == {} for state_value in (cover.STATE_CLOSING, cover.STATE_OPENING): state.state = state_value assert trt.query_attributes() == {"isRunning": True} stop_calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_STOP_COVER) await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": False}, {}) assert len(stop_calls) == 1 assert stop_calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} for state_value in (cover.STATE_CLOSED, cover.STATE_OPEN): state.state = state_value assert trt.query_attributes() == {"isRunning": False} with pytest.raises(SmartHomeError, match="Cover is already stopped"): await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": False}, {}) with pytest.raises(SmartHomeError, match="Starting a cover is not supported"): await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": True}, {}) with pytest.raises( SmartHomeError, match="Command action.devices.commands.PauseUnpause is not supported", ): await trt.execute(trait.COMMAND_PAUSEUNPAUSE, BASIC_DATA, {"start": True}, {}) async def test_startstop_cover_assumed(hass): """Test startStop trait support for cover domain of assumed state.""" trt = trait.StartStopTrait( hass, State( "cover.bla", cover.STATE_CLOSED, {ATTR_SUPPORTED_FEATURES: cover.SUPPORT_STOP, ATTR_ASSUMED_STATE: True}, ), BASIC_CONFIG, ) stop_calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_STOP_COVER) await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": False}, {}) assert len(stop_calls) == 1 assert stop_calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} async def test_color_setting_color_light(hass): """Test ColorSpectrum trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert not trait.ColorSettingTrait.supported(light.DOMAIN, 0, None) assert trait.ColorSettingTrait.supported(light.DOMAIN, light.SUPPORT_COLOR, None) trt = trait.ColorSettingTrait( hass, State( "light.bla", STATE_ON, { light.ATTR_HS_COLOR: (20, 94), light.ATTR_BRIGHTNESS: 200, ATTR_SUPPORTED_FEATURES: light.SUPPORT_COLOR, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == {"colorModel": "hsv"} assert trt.query_attributes() == { "color": {"spectrumHsv": {"hue": 20, "saturation": 0.94, "value": 200 / 255}} } assert trt.can_execute( trait.COMMAND_COLOR_ABSOLUTE, {"color": {"spectrumRGB": 16715792}} ) calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON) await trt.execute( trait.COMMAND_COLOR_ABSOLUTE, BASIC_DATA, {"color": {"spectrumRGB": 1052927}}, {}, ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "light.bla", light.ATTR_HS_COLOR: (240, 93.725), } await trt.execute( trait.COMMAND_COLOR_ABSOLUTE, BASIC_DATA, {"color": {"spectrumHSV": {"hue": 100, "saturation": 0.50, "value": 0.20}}}, {}, ) assert len(calls) == 2 assert calls[1].data == { ATTR_ENTITY_ID: "light.bla", light.ATTR_HS_COLOR: [100, 50], light.ATTR_BRIGHTNESS: 0.2 * 255, } async def test_color_setting_temperature_light(hass): """Test ColorTemperature trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert not trait.ColorSettingTrait.supported(light.DOMAIN, 0, None) assert trait.ColorSettingTrait.supported( light.DOMAIN, light.SUPPORT_COLOR_TEMP, None ) trt = trait.ColorSettingTrait( hass, State( "light.bla", STATE_ON, { light.ATTR_MIN_MIREDS: 200, light.ATTR_COLOR_TEMP: 300, light.ATTR_MAX_MIREDS: 500, ATTR_SUPPORTED_FEATURES: light.SUPPORT_COLOR_TEMP, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "colorTemperatureRange": {"temperatureMinK": 2000, "temperatureMaxK": 5000} } assert trt.query_attributes() == {"color": {"temperatureK": 3333}} assert trt.can_execute( trait.COMMAND_COLOR_ABSOLUTE, {"color": {"temperature": 400}} ) calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON) with pytest.raises(helpers.SmartHomeError) as err: await trt.execute( trait.COMMAND_COLOR_ABSOLUTE, BASIC_DATA, {"color": {"temperature": 5555}}, {}, ) assert err.value.code == const.ERR_VALUE_OUT_OF_RANGE await trt.execute( trait.COMMAND_COLOR_ABSOLUTE, BASIC_DATA, {"color": {"temperature": 2857}}, {} ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "light.bla", light.ATTR_COLOR_TEMP: color.color_temperature_kelvin_to_mired(2857), } async def test_color_light_temperature_light_bad_temp(hass): """Test ColorTemperature trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert not trait.ColorSettingTrait.supported(light.DOMAIN, 0, None) assert trait.ColorSettingTrait.supported( light.DOMAIN, light.SUPPORT_COLOR_TEMP, None ) trt = trait.ColorSettingTrait( hass, State( "light.bla", STATE_ON, { light.ATTR_MIN_MIREDS: 200, light.ATTR_COLOR_TEMP: 0, light.ATTR_MAX_MIREDS: 500, }, ), BASIC_CONFIG, ) assert trt.query_attributes() == {} async def test_light_modes(hass): """Test Light Mode trait.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert trait.ModesTrait.supported(light.DOMAIN, light.SUPPORT_EFFECT, None) trt = trait.ModesTrait( hass, State( "light.living_room", light.STATE_ON, attributes={ light.ATTR_EFFECT_LIST: ["random", "colorloop"], light.ATTR_EFFECT: "random", }, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableModes": [ { "name": "effect", "name_values": [{"name_synonym": ["effect"], "lang": "en"}], "settings": [ { "setting_name": "random", "setting_values": [ {"setting_synonym": ["random"], "lang": "en"} ], }, { "setting_name": "colorloop", "setting_values": [ {"setting_synonym": ["colorloop"], "lang": "en"} ], }, ], "ordered": False, } ] } assert trt.query_attributes() == { "currentModeSettings": {"effect": "random"}, "on": True, } assert trt.can_execute( trait.COMMAND_MODES, params={"updateModeSettings": {"effect": "colorloop"}}, ) calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON) await trt.execute( trait.COMMAND_MODES, BASIC_DATA, {"updateModeSettings": {"effect": "colorloop"}}, {}, ) assert len(calls) == 1 assert calls[0].data == { "entity_id": "light.living_room", "effect": "colorloop", } async def test_scene_scene(hass): """Test Scene trait support for scene domain.""" assert helpers.get_google_type(scene.DOMAIN, None) is not None assert trait.SceneTrait.supported(scene.DOMAIN, 0, None) trt = trait.SceneTrait(hass, State("scene.bla", scene.STATE), BASIC_CONFIG) assert trt.sync_attributes() == {} assert trt.query_attributes() == {} assert trt.can_execute(trait.COMMAND_ACTIVATE_SCENE, {}) calls = async_mock_service(hass, scene.DOMAIN, SERVICE_TURN_ON) await trt.execute(trait.COMMAND_ACTIVATE_SCENE, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "scene.bla"} async def test_scene_script(hass): """Test Scene trait support for script domain.""" assert helpers.get_google_type(script.DOMAIN, None) is not None assert trait.SceneTrait.supported(script.DOMAIN, 0, None) trt = trait.SceneTrait(hass, State("script.bla", STATE_OFF), BASIC_CONFIG) assert trt.sync_attributes() == {} assert trt.query_attributes() == {} assert trt.can_execute(trait.COMMAND_ACTIVATE_SCENE, {}) calls = async_mock_service(hass, script.DOMAIN, SERVICE_TURN_ON) await trt.execute(trait.COMMAND_ACTIVATE_SCENE, BASIC_DATA, {}, {}) # We don't wait till script execution is done. await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "script.bla"} async def test_temperature_setting_climate_onoff(hass): """Test TemperatureSetting trait support for climate domain - range.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.TemperatureSettingTrait.supported(climate.DOMAIN, 0, None) hass.config.units.temperature_unit = TEMP_FAHRENHEIT trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_AUTO, { ATTR_SUPPORTED_FEATURES: climate.SUPPORT_TARGET_TEMPERATURE_RANGE, climate.ATTR_HVAC_MODES: [ climate.HVAC_MODE_OFF, climate.HVAC_MODE_COOL, climate.HVAC_MODE_HEAT, climate.HVAC_MODE_HEAT_COOL, ], climate.ATTR_MIN_TEMP: None, climate.ATTR_MAX_TEMP: None, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["off", "cool", "heat", "heatcool", "on"], "thermostatTemperatureUnit": "F", } assert trt.can_execute(trait.COMMAND_THERMOSTAT_SET_MODE, {}) calls = async_mock_service(hass, climate.DOMAIN, SERVICE_TURN_ON) await trt.execute( trait.COMMAND_THERMOSTAT_SET_MODE, BASIC_DATA, {"thermostatMode": "on"}, {} ) assert len(calls) == 1 calls = async_mock_service(hass, climate.DOMAIN, SERVICE_TURN_OFF) await trt.execute( trait.COMMAND_THERMOSTAT_SET_MODE, BASIC_DATA, {"thermostatMode": "off"}, {} ) assert len(calls) == 1 async def test_temperature_setting_climate_no_modes(hass): """Test TemperatureSetting trait support for climate domain not supporting any modes.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.TemperatureSettingTrait.supported(climate.DOMAIN, 0, None) hass.config.units.temperature_unit = TEMP_CELSIUS trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_AUTO, { climate.ATTR_HVAC_MODES: [], climate.ATTR_MIN_TEMP: None, climate.ATTR_MAX_TEMP: None, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["heat"], "thermostatTemperatureUnit": "C", } async def test_temperature_setting_climate_range(hass): """Test TemperatureSetting trait support for climate domain - range.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.TemperatureSettingTrait.supported(climate.DOMAIN, 0, None) hass.config.units.temperature_unit = TEMP_FAHRENHEIT trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_AUTO, { climate.ATTR_CURRENT_TEMPERATURE: 70, climate.ATTR_CURRENT_HUMIDITY: 25, ATTR_SUPPORTED_FEATURES: climate.SUPPORT_TARGET_TEMPERATURE_RANGE, climate.ATTR_HVAC_MODES: [ STATE_OFF, climate.HVAC_MODE_COOL, climate.HVAC_MODE_HEAT, climate.HVAC_MODE_AUTO, ], climate.ATTR_TARGET_TEMP_HIGH: 75, climate.ATTR_TARGET_TEMP_LOW: 65, climate.ATTR_MIN_TEMP: 50, climate.ATTR_MAX_TEMP: 80, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["off", "cool", "heat", "auto", "on"], "thermostatTemperatureUnit": "F", } assert trt.query_attributes() == { "thermostatMode": "auto", "thermostatTemperatureAmbient": 21.1, "thermostatHumidityAmbient": 25, "thermostatTemperatureSetpointLow": 18.3, "thermostatTemperatureSetpointHigh": 23.9, } assert trt.can_execute(trait.COMMAND_THERMOSTAT_TEMPERATURE_SET_RANGE, {}) assert trt.can_execute(trait.COMMAND_THERMOSTAT_SET_MODE, {}) calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_TEMPERATURE) await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SET_RANGE, BASIC_DATA, { "thermostatTemperatureSetpointHigh": 25, "thermostatTemperatureSetpointLow": 20, }, {}, ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "climate.bla", climate.ATTR_TARGET_TEMP_HIGH: 77, climate.ATTR_TARGET_TEMP_LOW: 68, } calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_HVAC_MODE) await trt.execute( trait.COMMAND_THERMOSTAT_SET_MODE, BASIC_DATA, {"thermostatMode": "cool"}, {} ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "climate.bla", climate.ATTR_HVAC_MODE: climate.HVAC_MODE_COOL, } with pytest.raises(helpers.SmartHomeError) as err: await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, BASIC_DATA, {"thermostatTemperatureSetpoint": -100}, {}, ) assert err.value.code == const.ERR_VALUE_OUT_OF_RANGE hass.config.units.temperature_unit = TEMP_CELSIUS async def test_temperature_setting_climate_setpoint(hass): """Test TemperatureSetting trait support for climate domain - setpoint.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.TemperatureSettingTrait.supported(climate.DOMAIN, 0, None) hass.config.units.temperature_unit = TEMP_CELSIUS trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_COOL, { climate.ATTR_HVAC_MODES: [STATE_OFF, climate.HVAC_MODE_COOL], climate.ATTR_MIN_TEMP: 10, climate.ATTR_MAX_TEMP: 30, ATTR_TEMPERATURE: 18, climate.ATTR_CURRENT_TEMPERATURE: 20, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["off", "cool", "on"], "thermostatTemperatureUnit": "C", } assert trt.query_attributes() == { "thermostatMode": "cool", "thermostatTemperatureAmbient": 20, "thermostatTemperatureSetpoint": 18, } assert trt.can_execute(trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, {}) assert trt.can_execute(trait.COMMAND_THERMOSTAT_SET_MODE, {}) calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_TEMPERATURE) with pytest.raises(helpers.SmartHomeError): await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, BASIC_DATA, {"thermostatTemperatureSetpoint": -100}, {}, ) await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, BASIC_DATA, {"thermostatTemperatureSetpoint": 19}, {}, ) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "climate.bla", ATTR_TEMPERATURE: 19} async def test_temperature_setting_climate_setpoint_auto(hass): """ Test TemperatureSetting trait support for climate domain. Setpoint in auto mode. """ hass.config.units.temperature_unit = TEMP_CELSIUS trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_HEAT_COOL, { climate.ATTR_HVAC_MODES: [ climate.HVAC_MODE_OFF, climate.HVAC_MODE_HEAT_COOL, ], climate.ATTR_MIN_TEMP: 10, climate.ATTR_MAX_TEMP: 30, ATTR_TEMPERATURE: 18, climate.ATTR_CURRENT_TEMPERATURE: 20, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["off", "heatcool", "on"], "thermostatTemperatureUnit": "C", } assert trt.query_attributes() == { "thermostatMode": "heatcool", "thermostatTemperatureAmbient": 20, "thermostatTemperatureSetpointHigh": 18, "thermostatTemperatureSetpointLow": 18, } assert trt.can_execute(trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, {}) assert trt.can_execute(trait.COMMAND_THERMOSTAT_SET_MODE, {}) calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_TEMPERATURE) await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, BASIC_DATA, {"thermostatTemperatureSetpoint": 19}, {}, ) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "climate.bla", ATTR_TEMPERATURE: 19} async def test_humidity_setting_humidifier_setpoint(hass): """Test HumiditySetting trait support for humidifier domain - setpoint.""" assert helpers.get_google_type(humidifier.DOMAIN, None) is not None assert trait.HumiditySettingTrait.supported(humidifier.DOMAIN, 0, None) trt = trait.HumiditySettingTrait( hass, State( "humidifier.bla", STATE_ON, { humidifier.ATTR_MIN_HUMIDITY: 20, humidifier.ATTR_MAX_HUMIDITY: 90, humidifier.ATTR_HUMIDITY: 38, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "humiditySetpointRange": {"minPercent": 20, "maxPercent": 90} } assert trt.query_attributes() == { "humiditySetpointPercent": 38, } assert trt.can_execute(trait.COMMAND_SET_HUMIDITY, {}) calls = async_mock_service(hass, humidifier.DOMAIN, humidifier.SERVICE_SET_HUMIDITY) await trt.execute(trait.COMMAND_SET_HUMIDITY, BASIC_DATA, {"humidity": 32}, {}) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "humidifier.bla", humidifier.ATTR_HUMIDITY: 32, } async def test_lock_unlock_lock(hass): """Test LockUnlock trait locking support for lock domain.""" assert helpers.get_google_type(lock.DOMAIN, None) is not None assert trait.LockUnlockTrait.supported(lock.DOMAIN, lock.SUPPORT_OPEN, None) assert trait.LockUnlockTrait.might_2fa(lock.DOMAIN, lock.SUPPORT_OPEN, None) trt = trait.LockUnlockTrait( hass, State("lock.front_door", lock.STATE_LOCKED), PIN_CONFIG ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"isLocked": True} assert trt.can_execute(trait.COMMAND_LOCKUNLOCK, {"lock": True}) calls = async_mock_service(hass, lock.DOMAIN, lock.SERVICE_LOCK) await trt.execute(trait.COMMAND_LOCKUNLOCK, PIN_DATA, {"lock": True}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "lock.front_door"} async def test_lock_unlock_unlock(hass): """Test LockUnlock trait unlocking support for lock domain.""" assert helpers.get_google_type(lock.DOMAIN, None) is not None assert trait.LockUnlockTrait.supported(lock.DOMAIN, lock.SUPPORT_OPEN, None) trt = trait.LockUnlockTrait( hass, State("lock.front_door", lock.STATE_LOCKED), PIN_CONFIG ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"isLocked": True} assert trt.can_execute(trait.COMMAND_LOCKUNLOCK, {"lock": False}) calls = async_mock_service(hass, lock.DOMAIN, lock.SERVICE_UNLOCK) # No challenge data with pytest.raises(error.ChallengeNeeded) as err: await trt.execute(trait.COMMAND_LOCKUNLOCK, PIN_DATA, {"lock": False}, {}) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # invalid pin with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_LOCKUNLOCK, PIN_DATA, {"lock": False}, {"pin": 9999} ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_FAILED_PIN_NEEDED await trt.execute( trait.COMMAND_LOCKUNLOCK, PIN_DATA, {"lock": False}, {"pin": "1234"} ) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "lock.front_door"} # Test without pin trt = trait.LockUnlockTrait( hass, State("lock.front_door", lock.STATE_LOCKED), BASIC_CONFIG ) with pytest.raises(error.SmartHomeError) as err: await trt.execute(trait.COMMAND_LOCKUNLOCK, BASIC_DATA, {"lock": False}, {}) assert len(calls) == 1 assert err.value.code == const.ERR_CHALLENGE_NOT_SETUP # Test with 2FA override with patch.object( BASIC_CONFIG, "should_2fa", return_value=False, ): await trt.execute(trait.COMMAND_LOCKUNLOCK, BASIC_DATA, {"lock": False}, {}) assert len(calls) == 2 async def test_arm_disarm_arm_away(hass): """Test ArmDisarm trait Arming support for alarm_control_panel domain.""" assert helpers.get_google_type(alarm_control_panel.DOMAIN, None) is not None assert trait.ArmDisArmTrait.supported(alarm_control_panel.DOMAIN, 0, None) assert trait.ArmDisArmTrait.might_2fa(alarm_control_panel.DOMAIN, 0, None) trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, { alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True, ATTR_SUPPORTED_FEATURES: alarm_control_panel.const.SUPPORT_ALARM_ARM_HOME | alarm_control_panel.const.SUPPORT_ALARM_ARM_AWAY, }, ), PIN_CONFIG, ) assert trt.sync_attributes() == { "availableArmLevels": { "levels": [ { "level_name": "armed_home", "level_values": [ {"level_synonym": ["armed home", "home"], "lang": "en"} ], }, { "level_name": "armed_away", "level_values": [ {"level_synonym": ["armed away", "away"], "lang": "en"} ], }, ], "ordered": False, } } assert trt.query_attributes() == { "isArmed": True, "currentArmLevel": STATE_ALARM_ARMED_AWAY, } assert trt.can_execute( trait.COMMAND_ARMDISARM, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY} ) calls = async_mock_service( hass, alarm_control_panel.DOMAIN, alarm_control_panel.SERVICE_ALARM_ARM_AWAY ) # Test with no secure_pin configured with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), BASIC_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, BASIC_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {}, ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NOT_SETUP trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), PIN_CONFIG, ) # No challenge data with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {}, ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # invalid pin with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {"pin": 9999}, ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_FAILED_PIN_NEEDED # correct pin await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {"pin": "1234"}, ) assert len(calls) == 1 # Test already armed with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), PIN_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {}, ) assert len(calls) == 1 assert err.value.code == const.ERR_ALREADY_ARMED # Test with code_arm_required False trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: False}, ), PIN_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {}, ) assert len(calls) == 2 with pytest.raises(error.SmartHomeError) as err: await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True}, {}, ) async def test_arm_disarm_disarm(hass): """Test ArmDisarm trait Disarming support for alarm_control_panel domain.""" assert helpers.get_google_type(alarm_control_panel.DOMAIN, None) is not None assert trait.ArmDisArmTrait.supported(alarm_control_panel.DOMAIN, 0, None) assert trait.ArmDisArmTrait.might_2fa(alarm_control_panel.DOMAIN, 0, None) trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, { alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True, ATTR_SUPPORTED_FEATURES: alarm_control_panel.const.SUPPORT_ALARM_TRIGGER | alarm_control_panel.const.SUPPORT_ALARM_ARM_CUSTOM_BYPASS, }, ), PIN_CONFIG, ) assert trt.sync_attributes() == { "availableArmLevels": { "levels": [ { "level_name": "armed_custom_bypass", "level_values": [ { "level_synonym": ["armed custom bypass", "custom"], "lang": "en", } ], }, { "level_name": "triggered", "level_values": [{"level_synonym": ["triggered"], "lang": "en"}], }, ], "ordered": False, } } assert trt.query_attributes() == {"isArmed": False} assert trt.can_execute(trait.COMMAND_ARMDISARM, {"arm": False}) calls = async_mock_service( hass, alarm_control_panel.DOMAIN, alarm_control_panel.SERVICE_ALARM_DISARM ) # Test without secure_pin configured with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), BASIC_CONFIG, ) await trt.execute(trait.COMMAND_ARMDISARM, BASIC_DATA, {"arm": False}, {}) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NOT_SETUP trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), PIN_CONFIG, ) # No challenge data with pytest.raises(error.ChallengeNeeded) as err: await trt.execute(trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": False}, {}) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # invalid pin with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": False}, {"pin": 9999} ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_FAILED_PIN_NEEDED # correct pin await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": False}, {"pin": "1234"} ) assert len(calls) == 1 # Test already disarmed with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), PIN_CONFIG, ) await trt.execute(trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": False}, {}) assert len(calls) == 1 assert err.value.code == const.ERR_ALREADY_DISARMED # Cancel arming after already armed will require pin with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: False}, ), PIN_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "cancel": True}, {} ) assert len(calls) == 1 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # Cancel arming while pending to arm doesn't require pin trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_PENDING, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: False}, ), PIN_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "cancel": True}, {} ) assert len(calls) == 2 async def test_fan_speed(hass): """Test FanSpeed trait speed control support for fan domain.""" assert helpers.get_google_type(fan.DOMAIN, None) is not None assert trait.FanSpeedTrait.supported(fan.DOMAIN, fan.SUPPORT_SET_SPEED, None) trt = trait.FanSpeedTrait( hass, State( "fan.living_room_fan", fan.SPEED_HIGH, attributes={ "speed_list": [ fan.SPEED_OFF, fan.SPEED_LOW, fan.SPEED_MEDIUM, fan.SPEED_HIGH, ], "speed": "low", "percentage": 33, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableFanSpeeds": { "ordered": True, "speeds": [ { "speed_name": "off", "speed_values": [{"speed_synonym": ["stop", "off"], "lang": "en"}], }, { "speed_name": "low", "speed_values": [ { "speed_synonym": ["slow", "low", "slowest", "lowest"], "lang": "en", } ], }, { "speed_name": "medium", "speed_values": [ {"speed_synonym": ["medium", "mid", "middle"], "lang": "en"} ], }, { "speed_name": "high", "speed_values": [ { "speed_synonym": [ "high", "max", "fast", "highest", "fastest", "maximum", ], "lang": "en", } ], }, ], }, "reversible": False, "supportsFanSpeedPercent": True, } assert trt.query_attributes() == { "currentFanSpeedSetting": "low", "on": True, "currentFanSpeedPercent": 33, } assert trt.can_execute(trait.COMMAND_FANSPEED, params={"fanSpeed": "medium"}) calls = async_mock_service(hass, fan.DOMAIN, fan.SERVICE_SET_SPEED) await trt.execute(trait.COMMAND_FANSPEED, BASIC_DATA, {"fanSpeed": "medium"}, {}) assert len(calls) == 1 assert calls[0].data == {"entity_id": "fan.living_room_fan", "speed": "medium"} assert trt.can_execute(trait.COMMAND_FANSPEED, params={"fanSpeedPercent": 10}) calls = async_mock_service(hass, fan.DOMAIN, fan.SERVICE_SET_PERCENTAGE) await trt.execute(trait.COMMAND_FANSPEED, BASIC_DATA, {"fanSpeedPercent": 10}, {}) assert len(calls) == 1 assert calls[0].data == {"entity_id": "fan.living_room_fan", "percentage": 10} async def test_climate_fan_speed(hass): """Test FanSpeed trait speed control support for climate domain.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.FanSpeedTrait.supported(climate.DOMAIN, climate.SUPPORT_FAN_MODE, None) trt = trait.FanSpeedTrait( hass, State( "climate.living_room_ac", "on", attributes={ "fan_modes": ["auto", "low", "medium", "high"], "fan_mode": "low", }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableFanSpeeds": { "ordered": True, "speeds": [ { "speed_name": "auto", "speed_values": [{"speed_synonym": ["auto"], "lang": "en"}], }, { "speed_name": "low", "speed_values": [{"speed_synonym": ["low"], "lang": "en"}], }, { "speed_name": "medium", "speed_values": [{"speed_synonym": ["medium"], "lang": "en"}], }, { "speed_name": "high", "speed_values": [{"speed_synonym": ["high"], "lang": "en"}], }, ], }, "reversible": False, "supportsFanSpeedPercent": True, } assert trt.query_attributes() == { "currentFanSpeedSetting": "low", } assert trt.can_execute(trait.COMMAND_FANSPEED, params={"fanSpeed": "medium"}) calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_FAN_MODE) await trt.execute(trait.COMMAND_FANSPEED, BASIC_DATA, {"fanSpeed": "medium"}, {}) assert len(calls) == 1 assert calls[0].data == { "entity_id": "climate.living_room_ac", "fan_mode": "medium", } async def test_inputselector(hass): """Test input selector trait.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.InputSelectorTrait.supported( media_player.DOMAIN, media_player.SUPPORT_SELECT_SOURCE, None ) trt = trait.InputSelectorTrait( hass, State( "media_player.living_room", media_player.STATE_PLAYING, attributes={ media_player.ATTR_INPUT_SOURCE_LIST: [ "media", "game", "chromecast", "plex", ], media_player.ATTR_INPUT_SOURCE: "game", }, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableInputs": [ {"key": "media", "names": [{"name_synonym": ["media"], "lang": "en"}]}, {"key": "game", "names": [{"name_synonym": ["game"], "lang": "en"}]}, { "key": "chromecast", "names": [{"name_synonym": ["chromecast"], "lang": "en"}], }, {"key": "plex", "names": [{"name_synonym": ["plex"], "lang": "en"}]}, ], "orderedInputs": True, } assert trt.query_attributes() == { "currentInput": "game", } assert trt.can_execute( trait.COMMAND_INPUT, params={"newInput": "media"}, ) calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_SELECT_SOURCE ) await trt.execute( trait.COMMAND_INPUT, BASIC_DATA, {"newInput": "media"}, {}, ) assert len(calls) == 1 assert calls[0].data == {"entity_id": "media_player.living_room", "source": "media"} @pytest.mark.parametrize( "sources,source,source_next,source_prev", [ (["a"], "a", "a", "a"), (["a", "b"], "a", "b", "b"), (["a", "b", "c"], "a", "b", "c"), ], ) async def test_inputselector_nextprev(hass, sources, source, source_next, source_prev): """Test input selector trait.""" trt = trait.InputSelectorTrait( hass, State( "media_player.living_room", media_player.STATE_PLAYING, attributes={ media_player.ATTR_INPUT_SOURCE_LIST: sources, media_player.ATTR_INPUT_SOURCE: source, }, ), BASIC_CONFIG, ) assert trt.can_execute("action.devices.commands.NextInput", params={}) assert trt.can_execute("action.devices.commands.PreviousInput", params={}) calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_SELECT_SOURCE ) await trt.execute( "action.devices.commands.NextInput", BASIC_DATA, {}, {}, ) await trt.execute( "action.devices.commands.PreviousInput", BASIC_DATA, {}, {}, ) assert len(calls) == 2 assert calls[0].data == { "entity_id": "media_player.living_room", "source": source_next, } assert calls[1].data == { "entity_id": "media_player.living_room", "source": source_prev, } @pytest.mark.parametrize( "sources,source", [(None, "a"), (["a", "b"], None), (["a", "b"], "c")] ) async def test_inputselector_nextprev_invalid(hass, sources, source): """Test input selector trait.""" trt = trait.InputSelectorTrait( hass, State( "media_player.living_room", media_player.STATE_PLAYING, attributes={ media_player.ATTR_INPUT_SOURCE_LIST: sources, media_player.ATTR_INPUT_SOURCE: source, }, ), BASIC_CONFIG, ) with pytest.raises(SmartHomeError): await trt.execute( "action.devices.commands.NextInput", BASIC_DATA, {}, {}, ) with pytest.raises(SmartHomeError): await trt.execute( "action.devices.commands.PreviousInput", BASIC_DATA, {}, {}, ) with pytest.raises(SmartHomeError): await trt.execute( "action.devices.commands.InvalidCommand", BASIC_DATA, {}, {}, ) async def test_modes_input_select(hass): """Test Input Select Mode trait.""" assert helpers.get_google_type(input_select.DOMAIN, None) is not None assert trait.ModesTrait.supported(input_select.DOMAIN, None, None) trt = trait.ModesTrait( hass, State("input_select.bla", "unavailable"), BASIC_CONFIG, ) assert trt.sync_attributes() == {"availableModes": []} trt = trait.ModesTrait( hass, State( "input_select.bla", "abc", attributes={input_select.ATTR_OPTIONS: ["abc", "123", "xyz"]}, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableModes": [ { "name": "option", "name_values": [ { "name_synonym": ["option", "setting", "mode", "value"], "lang": "en", } ], "settings": [ { "setting_name": "abc", "setting_values": [{"setting_synonym": ["abc"], "lang": "en"}], }, { "setting_name": "123", "setting_values": [{"setting_synonym": ["123"], "lang": "en"}], }, { "setting_name": "xyz", "setting_values": [{"setting_synonym": ["xyz"], "lang": "en"}], }, ], "ordered": False, } ] } assert trt.query_attributes() == { "currentModeSettings": {"option": "abc"}, "on": True, } assert trt.can_execute( trait.COMMAND_MODES, params={"updateModeSettings": {"option": "xyz"}}, ) calls = async_mock_service( hass, input_select.DOMAIN, input_select.SERVICE_SELECT_OPTION ) await trt.execute( trait.COMMAND_MODES, BASIC_DATA, {"updateModeSettings": {"option": "xyz"}}, {}, ) assert len(calls) == 1 assert calls[0].data == {"entity_id": "input_select.bla", "option": "xyz"} async def test_modes_humidifier(hass): """Test Humidifier Mode trait.""" assert helpers.get_google_type(humidifier.DOMAIN, None) is not None assert trait.ModesTrait.supported(humidifier.DOMAIN, humidifier.SUPPORT_MODES, None) trt = trait.ModesTrait( hass, State( "humidifier.humidifier", STATE_OFF, attributes={ humidifier.ATTR_AVAILABLE_MODES: [ humidifier.MODE_NORMAL, humidifier.MODE_AUTO, humidifier.MODE_AWAY, ], ATTR_SUPPORTED_FEATURES: humidifier.SUPPORT_MODES, humidifier.ATTR_MIN_HUMIDITY: 30, humidifier.ATTR_MAX_HUMIDITY: 99, humidifier.ATTR_HUMIDITY: 50, ATTR_MODE: humidifier.MODE_AUTO, }, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableModes": [ { "name": "mode", "name_values": [{"name_synonym": ["mode"], "lang": "en"}], "settings": [ { "setting_name": "normal", "setting_values": [ {"setting_synonym": ["normal"], "lang": "en"} ], }, { "setting_name": "auto", "setting_values": [{"setting_synonym": ["auto"], "lang": "en"}], }, { "setting_name": "away", "setting_values": [{"setting_synonym": ["away"], "lang": "en"}], }, ], "ordered": False, }, ] } assert trt.query_attributes() == { "currentModeSettings": {"mode": "auto"}, "on": False, } assert trt.can_execute( trait.COMMAND_MODES, params={"updateModeSettings": {"mode": "away"}} ) calls = async_mock_service(hass, humidifier.DOMAIN, humidifier.SERVICE_SET_MODE) await trt.execute( trait.COMMAND_MODES, BASIC_DATA, {"updateModeSettings": {"mode": "away"}}, {}, ) assert len(calls) == 1 assert calls[0].data == { "entity_id": "humidifier.humidifier", "mode": "away", } async def test_sound_modes(hass): """Test Mode trait.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.ModesTrait.supported( media_player.DOMAIN, media_player.SUPPORT_SELECT_SOUND_MODE, None ) trt = trait.ModesTrait( hass, State( "media_player.living_room", media_player.STATE_PLAYING, attributes={ media_player.ATTR_SOUND_MODE_LIST: ["stereo", "prologic"], media_player.ATTR_SOUND_MODE: "stereo", }, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableModes": [ { "name": "sound mode", "name_values": [ {"name_synonym": ["sound mode", "effects"], "lang": "en"} ], "settings": [ { "setting_name": "stereo", "setting_values": [ {"setting_synonym": ["stereo"], "lang": "en"} ], }, { "setting_name": "prologic", "setting_values": [ {"setting_synonym": ["prologic"], "lang": "en"} ], }, ], "ordered": False, } ] } assert trt.query_attributes() == { "currentModeSettings": {"sound mode": "stereo"}, "on": True, } assert trt.can_execute( trait.COMMAND_MODES, params={"updateModeSettings": {"sound mode": "stereo"}}, ) calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_SELECT_SOUND_MODE ) await trt.execute( trait.COMMAND_MODES, BASIC_DATA, {"updateModeSettings": {"sound mode": "stereo"}}, {}, ) assert len(calls) == 1 assert calls[0].data == { "entity_id": "media_player.living_room", "sound_mode": "stereo", } async def test_openclose_cover(hass): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_SET_POSITION, None ) trt = trait.OpenCloseTrait( hass, State( "cover.bla", cover.STATE_OPEN, { cover.ATTR_CURRENT_POSITION: 75, ATTR_SUPPORTED_FEATURES: cover.SUPPORT_SET_POSITION, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"openPercent": 75} calls_set = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_SET_COVER_POSITION) calls_open = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_OPEN_COVER) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 50}, {}) await trt.execute( trait.COMMAND_OPENCLOSE_RELATIVE, BASIC_DATA, {"openRelativePercent": 50}, {} ) assert len(calls_set) == 1 assert calls_set[0].data == {ATTR_ENTITY_ID: "cover.bla", cover.ATTR_POSITION: 50} assert len(calls_open) == 1 assert calls_open[0].data == {ATTR_ENTITY_ID: "cover.bla"} async def test_openclose_cover_unknown_state(hass): """Test OpenClose trait support for cover domain with unknown state.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_SET_POSITION, None ) # No state trt = trait.OpenCloseTrait( hass, State( "cover.bla", STATE_UNKNOWN, {ATTR_SUPPORTED_FEATURES: cover.SUPPORT_OPEN} ), BASIC_CONFIG, ) assert trt.sync_attributes() == {"discreteOnlyOpenClose": True} with pytest.raises(helpers.SmartHomeError): trt.query_attributes() calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_OPEN_COVER) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 100}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} with pytest.raises(helpers.SmartHomeError): trt.query_attributes() async def test_openclose_cover_assumed_state(hass): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_SET_POSITION, None ) trt = trait.OpenCloseTrait( hass, State( "cover.bla", cover.STATE_OPEN, { ATTR_ASSUMED_STATE: True, ATTR_SUPPORTED_FEATURES: cover.SUPPORT_SET_POSITION, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == {"commandOnlyOpenClose": True} assert trt.query_attributes() == {} calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_SET_COVER_POSITION) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 40}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla", cover.ATTR_POSITION: 40} async def test_openclose_cover_query_only(hass): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported(cover.DOMAIN, 0, None) state = State( "cover.bla", cover.STATE_OPEN, ) trt = trait.OpenCloseTrait( hass, state, BASIC_CONFIG, ) assert trt.sync_attributes() == { "discreteOnlyOpenClose": True, "queryOnlyOpenClose": True, } assert trt.query_attributes() == {"openPercent": 100} async def test_openclose_cover_no_position(hass): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_OPEN | cover.SUPPORT_CLOSE, None ) state = State( "cover.bla", cover.STATE_OPEN, { ATTR_SUPPORTED_FEATURES: cover.SUPPORT_OPEN | cover.SUPPORT_CLOSE, }, ) trt = trait.OpenCloseTrait( hass, state, BASIC_CONFIG, ) assert trt.sync_attributes() == {"discreteOnlyOpenClose": True} assert trt.query_attributes() == {"openPercent": 100} state.state = cover.STATE_CLOSED assert trt.sync_attributes() == {"discreteOnlyOpenClose": True} assert trt.query_attributes() == {"openPercent": 0} calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_CLOSE_COVER) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 0}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_OPEN_COVER) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 100}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} with pytest.raises( SmartHomeError, match=r"Current position not know for relative command" ): await trt.execute( trait.COMMAND_OPENCLOSE_RELATIVE, BASIC_DATA, {"openRelativePercent": 100}, {}, ) with pytest.raises(SmartHomeError, match=r"No support for partial open close"): await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 50}, {}) @pytest.mark.parametrize( "device_class", (cover.DEVICE_CLASS_DOOR, cover.DEVICE_CLASS_GARAGE, cover.DEVICE_CLASS_GATE), ) async def test_openclose_cover_secure(hass, device_class): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, device_class) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_SET_POSITION, device_class ) assert trait.OpenCloseTrait.might_2fa( cover.DOMAIN, cover.SUPPORT_SET_POSITION, device_class ) trt = trait.OpenCloseTrait( hass, State( "cover.bla", cover.STATE_OPEN, { ATTR_DEVICE_CLASS: device_class, ATTR_SUPPORTED_FEATURES: cover.SUPPORT_SET_POSITION, cover.ATTR_CURRENT_POSITION: 75, }, ), PIN_CONFIG, ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"openPercent": 75} calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_SET_COVER_POSITION) calls_close = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_CLOSE_COVER) # No challenge data with pytest.raises(error.ChallengeNeeded) as err: await trt.execute(trait.COMMAND_OPENCLOSE, PIN_DATA, {"openPercent": 50}, {}) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # invalid pin with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_OPENCLOSE, PIN_DATA, {"openPercent": 50}, {"pin": "9999"} ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_FAILED_PIN_NEEDED await trt.execute( trait.COMMAND_OPENCLOSE, PIN_DATA, {"openPercent": 50}, {"pin": "1234"} ) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla", cover.ATTR_POSITION: 50} # no challenge on close await trt.execute(trait.COMMAND_OPENCLOSE, PIN_DATA, {"openPercent": 0}, {}) assert len(calls_close) == 1 assert calls_close[0].data == {ATTR_ENTITY_ID: "cover.bla"} @pytest.mark.parametrize( "device_class", ( binary_sensor.DEVICE_CLASS_DOOR, binary_sensor.DEVICE_CLASS_GARAGE_DOOR, binary_sensor.DEVICE_CLASS_LOCK, binary_sensor.DEVICE_CLASS_OPENING, binary_sensor.DEVICE_CLASS_WINDOW, ), ) async def test_openclose_binary_sensor(hass, device_class): """Test OpenClose trait support for binary_sensor domain.""" assert helpers.get_google_type(binary_sensor.DOMAIN, device_class) is not None assert trait.OpenCloseTrait.supported(binary_sensor.DOMAIN, 0, device_class) trt = trait.OpenCloseTrait( hass, State("binary_sensor.test", STATE_ON, {ATTR_DEVICE_CLASS: device_class}), BASIC_CONFIG, ) assert trt.sync_attributes() == { "queryOnlyOpenClose": True, "discreteOnlyOpenClose": True, } assert trt.query_attributes() == {"openPercent": 100} trt = trait.OpenCloseTrait( hass, State("binary_sensor.test", STATE_OFF, {ATTR_DEVICE_CLASS: device_class}), BASIC_CONFIG, ) assert trt.sync_attributes() == { "queryOnlyOpenClose": True, "discreteOnlyOpenClose": True, } assert trt.query_attributes() == {"openPercent": 0} async def test_volume_media_player(hass): """Test volume trait support for media player domain.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.VolumeTrait.supported( media_player.DOMAIN, media_player.SUPPORT_VOLUME_SET, None, ) trt = trait.VolumeTrait( hass, State( "media_player.bla", media_player.STATE_PLAYING, { ATTR_SUPPORTED_FEATURES: media_player.SUPPORT_VOLUME_SET, media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.3, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "volumeMaxLevel": 100, "levelStepSize": 10, "volumeCanMuteAndUnmute": False, "commandOnlyVolume": False, } assert trt.query_attributes() == {"currentVolume": 30} calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_SET ) await trt.execute(trait.COMMAND_SET_VOLUME, BASIC_DATA, {"volumeLevel": 60}, {}) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.6, } calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_SET ) await trt.execute( trait.COMMAND_VOLUME_RELATIVE, BASIC_DATA, {"relativeSteps": 10}, {} ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.4, } async def test_volume_media_player_relative(hass): """Test volume trait support for relative-volume-only media players.""" assert trait.VolumeTrait.supported( media_player.DOMAIN, media_player.SUPPORT_VOLUME_STEP, None, ) trt = trait.VolumeTrait( hass, State( "media_player.bla", media_player.STATE_PLAYING, { ATTR_ASSUMED_STATE: True, ATTR_SUPPORTED_FEATURES: media_player.SUPPORT_VOLUME_STEP, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "volumeMaxLevel": 100, "levelStepSize": 10, "volumeCanMuteAndUnmute": False, "commandOnlyVolume": True, } assert trt.query_attributes() == {} calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_UP ) await trt.execute( trait.COMMAND_VOLUME_RELATIVE, BASIC_DATA, {"relativeSteps": 10}, {}, ) assert len(calls) == 10 for call in calls: assert call.data == { ATTR_ENTITY_ID: "media_player.bla", } calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_DOWN ) await trt.execute( trait.COMMAND_VOLUME_RELATIVE, BASIC_DATA, {"relativeSteps": -10}, {}, ) assert len(calls) == 10 for call in calls: assert call.data == { ATTR_ENTITY_ID: "media_player.bla", } with pytest.raises(SmartHomeError): await trt.execute(trait.COMMAND_SET_VOLUME, BASIC_DATA, {"volumeLevel": 42}, {}) with pytest.raises(SmartHomeError): await trt.execute(trait.COMMAND_MUTE, BASIC_DATA, {"mute": True}, {}) async def test_media_player_mute(hass): """Test volume trait support for muting.""" assert trait.VolumeTrait.supported( media_player.DOMAIN, media_player.SUPPORT_VOLUME_STEP | media_player.SUPPORT_VOLUME_MUTE, None, ) trt = trait.VolumeTrait( hass, State( "media_player.bla", media_player.STATE_PLAYING, { ATTR_SUPPORTED_FEATURES: ( media_player.SUPPORT_VOLUME_STEP | media_player.SUPPORT_VOLUME_MUTE ), media_player.ATTR_MEDIA_VOLUME_MUTED: False, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "volumeMaxLevel": 100, "levelStepSize": 10, "volumeCanMuteAndUnmute": True, "commandOnlyVolume": False, } assert trt.query_attributes() == {"isMuted": False} mute_calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_MUTE ) await trt.execute( trait.COMMAND_MUTE, BASIC_DATA, {"mute": True}, {}, ) assert len(mute_calls) == 1 assert mute_calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_VOLUME_MUTED: True, } unmute_calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_MUTE ) await trt.execute( trait.COMMAND_MUTE, BASIC_DATA, {"mute": False}, {}, ) assert len(unmute_calls) == 1 assert unmute_calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_VOLUME_MUTED: False, } async def test_temperature_setting_sensor(hass): """Test TemperatureSetting trait support for temperature sensor.""" assert ( helpers.get_google_type(sensor.DOMAIN, sensor.DEVICE_CLASS_TEMPERATURE) is not None ) assert not trait.TemperatureSettingTrait.supported( sensor.DOMAIN, 0, sensor.DEVICE_CLASS_HUMIDITY ) assert trait.TemperatureSettingTrait.supported( sensor.DOMAIN, 0, sensor.DEVICE_CLASS_TEMPERATURE ) @pytest.mark.parametrize( "unit_in,unit_out,state,ambient", [ (TEMP_FAHRENHEIT, "F", "70", 21.1), (TEMP_CELSIUS, "C", "21.1", 21.1), (TEMP_FAHRENHEIT, "F", "unavailable", None), (TEMP_FAHRENHEIT, "F", "unknown", None), ], ) async def test_temperature_setting_sensor_data(hass, unit_in, unit_out, state, ambient): """Test TemperatureSetting trait support for temperature sensor.""" hass.config.units.temperature_unit = unit_in trt = trait.TemperatureSettingTrait( hass, State( "sensor.test", state, {ATTR_DEVICE_CLASS: sensor.DEVICE_CLASS_TEMPERATURE} ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "queryOnlyTemperatureSetting": True, "thermostatTemperatureUnit": unit_out, } if ambient: assert trt.query_attributes() == {"thermostatTemperatureAmbient": ambient} else: assert trt.query_attributes() == {} hass.config.units.temperature_unit = TEMP_CELSIUS async def test_humidity_setting_sensor(hass): """Test HumiditySetting trait support for humidity sensor.""" assert ( helpers.get_google_type(sensor.DOMAIN, sensor.DEVICE_CLASS_HUMIDITY) is not None ) assert not trait.HumiditySettingTrait.supported( sensor.DOMAIN, 0, sensor.DEVICE_CLASS_TEMPERATURE ) assert trait.HumiditySettingTrait.supported( sensor.DOMAIN, 0, sensor.DEVICE_CLASS_HUMIDITY ) @pytest.mark.parametrize( "state,ambient", [("70", 70), ("unavailable", None), ("unknown", None)] ) async def test_humidity_setting_sensor_data(hass, state, ambient): """Test HumiditySetting trait support for humidity sensor.""" trt = trait.HumiditySettingTrait( hass, State("sensor.test", state, {ATTR_DEVICE_CLASS: sensor.DEVICE_CLASS_HUMIDITY}), BASIC_CONFIG, ) assert trt.sync_attributes() == {"queryOnlyHumiditySetting": True} if ambient: assert trt.query_attributes() == {"humidityAmbientPercent": ambient} else: assert trt.query_attributes() == {} with pytest.raises(helpers.SmartHomeError) as err: await trt.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert err.value.code == const.ERR_NOT_SUPPORTED async def test_transport_control(hass): """Test the TransportControlTrait.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None for feature in trait.MEDIA_COMMAND_SUPPORT_MAPPING.values(): assert trait.TransportControlTrait.supported(media_player.DOMAIN, feature, None) now = datetime(2020, 1, 1) trt = trait.TransportControlTrait( hass, State( "media_player.bla", media_player.STATE_PLAYING, { media_player.ATTR_MEDIA_POSITION: 100, media_player.ATTR_MEDIA_DURATION: 200, media_player.ATTR_MEDIA_POSITION_UPDATED_AT: now - timedelta(seconds=10), media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.5, ATTR_SUPPORTED_FEATURES: media_player.SUPPORT_PLAY | media_player.SUPPORT_STOP, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "transportControlSupportedCommands": ["RESUME", "STOP"] } assert trt.query_attributes() == {} # COMMAND_MEDIA_SEEK_RELATIVE calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_SEEK ) # Patch to avoid time ticking over during the command failing the test with patch("homeassistant.util.dt.utcnow", return_value=now): await trt.execute( trait.COMMAND_MEDIA_SEEK_RELATIVE, BASIC_DATA, {"relativePositionMs": 10000}, {}, ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", # 100s (current position) + 10s (from command) + 10s (from updated_at) media_player.ATTR_MEDIA_SEEK_POSITION: 120, } # COMMAND_MEDIA_SEEK_TO_POSITION calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_SEEK ) await trt.execute( trait.COMMAND_MEDIA_SEEK_TO_POSITION, BASIC_DATA, {"absPositionMs": 50000}, {} ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_SEEK_POSITION: 50, } # COMMAND_MEDIA_NEXT calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_NEXT_TRACK ) await trt.execute(trait.COMMAND_MEDIA_NEXT, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} # COMMAND_MEDIA_PAUSE calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_PAUSE ) await trt.execute(trait.COMMAND_MEDIA_PAUSE, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} # COMMAND_MEDIA_PREVIOUS calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_PREVIOUS_TRACK ) await trt.execute(trait.COMMAND_MEDIA_PREVIOUS, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} # COMMAND_MEDIA_RESUME calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_PLAY ) await trt.execute(trait.COMMAND_MEDIA_RESUME, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} # COMMAND_MEDIA_SHUFFLE calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_SHUFFLE_SET ) await trt.execute(trait.COMMAND_MEDIA_SHUFFLE, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_SHUFFLE: True, } # COMMAND_MEDIA_STOP calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_STOP ) await trt.execute(trait.COMMAND_MEDIA_STOP, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} @pytest.mark.parametrize( "state", ( STATE_OFF, STATE_IDLE, STATE_PLAYING, STATE_ON, STATE_PAUSED, STATE_STANDBY, STATE_UNAVAILABLE, STATE_UNKNOWN, ), ) async def test_media_state(hass, state): """Test the MediaStateTrait.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.TransportControlTrait.supported( media_player.DOMAIN, media_player.SUPPORT_PLAY, None ) trt = trait.MediaStateTrait( hass, State( "media_player.bla", state, { media_player.ATTR_MEDIA_POSITION: 100, media_player.ATTR_MEDIA_DURATION: 200, media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.5, ATTR_SUPPORTED_FEATURES: media_player.SUPPORT_PLAY | media_player.SUPPORT_STOP, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "supportActivityState": True, "supportPlaybackState": True, } assert trt.query_attributes() == { "activityState": trt.activity_lookup.get(state), "playbackState": trt.playback_lookup.get(state), }
sander76/home-assistant
tests/components/google_assistant/test_trait.py
homeassistant/components/arcam_fmj/device_trigger.py
"""Support for deCONZ locks.""" from homeassistant.components.lock import DOMAIN, LockEntity from homeassistant.core import callback from homeassistant.helpers.dispatcher import async_dispatcher_connect from .const import LOCKS, NEW_LIGHT, NEW_SENSOR from .deconz_device import DeconzDevice from .gateway import get_gateway_from_config_entry async def async_setup_entry(hass, config_entry, async_add_entities): """Set up locks for deCONZ component.""" gateway = get_gateway_from_config_entry(hass, config_entry) gateway.entities[DOMAIN] = set() @callback def async_add_lock_from_light(lights=gateway.api.lights.values()): """Add lock from deCONZ.""" entities = [] for light in lights: if light.type in LOCKS and light.uniqueid not in gateway.entities[DOMAIN]: entities.append(DeconzLock(light, gateway)) if entities: async_add_entities(entities) gateway.listeners.append( async_dispatcher_connect( hass, gateway.async_signal_new_device(NEW_LIGHT), async_add_lock_from_light ) ) @callback def async_add_lock_from_sensor(sensors=gateway.api.sensors.values()): """Add lock from deCONZ.""" entities = [] for sensor in sensors: if sensor.type in LOCKS and sensor.uniqueid not in gateway.entities[DOMAIN]: entities.append(DeconzLock(sensor, gateway)) if entities: async_add_entities(entities) gateway.listeners.append( async_dispatcher_connect( hass, gateway.async_signal_new_device(NEW_SENSOR), async_add_lock_from_sensor, ) ) async_add_lock_from_light() async_add_lock_from_sensor() class DeconzLock(DeconzDevice, LockEntity): """Representation of a deCONZ lock.""" TYPE = DOMAIN @property def is_locked(self): """Return true if lock is on.""" return self._device.is_locked async def async_lock(self, **kwargs): """Lock the lock.""" await self._device.lock() async def async_unlock(self, **kwargs): """Unlock the lock.""" await self._device.unlock()
"""Tests for the Google Assistant traits.""" from datetime import datetime, timedelta from unittest.mock import patch import pytest from homeassistant.components import ( alarm_control_panel, binary_sensor, camera, cover, fan, group, input_boolean, input_select, light, lock, media_player, scene, script, sensor, switch, vacuum, ) from homeassistant.components.climate import const as climate from homeassistant.components.google_assistant import const, error, helpers, trait from homeassistant.components.google_assistant.error import SmartHomeError from homeassistant.components.humidifier import const as humidifier from homeassistant.config import async_process_ha_core_config from homeassistant.const import ( ATTR_ASSUMED_STATE, ATTR_DEVICE_CLASS, ATTR_ENTITY_ID, ATTR_MODE, ATTR_SUPPORTED_FEATURES, ATTR_TEMPERATURE, SERVICE_TURN_OFF, SERVICE_TURN_ON, STATE_ALARM_ARMED_AWAY, STATE_ALARM_DISARMED, STATE_ALARM_PENDING, STATE_IDLE, STATE_OFF, STATE_ON, STATE_PAUSED, STATE_PLAYING, STATE_STANDBY, STATE_UNAVAILABLE, STATE_UNKNOWN, TEMP_CELSIUS, TEMP_FAHRENHEIT, ) from homeassistant.core import DOMAIN as HA_DOMAIN, EVENT_CALL_SERVICE, State from homeassistant.util import color from . import BASIC_CONFIG, MockConfig from tests.common import async_capture_events, async_mock_service REQ_ID = "ff36a3cc-ec34-11e6-b1a0-64510650abcf" BASIC_DATA = helpers.RequestData( BASIC_CONFIG, "test-agent", const.SOURCE_CLOUD, REQ_ID, None ) PIN_CONFIG = MockConfig(secure_devices_pin="1234") PIN_DATA = helpers.RequestData( PIN_CONFIG, "test-agent", const.SOURCE_CLOUD, REQ_ID, None ) async def test_brightness_light(hass): """Test brightness trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert trait.BrightnessTrait.supported(light.DOMAIN, light.SUPPORT_BRIGHTNESS, None) trt = trait.BrightnessTrait( hass, State("light.bla", light.STATE_ON, {light.ATTR_BRIGHTNESS: 243}), BASIC_CONFIG, ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"brightness": 95} events = async_capture_events(hass, EVENT_CALL_SERVICE) calls = async_mock_service(hass, light.DOMAIN, light.SERVICE_TURN_ON) await trt.execute( trait.COMMAND_BRIGHTNESS_ABSOLUTE, BASIC_DATA, {"brightness": 50}, {} ) await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "light.bla", light.ATTR_BRIGHTNESS_PCT: 50} assert len(events) == 1 assert events[0].data == { "domain": "light", "service": "turn_on", "service_data": {"brightness_pct": 50, "entity_id": "light.bla"}, } async def test_camera_stream(hass): """Test camera stream trait support for camera domain.""" await async_process_ha_core_config( hass, {"external_url": "https://example.com"}, ) assert helpers.get_google_type(camera.DOMAIN, None) is not None assert trait.CameraStreamTrait.supported(camera.DOMAIN, camera.SUPPORT_STREAM, None) trt = trait.CameraStreamTrait( hass, State("camera.bla", camera.STATE_IDLE, {}), BASIC_CONFIG ) assert trt.sync_attributes() == { "cameraStreamSupportedProtocols": ["hls"], "cameraStreamNeedAuthToken": False, "cameraStreamNeedDrmEncryption": False, } assert trt.query_attributes() == {} with patch( "homeassistant.components.camera.async_request_stream", return_value="/api/streams/bla", ): await trt.execute(trait.COMMAND_GET_CAMERA_STREAM, BASIC_DATA, {}, {}) assert trt.query_attributes() == { "cameraStreamAccessUrl": "https://example.com/api/streams/bla", "cameraStreamReceiverAppId": "B12CE3CA", } async def test_onoff_group(hass): """Test OnOff trait support for group domain.""" assert helpers.get_google_type(group.DOMAIN, None) is not None assert trait.OnOffTrait.supported(group.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("group.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("group.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, HA_DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "group.bla"} off_calls = async_mock_service(hass, HA_DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "group.bla"} async def test_onoff_input_boolean(hass): """Test OnOff trait support for input_boolean domain.""" assert helpers.get_google_type(input_boolean.DOMAIN, None) is not None assert trait.OnOffTrait.supported(input_boolean.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("input_boolean.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait( hass, State("input_boolean.bla", STATE_OFF), BASIC_CONFIG ) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, input_boolean.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "input_boolean.bla"} off_calls = async_mock_service(hass, input_boolean.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "input_boolean.bla"} async def test_onoff_switch(hass): """Test OnOff trait support for switch domain.""" assert helpers.get_google_type(switch.DOMAIN, None) is not None assert trait.OnOffTrait.supported(switch.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("switch.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("switch.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} trt_assumed = trait.OnOffTrait( hass, State("switch.bla", STATE_OFF, {"assumed_state": True}), BASIC_CONFIG ) assert trt_assumed.sync_attributes() == {"commandOnlyOnOff": True} on_calls = async_mock_service(hass, switch.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "switch.bla"} off_calls = async_mock_service(hass, switch.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "switch.bla"} async def test_onoff_fan(hass): """Test OnOff trait support for fan domain.""" assert helpers.get_google_type(fan.DOMAIN, None) is not None assert trait.OnOffTrait.supported(fan.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("fan.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("fan.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, fan.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "fan.bla"} off_calls = async_mock_service(hass, fan.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "fan.bla"} async def test_onoff_light(hass): """Test OnOff trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert trait.OnOffTrait.supported(light.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("light.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("light.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "light.bla"} off_calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "light.bla"} async def test_onoff_media_player(hass): """Test OnOff trait support for media_player domain.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.OnOffTrait.supported(media_player.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("media_player.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("media_player.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, media_player.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} off_calls = async_mock_service(hass, media_player.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} async def test_onoff_humidifier(hass): """Test OnOff trait support for humidifier domain.""" assert helpers.get_google_type(humidifier.DOMAIN, None) is not None assert trait.OnOffTrait.supported(humidifier.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("humidifier.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("humidifier.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, humidifier.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "humidifier.bla"} off_calls = async_mock_service(hass, humidifier.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "humidifier.bla"} async def test_dock_vacuum(hass): """Test dock trait support for vacuum domain.""" assert helpers.get_google_type(vacuum.DOMAIN, None) is not None assert trait.DockTrait.supported(vacuum.DOMAIN, 0, None) trt = trait.DockTrait(hass, State("vacuum.bla", vacuum.STATE_IDLE), BASIC_CONFIG) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"isDocked": False} calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_RETURN_TO_BASE) await trt.execute(trait.COMMAND_DOCK, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} async def test_startstop_vacuum(hass): """Test startStop trait support for vacuum domain.""" assert helpers.get_google_type(vacuum.DOMAIN, None) is not None assert trait.StartStopTrait.supported(vacuum.DOMAIN, 0, None) trt = trait.StartStopTrait( hass, State( "vacuum.bla", vacuum.STATE_PAUSED, {ATTR_SUPPORTED_FEATURES: vacuum.SUPPORT_PAUSE}, ), BASIC_CONFIG, ) assert trt.sync_attributes() == {"pausable": True} assert trt.query_attributes() == {"isRunning": False, "isPaused": True} start_calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_START) await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": True}, {}) assert len(start_calls) == 1 assert start_calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} stop_calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_STOP) await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": False}, {}) assert len(stop_calls) == 1 assert stop_calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} pause_calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_PAUSE) await trt.execute(trait.COMMAND_PAUSEUNPAUSE, BASIC_DATA, {"pause": True}, {}) assert len(pause_calls) == 1 assert pause_calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} unpause_calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_START) await trt.execute(trait.COMMAND_PAUSEUNPAUSE, BASIC_DATA, {"pause": False}, {}) assert len(unpause_calls) == 1 assert unpause_calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} async def test_startstop_cover(hass): """Test startStop trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.StartStopTrait.supported(cover.DOMAIN, cover.SUPPORT_STOP, None) state = State( "cover.bla", cover.STATE_CLOSED, {ATTR_SUPPORTED_FEATURES: cover.SUPPORT_STOP}, ) trt = trait.StartStopTrait( hass, state, BASIC_CONFIG, ) assert trt.sync_attributes() == {} for state_value in (cover.STATE_CLOSING, cover.STATE_OPENING): state.state = state_value assert trt.query_attributes() == {"isRunning": True} stop_calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_STOP_COVER) await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": False}, {}) assert len(stop_calls) == 1 assert stop_calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} for state_value in (cover.STATE_CLOSED, cover.STATE_OPEN): state.state = state_value assert trt.query_attributes() == {"isRunning": False} with pytest.raises(SmartHomeError, match="Cover is already stopped"): await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": False}, {}) with pytest.raises(SmartHomeError, match="Starting a cover is not supported"): await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": True}, {}) with pytest.raises( SmartHomeError, match="Command action.devices.commands.PauseUnpause is not supported", ): await trt.execute(trait.COMMAND_PAUSEUNPAUSE, BASIC_DATA, {"start": True}, {}) async def test_startstop_cover_assumed(hass): """Test startStop trait support for cover domain of assumed state.""" trt = trait.StartStopTrait( hass, State( "cover.bla", cover.STATE_CLOSED, {ATTR_SUPPORTED_FEATURES: cover.SUPPORT_STOP, ATTR_ASSUMED_STATE: True}, ), BASIC_CONFIG, ) stop_calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_STOP_COVER) await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": False}, {}) assert len(stop_calls) == 1 assert stop_calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} async def test_color_setting_color_light(hass): """Test ColorSpectrum trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert not trait.ColorSettingTrait.supported(light.DOMAIN, 0, None) assert trait.ColorSettingTrait.supported(light.DOMAIN, light.SUPPORT_COLOR, None) trt = trait.ColorSettingTrait( hass, State( "light.bla", STATE_ON, { light.ATTR_HS_COLOR: (20, 94), light.ATTR_BRIGHTNESS: 200, ATTR_SUPPORTED_FEATURES: light.SUPPORT_COLOR, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == {"colorModel": "hsv"} assert trt.query_attributes() == { "color": {"spectrumHsv": {"hue": 20, "saturation": 0.94, "value": 200 / 255}} } assert trt.can_execute( trait.COMMAND_COLOR_ABSOLUTE, {"color": {"spectrumRGB": 16715792}} ) calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON) await trt.execute( trait.COMMAND_COLOR_ABSOLUTE, BASIC_DATA, {"color": {"spectrumRGB": 1052927}}, {}, ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "light.bla", light.ATTR_HS_COLOR: (240, 93.725), } await trt.execute( trait.COMMAND_COLOR_ABSOLUTE, BASIC_DATA, {"color": {"spectrumHSV": {"hue": 100, "saturation": 0.50, "value": 0.20}}}, {}, ) assert len(calls) == 2 assert calls[1].data == { ATTR_ENTITY_ID: "light.bla", light.ATTR_HS_COLOR: [100, 50], light.ATTR_BRIGHTNESS: 0.2 * 255, } async def test_color_setting_temperature_light(hass): """Test ColorTemperature trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert not trait.ColorSettingTrait.supported(light.DOMAIN, 0, None) assert trait.ColorSettingTrait.supported( light.DOMAIN, light.SUPPORT_COLOR_TEMP, None ) trt = trait.ColorSettingTrait( hass, State( "light.bla", STATE_ON, { light.ATTR_MIN_MIREDS: 200, light.ATTR_COLOR_TEMP: 300, light.ATTR_MAX_MIREDS: 500, ATTR_SUPPORTED_FEATURES: light.SUPPORT_COLOR_TEMP, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "colorTemperatureRange": {"temperatureMinK": 2000, "temperatureMaxK": 5000} } assert trt.query_attributes() == {"color": {"temperatureK": 3333}} assert trt.can_execute( trait.COMMAND_COLOR_ABSOLUTE, {"color": {"temperature": 400}} ) calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON) with pytest.raises(helpers.SmartHomeError) as err: await trt.execute( trait.COMMAND_COLOR_ABSOLUTE, BASIC_DATA, {"color": {"temperature": 5555}}, {}, ) assert err.value.code == const.ERR_VALUE_OUT_OF_RANGE await trt.execute( trait.COMMAND_COLOR_ABSOLUTE, BASIC_DATA, {"color": {"temperature": 2857}}, {} ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "light.bla", light.ATTR_COLOR_TEMP: color.color_temperature_kelvin_to_mired(2857), } async def test_color_light_temperature_light_bad_temp(hass): """Test ColorTemperature trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert not trait.ColorSettingTrait.supported(light.DOMAIN, 0, None) assert trait.ColorSettingTrait.supported( light.DOMAIN, light.SUPPORT_COLOR_TEMP, None ) trt = trait.ColorSettingTrait( hass, State( "light.bla", STATE_ON, { light.ATTR_MIN_MIREDS: 200, light.ATTR_COLOR_TEMP: 0, light.ATTR_MAX_MIREDS: 500, }, ), BASIC_CONFIG, ) assert trt.query_attributes() == {} async def test_light_modes(hass): """Test Light Mode trait.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert trait.ModesTrait.supported(light.DOMAIN, light.SUPPORT_EFFECT, None) trt = trait.ModesTrait( hass, State( "light.living_room", light.STATE_ON, attributes={ light.ATTR_EFFECT_LIST: ["random", "colorloop"], light.ATTR_EFFECT: "random", }, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableModes": [ { "name": "effect", "name_values": [{"name_synonym": ["effect"], "lang": "en"}], "settings": [ { "setting_name": "random", "setting_values": [ {"setting_synonym": ["random"], "lang": "en"} ], }, { "setting_name": "colorloop", "setting_values": [ {"setting_synonym": ["colorloop"], "lang": "en"} ], }, ], "ordered": False, } ] } assert trt.query_attributes() == { "currentModeSettings": {"effect": "random"}, "on": True, } assert trt.can_execute( trait.COMMAND_MODES, params={"updateModeSettings": {"effect": "colorloop"}}, ) calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON) await trt.execute( trait.COMMAND_MODES, BASIC_DATA, {"updateModeSettings": {"effect": "colorloop"}}, {}, ) assert len(calls) == 1 assert calls[0].data == { "entity_id": "light.living_room", "effect": "colorloop", } async def test_scene_scene(hass): """Test Scene trait support for scene domain.""" assert helpers.get_google_type(scene.DOMAIN, None) is not None assert trait.SceneTrait.supported(scene.DOMAIN, 0, None) trt = trait.SceneTrait(hass, State("scene.bla", scene.STATE), BASIC_CONFIG) assert trt.sync_attributes() == {} assert trt.query_attributes() == {} assert trt.can_execute(trait.COMMAND_ACTIVATE_SCENE, {}) calls = async_mock_service(hass, scene.DOMAIN, SERVICE_TURN_ON) await trt.execute(trait.COMMAND_ACTIVATE_SCENE, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "scene.bla"} async def test_scene_script(hass): """Test Scene trait support for script domain.""" assert helpers.get_google_type(script.DOMAIN, None) is not None assert trait.SceneTrait.supported(script.DOMAIN, 0, None) trt = trait.SceneTrait(hass, State("script.bla", STATE_OFF), BASIC_CONFIG) assert trt.sync_attributes() == {} assert trt.query_attributes() == {} assert trt.can_execute(trait.COMMAND_ACTIVATE_SCENE, {}) calls = async_mock_service(hass, script.DOMAIN, SERVICE_TURN_ON) await trt.execute(trait.COMMAND_ACTIVATE_SCENE, BASIC_DATA, {}, {}) # We don't wait till script execution is done. await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "script.bla"} async def test_temperature_setting_climate_onoff(hass): """Test TemperatureSetting trait support for climate domain - range.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.TemperatureSettingTrait.supported(climate.DOMAIN, 0, None) hass.config.units.temperature_unit = TEMP_FAHRENHEIT trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_AUTO, { ATTR_SUPPORTED_FEATURES: climate.SUPPORT_TARGET_TEMPERATURE_RANGE, climate.ATTR_HVAC_MODES: [ climate.HVAC_MODE_OFF, climate.HVAC_MODE_COOL, climate.HVAC_MODE_HEAT, climate.HVAC_MODE_HEAT_COOL, ], climate.ATTR_MIN_TEMP: None, climate.ATTR_MAX_TEMP: None, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["off", "cool", "heat", "heatcool", "on"], "thermostatTemperatureUnit": "F", } assert trt.can_execute(trait.COMMAND_THERMOSTAT_SET_MODE, {}) calls = async_mock_service(hass, climate.DOMAIN, SERVICE_TURN_ON) await trt.execute( trait.COMMAND_THERMOSTAT_SET_MODE, BASIC_DATA, {"thermostatMode": "on"}, {} ) assert len(calls) == 1 calls = async_mock_service(hass, climate.DOMAIN, SERVICE_TURN_OFF) await trt.execute( trait.COMMAND_THERMOSTAT_SET_MODE, BASIC_DATA, {"thermostatMode": "off"}, {} ) assert len(calls) == 1 async def test_temperature_setting_climate_no_modes(hass): """Test TemperatureSetting trait support for climate domain not supporting any modes.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.TemperatureSettingTrait.supported(climate.DOMAIN, 0, None) hass.config.units.temperature_unit = TEMP_CELSIUS trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_AUTO, { climate.ATTR_HVAC_MODES: [], climate.ATTR_MIN_TEMP: None, climate.ATTR_MAX_TEMP: None, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["heat"], "thermostatTemperatureUnit": "C", } async def test_temperature_setting_climate_range(hass): """Test TemperatureSetting trait support for climate domain - range.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.TemperatureSettingTrait.supported(climate.DOMAIN, 0, None) hass.config.units.temperature_unit = TEMP_FAHRENHEIT trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_AUTO, { climate.ATTR_CURRENT_TEMPERATURE: 70, climate.ATTR_CURRENT_HUMIDITY: 25, ATTR_SUPPORTED_FEATURES: climate.SUPPORT_TARGET_TEMPERATURE_RANGE, climate.ATTR_HVAC_MODES: [ STATE_OFF, climate.HVAC_MODE_COOL, climate.HVAC_MODE_HEAT, climate.HVAC_MODE_AUTO, ], climate.ATTR_TARGET_TEMP_HIGH: 75, climate.ATTR_TARGET_TEMP_LOW: 65, climate.ATTR_MIN_TEMP: 50, climate.ATTR_MAX_TEMP: 80, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["off", "cool", "heat", "auto", "on"], "thermostatTemperatureUnit": "F", } assert trt.query_attributes() == { "thermostatMode": "auto", "thermostatTemperatureAmbient": 21.1, "thermostatHumidityAmbient": 25, "thermostatTemperatureSetpointLow": 18.3, "thermostatTemperatureSetpointHigh": 23.9, } assert trt.can_execute(trait.COMMAND_THERMOSTAT_TEMPERATURE_SET_RANGE, {}) assert trt.can_execute(trait.COMMAND_THERMOSTAT_SET_MODE, {}) calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_TEMPERATURE) await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SET_RANGE, BASIC_DATA, { "thermostatTemperatureSetpointHigh": 25, "thermostatTemperatureSetpointLow": 20, }, {}, ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "climate.bla", climate.ATTR_TARGET_TEMP_HIGH: 77, climate.ATTR_TARGET_TEMP_LOW: 68, } calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_HVAC_MODE) await trt.execute( trait.COMMAND_THERMOSTAT_SET_MODE, BASIC_DATA, {"thermostatMode": "cool"}, {} ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "climate.bla", climate.ATTR_HVAC_MODE: climate.HVAC_MODE_COOL, } with pytest.raises(helpers.SmartHomeError) as err: await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, BASIC_DATA, {"thermostatTemperatureSetpoint": -100}, {}, ) assert err.value.code == const.ERR_VALUE_OUT_OF_RANGE hass.config.units.temperature_unit = TEMP_CELSIUS async def test_temperature_setting_climate_setpoint(hass): """Test TemperatureSetting trait support for climate domain - setpoint.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.TemperatureSettingTrait.supported(climate.DOMAIN, 0, None) hass.config.units.temperature_unit = TEMP_CELSIUS trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_COOL, { climate.ATTR_HVAC_MODES: [STATE_OFF, climate.HVAC_MODE_COOL], climate.ATTR_MIN_TEMP: 10, climate.ATTR_MAX_TEMP: 30, ATTR_TEMPERATURE: 18, climate.ATTR_CURRENT_TEMPERATURE: 20, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["off", "cool", "on"], "thermostatTemperatureUnit": "C", } assert trt.query_attributes() == { "thermostatMode": "cool", "thermostatTemperatureAmbient": 20, "thermostatTemperatureSetpoint": 18, } assert trt.can_execute(trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, {}) assert trt.can_execute(trait.COMMAND_THERMOSTAT_SET_MODE, {}) calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_TEMPERATURE) with pytest.raises(helpers.SmartHomeError): await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, BASIC_DATA, {"thermostatTemperatureSetpoint": -100}, {}, ) await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, BASIC_DATA, {"thermostatTemperatureSetpoint": 19}, {}, ) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "climate.bla", ATTR_TEMPERATURE: 19} async def test_temperature_setting_climate_setpoint_auto(hass): """ Test TemperatureSetting trait support for climate domain. Setpoint in auto mode. """ hass.config.units.temperature_unit = TEMP_CELSIUS trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_HEAT_COOL, { climate.ATTR_HVAC_MODES: [ climate.HVAC_MODE_OFF, climate.HVAC_MODE_HEAT_COOL, ], climate.ATTR_MIN_TEMP: 10, climate.ATTR_MAX_TEMP: 30, ATTR_TEMPERATURE: 18, climate.ATTR_CURRENT_TEMPERATURE: 20, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["off", "heatcool", "on"], "thermostatTemperatureUnit": "C", } assert trt.query_attributes() == { "thermostatMode": "heatcool", "thermostatTemperatureAmbient": 20, "thermostatTemperatureSetpointHigh": 18, "thermostatTemperatureSetpointLow": 18, } assert trt.can_execute(trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, {}) assert trt.can_execute(trait.COMMAND_THERMOSTAT_SET_MODE, {}) calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_TEMPERATURE) await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, BASIC_DATA, {"thermostatTemperatureSetpoint": 19}, {}, ) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "climate.bla", ATTR_TEMPERATURE: 19} async def test_humidity_setting_humidifier_setpoint(hass): """Test HumiditySetting trait support for humidifier domain - setpoint.""" assert helpers.get_google_type(humidifier.DOMAIN, None) is not None assert trait.HumiditySettingTrait.supported(humidifier.DOMAIN, 0, None) trt = trait.HumiditySettingTrait( hass, State( "humidifier.bla", STATE_ON, { humidifier.ATTR_MIN_HUMIDITY: 20, humidifier.ATTR_MAX_HUMIDITY: 90, humidifier.ATTR_HUMIDITY: 38, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "humiditySetpointRange": {"minPercent": 20, "maxPercent": 90} } assert trt.query_attributes() == { "humiditySetpointPercent": 38, } assert trt.can_execute(trait.COMMAND_SET_HUMIDITY, {}) calls = async_mock_service(hass, humidifier.DOMAIN, humidifier.SERVICE_SET_HUMIDITY) await trt.execute(trait.COMMAND_SET_HUMIDITY, BASIC_DATA, {"humidity": 32}, {}) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "humidifier.bla", humidifier.ATTR_HUMIDITY: 32, } async def test_lock_unlock_lock(hass): """Test LockUnlock trait locking support for lock domain.""" assert helpers.get_google_type(lock.DOMAIN, None) is not None assert trait.LockUnlockTrait.supported(lock.DOMAIN, lock.SUPPORT_OPEN, None) assert trait.LockUnlockTrait.might_2fa(lock.DOMAIN, lock.SUPPORT_OPEN, None) trt = trait.LockUnlockTrait( hass, State("lock.front_door", lock.STATE_LOCKED), PIN_CONFIG ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"isLocked": True} assert trt.can_execute(trait.COMMAND_LOCKUNLOCK, {"lock": True}) calls = async_mock_service(hass, lock.DOMAIN, lock.SERVICE_LOCK) await trt.execute(trait.COMMAND_LOCKUNLOCK, PIN_DATA, {"lock": True}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "lock.front_door"} async def test_lock_unlock_unlock(hass): """Test LockUnlock trait unlocking support for lock domain.""" assert helpers.get_google_type(lock.DOMAIN, None) is not None assert trait.LockUnlockTrait.supported(lock.DOMAIN, lock.SUPPORT_OPEN, None) trt = trait.LockUnlockTrait( hass, State("lock.front_door", lock.STATE_LOCKED), PIN_CONFIG ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"isLocked": True} assert trt.can_execute(trait.COMMAND_LOCKUNLOCK, {"lock": False}) calls = async_mock_service(hass, lock.DOMAIN, lock.SERVICE_UNLOCK) # No challenge data with pytest.raises(error.ChallengeNeeded) as err: await trt.execute(trait.COMMAND_LOCKUNLOCK, PIN_DATA, {"lock": False}, {}) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # invalid pin with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_LOCKUNLOCK, PIN_DATA, {"lock": False}, {"pin": 9999} ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_FAILED_PIN_NEEDED await trt.execute( trait.COMMAND_LOCKUNLOCK, PIN_DATA, {"lock": False}, {"pin": "1234"} ) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "lock.front_door"} # Test without pin trt = trait.LockUnlockTrait( hass, State("lock.front_door", lock.STATE_LOCKED), BASIC_CONFIG ) with pytest.raises(error.SmartHomeError) as err: await trt.execute(trait.COMMAND_LOCKUNLOCK, BASIC_DATA, {"lock": False}, {}) assert len(calls) == 1 assert err.value.code == const.ERR_CHALLENGE_NOT_SETUP # Test with 2FA override with patch.object( BASIC_CONFIG, "should_2fa", return_value=False, ): await trt.execute(trait.COMMAND_LOCKUNLOCK, BASIC_DATA, {"lock": False}, {}) assert len(calls) == 2 async def test_arm_disarm_arm_away(hass): """Test ArmDisarm trait Arming support for alarm_control_panel domain.""" assert helpers.get_google_type(alarm_control_panel.DOMAIN, None) is not None assert trait.ArmDisArmTrait.supported(alarm_control_panel.DOMAIN, 0, None) assert trait.ArmDisArmTrait.might_2fa(alarm_control_panel.DOMAIN, 0, None) trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, { alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True, ATTR_SUPPORTED_FEATURES: alarm_control_panel.const.SUPPORT_ALARM_ARM_HOME | alarm_control_panel.const.SUPPORT_ALARM_ARM_AWAY, }, ), PIN_CONFIG, ) assert trt.sync_attributes() == { "availableArmLevels": { "levels": [ { "level_name": "armed_home", "level_values": [ {"level_synonym": ["armed home", "home"], "lang": "en"} ], }, { "level_name": "armed_away", "level_values": [ {"level_synonym": ["armed away", "away"], "lang": "en"} ], }, ], "ordered": False, } } assert trt.query_attributes() == { "isArmed": True, "currentArmLevel": STATE_ALARM_ARMED_AWAY, } assert trt.can_execute( trait.COMMAND_ARMDISARM, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY} ) calls = async_mock_service( hass, alarm_control_panel.DOMAIN, alarm_control_panel.SERVICE_ALARM_ARM_AWAY ) # Test with no secure_pin configured with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), BASIC_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, BASIC_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {}, ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NOT_SETUP trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), PIN_CONFIG, ) # No challenge data with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {}, ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # invalid pin with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {"pin": 9999}, ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_FAILED_PIN_NEEDED # correct pin await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {"pin": "1234"}, ) assert len(calls) == 1 # Test already armed with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), PIN_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {}, ) assert len(calls) == 1 assert err.value.code == const.ERR_ALREADY_ARMED # Test with code_arm_required False trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: False}, ), PIN_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {}, ) assert len(calls) == 2 with pytest.raises(error.SmartHomeError) as err: await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True}, {}, ) async def test_arm_disarm_disarm(hass): """Test ArmDisarm trait Disarming support for alarm_control_panel domain.""" assert helpers.get_google_type(alarm_control_panel.DOMAIN, None) is not None assert trait.ArmDisArmTrait.supported(alarm_control_panel.DOMAIN, 0, None) assert trait.ArmDisArmTrait.might_2fa(alarm_control_panel.DOMAIN, 0, None) trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, { alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True, ATTR_SUPPORTED_FEATURES: alarm_control_panel.const.SUPPORT_ALARM_TRIGGER | alarm_control_panel.const.SUPPORT_ALARM_ARM_CUSTOM_BYPASS, }, ), PIN_CONFIG, ) assert trt.sync_attributes() == { "availableArmLevels": { "levels": [ { "level_name": "armed_custom_bypass", "level_values": [ { "level_synonym": ["armed custom bypass", "custom"], "lang": "en", } ], }, { "level_name": "triggered", "level_values": [{"level_synonym": ["triggered"], "lang": "en"}], }, ], "ordered": False, } } assert trt.query_attributes() == {"isArmed": False} assert trt.can_execute(trait.COMMAND_ARMDISARM, {"arm": False}) calls = async_mock_service( hass, alarm_control_panel.DOMAIN, alarm_control_panel.SERVICE_ALARM_DISARM ) # Test without secure_pin configured with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), BASIC_CONFIG, ) await trt.execute(trait.COMMAND_ARMDISARM, BASIC_DATA, {"arm": False}, {}) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NOT_SETUP trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), PIN_CONFIG, ) # No challenge data with pytest.raises(error.ChallengeNeeded) as err: await trt.execute(trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": False}, {}) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # invalid pin with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": False}, {"pin": 9999} ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_FAILED_PIN_NEEDED # correct pin await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": False}, {"pin": "1234"} ) assert len(calls) == 1 # Test already disarmed with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), PIN_CONFIG, ) await trt.execute(trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": False}, {}) assert len(calls) == 1 assert err.value.code == const.ERR_ALREADY_DISARMED # Cancel arming after already armed will require pin with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: False}, ), PIN_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "cancel": True}, {} ) assert len(calls) == 1 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # Cancel arming while pending to arm doesn't require pin trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_PENDING, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: False}, ), PIN_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "cancel": True}, {} ) assert len(calls) == 2 async def test_fan_speed(hass): """Test FanSpeed trait speed control support for fan domain.""" assert helpers.get_google_type(fan.DOMAIN, None) is not None assert trait.FanSpeedTrait.supported(fan.DOMAIN, fan.SUPPORT_SET_SPEED, None) trt = trait.FanSpeedTrait( hass, State( "fan.living_room_fan", fan.SPEED_HIGH, attributes={ "speed_list": [ fan.SPEED_OFF, fan.SPEED_LOW, fan.SPEED_MEDIUM, fan.SPEED_HIGH, ], "speed": "low", "percentage": 33, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableFanSpeeds": { "ordered": True, "speeds": [ { "speed_name": "off", "speed_values": [{"speed_synonym": ["stop", "off"], "lang": "en"}], }, { "speed_name": "low", "speed_values": [ { "speed_synonym": ["slow", "low", "slowest", "lowest"], "lang": "en", } ], }, { "speed_name": "medium", "speed_values": [ {"speed_synonym": ["medium", "mid", "middle"], "lang": "en"} ], }, { "speed_name": "high", "speed_values": [ { "speed_synonym": [ "high", "max", "fast", "highest", "fastest", "maximum", ], "lang": "en", } ], }, ], }, "reversible": False, "supportsFanSpeedPercent": True, } assert trt.query_attributes() == { "currentFanSpeedSetting": "low", "on": True, "currentFanSpeedPercent": 33, } assert trt.can_execute(trait.COMMAND_FANSPEED, params={"fanSpeed": "medium"}) calls = async_mock_service(hass, fan.DOMAIN, fan.SERVICE_SET_SPEED) await trt.execute(trait.COMMAND_FANSPEED, BASIC_DATA, {"fanSpeed": "medium"}, {}) assert len(calls) == 1 assert calls[0].data == {"entity_id": "fan.living_room_fan", "speed": "medium"} assert trt.can_execute(trait.COMMAND_FANSPEED, params={"fanSpeedPercent": 10}) calls = async_mock_service(hass, fan.DOMAIN, fan.SERVICE_SET_PERCENTAGE) await trt.execute(trait.COMMAND_FANSPEED, BASIC_DATA, {"fanSpeedPercent": 10}, {}) assert len(calls) == 1 assert calls[0].data == {"entity_id": "fan.living_room_fan", "percentage": 10} async def test_climate_fan_speed(hass): """Test FanSpeed trait speed control support for climate domain.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.FanSpeedTrait.supported(climate.DOMAIN, climate.SUPPORT_FAN_MODE, None) trt = trait.FanSpeedTrait( hass, State( "climate.living_room_ac", "on", attributes={ "fan_modes": ["auto", "low", "medium", "high"], "fan_mode": "low", }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableFanSpeeds": { "ordered": True, "speeds": [ { "speed_name": "auto", "speed_values": [{"speed_synonym": ["auto"], "lang": "en"}], }, { "speed_name": "low", "speed_values": [{"speed_synonym": ["low"], "lang": "en"}], }, { "speed_name": "medium", "speed_values": [{"speed_synonym": ["medium"], "lang": "en"}], }, { "speed_name": "high", "speed_values": [{"speed_synonym": ["high"], "lang": "en"}], }, ], }, "reversible": False, "supportsFanSpeedPercent": True, } assert trt.query_attributes() == { "currentFanSpeedSetting": "low", } assert trt.can_execute(trait.COMMAND_FANSPEED, params={"fanSpeed": "medium"}) calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_FAN_MODE) await trt.execute(trait.COMMAND_FANSPEED, BASIC_DATA, {"fanSpeed": "medium"}, {}) assert len(calls) == 1 assert calls[0].data == { "entity_id": "climate.living_room_ac", "fan_mode": "medium", } async def test_inputselector(hass): """Test input selector trait.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.InputSelectorTrait.supported( media_player.DOMAIN, media_player.SUPPORT_SELECT_SOURCE, None ) trt = trait.InputSelectorTrait( hass, State( "media_player.living_room", media_player.STATE_PLAYING, attributes={ media_player.ATTR_INPUT_SOURCE_LIST: [ "media", "game", "chromecast", "plex", ], media_player.ATTR_INPUT_SOURCE: "game", }, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableInputs": [ {"key": "media", "names": [{"name_synonym": ["media"], "lang": "en"}]}, {"key": "game", "names": [{"name_synonym": ["game"], "lang": "en"}]}, { "key": "chromecast", "names": [{"name_synonym": ["chromecast"], "lang": "en"}], }, {"key": "plex", "names": [{"name_synonym": ["plex"], "lang": "en"}]}, ], "orderedInputs": True, } assert trt.query_attributes() == { "currentInput": "game", } assert trt.can_execute( trait.COMMAND_INPUT, params={"newInput": "media"}, ) calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_SELECT_SOURCE ) await trt.execute( trait.COMMAND_INPUT, BASIC_DATA, {"newInput": "media"}, {}, ) assert len(calls) == 1 assert calls[0].data == {"entity_id": "media_player.living_room", "source": "media"} @pytest.mark.parametrize( "sources,source,source_next,source_prev", [ (["a"], "a", "a", "a"), (["a", "b"], "a", "b", "b"), (["a", "b", "c"], "a", "b", "c"), ], ) async def test_inputselector_nextprev(hass, sources, source, source_next, source_prev): """Test input selector trait.""" trt = trait.InputSelectorTrait( hass, State( "media_player.living_room", media_player.STATE_PLAYING, attributes={ media_player.ATTR_INPUT_SOURCE_LIST: sources, media_player.ATTR_INPUT_SOURCE: source, }, ), BASIC_CONFIG, ) assert trt.can_execute("action.devices.commands.NextInput", params={}) assert trt.can_execute("action.devices.commands.PreviousInput", params={}) calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_SELECT_SOURCE ) await trt.execute( "action.devices.commands.NextInput", BASIC_DATA, {}, {}, ) await trt.execute( "action.devices.commands.PreviousInput", BASIC_DATA, {}, {}, ) assert len(calls) == 2 assert calls[0].data == { "entity_id": "media_player.living_room", "source": source_next, } assert calls[1].data == { "entity_id": "media_player.living_room", "source": source_prev, } @pytest.mark.parametrize( "sources,source", [(None, "a"), (["a", "b"], None), (["a", "b"], "c")] ) async def test_inputselector_nextprev_invalid(hass, sources, source): """Test input selector trait.""" trt = trait.InputSelectorTrait( hass, State( "media_player.living_room", media_player.STATE_PLAYING, attributes={ media_player.ATTR_INPUT_SOURCE_LIST: sources, media_player.ATTR_INPUT_SOURCE: source, }, ), BASIC_CONFIG, ) with pytest.raises(SmartHomeError): await trt.execute( "action.devices.commands.NextInput", BASIC_DATA, {}, {}, ) with pytest.raises(SmartHomeError): await trt.execute( "action.devices.commands.PreviousInput", BASIC_DATA, {}, {}, ) with pytest.raises(SmartHomeError): await trt.execute( "action.devices.commands.InvalidCommand", BASIC_DATA, {}, {}, ) async def test_modes_input_select(hass): """Test Input Select Mode trait.""" assert helpers.get_google_type(input_select.DOMAIN, None) is not None assert trait.ModesTrait.supported(input_select.DOMAIN, None, None) trt = trait.ModesTrait( hass, State("input_select.bla", "unavailable"), BASIC_CONFIG, ) assert trt.sync_attributes() == {"availableModes": []} trt = trait.ModesTrait( hass, State( "input_select.bla", "abc", attributes={input_select.ATTR_OPTIONS: ["abc", "123", "xyz"]}, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableModes": [ { "name": "option", "name_values": [ { "name_synonym": ["option", "setting", "mode", "value"], "lang": "en", } ], "settings": [ { "setting_name": "abc", "setting_values": [{"setting_synonym": ["abc"], "lang": "en"}], }, { "setting_name": "123", "setting_values": [{"setting_synonym": ["123"], "lang": "en"}], }, { "setting_name": "xyz", "setting_values": [{"setting_synonym": ["xyz"], "lang": "en"}], }, ], "ordered": False, } ] } assert trt.query_attributes() == { "currentModeSettings": {"option": "abc"}, "on": True, } assert trt.can_execute( trait.COMMAND_MODES, params={"updateModeSettings": {"option": "xyz"}}, ) calls = async_mock_service( hass, input_select.DOMAIN, input_select.SERVICE_SELECT_OPTION ) await trt.execute( trait.COMMAND_MODES, BASIC_DATA, {"updateModeSettings": {"option": "xyz"}}, {}, ) assert len(calls) == 1 assert calls[0].data == {"entity_id": "input_select.bla", "option": "xyz"} async def test_modes_humidifier(hass): """Test Humidifier Mode trait.""" assert helpers.get_google_type(humidifier.DOMAIN, None) is not None assert trait.ModesTrait.supported(humidifier.DOMAIN, humidifier.SUPPORT_MODES, None) trt = trait.ModesTrait( hass, State( "humidifier.humidifier", STATE_OFF, attributes={ humidifier.ATTR_AVAILABLE_MODES: [ humidifier.MODE_NORMAL, humidifier.MODE_AUTO, humidifier.MODE_AWAY, ], ATTR_SUPPORTED_FEATURES: humidifier.SUPPORT_MODES, humidifier.ATTR_MIN_HUMIDITY: 30, humidifier.ATTR_MAX_HUMIDITY: 99, humidifier.ATTR_HUMIDITY: 50, ATTR_MODE: humidifier.MODE_AUTO, }, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableModes": [ { "name": "mode", "name_values": [{"name_synonym": ["mode"], "lang": "en"}], "settings": [ { "setting_name": "normal", "setting_values": [ {"setting_synonym": ["normal"], "lang": "en"} ], }, { "setting_name": "auto", "setting_values": [{"setting_synonym": ["auto"], "lang": "en"}], }, { "setting_name": "away", "setting_values": [{"setting_synonym": ["away"], "lang": "en"}], }, ], "ordered": False, }, ] } assert trt.query_attributes() == { "currentModeSettings": {"mode": "auto"}, "on": False, } assert trt.can_execute( trait.COMMAND_MODES, params={"updateModeSettings": {"mode": "away"}} ) calls = async_mock_service(hass, humidifier.DOMAIN, humidifier.SERVICE_SET_MODE) await trt.execute( trait.COMMAND_MODES, BASIC_DATA, {"updateModeSettings": {"mode": "away"}}, {}, ) assert len(calls) == 1 assert calls[0].data == { "entity_id": "humidifier.humidifier", "mode": "away", } async def test_sound_modes(hass): """Test Mode trait.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.ModesTrait.supported( media_player.DOMAIN, media_player.SUPPORT_SELECT_SOUND_MODE, None ) trt = trait.ModesTrait( hass, State( "media_player.living_room", media_player.STATE_PLAYING, attributes={ media_player.ATTR_SOUND_MODE_LIST: ["stereo", "prologic"], media_player.ATTR_SOUND_MODE: "stereo", }, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableModes": [ { "name": "sound mode", "name_values": [ {"name_synonym": ["sound mode", "effects"], "lang": "en"} ], "settings": [ { "setting_name": "stereo", "setting_values": [ {"setting_synonym": ["stereo"], "lang": "en"} ], }, { "setting_name": "prologic", "setting_values": [ {"setting_synonym": ["prologic"], "lang": "en"} ], }, ], "ordered": False, } ] } assert trt.query_attributes() == { "currentModeSettings": {"sound mode": "stereo"}, "on": True, } assert trt.can_execute( trait.COMMAND_MODES, params={"updateModeSettings": {"sound mode": "stereo"}}, ) calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_SELECT_SOUND_MODE ) await trt.execute( trait.COMMAND_MODES, BASIC_DATA, {"updateModeSettings": {"sound mode": "stereo"}}, {}, ) assert len(calls) == 1 assert calls[0].data == { "entity_id": "media_player.living_room", "sound_mode": "stereo", } async def test_openclose_cover(hass): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_SET_POSITION, None ) trt = trait.OpenCloseTrait( hass, State( "cover.bla", cover.STATE_OPEN, { cover.ATTR_CURRENT_POSITION: 75, ATTR_SUPPORTED_FEATURES: cover.SUPPORT_SET_POSITION, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"openPercent": 75} calls_set = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_SET_COVER_POSITION) calls_open = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_OPEN_COVER) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 50}, {}) await trt.execute( trait.COMMAND_OPENCLOSE_RELATIVE, BASIC_DATA, {"openRelativePercent": 50}, {} ) assert len(calls_set) == 1 assert calls_set[0].data == {ATTR_ENTITY_ID: "cover.bla", cover.ATTR_POSITION: 50} assert len(calls_open) == 1 assert calls_open[0].data == {ATTR_ENTITY_ID: "cover.bla"} async def test_openclose_cover_unknown_state(hass): """Test OpenClose trait support for cover domain with unknown state.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_SET_POSITION, None ) # No state trt = trait.OpenCloseTrait( hass, State( "cover.bla", STATE_UNKNOWN, {ATTR_SUPPORTED_FEATURES: cover.SUPPORT_OPEN} ), BASIC_CONFIG, ) assert trt.sync_attributes() == {"discreteOnlyOpenClose": True} with pytest.raises(helpers.SmartHomeError): trt.query_attributes() calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_OPEN_COVER) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 100}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} with pytest.raises(helpers.SmartHomeError): trt.query_attributes() async def test_openclose_cover_assumed_state(hass): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_SET_POSITION, None ) trt = trait.OpenCloseTrait( hass, State( "cover.bla", cover.STATE_OPEN, { ATTR_ASSUMED_STATE: True, ATTR_SUPPORTED_FEATURES: cover.SUPPORT_SET_POSITION, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == {"commandOnlyOpenClose": True} assert trt.query_attributes() == {} calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_SET_COVER_POSITION) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 40}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla", cover.ATTR_POSITION: 40} async def test_openclose_cover_query_only(hass): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported(cover.DOMAIN, 0, None) state = State( "cover.bla", cover.STATE_OPEN, ) trt = trait.OpenCloseTrait( hass, state, BASIC_CONFIG, ) assert trt.sync_attributes() == { "discreteOnlyOpenClose": True, "queryOnlyOpenClose": True, } assert trt.query_attributes() == {"openPercent": 100} async def test_openclose_cover_no_position(hass): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_OPEN | cover.SUPPORT_CLOSE, None ) state = State( "cover.bla", cover.STATE_OPEN, { ATTR_SUPPORTED_FEATURES: cover.SUPPORT_OPEN | cover.SUPPORT_CLOSE, }, ) trt = trait.OpenCloseTrait( hass, state, BASIC_CONFIG, ) assert trt.sync_attributes() == {"discreteOnlyOpenClose": True} assert trt.query_attributes() == {"openPercent": 100} state.state = cover.STATE_CLOSED assert trt.sync_attributes() == {"discreteOnlyOpenClose": True} assert trt.query_attributes() == {"openPercent": 0} calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_CLOSE_COVER) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 0}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_OPEN_COVER) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 100}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} with pytest.raises( SmartHomeError, match=r"Current position not know for relative command" ): await trt.execute( trait.COMMAND_OPENCLOSE_RELATIVE, BASIC_DATA, {"openRelativePercent": 100}, {}, ) with pytest.raises(SmartHomeError, match=r"No support for partial open close"): await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 50}, {}) @pytest.mark.parametrize( "device_class", (cover.DEVICE_CLASS_DOOR, cover.DEVICE_CLASS_GARAGE, cover.DEVICE_CLASS_GATE), ) async def test_openclose_cover_secure(hass, device_class): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, device_class) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_SET_POSITION, device_class ) assert trait.OpenCloseTrait.might_2fa( cover.DOMAIN, cover.SUPPORT_SET_POSITION, device_class ) trt = trait.OpenCloseTrait( hass, State( "cover.bla", cover.STATE_OPEN, { ATTR_DEVICE_CLASS: device_class, ATTR_SUPPORTED_FEATURES: cover.SUPPORT_SET_POSITION, cover.ATTR_CURRENT_POSITION: 75, }, ), PIN_CONFIG, ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"openPercent": 75} calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_SET_COVER_POSITION) calls_close = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_CLOSE_COVER) # No challenge data with pytest.raises(error.ChallengeNeeded) as err: await trt.execute(trait.COMMAND_OPENCLOSE, PIN_DATA, {"openPercent": 50}, {}) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # invalid pin with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_OPENCLOSE, PIN_DATA, {"openPercent": 50}, {"pin": "9999"} ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_FAILED_PIN_NEEDED await trt.execute( trait.COMMAND_OPENCLOSE, PIN_DATA, {"openPercent": 50}, {"pin": "1234"} ) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla", cover.ATTR_POSITION: 50} # no challenge on close await trt.execute(trait.COMMAND_OPENCLOSE, PIN_DATA, {"openPercent": 0}, {}) assert len(calls_close) == 1 assert calls_close[0].data == {ATTR_ENTITY_ID: "cover.bla"} @pytest.mark.parametrize( "device_class", ( binary_sensor.DEVICE_CLASS_DOOR, binary_sensor.DEVICE_CLASS_GARAGE_DOOR, binary_sensor.DEVICE_CLASS_LOCK, binary_sensor.DEVICE_CLASS_OPENING, binary_sensor.DEVICE_CLASS_WINDOW, ), ) async def test_openclose_binary_sensor(hass, device_class): """Test OpenClose trait support for binary_sensor domain.""" assert helpers.get_google_type(binary_sensor.DOMAIN, device_class) is not None assert trait.OpenCloseTrait.supported(binary_sensor.DOMAIN, 0, device_class) trt = trait.OpenCloseTrait( hass, State("binary_sensor.test", STATE_ON, {ATTR_DEVICE_CLASS: device_class}), BASIC_CONFIG, ) assert trt.sync_attributes() == { "queryOnlyOpenClose": True, "discreteOnlyOpenClose": True, } assert trt.query_attributes() == {"openPercent": 100} trt = trait.OpenCloseTrait( hass, State("binary_sensor.test", STATE_OFF, {ATTR_DEVICE_CLASS: device_class}), BASIC_CONFIG, ) assert trt.sync_attributes() == { "queryOnlyOpenClose": True, "discreteOnlyOpenClose": True, } assert trt.query_attributes() == {"openPercent": 0} async def test_volume_media_player(hass): """Test volume trait support for media player domain.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.VolumeTrait.supported( media_player.DOMAIN, media_player.SUPPORT_VOLUME_SET, None, ) trt = trait.VolumeTrait( hass, State( "media_player.bla", media_player.STATE_PLAYING, { ATTR_SUPPORTED_FEATURES: media_player.SUPPORT_VOLUME_SET, media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.3, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "volumeMaxLevel": 100, "levelStepSize": 10, "volumeCanMuteAndUnmute": False, "commandOnlyVolume": False, } assert trt.query_attributes() == {"currentVolume": 30} calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_SET ) await trt.execute(trait.COMMAND_SET_VOLUME, BASIC_DATA, {"volumeLevel": 60}, {}) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.6, } calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_SET ) await trt.execute( trait.COMMAND_VOLUME_RELATIVE, BASIC_DATA, {"relativeSteps": 10}, {} ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.4, } async def test_volume_media_player_relative(hass): """Test volume trait support for relative-volume-only media players.""" assert trait.VolumeTrait.supported( media_player.DOMAIN, media_player.SUPPORT_VOLUME_STEP, None, ) trt = trait.VolumeTrait( hass, State( "media_player.bla", media_player.STATE_PLAYING, { ATTR_ASSUMED_STATE: True, ATTR_SUPPORTED_FEATURES: media_player.SUPPORT_VOLUME_STEP, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "volumeMaxLevel": 100, "levelStepSize": 10, "volumeCanMuteAndUnmute": False, "commandOnlyVolume": True, } assert trt.query_attributes() == {} calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_UP ) await trt.execute( trait.COMMAND_VOLUME_RELATIVE, BASIC_DATA, {"relativeSteps": 10}, {}, ) assert len(calls) == 10 for call in calls: assert call.data == { ATTR_ENTITY_ID: "media_player.bla", } calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_DOWN ) await trt.execute( trait.COMMAND_VOLUME_RELATIVE, BASIC_DATA, {"relativeSteps": -10}, {}, ) assert len(calls) == 10 for call in calls: assert call.data == { ATTR_ENTITY_ID: "media_player.bla", } with pytest.raises(SmartHomeError): await trt.execute(trait.COMMAND_SET_VOLUME, BASIC_DATA, {"volumeLevel": 42}, {}) with pytest.raises(SmartHomeError): await trt.execute(trait.COMMAND_MUTE, BASIC_DATA, {"mute": True}, {}) async def test_media_player_mute(hass): """Test volume trait support for muting.""" assert trait.VolumeTrait.supported( media_player.DOMAIN, media_player.SUPPORT_VOLUME_STEP | media_player.SUPPORT_VOLUME_MUTE, None, ) trt = trait.VolumeTrait( hass, State( "media_player.bla", media_player.STATE_PLAYING, { ATTR_SUPPORTED_FEATURES: ( media_player.SUPPORT_VOLUME_STEP | media_player.SUPPORT_VOLUME_MUTE ), media_player.ATTR_MEDIA_VOLUME_MUTED: False, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "volumeMaxLevel": 100, "levelStepSize": 10, "volumeCanMuteAndUnmute": True, "commandOnlyVolume": False, } assert trt.query_attributes() == {"isMuted": False} mute_calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_MUTE ) await trt.execute( trait.COMMAND_MUTE, BASIC_DATA, {"mute": True}, {}, ) assert len(mute_calls) == 1 assert mute_calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_VOLUME_MUTED: True, } unmute_calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_MUTE ) await trt.execute( trait.COMMAND_MUTE, BASIC_DATA, {"mute": False}, {}, ) assert len(unmute_calls) == 1 assert unmute_calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_VOLUME_MUTED: False, } async def test_temperature_setting_sensor(hass): """Test TemperatureSetting trait support for temperature sensor.""" assert ( helpers.get_google_type(sensor.DOMAIN, sensor.DEVICE_CLASS_TEMPERATURE) is not None ) assert not trait.TemperatureSettingTrait.supported( sensor.DOMAIN, 0, sensor.DEVICE_CLASS_HUMIDITY ) assert trait.TemperatureSettingTrait.supported( sensor.DOMAIN, 0, sensor.DEVICE_CLASS_TEMPERATURE ) @pytest.mark.parametrize( "unit_in,unit_out,state,ambient", [ (TEMP_FAHRENHEIT, "F", "70", 21.1), (TEMP_CELSIUS, "C", "21.1", 21.1), (TEMP_FAHRENHEIT, "F", "unavailable", None), (TEMP_FAHRENHEIT, "F", "unknown", None), ], ) async def test_temperature_setting_sensor_data(hass, unit_in, unit_out, state, ambient): """Test TemperatureSetting trait support for temperature sensor.""" hass.config.units.temperature_unit = unit_in trt = trait.TemperatureSettingTrait( hass, State( "sensor.test", state, {ATTR_DEVICE_CLASS: sensor.DEVICE_CLASS_TEMPERATURE} ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "queryOnlyTemperatureSetting": True, "thermostatTemperatureUnit": unit_out, } if ambient: assert trt.query_attributes() == {"thermostatTemperatureAmbient": ambient} else: assert trt.query_attributes() == {} hass.config.units.temperature_unit = TEMP_CELSIUS async def test_humidity_setting_sensor(hass): """Test HumiditySetting trait support for humidity sensor.""" assert ( helpers.get_google_type(sensor.DOMAIN, sensor.DEVICE_CLASS_HUMIDITY) is not None ) assert not trait.HumiditySettingTrait.supported( sensor.DOMAIN, 0, sensor.DEVICE_CLASS_TEMPERATURE ) assert trait.HumiditySettingTrait.supported( sensor.DOMAIN, 0, sensor.DEVICE_CLASS_HUMIDITY ) @pytest.mark.parametrize( "state,ambient", [("70", 70), ("unavailable", None), ("unknown", None)] ) async def test_humidity_setting_sensor_data(hass, state, ambient): """Test HumiditySetting trait support for humidity sensor.""" trt = trait.HumiditySettingTrait( hass, State("sensor.test", state, {ATTR_DEVICE_CLASS: sensor.DEVICE_CLASS_HUMIDITY}), BASIC_CONFIG, ) assert trt.sync_attributes() == {"queryOnlyHumiditySetting": True} if ambient: assert trt.query_attributes() == {"humidityAmbientPercent": ambient} else: assert trt.query_attributes() == {} with pytest.raises(helpers.SmartHomeError) as err: await trt.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert err.value.code == const.ERR_NOT_SUPPORTED async def test_transport_control(hass): """Test the TransportControlTrait.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None for feature in trait.MEDIA_COMMAND_SUPPORT_MAPPING.values(): assert trait.TransportControlTrait.supported(media_player.DOMAIN, feature, None) now = datetime(2020, 1, 1) trt = trait.TransportControlTrait( hass, State( "media_player.bla", media_player.STATE_PLAYING, { media_player.ATTR_MEDIA_POSITION: 100, media_player.ATTR_MEDIA_DURATION: 200, media_player.ATTR_MEDIA_POSITION_UPDATED_AT: now - timedelta(seconds=10), media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.5, ATTR_SUPPORTED_FEATURES: media_player.SUPPORT_PLAY | media_player.SUPPORT_STOP, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "transportControlSupportedCommands": ["RESUME", "STOP"] } assert trt.query_attributes() == {} # COMMAND_MEDIA_SEEK_RELATIVE calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_SEEK ) # Patch to avoid time ticking over during the command failing the test with patch("homeassistant.util.dt.utcnow", return_value=now): await trt.execute( trait.COMMAND_MEDIA_SEEK_RELATIVE, BASIC_DATA, {"relativePositionMs": 10000}, {}, ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", # 100s (current position) + 10s (from command) + 10s (from updated_at) media_player.ATTR_MEDIA_SEEK_POSITION: 120, } # COMMAND_MEDIA_SEEK_TO_POSITION calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_SEEK ) await trt.execute( trait.COMMAND_MEDIA_SEEK_TO_POSITION, BASIC_DATA, {"absPositionMs": 50000}, {} ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_SEEK_POSITION: 50, } # COMMAND_MEDIA_NEXT calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_NEXT_TRACK ) await trt.execute(trait.COMMAND_MEDIA_NEXT, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} # COMMAND_MEDIA_PAUSE calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_PAUSE ) await trt.execute(trait.COMMAND_MEDIA_PAUSE, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} # COMMAND_MEDIA_PREVIOUS calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_PREVIOUS_TRACK ) await trt.execute(trait.COMMAND_MEDIA_PREVIOUS, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} # COMMAND_MEDIA_RESUME calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_PLAY ) await trt.execute(trait.COMMAND_MEDIA_RESUME, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} # COMMAND_MEDIA_SHUFFLE calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_SHUFFLE_SET ) await trt.execute(trait.COMMAND_MEDIA_SHUFFLE, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_SHUFFLE: True, } # COMMAND_MEDIA_STOP calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_STOP ) await trt.execute(trait.COMMAND_MEDIA_STOP, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} @pytest.mark.parametrize( "state", ( STATE_OFF, STATE_IDLE, STATE_PLAYING, STATE_ON, STATE_PAUSED, STATE_STANDBY, STATE_UNAVAILABLE, STATE_UNKNOWN, ), ) async def test_media_state(hass, state): """Test the MediaStateTrait.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.TransportControlTrait.supported( media_player.DOMAIN, media_player.SUPPORT_PLAY, None ) trt = trait.MediaStateTrait( hass, State( "media_player.bla", state, { media_player.ATTR_MEDIA_POSITION: 100, media_player.ATTR_MEDIA_DURATION: 200, media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.5, ATTR_SUPPORTED_FEATURES: media_player.SUPPORT_PLAY | media_player.SUPPORT_STOP, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "supportActivityState": True, "supportPlaybackState": True, } assert trt.query_attributes() == { "activityState": trt.activity_lookup.get(state), "playbackState": trt.playback_lookup.get(state), }
sander76/home-assistant
tests/components/google_assistant/test_trait.py
homeassistant/components/deconz/lock.py
"""Support for Somfy Covers.""" from pymfy.api.devices.blind import Blind from pymfy.api.devices.category import Category from homeassistant.components.cover import ( ATTR_POSITION, ATTR_TILT_POSITION, DEVICE_CLASS_BLIND, DEVICE_CLASS_SHUTTER, SUPPORT_CLOSE, SUPPORT_CLOSE_TILT, SUPPORT_OPEN, SUPPORT_OPEN_TILT, SUPPORT_SET_POSITION, SUPPORT_SET_TILT_POSITION, SUPPORT_STOP, SUPPORT_STOP_TILT, CoverEntity, ) from homeassistant.const import CONF_OPTIMISTIC, STATE_CLOSED, STATE_OPEN from homeassistant.helpers.restore_state import RestoreEntity from . import SomfyEntity from .const import API, COORDINATOR, DOMAIN BLIND_DEVICE_CATEGORIES = {Category.INTERIOR_BLIND.value, Category.EXTERIOR_BLIND.value} SHUTTER_DEVICE_CATEGORIES = {Category.EXTERIOR_BLIND.value} SUPPORTED_CATEGORIES = { Category.ROLLER_SHUTTER.value, Category.INTERIOR_BLIND.value, Category.EXTERIOR_BLIND.value, } async def async_setup_entry(hass, config_entry, async_add_entities): """Set up the Somfy cover platform.""" domain_data = hass.data[DOMAIN] coordinator = domain_data[COORDINATOR] api = domain_data[API] covers = [ SomfyCover(coordinator, device_id, api, domain_data[CONF_OPTIMISTIC]) for device_id, device in coordinator.data.items() if SUPPORTED_CATEGORIES & set(device.categories) ] async_add_entities(covers) class SomfyCover(SomfyEntity, RestoreEntity, CoverEntity): """Representation of a Somfy cover device.""" def __init__(self, coordinator, device_id, api, optimistic): """Initialize the Somfy device.""" super().__init__(coordinator, device_id, api) self.categories = set(self.device.categories) self.optimistic = optimistic self._closed = None self._is_opening = None self._is_closing = None self._cover = None self._create_device() def _create_device(self) -> Blind: """Update the device with the latest data.""" self._cover = Blind(self.device, self.api) @property def supported_features(self) -> int: """Flag supported features.""" supported_features = 0 if self.has_capability("open"): supported_features |= SUPPORT_OPEN if self.has_capability("close"): supported_features |= SUPPORT_CLOSE if self.has_capability("stop"): supported_features |= SUPPORT_STOP if self.has_capability("position"): supported_features |= SUPPORT_SET_POSITION if self.has_capability("rotation"): supported_features |= ( SUPPORT_OPEN_TILT | SUPPORT_CLOSE_TILT | SUPPORT_STOP_TILT | SUPPORT_SET_TILT_POSITION ) return supported_features async def async_close_cover(self, **kwargs): """Close the cover.""" self._is_closing = True self.async_write_ha_state() try: # Blocks until the close command is sent await self.hass.async_add_executor_job(self._cover.close) self._closed = True finally: self._is_closing = None self.async_write_ha_state() async def async_open_cover(self, **kwargs): """Open the cover.""" self._is_opening = True self.async_write_ha_state() try: # Blocks until the open command is sent await self.hass.async_add_executor_job(self._cover.open) self._closed = False finally: self._is_opening = None self.async_write_ha_state() def stop_cover(self, **kwargs): """Stop the cover.""" self._cover.stop() def set_cover_position(self, **kwargs): """Move the cover shutter to a specific position.""" self._cover.set_position(100 - kwargs[ATTR_POSITION]) @property def device_class(self): """Return the device class.""" if self.categories & BLIND_DEVICE_CATEGORIES: return DEVICE_CLASS_BLIND if self.categories & SHUTTER_DEVICE_CATEGORIES: return DEVICE_CLASS_SHUTTER return None @property def current_cover_position(self): """Return the current position of cover shutter.""" if not self.has_state("position"): return None return 100 - self._cover.get_position() @property def is_opening(self): """Return if the cover is opening.""" if not self.optimistic: return None return self._is_opening @property def is_closing(self): """Return if the cover is closing.""" if not self.optimistic: return None return self._is_closing @property def is_closed(self) -> bool: """Return if the cover is closed.""" is_closed = None if self.has_state("position"): is_closed = self._cover.is_closed() elif self.optimistic: is_closed = self._closed return is_closed @property def current_cover_tilt_position(self) -> int: """Return current position of cover tilt. None is unknown, 0 is closed, 100 is fully open. """ if not self.has_state("orientation"): return None return 100 - self._cover.orientation def set_cover_tilt_position(self, **kwargs): """Move the cover tilt to a specific position.""" self._cover.orientation = 100 - kwargs[ATTR_TILT_POSITION] def open_cover_tilt(self, **kwargs): """Open the cover tilt.""" self._cover.orientation = 0 def close_cover_tilt(self, **kwargs): """Close the cover tilt.""" self._cover.orientation = 100 def stop_cover_tilt(self, **kwargs): """Stop the cover.""" self._cover.stop() async def async_added_to_hass(self): """Complete the initialization.""" await super().async_added_to_hass() if not self.optimistic: return # Restore the last state if we use optimistic last_state = await self.async_get_last_state() if last_state is not None and last_state.state in ( STATE_OPEN, STATE_CLOSED, ): self._closed = last_state.state == STATE_CLOSED
"""Tests for the Google Assistant traits.""" from datetime import datetime, timedelta from unittest.mock import patch import pytest from homeassistant.components import ( alarm_control_panel, binary_sensor, camera, cover, fan, group, input_boolean, input_select, light, lock, media_player, scene, script, sensor, switch, vacuum, ) from homeassistant.components.climate import const as climate from homeassistant.components.google_assistant import const, error, helpers, trait from homeassistant.components.google_assistant.error import SmartHomeError from homeassistant.components.humidifier import const as humidifier from homeassistant.config import async_process_ha_core_config from homeassistant.const import ( ATTR_ASSUMED_STATE, ATTR_DEVICE_CLASS, ATTR_ENTITY_ID, ATTR_MODE, ATTR_SUPPORTED_FEATURES, ATTR_TEMPERATURE, SERVICE_TURN_OFF, SERVICE_TURN_ON, STATE_ALARM_ARMED_AWAY, STATE_ALARM_DISARMED, STATE_ALARM_PENDING, STATE_IDLE, STATE_OFF, STATE_ON, STATE_PAUSED, STATE_PLAYING, STATE_STANDBY, STATE_UNAVAILABLE, STATE_UNKNOWN, TEMP_CELSIUS, TEMP_FAHRENHEIT, ) from homeassistant.core import DOMAIN as HA_DOMAIN, EVENT_CALL_SERVICE, State from homeassistant.util import color from . import BASIC_CONFIG, MockConfig from tests.common import async_capture_events, async_mock_service REQ_ID = "ff36a3cc-ec34-11e6-b1a0-64510650abcf" BASIC_DATA = helpers.RequestData( BASIC_CONFIG, "test-agent", const.SOURCE_CLOUD, REQ_ID, None ) PIN_CONFIG = MockConfig(secure_devices_pin="1234") PIN_DATA = helpers.RequestData( PIN_CONFIG, "test-agent", const.SOURCE_CLOUD, REQ_ID, None ) async def test_brightness_light(hass): """Test brightness trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert trait.BrightnessTrait.supported(light.DOMAIN, light.SUPPORT_BRIGHTNESS, None) trt = trait.BrightnessTrait( hass, State("light.bla", light.STATE_ON, {light.ATTR_BRIGHTNESS: 243}), BASIC_CONFIG, ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"brightness": 95} events = async_capture_events(hass, EVENT_CALL_SERVICE) calls = async_mock_service(hass, light.DOMAIN, light.SERVICE_TURN_ON) await trt.execute( trait.COMMAND_BRIGHTNESS_ABSOLUTE, BASIC_DATA, {"brightness": 50}, {} ) await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "light.bla", light.ATTR_BRIGHTNESS_PCT: 50} assert len(events) == 1 assert events[0].data == { "domain": "light", "service": "turn_on", "service_data": {"brightness_pct": 50, "entity_id": "light.bla"}, } async def test_camera_stream(hass): """Test camera stream trait support for camera domain.""" await async_process_ha_core_config( hass, {"external_url": "https://example.com"}, ) assert helpers.get_google_type(camera.DOMAIN, None) is not None assert trait.CameraStreamTrait.supported(camera.DOMAIN, camera.SUPPORT_STREAM, None) trt = trait.CameraStreamTrait( hass, State("camera.bla", camera.STATE_IDLE, {}), BASIC_CONFIG ) assert trt.sync_attributes() == { "cameraStreamSupportedProtocols": ["hls"], "cameraStreamNeedAuthToken": False, "cameraStreamNeedDrmEncryption": False, } assert trt.query_attributes() == {} with patch( "homeassistant.components.camera.async_request_stream", return_value="/api/streams/bla", ): await trt.execute(trait.COMMAND_GET_CAMERA_STREAM, BASIC_DATA, {}, {}) assert trt.query_attributes() == { "cameraStreamAccessUrl": "https://example.com/api/streams/bla", "cameraStreamReceiverAppId": "B12CE3CA", } async def test_onoff_group(hass): """Test OnOff trait support for group domain.""" assert helpers.get_google_type(group.DOMAIN, None) is not None assert trait.OnOffTrait.supported(group.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("group.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("group.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, HA_DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "group.bla"} off_calls = async_mock_service(hass, HA_DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "group.bla"} async def test_onoff_input_boolean(hass): """Test OnOff trait support for input_boolean domain.""" assert helpers.get_google_type(input_boolean.DOMAIN, None) is not None assert trait.OnOffTrait.supported(input_boolean.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("input_boolean.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait( hass, State("input_boolean.bla", STATE_OFF), BASIC_CONFIG ) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, input_boolean.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "input_boolean.bla"} off_calls = async_mock_service(hass, input_boolean.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "input_boolean.bla"} async def test_onoff_switch(hass): """Test OnOff trait support for switch domain.""" assert helpers.get_google_type(switch.DOMAIN, None) is not None assert trait.OnOffTrait.supported(switch.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("switch.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("switch.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} trt_assumed = trait.OnOffTrait( hass, State("switch.bla", STATE_OFF, {"assumed_state": True}), BASIC_CONFIG ) assert trt_assumed.sync_attributes() == {"commandOnlyOnOff": True} on_calls = async_mock_service(hass, switch.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "switch.bla"} off_calls = async_mock_service(hass, switch.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "switch.bla"} async def test_onoff_fan(hass): """Test OnOff trait support for fan domain.""" assert helpers.get_google_type(fan.DOMAIN, None) is not None assert trait.OnOffTrait.supported(fan.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("fan.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("fan.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, fan.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "fan.bla"} off_calls = async_mock_service(hass, fan.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "fan.bla"} async def test_onoff_light(hass): """Test OnOff trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert trait.OnOffTrait.supported(light.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("light.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("light.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "light.bla"} off_calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "light.bla"} async def test_onoff_media_player(hass): """Test OnOff trait support for media_player domain.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.OnOffTrait.supported(media_player.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("media_player.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("media_player.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, media_player.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} off_calls = async_mock_service(hass, media_player.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} async def test_onoff_humidifier(hass): """Test OnOff trait support for humidifier domain.""" assert helpers.get_google_type(humidifier.DOMAIN, None) is not None assert trait.OnOffTrait.supported(humidifier.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("humidifier.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("humidifier.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, humidifier.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "humidifier.bla"} off_calls = async_mock_service(hass, humidifier.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "humidifier.bla"} async def test_dock_vacuum(hass): """Test dock trait support for vacuum domain.""" assert helpers.get_google_type(vacuum.DOMAIN, None) is not None assert trait.DockTrait.supported(vacuum.DOMAIN, 0, None) trt = trait.DockTrait(hass, State("vacuum.bla", vacuum.STATE_IDLE), BASIC_CONFIG) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"isDocked": False} calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_RETURN_TO_BASE) await trt.execute(trait.COMMAND_DOCK, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} async def test_startstop_vacuum(hass): """Test startStop trait support for vacuum domain.""" assert helpers.get_google_type(vacuum.DOMAIN, None) is not None assert trait.StartStopTrait.supported(vacuum.DOMAIN, 0, None) trt = trait.StartStopTrait( hass, State( "vacuum.bla", vacuum.STATE_PAUSED, {ATTR_SUPPORTED_FEATURES: vacuum.SUPPORT_PAUSE}, ), BASIC_CONFIG, ) assert trt.sync_attributes() == {"pausable": True} assert trt.query_attributes() == {"isRunning": False, "isPaused": True} start_calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_START) await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": True}, {}) assert len(start_calls) == 1 assert start_calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} stop_calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_STOP) await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": False}, {}) assert len(stop_calls) == 1 assert stop_calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} pause_calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_PAUSE) await trt.execute(trait.COMMAND_PAUSEUNPAUSE, BASIC_DATA, {"pause": True}, {}) assert len(pause_calls) == 1 assert pause_calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} unpause_calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_START) await trt.execute(trait.COMMAND_PAUSEUNPAUSE, BASIC_DATA, {"pause": False}, {}) assert len(unpause_calls) == 1 assert unpause_calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} async def test_startstop_cover(hass): """Test startStop trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.StartStopTrait.supported(cover.DOMAIN, cover.SUPPORT_STOP, None) state = State( "cover.bla", cover.STATE_CLOSED, {ATTR_SUPPORTED_FEATURES: cover.SUPPORT_STOP}, ) trt = trait.StartStopTrait( hass, state, BASIC_CONFIG, ) assert trt.sync_attributes() == {} for state_value in (cover.STATE_CLOSING, cover.STATE_OPENING): state.state = state_value assert trt.query_attributes() == {"isRunning": True} stop_calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_STOP_COVER) await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": False}, {}) assert len(stop_calls) == 1 assert stop_calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} for state_value in (cover.STATE_CLOSED, cover.STATE_OPEN): state.state = state_value assert trt.query_attributes() == {"isRunning": False} with pytest.raises(SmartHomeError, match="Cover is already stopped"): await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": False}, {}) with pytest.raises(SmartHomeError, match="Starting a cover is not supported"): await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": True}, {}) with pytest.raises( SmartHomeError, match="Command action.devices.commands.PauseUnpause is not supported", ): await trt.execute(trait.COMMAND_PAUSEUNPAUSE, BASIC_DATA, {"start": True}, {}) async def test_startstop_cover_assumed(hass): """Test startStop trait support for cover domain of assumed state.""" trt = trait.StartStopTrait( hass, State( "cover.bla", cover.STATE_CLOSED, {ATTR_SUPPORTED_FEATURES: cover.SUPPORT_STOP, ATTR_ASSUMED_STATE: True}, ), BASIC_CONFIG, ) stop_calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_STOP_COVER) await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": False}, {}) assert len(stop_calls) == 1 assert stop_calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} async def test_color_setting_color_light(hass): """Test ColorSpectrum trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert not trait.ColorSettingTrait.supported(light.DOMAIN, 0, None) assert trait.ColorSettingTrait.supported(light.DOMAIN, light.SUPPORT_COLOR, None) trt = trait.ColorSettingTrait( hass, State( "light.bla", STATE_ON, { light.ATTR_HS_COLOR: (20, 94), light.ATTR_BRIGHTNESS: 200, ATTR_SUPPORTED_FEATURES: light.SUPPORT_COLOR, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == {"colorModel": "hsv"} assert trt.query_attributes() == { "color": {"spectrumHsv": {"hue": 20, "saturation": 0.94, "value": 200 / 255}} } assert trt.can_execute( trait.COMMAND_COLOR_ABSOLUTE, {"color": {"spectrumRGB": 16715792}} ) calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON) await trt.execute( trait.COMMAND_COLOR_ABSOLUTE, BASIC_DATA, {"color": {"spectrumRGB": 1052927}}, {}, ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "light.bla", light.ATTR_HS_COLOR: (240, 93.725), } await trt.execute( trait.COMMAND_COLOR_ABSOLUTE, BASIC_DATA, {"color": {"spectrumHSV": {"hue": 100, "saturation": 0.50, "value": 0.20}}}, {}, ) assert len(calls) == 2 assert calls[1].data == { ATTR_ENTITY_ID: "light.bla", light.ATTR_HS_COLOR: [100, 50], light.ATTR_BRIGHTNESS: 0.2 * 255, } async def test_color_setting_temperature_light(hass): """Test ColorTemperature trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert not trait.ColorSettingTrait.supported(light.DOMAIN, 0, None) assert trait.ColorSettingTrait.supported( light.DOMAIN, light.SUPPORT_COLOR_TEMP, None ) trt = trait.ColorSettingTrait( hass, State( "light.bla", STATE_ON, { light.ATTR_MIN_MIREDS: 200, light.ATTR_COLOR_TEMP: 300, light.ATTR_MAX_MIREDS: 500, ATTR_SUPPORTED_FEATURES: light.SUPPORT_COLOR_TEMP, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "colorTemperatureRange": {"temperatureMinK": 2000, "temperatureMaxK": 5000} } assert trt.query_attributes() == {"color": {"temperatureK": 3333}} assert trt.can_execute( trait.COMMAND_COLOR_ABSOLUTE, {"color": {"temperature": 400}} ) calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON) with pytest.raises(helpers.SmartHomeError) as err: await trt.execute( trait.COMMAND_COLOR_ABSOLUTE, BASIC_DATA, {"color": {"temperature": 5555}}, {}, ) assert err.value.code == const.ERR_VALUE_OUT_OF_RANGE await trt.execute( trait.COMMAND_COLOR_ABSOLUTE, BASIC_DATA, {"color": {"temperature": 2857}}, {} ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "light.bla", light.ATTR_COLOR_TEMP: color.color_temperature_kelvin_to_mired(2857), } async def test_color_light_temperature_light_bad_temp(hass): """Test ColorTemperature trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert not trait.ColorSettingTrait.supported(light.DOMAIN, 0, None) assert trait.ColorSettingTrait.supported( light.DOMAIN, light.SUPPORT_COLOR_TEMP, None ) trt = trait.ColorSettingTrait( hass, State( "light.bla", STATE_ON, { light.ATTR_MIN_MIREDS: 200, light.ATTR_COLOR_TEMP: 0, light.ATTR_MAX_MIREDS: 500, }, ), BASIC_CONFIG, ) assert trt.query_attributes() == {} async def test_light_modes(hass): """Test Light Mode trait.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert trait.ModesTrait.supported(light.DOMAIN, light.SUPPORT_EFFECT, None) trt = trait.ModesTrait( hass, State( "light.living_room", light.STATE_ON, attributes={ light.ATTR_EFFECT_LIST: ["random", "colorloop"], light.ATTR_EFFECT: "random", }, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableModes": [ { "name": "effect", "name_values": [{"name_synonym": ["effect"], "lang": "en"}], "settings": [ { "setting_name": "random", "setting_values": [ {"setting_synonym": ["random"], "lang": "en"} ], }, { "setting_name": "colorloop", "setting_values": [ {"setting_synonym": ["colorloop"], "lang": "en"} ], }, ], "ordered": False, } ] } assert trt.query_attributes() == { "currentModeSettings": {"effect": "random"}, "on": True, } assert trt.can_execute( trait.COMMAND_MODES, params={"updateModeSettings": {"effect": "colorloop"}}, ) calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON) await trt.execute( trait.COMMAND_MODES, BASIC_DATA, {"updateModeSettings": {"effect": "colorloop"}}, {}, ) assert len(calls) == 1 assert calls[0].data == { "entity_id": "light.living_room", "effect": "colorloop", } async def test_scene_scene(hass): """Test Scene trait support for scene domain.""" assert helpers.get_google_type(scene.DOMAIN, None) is not None assert trait.SceneTrait.supported(scene.DOMAIN, 0, None) trt = trait.SceneTrait(hass, State("scene.bla", scene.STATE), BASIC_CONFIG) assert trt.sync_attributes() == {} assert trt.query_attributes() == {} assert trt.can_execute(trait.COMMAND_ACTIVATE_SCENE, {}) calls = async_mock_service(hass, scene.DOMAIN, SERVICE_TURN_ON) await trt.execute(trait.COMMAND_ACTIVATE_SCENE, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "scene.bla"} async def test_scene_script(hass): """Test Scene trait support for script domain.""" assert helpers.get_google_type(script.DOMAIN, None) is not None assert trait.SceneTrait.supported(script.DOMAIN, 0, None) trt = trait.SceneTrait(hass, State("script.bla", STATE_OFF), BASIC_CONFIG) assert trt.sync_attributes() == {} assert trt.query_attributes() == {} assert trt.can_execute(trait.COMMAND_ACTIVATE_SCENE, {}) calls = async_mock_service(hass, script.DOMAIN, SERVICE_TURN_ON) await trt.execute(trait.COMMAND_ACTIVATE_SCENE, BASIC_DATA, {}, {}) # We don't wait till script execution is done. await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "script.bla"} async def test_temperature_setting_climate_onoff(hass): """Test TemperatureSetting trait support for climate domain - range.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.TemperatureSettingTrait.supported(climate.DOMAIN, 0, None) hass.config.units.temperature_unit = TEMP_FAHRENHEIT trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_AUTO, { ATTR_SUPPORTED_FEATURES: climate.SUPPORT_TARGET_TEMPERATURE_RANGE, climate.ATTR_HVAC_MODES: [ climate.HVAC_MODE_OFF, climate.HVAC_MODE_COOL, climate.HVAC_MODE_HEAT, climate.HVAC_MODE_HEAT_COOL, ], climate.ATTR_MIN_TEMP: None, climate.ATTR_MAX_TEMP: None, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["off", "cool", "heat", "heatcool", "on"], "thermostatTemperatureUnit": "F", } assert trt.can_execute(trait.COMMAND_THERMOSTAT_SET_MODE, {}) calls = async_mock_service(hass, climate.DOMAIN, SERVICE_TURN_ON) await trt.execute( trait.COMMAND_THERMOSTAT_SET_MODE, BASIC_DATA, {"thermostatMode": "on"}, {} ) assert len(calls) == 1 calls = async_mock_service(hass, climate.DOMAIN, SERVICE_TURN_OFF) await trt.execute( trait.COMMAND_THERMOSTAT_SET_MODE, BASIC_DATA, {"thermostatMode": "off"}, {} ) assert len(calls) == 1 async def test_temperature_setting_climate_no_modes(hass): """Test TemperatureSetting trait support for climate domain not supporting any modes.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.TemperatureSettingTrait.supported(climate.DOMAIN, 0, None) hass.config.units.temperature_unit = TEMP_CELSIUS trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_AUTO, { climate.ATTR_HVAC_MODES: [], climate.ATTR_MIN_TEMP: None, climate.ATTR_MAX_TEMP: None, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["heat"], "thermostatTemperatureUnit": "C", } async def test_temperature_setting_climate_range(hass): """Test TemperatureSetting trait support for climate domain - range.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.TemperatureSettingTrait.supported(climate.DOMAIN, 0, None) hass.config.units.temperature_unit = TEMP_FAHRENHEIT trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_AUTO, { climate.ATTR_CURRENT_TEMPERATURE: 70, climate.ATTR_CURRENT_HUMIDITY: 25, ATTR_SUPPORTED_FEATURES: climate.SUPPORT_TARGET_TEMPERATURE_RANGE, climate.ATTR_HVAC_MODES: [ STATE_OFF, climate.HVAC_MODE_COOL, climate.HVAC_MODE_HEAT, climate.HVAC_MODE_AUTO, ], climate.ATTR_TARGET_TEMP_HIGH: 75, climate.ATTR_TARGET_TEMP_LOW: 65, climate.ATTR_MIN_TEMP: 50, climate.ATTR_MAX_TEMP: 80, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["off", "cool", "heat", "auto", "on"], "thermostatTemperatureUnit": "F", } assert trt.query_attributes() == { "thermostatMode": "auto", "thermostatTemperatureAmbient": 21.1, "thermostatHumidityAmbient": 25, "thermostatTemperatureSetpointLow": 18.3, "thermostatTemperatureSetpointHigh": 23.9, } assert trt.can_execute(trait.COMMAND_THERMOSTAT_TEMPERATURE_SET_RANGE, {}) assert trt.can_execute(trait.COMMAND_THERMOSTAT_SET_MODE, {}) calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_TEMPERATURE) await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SET_RANGE, BASIC_DATA, { "thermostatTemperatureSetpointHigh": 25, "thermostatTemperatureSetpointLow": 20, }, {}, ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "climate.bla", climate.ATTR_TARGET_TEMP_HIGH: 77, climate.ATTR_TARGET_TEMP_LOW: 68, } calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_HVAC_MODE) await trt.execute( trait.COMMAND_THERMOSTAT_SET_MODE, BASIC_DATA, {"thermostatMode": "cool"}, {} ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "climate.bla", climate.ATTR_HVAC_MODE: climate.HVAC_MODE_COOL, } with pytest.raises(helpers.SmartHomeError) as err: await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, BASIC_DATA, {"thermostatTemperatureSetpoint": -100}, {}, ) assert err.value.code == const.ERR_VALUE_OUT_OF_RANGE hass.config.units.temperature_unit = TEMP_CELSIUS async def test_temperature_setting_climate_setpoint(hass): """Test TemperatureSetting trait support for climate domain - setpoint.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.TemperatureSettingTrait.supported(climate.DOMAIN, 0, None) hass.config.units.temperature_unit = TEMP_CELSIUS trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_COOL, { climate.ATTR_HVAC_MODES: [STATE_OFF, climate.HVAC_MODE_COOL], climate.ATTR_MIN_TEMP: 10, climate.ATTR_MAX_TEMP: 30, ATTR_TEMPERATURE: 18, climate.ATTR_CURRENT_TEMPERATURE: 20, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["off", "cool", "on"], "thermostatTemperatureUnit": "C", } assert trt.query_attributes() == { "thermostatMode": "cool", "thermostatTemperatureAmbient": 20, "thermostatTemperatureSetpoint": 18, } assert trt.can_execute(trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, {}) assert trt.can_execute(trait.COMMAND_THERMOSTAT_SET_MODE, {}) calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_TEMPERATURE) with pytest.raises(helpers.SmartHomeError): await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, BASIC_DATA, {"thermostatTemperatureSetpoint": -100}, {}, ) await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, BASIC_DATA, {"thermostatTemperatureSetpoint": 19}, {}, ) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "climate.bla", ATTR_TEMPERATURE: 19} async def test_temperature_setting_climate_setpoint_auto(hass): """ Test TemperatureSetting trait support for climate domain. Setpoint in auto mode. """ hass.config.units.temperature_unit = TEMP_CELSIUS trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_HEAT_COOL, { climate.ATTR_HVAC_MODES: [ climate.HVAC_MODE_OFF, climate.HVAC_MODE_HEAT_COOL, ], climate.ATTR_MIN_TEMP: 10, climate.ATTR_MAX_TEMP: 30, ATTR_TEMPERATURE: 18, climate.ATTR_CURRENT_TEMPERATURE: 20, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["off", "heatcool", "on"], "thermostatTemperatureUnit": "C", } assert trt.query_attributes() == { "thermostatMode": "heatcool", "thermostatTemperatureAmbient": 20, "thermostatTemperatureSetpointHigh": 18, "thermostatTemperatureSetpointLow": 18, } assert trt.can_execute(trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, {}) assert trt.can_execute(trait.COMMAND_THERMOSTAT_SET_MODE, {}) calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_TEMPERATURE) await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, BASIC_DATA, {"thermostatTemperatureSetpoint": 19}, {}, ) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "climate.bla", ATTR_TEMPERATURE: 19} async def test_humidity_setting_humidifier_setpoint(hass): """Test HumiditySetting trait support for humidifier domain - setpoint.""" assert helpers.get_google_type(humidifier.DOMAIN, None) is not None assert trait.HumiditySettingTrait.supported(humidifier.DOMAIN, 0, None) trt = trait.HumiditySettingTrait( hass, State( "humidifier.bla", STATE_ON, { humidifier.ATTR_MIN_HUMIDITY: 20, humidifier.ATTR_MAX_HUMIDITY: 90, humidifier.ATTR_HUMIDITY: 38, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "humiditySetpointRange": {"minPercent": 20, "maxPercent": 90} } assert trt.query_attributes() == { "humiditySetpointPercent": 38, } assert trt.can_execute(trait.COMMAND_SET_HUMIDITY, {}) calls = async_mock_service(hass, humidifier.DOMAIN, humidifier.SERVICE_SET_HUMIDITY) await trt.execute(trait.COMMAND_SET_HUMIDITY, BASIC_DATA, {"humidity": 32}, {}) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "humidifier.bla", humidifier.ATTR_HUMIDITY: 32, } async def test_lock_unlock_lock(hass): """Test LockUnlock trait locking support for lock domain.""" assert helpers.get_google_type(lock.DOMAIN, None) is not None assert trait.LockUnlockTrait.supported(lock.DOMAIN, lock.SUPPORT_OPEN, None) assert trait.LockUnlockTrait.might_2fa(lock.DOMAIN, lock.SUPPORT_OPEN, None) trt = trait.LockUnlockTrait( hass, State("lock.front_door", lock.STATE_LOCKED), PIN_CONFIG ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"isLocked": True} assert trt.can_execute(trait.COMMAND_LOCKUNLOCK, {"lock": True}) calls = async_mock_service(hass, lock.DOMAIN, lock.SERVICE_LOCK) await trt.execute(trait.COMMAND_LOCKUNLOCK, PIN_DATA, {"lock": True}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "lock.front_door"} async def test_lock_unlock_unlock(hass): """Test LockUnlock trait unlocking support for lock domain.""" assert helpers.get_google_type(lock.DOMAIN, None) is not None assert trait.LockUnlockTrait.supported(lock.DOMAIN, lock.SUPPORT_OPEN, None) trt = trait.LockUnlockTrait( hass, State("lock.front_door", lock.STATE_LOCKED), PIN_CONFIG ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"isLocked": True} assert trt.can_execute(trait.COMMAND_LOCKUNLOCK, {"lock": False}) calls = async_mock_service(hass, lock.DOMAIN, lock.SERVICE_UNLOCK) # No challenge data with pytest.raises(error.ChallengeNeeded) as err: await trt.execute(trait.COMMAND_LOCKUNLOCK, PIN_DATA, {"lock": False}, {}) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # invalid pin with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_LOCKUNLOCK, PIN_DATA, {"lock": False}, {"pin": 9999} ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_FAILED_PIN_NEEDED await trt.execute( trait.COMMAND_LOCKUNLOCK, PIN_DATA, {"lock": False}, {"pin": "1234"} ) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "lock.front_door"} # Test without pin trt = trait.LockUnlockTrait( hass, State("lock.front_door", lock.STATE_LOCKED), BASIC_CONFIG ) with pytest.raises(error.SmartHomeError) as err: await trt.execute(trait.COMMAND_LOCKUNLOCK, BASIC_DATA, {"lock": False}, {}) assert len(calls) == 1 assert err.value.code == const.ERR_CHALLENGE_NOT_SETUP # Test with 2FA override with patch.object( BASIC_CONFIG, "should_2fa", return_value=False, ): await trt.execute(trait.COMMAND_LOCKUNLOCK, BASIC_DATA, {"lock": False}, {}) assert len(calls) == 2 async def test_arm_disarm_arm_away(hass): """Test ArmDisarm trait Arming support for alarm_control_panel domain.""" assert helpers.get_google_type(alarm_control_panel.DOMAIN, None) is not None assert trait.ArmDisArmTrait.supported(alarm_control_panel.DOMAIN, 0, None) assert trait.ArmDisArmTrait.might_2fa(alarm_control_panel.DOMAIN, 0, None) trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, { alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True, ATTR_SUPPORTED_FEATURES: alarm_control_panel.const.SUPPORT_ALARM_ARM_HOME | alarm_control_panel.const.SUPPORT_ALARM_ARM_AWAY, }, ), PIN_CONFIG, ) assert trt.sync_attributes() == { "availableArmLevels": { "levels": [ { "level_name": "armed_home", "level_values": [ {"level_synonym": ["armed home", "home"], "lang": "en"} ], }, { "level_name": "armed_away", "level_values": [ {"level_synonym": ["armed away", "away"], "lang": "en"} ], }, ], "ordered": False, } } assert trt.query_attributes() == { "isArmed": True, "currentArmLevel": STATE_ALARM_ARMED_AWAY, } assert trt.can_execute( trait.COMMAND_ARMDISARM, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY} ) calls = async_mock_service( hass, alarm_control_panel.DOMAIN, alarm_control_panel.SERVICE_ALARM_ARM_AWAY ) # Test with no secure_pin configured with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), BASIC_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, BASIC_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {}, ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NOT_SETUP trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), PIN_CONFIG, ) # No challenge data with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {}, ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # invalid pin with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {"pin": 9999}, ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_FAILED_PIN_NEEDED # correct pin await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {"pin": "1234"}, ) assert len(calls) == 1 # Test already armed with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), PIN_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {}, ) assert len(calls) == 1 assert err.value.code == const.ERR_ALREADY_ARMED # Test with code_arm_required False trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: False}, ), PIN_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {}, ) assert len(calls) == 2 with pytest.raises(error.SmartHomeError) as err: await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True}, {}, ) async def test_arm_disarm_disarm(hass): """Test ArmDisarm trait Disarming support for alarm_control_panel domain.""" assert helpers.get_google_type(alarm_control_panel.DOMAIN, None) is not None assert trait.ArmDisArmTrait.supported(alarm_control_panel.DOMAIN, 0, None) assert trait.ArmDisArmTrait.might_2fa(alarm_control_panel.DOMAIN, 0, None) trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, { alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True, ATTR_SUPPORTED_FEATURES: alarm_control_panel.const.SUPPORT_ALARM_TRIGGER | alarm_control_panel.const.SUPPORT_ALARM_ARM_CUSTOM_BYPASS, }, ), PIN_CONFIG, ) assert trt.sync_attributes() == { "availableArmLevels": { "levels": [ { "level_name": "armed_custom_bypass", "level_values": [ { "level_synonym": ["armed custom bypass", "custom"], "lang": "en", } ], }, { "level_name": "triggered", "level_values": [{"level_synonym": ["triggered"], "lang": "en"}], }, ], "ordered": False, } } assert trt.query_attributes() == {"isArmed": False} assert trt.can_execute(trait.COMMAND_ARMDISARM, {"arm": False}) calls = async_mock_service( hass, alarm_control_panel.DOMAIN, alarm_control_panel.SERVICE_ALARM_DISARM ) # Test without secure_pin configured with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), BASIC_CONFIG, ) await trt.execute(trait.COMMAND_ARMDISARM, BASIC_DATA, {"arm": False}, {}) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NOT_SETUP trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), PIN_CONFIG, ) # No challenge data with pytest.raises(error.ChallengeNeeded) as err: await trt.execute(trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": False}, {}) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # invalid pin with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": False}, {"pin": 9999} ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_FAILED_PIN_NEEDED # correct pin await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": False}, {"pin": "1234"} ) assert len(calls) == 1 # Test already disarmed with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), PIN_CONFIG, ) await trt.execute(trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": False}, {}) assert len(calls) == 1 assert err.value.code == const.ERR_ALREADY_DISARMED # Cancel arming after already armed will require pin with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: False}, ), PIN_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "cancel": True}, {} ) assert len(calls) == 1 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # Cancel arming while pending to arm doesn't require pin trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_PENDING, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: False}, ), PIN_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "cancel": True}, {} ) assert len(calls) == 2 async def test_fan_speed(hass): """Test FanSpeed trait speed control support for fan domain.""" assert helpers.get_google_type(fan.DOMAIN, None) is not None assert trait.FanSpeedTrait.supported(fan.DOMAIN, fan.SUPPORT_SET_SPEED, None) trt = trait.FanSpeedTrait( hass, State( "fan.living_room_fan", fan.SPEED_HIGH, attributes={ "speed_list": [ fan.SPEED_OFF, fan.SPEED_LOW, fan.SPEED_MEDIUM, fan.SPEED_HIGH, ], "speed": "low", "percentage": 33, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableFanSpeeds": { "ordered": True, "speeds": [ { "speed_name": "off", "speed_values": [{"speed_synonym": ["stop", "off"], "lang": "en"}], }, { "speed_name": "low", "speed_values": [ { "speed_synonym": ["slow", "low", "slowest", "lowest"], "lang": "en", } ], }, { "speed_name": "medium", "speed_values": [ {"speed_synonym": ["medium", "mid", "middle"], "lang": "en"} ], }, { "speed_name": "high", "speed_values": [ { "speed_synonym": [ "high", "max", "fast", "highest", "fastest", "maximum", ], "lang": "en", } ], }, ], }, "reversible": False, "supportsFanSpeedPercent": True, } assert trt.query_attributes() == { "currentFanSpeedSetting": "low", "on": True, "currentFanSpeedPercent": 33, } assert trt.can_execute(trait.COMMAND_FANSPEED, params={"fanSpeed": "medium"}) calls = async_mock_service(hass, fan.DOMAIN, fan.SERVICE_SET_SPEED) await trt.execute(trait.COMMAND_FANSPEED, BASIC_DATA, {"fanSpeed": "medium"}, {}) assert len(calls) == 1 assert calls[0].data == {"entity_id": "fan.living_room_fan", "speed": "medium"} assert trt.can_execute(trait.COMMAND_FANSPEED, params={"fanSpeedPercent": 10}) calls = async_mock_service(hass, fan.DOMAIN, fan.SERVICE_SET_PERCENTAGE) await trt.execute(trait.COMMAND_FANSPEED, BASIC_DATA, {"fanSpeedPercent": 10}, {}) assert len(calls) == 1 assert calls[0].data == {"entity_id": "fan.living_room_fan", "percentage": 10} async def test_climate_fan_speed(hass): """Test FanSpeed trait speed control support for climate domain.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.FanSpeedTrait.supported(climate.DOMAIN, climate.SUPPORT_FAN_MODE, None) trt = trait.FanSpeedTrait( hass, State( "climate.living_room_ac", "on", attributes={ "fan_modes": ["auto", "low", "medium", "high"], "fan_mode": "low", }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableFanSpeeds": { "ordered": True, "speeds": [ { "speed_name": "auto", "speed_values": [{"speed_synonym": ["auto"], "lang": "en"}], }, { "speed_name": "low", "speed_values": [{"speed_synonym": ["low"], "lang": "en"}], }, { "speed_name": "medium", "speed_values": [{"speed_synonym": ["medium"], "lang": "en"}], }, { "speed_name": "high", "speed_values": [{"speed_synonym": ["high"], "lang": "en"}], }, ], }, "reversible": False, "supportsFanSpeedPercent": True, } assert trt.query_attributes() == { "currentFanSpeedSetting": "low", } assert trt.can_execute(trait.COMMAND_FANSPEED, params={"fanSpeed": "medium"}) calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_FAN_MODE) await trt.execute(trait.COMMAND_FANSPEED, BASIC_DATA, {"fanSpeed": "medium"}, {}) assert len(calls) == 1 assert calls[0].data == { "entity_id": "climate.living_room_ac", "fan_mode": "medium", } async def test_inputselector(hass): """Test input selector trait.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.InputSelectorTrait.supported( media_player.DOMAIN, media_player.SUPPORT_SELECT_SOURCE, None ) trt = trait.InputSelectorTrait( hass, State( "media_player.living_room", media_player.STATE_PLAYING, attributes={ media_player.ATTR_INPUT_SOURCE_LIST: [ "media", "game", "chromecast", "plex", ], media_player.ATTR_INPUT_SOURCE: "game", }, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableInputs": [ {"key": "media", "names": [{"name_synonym": ["media"], "lang": "en"}]}, {"key": "game", "names": [{"name_synonym": ["game"], "lang": "en"}]}, { "key": "chromecast", "names": [{"name_synonym": ["chromecast"], "lang": "en"}], }, {"key": "plex", "names": [{"name_synonym": ["plex"], "lang": "en"}]}, ], "orderedInputs": True, } assert trt.query_attributes() == { "currentInput": "game", } assert trt.can_execute( trait.COMMAND_INPUT, params={"newInput": "media"}, ) calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_SELECT_SOURCE ) await trt.execute( trait.COMMAND_INPUT, BASIC_DATA, {"newInput": "media"}, {}, ) assert len(calls) == 1 assert calls[0].data == {"entity_id": "media_player.living_room", "source": "media"} @pytest.mark.parametrize( "sources,source,source_next,source_prev", [ (["a"], "a", "a", "a"), (["a", "b"], "a", "b", "b"), (["a", "b", "c"], "a", "b", "c"), ], ) async def test_inputselector_nextprev(hass, sources, source, source_next, source_prev): """Test input selector trait.""" trt = trait.InputSelectorTrait( hass, State( "media_player.living_room", media_player.STATE_PLAYING, attributes={ media_player.ATTR_INPUT_SOURCE_LIST: sources, media_player.ATTR_INPUT_SOURCE: source, }, ), BASIC_CONFIG, ) assert trt.can_execute("action.devices.commands.NextInput", params={}) assert trt.can_execute("action.devices.commands.PreviousInput", params={}) calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_SELECT_SOURCE ) await trt.execute( "action.devices.commands.NextInput", BASIC_DATA, {}, {}, ) await trt.execute( "action.devices.commands.PreviousInput", BASIC_DATA, {}, {}, ) assert len(calls) == 2 assert calls[0].data == { "entity_id": "media_player.living_room", "source": source_next, } assert calls[1].data == { "entity_id": "media_player.living_room", "source": source_prev, } @pytest.mark.parametrize( "sources,source", [(None, "a"), (["a", "b"], None), (["a", "b"], "c")] ) async def test_inputselector_nextprev_invalid(hass, sources, source): """Test input selector trait.""" trt = trait.InputSelectorTrait( hass, State( "media_player.living_room", media_player.STATE_PLAYING, attributes={ media_player.ATTR_INPUT_SOURCE_LIST: sources, media_player.ATTR_INPUT_SOURCE: source, }, ), BASIC_CONFIG, ) with pytest.raises(SmartHomeError): await trt.execute( "action.devices.commands.NextInput", BASIC_DATA, {}, {}, ) with pytest.raises(SmartHomeError): await trt.execute( "action.devices.commands.PreviousInput", BASIC_DATA, {}, {}, ) with pytest.raises(SmartHomeError): await trt.execute( "action.devices.commands.InvalidCommand", BASIC_DATA, {}, {}, ) async def test_modes_input_select(hass): """Test Input Select Mode trait.""" assert helpers.get_google_type(input_select.DOMAIN, None) is not None assert trait.ModesTrait.supported(input_select.DOMAIN, None, None) trt = trait.ModesTrait( hass, State("input_select.bla", "unavailable"), BASIC_CONFIG, ) assert trt.sync_attributes() == {"availableModes": []} trt = trait.ModesTrait( hass, State( "input_select.bla", "abc", attributes={input_select.ATTR_OPTIONS: ["abc", "123", "xyz"]}, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableModes": [ { "name": "option", "name_values": [ { "name_synonym": ["option", "setting", "mode", "value"], "lang": "en", } ], "settings": [ { "setting_name": "abc", "setting_values": [{"setting_synonym": ["abc"], "lang": "en"}], }, { "setting_name": "123", "setting_values": [{"setting_synonym": ["123"], "lang": "en"}], }, { "setting_name": "xyz", "setting_values": [{"setting_synonym": ["xyz"], "lang": "en"}], }, ], "ordered": False, } ] } assert trt.query_attributes() == { "currentModeSettings": {"option": "abc"}, "on": True, } assert trt.can_execute( trait.COMMAND_MODES, params={"updateModeSettings": {"option": "xyz"}}, ) calls = async_mock_service( hass, input_select.DOMAIN, input_select.SERVICE_SELECT_OPTION ) await trt.execute( trait.COMMAND_MODES, BASIC_DATA, {"updateModeSettings": {"option": "xyz"}}, {}, ) assert len(calls) == 1 assert calls[0].data == {"entity_id": "input_select.bla", "option": "xyz"} async def test_modes_humidifier(hass): """Test Humidifier Mode trait.""" assert helpers.get_google_type(humidifier.DOMAIN, None) is not None assert trait.ModesTrait.supported(humidifier.DOMAIN, humidifier.SUPPORT_MODES, None) trt = trait.ModesTrait( hass, State( "humidifier.humidifier", STATE_OFF, attributes={ humidifier.ATTR_AVAILABLE_MODES: [ humidifier.MODE_NORMAL, humidifier.MODE_AUTO, humidifier.MODE_AWAY, ], ATTR_SUPPORTED_FEATURES: humidifier.SUPPORT_MODES, humidifier.ATTR_MIN_HUMIDITY: 30, humidifier.ATTR_MAX_HUMIDITY: 99, humidifier.ATTR_HUMIDITY: 50, ATTR_MODE: humidifier.MODE_AUTO, }, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableModes": [ { "name": "mode", "name_values": [{"name_synonym": ["mode"], "lang": "en"}], "settings": [ { "setting_name": "normal", "setting_values": [ {"setting_synonym": ["normal"], "lang": "en"} ], }, { "setting_name": "auto", "setting_values": [{"setting_synonym": ["auto"], "lang": "en"}], }, { "setting_name": "away", "setting_values": [{"setting_synonym": ["away"], "lang": "en"}], }, ], "ordered": False, }, ] } assert trt.query_attributes() == { "currentModeSettings": {"mode": "auto"}, "on": False, } assert trt.can_execute( trait.COMMAND_MODES, params={"updateModeSettings": {"mode": "away"}} ) calls = async_mock_service(hass, humidifier.DOMAIN, humidifier.SERVICE_SET_MODE) await trt.execute( trait.COMMAND_MODES, BASIC_DATA, {"updateModeSettings": {"mode": "away"}}, {}, ) assert len(calls) == 1 assert calls[0].data == { "entity_id": "humidifier.humidifier", "mode": "away", } async def test_sound_modes(hass): """Test Mode trait.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.ModesTrait.supported( media_player.DOMAIN, media_player.SUPPORT_SELECT_SOUND_MODE, None ) trt = trait.ModesTrait( hass, State( "media_player.living_room", media_player.STATE_PLAYING, attributes={ media_player.ATTR_SOUND_MODE_LIST: ["stereo", "prologic"], media_player.ATTR_SOUND_MODE: "stereo", }, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableModes": [ { "name": "sound mode", "name_values": [ {"name_synonym": ["sound mode", "effects"], "lang": "en"} ], "settings": [ { "setting_name": "stereo", "setting_values": [ {"setting_synonym": ["stereo"], "lang": "en"} ], }, { "setting_name": "prologic", "setting_values": [ {"setting_synonym": ["prologic"], "lang": "en"} ], }, ], "ordered": False, } ] } assert trt.query_attributes() == { "currentModeSettings": {"sound mode": "stereo"}, "on": True, } assert trt.can_execute( trait.COMMAND_MODES, params={"updateModeSettings": {"sound mode": "stereo"}}, ) calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_SELECT_SOUND_MODE ) await trt.execute( trait.COMMAND_MODES, BASIC_DATA, {"updateModeSettings": {"sound mode": "stereo"}}, {}, ) assert len(calls) == 1 assert calls[0].data == { "entity_id": "media_player.living_room", "sound_mode": "stereo", } async def test_openclose_cover(hass): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_SET_POSITION, None ) trt = trait.OpenCloseTrait( hass, State( "cover.bla", cover.STATE_OPEN, { cover.ATTR_CURRENT_POSITION: 75, ATTR_SUPPORTED_FEATURES: cover.SUPPORT_SET_POSITION, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"openPercent": 75} calls_set = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_SET_COVER_POSITION) calls_open = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_OPEN_COVER) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 50}, {}) await trt.execute( trait.COMMAND_OPENCLOSE_RELATIVE, BASIC_DATA, {"openRelativePercent": 50}, {} ) assert len(calls_set) == 1 assert calls_set[0].data == {ATTR_ENTITY_ID: "cover.bla", cover.ATTR_POSITION: 50} assert len(calls_open) == 1 assert calls_open[0].data == {ATTR_ENTITY_ID: "cover.bla"} async def test_openclose_cover_unknown_state(hass): """Test OpenClose trait support for cover domain with unknown state.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_SET_POSITION, None ) # No state trt = trait.OpenCloseTrait( hass, State( "cover.bla", STATE_UNKNOWN, {ATTR_SUPPORTED_FEATURES: cover.SUPPORT_OPEN} ), BASIC_CONFIG, ) assert trt.sync_attributes() == {"discreteOnlyOpenClose": True} with pytest.raises(helpers.SmartHomeError): trt.query_attributes() calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_OPEN_COVER) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 100}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} with pytest.raises(helpers.SmartHomeError): trt.query_attributes() async def test_openclose_cover_assumed_state(hass): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_SET_POSITION, None ) trt = trait.OpenCloseTrait( hass, State( "cover.bla", cover.STATE_OPEN, { ATTR_ASSUMED_STATE: True, ATTR_SUPPORTED_FEATURES: cover.SUPPORT_SET_POSITION, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == {"commandOnlyOpenClose": True} assert trt.query_attributes() == {} calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_SET_COVER_POSITION) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 40}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla", cover.ATTR_POSITION: 40} async def test_openclose_cover_query_only(hass): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported(cover.DOMAIN, 0, None) state = State( "cover.bla", cover.STATE_OPEN, ) trt = trait.OpenCloseTrait( hass, state, BASIC_CONFIG, ) assert trt.sync_attributes() == { "discreteOnlyOpenClose": True, "queryOnlyOpenClose": True, } assert trt.query_attributes() == {"openPercent": 100} async def test_openclose_cover_no_position(hass): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_OPEN | cover.SUPPORT_CLOSE, None ) state = State( "cover.bla", cover.STATE_OPEN, { ATTR_SUPPORTED_FEATURES: cover.SUPPORT_OPEN | cover.SUPPORT_CLOSE, }, ) trt = trait.OpenCloseTrait( hass, state, BASIC_CONFIG, ) assert trt.sync_attributes() == {"discreteOnlyOpenClose": True} assert trt.query_attributes() == {"openPercent": 100} state.state = cover.STATE_CLOSED assert trt.sync_attributes() == {"discreteOnlyOpenClose": True} assert trt.query_attributes() == {"openPercent": 0} calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_CLOSE_COVER) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 0}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_OPEN_COVER) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 100}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} with pytest.raises( SmartHomeError, match=r"Current position not know for relative command" ): await trt.execute( trait.COMMAND_OPENCLOSE_RELATIVE, BASIC_DATA, {"openRelativePercent": 100}, {}, ) with pytest.raises(SmartHomeError, match=r"No support for partial open close"): await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 50}, {}) @pytest.mark.parametrize( "device_class", (cover.DEVICE_CLASS_DOOR, cover.DEVICE_CLASS_GARAGE, cover.DEVICE_CLASS_GATE), ) async def test_openclose_cover_secure(hass, device_class): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, device_class) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_SET_POSITION, device_class ) assert trait.OpenCloseTrait.might_2fa( cover.DOMAIN, cover.SUPPORT_SET_POSITION, device_class ) trt = trait.OpenCloseTrait( hass, State( "cover.bla", cover.STATE_OPEN, { ATTR_DEVICE_CLASS: device_class, ATTR_SUPPORTED_FEATURES: cover.SUPPORT_SET_POSITION, cover.ATTR_CURRENT_POSITION: 75, }, ), PIN_CONFIG, ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"openPercent": 75} calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_SET_COVER_POSITION) calls_close = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_CLOSE_COVER) # No challenge data with pytest.raises(error.ChallengeNeeded) as err: await trt.execute(trait.COMMAND_OPENCLOSE, PIN_DATA, {"openPercent": 50}, {}) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # invalid pin with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_OPENCLOSE, PIN_DATA, {"openPercent": 50}, {"pin": "9999"} ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_FAILED_PIN_NEEDED await trt.execute( trait.COMMAND_OPENCLOSE, PIN_DATA, {"openPercent": 50}, {"pin": "1234"} ) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla", cover.ATTR_POSITION: 50} # no challenge on close await trt.execute(trait.COMMAND_OPENCLOSE, PIN_DATA, {"openPercent": 0}, {}) assert len(calls_close) == 1 assert calls_close[0].data == {ATTR_ENTITY_ID: "cover.bla"} @pytest.mark.parametrize( "device_class", ( binary_sensor.DEVICE_CLASS_DOOR, binary_sensor.DEVICE_CLASS_GARAGE_DOOR, binary_sensor.DEVICE_CLASS_LOCK, binary_sensor.DEVICE_CLASS_OPENING, binary_sensor.DEVICE_CLASS_WINDOW, ), ) async def test_openclose_binary_sensor(hass, device_class): """Test OpenClose trait support for binary_sensor domain.""" assert helpers.get_google_type(binary_sensor.DOMAIN, device_class) is not None assert trait.OpenCloseTrait.supported(binary_sensor.DOMAIN, 0, device_class) trt = trait.OpenCloseTrait( hass, State("binary_sensor.test", STATE_ON, {ATTR_DEVICE_CLASS: device_class}), BASIC_CONFIG, ) assert trt.sync_attributes() == { "queryOnlyOpenClose": True, "discreteOnlyOpenClose": True, } assert trt.query_attributes() == {"openPercent": 100} trt = trait.OpenCloseTrait( hass, State("binary_sensor.test", STATE_OFF, {ATTR_DEVICE_CLASS: device_class}), BASIC_CONFIG, ) assert trt.sync_attributes() == { "queryOnlyOpenClose": True, "discreteOnlyOpenClose": True, } assert trt.query_attributes() == {"openPercent": 0} async def test_volume_media_player(hass): """Test volume trait support for media player domain.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.VolumeTrait.supported( media_player.DOMAIN, media_player.SUPPORT_VOLUME_SET, None, ) trt = trait.VolumeTrait( hass, State( "media_player.bla", media_player.STATE_PLAYING, { ATTR_SUPPORTED_FEATURES: media_player.SUPPORT_VOLUME_SET, media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.3, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "volumeMaxLevel": 100, "levelStepSize": 10, "volumeCanMuteAndUnmute": False, "commandOnlyVolume": False, } assert trt.query_attributes() == {"currentVolume": 30} calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_SET ) await trt.execute(trait.COMMAND_SET_VOLUME, BASIC_DATA, {"volumeLevel": 60}, {}) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.6, } calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_SET ) await trt.execute( trait.COMMAND_VOLUME_RELATIVE, BASIC_DATA, {"relativeSteps": 10}, {} ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.4, } async def test_volume_media_player_relative(hass): """Test volume trait support for relative-volume-only media players.""" assert trait.VolumeTrait.supported( media_player.DOMAIN, media_player.SUPPORT_VOLUME_STEP, None, ) trt = trait.VolumeTrait( hass, State( "media_player.bla", media_player.STATE_PLAYING, { ATTR_ASSUMED_STATE: True, ATTR_SUPPORTED_FEATURES: media_player.SUPPORT_VOLUME_STEP, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "volumeMaxLevel": 100, "levelStepSize": 10, "volumeCanMuteAndUnmute": False, "commandOnlyVolume": True, } assert trt.query_attributes() == {} calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_UP ) await trt.execute( trait.COMMAND_VOLUME_RELATIVE, BASIC_DATA, {"relativeSteps": 10}, {}, ) assert len(calls) == 10 for call in calls: assert call.data == { ATTR_ENTITY_ID: "media_player.bla", } calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_DOWN ) await trt.execute( trait.COMMAND_VOLUME_RELATIVE, BASIC_DATA, {"relativeSteps": -10}, {}, ) assert len(calls) == 10 for call in calls: assert call.data == { ATTR_ENTITY_ID: "media_player.bla", } with pytest.raises(SmartHomeError): await trt.execute(trait.COMMAND_SET_VOLUME, BASIC_DATA, {"volumeLevel": 42}, {}) with pytest.raises(SmartHomeError): await trt.execute(trait.COMMAND_MUTE, BASIC_DATA, {"mute": True}, {}) async def test_media_player_mute(hass): """Test volume trait support for muting.""" assert trait.VolumeTrait.supported( media_player.DOMAIN, media_player.SUPPORT_VOLUME_STEP | media_player.SUPPORT_VOLUME_MUTE, None, ) trt = trait.VolumeTrait( hass, State( "media_player.bla", media_player.STATE_PLAYING, { ATTR_SUPPORTED_FEATURES: ( media_player.SUPPORT_VOLUME_STEP | media_player.SUPPORT_VOLUME_MUTE ), media_player.ATTR_MEDIA_VOLUME_MUTED: False, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "volumeMaxLevel": 100, "levelStepSize": 10, "volumeCanMuteAndUnmute": True, "commandOnlyVolume": False, } assert trt.query_attributes() == {"isMuted": False} mute_calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_MUTE ) await trt.execute( trait.COMMAND_MUTE, BASIC_DATA, {"mute": True}, {}, ) assert len(mute_calls) == 1 assert mute_calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_VOLUME_MUTED: True, } unmute_calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_MUTE ) await trt.execute( trait.COMMAND_MUTE, BASIC_DATA, {"mute": False}, {}, ) assert len(unmute_calls) == 1 assert unmute_calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_VOLUME_MUTED: False, } async def test_temperature_setting_sensor(hass): """Test TemperatureSetting trait support for temperature sensor.""" assert ( helpers.get_google_type(sensor.DOMAIN, sensor.DEVICE_CLASS_TEMPERATURE) is not None ) assert not trait.TemperatureSettingTrait.supported( sensor.DOMAIN, 0, sensor.DEVICE_CLASS_HUMIDITY ) assert trait.TemperatureSettingTrait.supported( sensor.DOMAIN, 0, sensor.DEVICE_CLASS_TEMPERATURE ) @pytest.mark.parametrize( "unit_in,unit_out,state,ambient", [ (TEMP_FAHRENHEIT, "F", "70", 21.1), (TEMP_CELSIUS, "C", "21.1", 21.1), (TEMP_FAHRENHEIT, "F", "unavailable", None), (TEMP_FAHRENHEIT, "F", "unknown", None), ], ) async def test_temperature_setting_sensor_data(hass, unit_in, unit_out, state, ambient): """Test TemperatureSetting trait support for temperature sensor.""" hass.config.units.temperature_unit = unit_in trt = trait.TemperatureSettingTrait( hass, State( "sensor.test", state, {ATTR_DEVICE_CLASS: sensor.DEVICE_CLASS_TEMPERATURE} ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "queryOnlyTemperatureSetting": True, "thermostatTemperatureUnit": unit_out, } if ambient: assert trt.query_attributes() == {"thermostatTemperatureAmbient": ambient} else: assert trt.query_attributes() == {} hass.config.units.temperature_unit = TEMP_CELSIUS async def test_humidity_setting_sensor(hass): """Test HumiditySetting trait support for humidity sensor.""" assert ( helpers.get_google_type(sensor.DOMAIN, sensor.DEVICE_CLASS_HUMIDITY) is not None ) assert not trait.HumiditySettingTrait.supported( sensor.DOMAIN, 0, sensor.DEVICE_CLASS_TEMPERATURE ) assert trait.HumiditySettingTrait.supported( sensor.DOMAIN, 0, sensor.DEVICE_CLASS_HUMIDITY ) @pytest.mark.parametrize( "state,ambient", [("70", 70), ("unavailable", None), ("unknown", None)] ) async def test_humidity_setting_sensor_data(hass, state, ambient): """Test HumiditySetting trait support for humidity sensor.""" trt = trait.HumiditySettingTrait( hass, State("sensor.test", state, {ATTR_DEVICE_CLASS: sensor.DEVICE_CLASS_HUMIDITY}), BASIC_CONFIG, ) assert trt.sync_attributes() == {"queryOnlyHumiditySetting": True} if ambient: assert trt.query_attributes() == {"humidityAmbientPercent": ambient} else: assert trt.query_attributes() == {} with pytest.raises(helpers.SmartHomeError) as err: await trt.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert err.value.code == const.ERR_NOT_SUPPORTED async def test_transport_control(hass): """Test the TransportControlTrait.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None for feature in trait.MEDIA_COMMAND_SUPPORT_MAPPING.values(): assert trait.TransportControlTrait.supported(media_player.DOMAIN, feature, None) now = datetime(2020, 1, 1) trt = trait.TransportControlTrait( hass, State( "media_player.bla", media_player.STATE_PLAYING, { media_player.ATTR_MEDIA_POSITION: 100, media_player.ATTR_MEDIA_DURATION: 200, media_player.ATTR_MEDIA_POSITION_UPDATED_AT: now - timedelta(seconds=10), media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.5, ATTR_SUPPORTED_FEATURES: media_player.SUPPORT_PLAY | media_player.SUPPORT_STOP, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "transportControlSupportedCommands": ["RESUME", "STOP"] } assert trt.query_attributes() == {} # COMMAND_MEDIA_SEEK_RELATIVE calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_SEEK ) # Patch to avoid time ticking over during the command failing the test with patch("homeassistant.util.dt.utcnow", return_value=now): await trt.execute( trait.COMMAND_MEDIA_SEEK_RELATIVE, BASIC_DATA, {"relativePositionMs": 10000}, {}, ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", # 100s (current position) + 10s (from command) + 10s (from updated_at) media_player.ATTR_MEDIA_SEEK_POSITION: 120, } # COMMAND_MEDIA_SEEK_TO_POSITION calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_SEEK ) await trt.execute( trait.COMMAND_MEDIA_SEEK_TO_POSITION, BASIC_DATA, {"absPositionMs": 50000}, {} ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_SEEK_POSITION: 50, } # COMMAND_MEDIA_NEXT calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_NEXT_TRACK ) await trt.execute(trait.COMMAND_MEDIA_NEXT, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} # COMMAND_MEDIA_PAUSE calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_PAUSE ) await trt.execute(trait.COMMAND_MEDIA_PAUSE, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} # COMMAND_MEDIA_PREVIOUS calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_PREVIOUS_TRACK ) await trt.execute(trait.COMMAND_MEDIA_PREVIOUS, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} # COMMAND_MEDIA_RESUME calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_PLAY ) await trt.execute(trait.COMMAND_MEDIA_RESUME, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} # COMMAND_MEDIA_SHUFFLE calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_SHUFFLE_SET ) await trt.execute(trait.COMMAND_MEDIA_SHUFFLE, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_SHUFFLE: True, } # COMMAND_MEDIA_STOP calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_STOP ) await trt.execute(trait.COMMAND_MEDIA_STOP, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} @pytest.mark.parametrize( "state", ( STATE_OFF, STATE_IDLE, STATE_PLAYING, STATE_ON, STATE_PAUSED, STATE_STANDBY, STATE_UNAVAILABLE, STATE_UNKNOWN, ), ) async def test_media_state(hass, state): """Test the MediaStateTrait.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.TransportControlTrait.supported( media_player.DOMAIN, media_player.SUPPORT_PLAY, None ) trt = trait.MediaStateTrait( hass, State( "media_player.bla", state, { media_player.ATTR_MEDIA_POSITION: 100, media_player.ATTR_MEDIA_DURATION: 200, media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.5, ATTR_SUPPORTED_FEATURES: media_player.SUPPORT_PLAY | media_player.SUPPORT_STOP, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "supportActivityState": True, "supportPlaybackState": True, } assert trt.query_attributes() == { "activityState": trt.activity_lookup.get(state), "playbackState": trt.playback_lookup.get(state), }
sander76/home-assistant
tests/components/google_assistant/test_trait.py
homeassistant/components/somfy/cover.py
"""Sensor for displaying the number of result from Flume.""" from datetime import timedelta import logging from numbers import Number from pyflume import FlumeData import voluptuous as vol from homeassistant.components.sensor import PLATFORM_SCHEMA, SensorEntity from homeassistant.config_entries import SOURCE_IMPORT from homeassistant.const import ( CONF_CLIENT_ID, CONF_CLIENT_SECRET, CONF_NAME, CONF_PASSWORD, CONF_USERNAME, ) import homeassistant.helpers.config_validation as cv from homeassistant.helpers.update_coordinator import ( CoordinatorEntity, DataUpdateCoordinator, UpdateFailed, ) from .const import ( DEFAULT_NAME, DOMAIN, FLUME_AUTH, FLUME_DEVICES, FLUME_HTTP_SESSION, FLUME_QUERIES_SENSOR, FLUME_TYPE_SENSOR, KEY_DEVICE_ID, KEY_DEVICE_LOCATION, KEY_DEVICE_LOCATION_NAME, KEY_DEVICE_LOCATION_TIMEZONE, KEY_DEVICE_TYPE, ) _LOGGER = logging.getLogger(__name__) MIN_TIME_BETWEEN_UPDATES = timedelta(seconds=15) SCAN_INTERVAL = timedelta(minutes=1) PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( { vol.Required(CONF_USERNAME): cv.string, vol.Required(CONF_PASSWORD): cv.string, vol.Required(CONF_CLIENT_ID): cv.string, vol.Required(CONF_CLIENT_SECRET): cv.string, vol.Optional(CONF_NAME): cv.string, } ) async def async_setup_platform(hass, config, async_add_entities, discovery_info=None): """Import the platform into a config entry.""" hass.async_create_task( hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_IMPORT}, data=config ) ) async def async_setup_entry(hass, config_entry, async_add_entities): """Set up the Flume sensor.""" flume_domain_data = hass.data[DOMAIN][config_entry.entry_id] flume_auth = flume_domain_data[FLUME_AUTH] http_session = flume_domain_data[FLUME_HTTP_SESSION] flume_devices = flume_domain_data[FLUME_DEVICES] config = config_entry.data name = config.get(CONF_NAME, DEFAULT_NAME) flume_entity_list = [] for device in flume_devices.device_list: if device[KEY_DEVICE_TYPE] != FLUME_TYPE_SENSOR: continue device_id = device[KEY_DEVICE_ID] device_name = device[KEY_DEVICE_LOCATION][KEY_DEVICE_LOCATION_NAME] device_timezone = device[KEY_DEVICE_LOCATION][KEY_DEVICE_LOCATION_TIMEZONE] device_friendly_name = f"{name} {device_name}" flume_device = FlumeData( flume_auth, device_id, device_timezone, SCAN_INTERVAL, update_on_init=False, http_session=http_session, ) coordinator = _create_flume_device_coordinator(hass, flume_device) for flume_query_sensor in FLUME_QUERIES_SENSOR.items(): flume_entity_list.append( FlumeSensor( coordinator, flume_device, flume_query_sensor, f"{device_friendly_name} {flume_query_sensor[1]['friendly_name']}", device_id, ) ) if flume_entity_list: async_add_entities(flume_entity_list) class FlumeSensor(CoordinatorEntity, SensorEntity): """Representation of the Flume sensor.""" def __init__(self, coordinator, flume_device, flume_query_sensor, name, device_id): """Initialize the Flume sensor.""" super().__init__(coordinator) self._flume_device = flume_device self._flume_query_sensor = flume_query_sensor self._name = name self._device_id = device_id self._state = None @property def device_info(self): """Device info for the flume sensor.""" return { "name": self._name, "identifiers": {(DOMAIN, self._device_id)}, "manufacturer": "Flume, Inc.", "model": "Flume Smart Water Monitor", } @property def name(self): """Return the name of the sensor.""" return self._name @property def state(self): """Return the state of the sensor.""" sensor_key = self._flume_query_sensor[0] if sensor_key not in self._flume_device.values: return None return _format_state_value(self._flume_device.values[sensor_key]) @property def unit_of_measurement(self): """Return the unit the value is expressed in.""" # This is in gallons per SCAN_INTERVAL return self._flume_query_sensor[1]["unit_of_measurement"] @property def unique_id(self): """Flume query and Device unique ID.""" return f"{self._flume_query_sensor[0]}_{self._device_id}" async def async_added_to_hass(self): """Request an update when added.""" await super().async_added_to_hass() # We do not ask for an update with async_add_entities() # because it will update disabled entities await self.coordinator.async_request_refresh() def _format_state_value(value): return round(value, 1) if isinstance(value, Number) else None def _create_flume_device_coordinator(hass, flume_device): """Create a data coordinator for the flume device.""" async def _async_update_data(): """Get the latest data from the Flume.""" _LOGGER.debug("Updating Flume data") try: await hass.async_add_executor_job(flume_device.update_force) except Exception as ex: raise UpdateFailed(f"Error communicating with flume API: {ex}") from ex _LOGGER.debug( "Flume update details: %s", { "values": flume_device.values, "query_payload": flume_device.query_payload, }, ) return DataUpdateCoordinator( hass, _LOGGER, # Name of the data. For logging purposes. name=flume_device.device_id, update_method=_async_update_data, # Polling interval. Will only be polled if there are subscribers. update_interval=SCAN_INTERVAL, )
"""Tests for the Google Assistant traits.""" from datetime import datetime, timedelta from unittest.mock import patch import pytest from homeassistant.components import ( alarm_control_panel, binary_sensor, camera, cover, fan, group, input_boolean, input_select, light, lock, media_player, scene, script, sensor, switch, vacuum, ) from homeassistant.components.climate import const as climate from homeassistant.components.google_assistant import const, error, helpers, trait from homeassistant.components.google_assistant.error import SmartHomeError from homeassistant.components.humidifier import const as humidifier from homeassistant.config import async_process_ha_core_config from homeassistant.const import ( ATTR_ASSUMED_STATE, ATTR_DEVICE_CLASS, ATTR_ENTITY_ID, ATTR_MODE, ATTR_SUPPORTED_FEATURES, ATTR_TEMPERATURE, SERVICE_TURN_OFF, SERVICE_TURN_ON, STATE_ALARM_ARMED_AWAY, STATE_ALARM_DISARMED, STATE_ALARM_PENDING, STATE_IDLE, STATE_OFF, STATE_ON, STATE_PAUSED, STATE_PLAYING, STATE_STANDBY, STATE_UNAVAILABLE, STATE_UNKNOWN, TEMP_CELSIUS, TEMP_FAHRENHEIT, ) from homeassistant.core import DOMAIN as HA_DOMAIN, EVENT_CALL_SERVICE, State from homeassistant.util import color from . import BASIC_CONFIG, MockConfig from tests.common import async_capture_events, async_mock_service REQ_ID = "ff36a3cc-ec34-11e6-b1a0-64510650abcf" BASIC_DATA = helpers.RequestData( BASIC_CONFIG, "test-agent", const.SOURCE_CLOUD, REQ_ID, None ) PIN_CONFIG = MockConfig(secure_devices_pin="1234") PIN_DATA = helpers.RequestData( PIN_CONFIG, "test-agent", const.SOURCE_CLOUD, REQ_ID, None ) async def test_brightness_light(hass): """Test brightness trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert trait.BrightnessTrait.supported(light.DOMAIN, light.SUPPORT_BRIGHTNESS, None) trt = trait.BrightnessTrait( hass, State("light.bla", light.STATE_ON, {light.ATTR_BRIGHTNESS: 243}), BASIC_CONFIG, ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"brightness": 95} events = async_capture_events(hass, EVENT_CALL_SERVICE) calls = async_mock_service(hass, light.DOMAIN, light.SERVICE_TURN_ON) await trt.execute( trait.COMMAND_BRIGHTNESS_ABSOLUTE, BASIC_DATA, {"brightness": 50}, {} ) await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "light.bla", light.ATTR_BRIGHTNESS_PCT: 50} assert len(events) == 1 assert events[0].data == { "domain": "light", "service": "turn_on", "service_data": {"brightness_pct": 50, "entity_id": "light.bla"}, } async def test_camera_stream(hass): """Test camera stream trait support for camera domain.""" await async_process_ha_core_config( hass, {"external_url": "https://example.com"}, ) assert helpers.get_google_type(camera.DOMAIN, None) is not None assert trait.CameraStreamTrait.supported(camera.DOMAIN, camera.SUPPORT_STREAM, None) trt = trait.CameraStreamTrait( hass, State("camera.bla", camera.STATE_IDLE, {}), BASIC_CONFIG ) assert trt.sync_attributes() == { "cameraStreamSupportedProtocols": ["hls"], "cameraStreamNeedAuthToken": False, "cameraStreamNeedDrmEncryption": False, } assert trt.query_attributes() == {} with patch( "homeassistant.components.camera.async_request_stream", return_value="/api/streams/bla", ): await trt.execute(trait.COMMAND_GET_CAMERA_STREAM, BASIC_DATA, {}, {}) assert trt.query_attributes() == { "cameraStreamAccessUrl": "https://example.com/api/streams/bla", "cameraStreamReceiverAppId": "B12CE3CA", } async def test_onoff_group(hass): """Test OnOff trait support for group domain.""" assert helpers.get_google_type(group.DOMAIN, None) is not None assert trait.OnOffTrait.supported(group.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("group.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("group.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, HA_DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "group.bla"} off_calls = async_mock_service(hass, HA_DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "group.bla"} async def test_onoff_input_boolean(hass): """Test OnOff trait support for input_boolean domain.""" assert helpers.get_google_type(input_boolean.DOMAIN, None) is not None assert trait.OnOffTrait.supported(input_boolean.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("input_boolean.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait( hass, State("input_boolean.bla", STATE_OFF), BASIC_CONFIG ) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, input_boolean.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "input_boolean.bla"} off_calls = async_mock_service(hass, input_boolean.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "input_boolean.bla"} async def test_onoff_switch(hass): """Test OnOff trait support for switch domain.""" assert helpers.get_google_type(switch.DOMAIN, None) is not None assert trait.OnOffTrait.supported(switch.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("switch.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("switch.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} trt_assumed = trait.OnOffTrait( hass, State("switch.bla", STATE_OFF, {"assumed_state": True}), BASIC_CONFIG ) assert trt_assumed.sync_attributes() == {"commandOnlyOnOff": True} on_calls = async_mock_service(hass, switch.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "switch.bla"} off_calls = async_mock_service(hass, switch.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "switch.bla"} async def test_onoff_fan(hass): """Test OnOff trait support for fan domain.""" assert helpers.get_google_type(fan.DOMAIN, None) is not None assert trait.OnOffTrait.supported(fan.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("fan.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("fan.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, fan.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "fan.bla"} off_calls = async_mock_service(hass, fan.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "fan.bla"} async def test_onoff_light(hass): """Test OnOff trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert trait.OnOffTrait.supported(light.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("light.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("light.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "light.bla"} off_calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "light.bla"} async def test_onoff_media_player(hass): """Test OnOff trait support for media_player domain.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.OnOffTrait.supported(media_player.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("media_player.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("media_player.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, media_player.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} off_calls = async_mock_service(hass, media_player.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} async def test_onoff_humidifier(hass): """Test OnOff trait support for humidifier domain.""" assert helpers.get_google_type(humidifier.DOMAIN, None) is not None assert trait.OnOffTrait.supported(humidifier.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("humidifier.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("humidifier.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, humidifier.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "humidifier.bla"} off_calls = async_mock_service(hass, humidifier.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "humidifier.bla"} async def test_dock_vacuum(hass): """Test dock trait support for vacuum domain.""" assert helpers.get_google_type(vacuum.DOMAIN, None) is not None assert trait.DockTrait.supported(vacuum.DOMAIN, 0, None) trt = trait.DockTrait(hass, State("vacuum.bla", vacuum.STATE_IDLE), BASIC_CONFIG) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"isDocked": False} calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_RETURN_TO_BASE) await trt.execute(trait.COMMAND_DOCK, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} async def test_startstop_vacuum(hass): """Test startStop trait support for vacuum domain.""" assert helpers.get_google_type(vacuum.DOMAIN, None) is not None assert trait.StartStopTrait.supported(vacuum.DOMAIN, 0, None) trt = trait.StartStopTrait( hass, State( "vacuum.bla", vacuum.STATE_PAUSED, {ATTR_SUPPORTED_FEATURES: vacuum.SUPPORT_PAUSE}, ), BASIC_CONFIG, ) assert trt.sync_attributes() == {"pausable": True} assert trt.query_attributes() == {"isRunning": False, "isPaused": True} start_calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_START) await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": True}, {}) assert len(start_calls) == 1 assert start_calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} stop_calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_STOP) await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": False}, {}) assert len(stop_calls) == 1 assert stop_calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} pause_calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_PAUSE) await trt.execute(trait.COMMAND_PAUSEUNPAUSE, BASIC_DATA, {"pause": True}, {}) assert len(pause_calls) == 1 assert pause_calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} unpause_calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_START) await trt.execute(trait.COMMAND_PAUSEUNPAUSE, BASIC_DATA, {"pause": False}, {}) assert len(unpause_calls) == 1 assert unpause_calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} async def test_startstop_cover(hass): """Test startStop trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.StartStopTrait.supported(cover.DOMAIN, cover.SUPPORT_STOP, None) state = State( "cover.bla", cover.STATE_CLOSED, {ATTR_SUPPORTED_FEATURES: cover.SUPPORT_STOP}, ) trt = trait.StartStopTrait( hass, state, BASIC_CONFIG, ) assert trt.sync_attributes() == {} for state_value in (cover.STATE_CLOSING, cover.STATE_OPENING): state.state = state_value assert trt.query_attributes() == {"isRunning": True} stop_calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_STOP_COVER) await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": False}, {}) assert len(stop_calls) == 1 assert stop_calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} for state_value in (cover.STATE_CLOSED, cover.STATE_OPEN): state.state = state_value assert trt.query_attributes() == {"isRunning": False} with pytest.raises(SmartHomeError, match="Cover is already stopped"): await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": False}, {}) with pytest.raises(SmartHomeError, match="Starting a cover is not supported"): await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": True}, {}) with pytest.raises( SmartHomeError, match="Command action.devices.commands.PauseUnpause is not supported", ): await trt.execute(trait.COMMAND_PAUSEUNPAUSE, BASIC_DATA, {"start": True}, {}) async def test_startstop_cover_assumed(hass): """Test startStop trait support for cover domain of assumed state.""" trt = trait.StartStopTrait( hass, State( "cover.bla", cover.STATE_CLOSED, {ATTR_SUPPORTED_FEATURES: cover.SUPPORT_STOP, ATTR_ASSUMED_STATE: True}, ), BASIC_CONFIG, ) stop_calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_STOP_COVER) await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": False}, {}) assert len(stop_calls) == 1 assert stop_calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} async def test_color_setting_color_light(hass): """Test ColorSpectrum trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert not trait.ColorSettingTrait.supported(light.DOMAIN, 0, None) assert trait.ColorSettingTrait.supported(light.DOMAIN, light.SUPPORT_COLOR, None) trt = trait.ColorSettingTrait( hass, State( "light.bla", STATE_ON, { light.ATTR_HS_COLOR: (20, 94), light.ATTR_BRIGHTNESS: 200, ATTR_SUPPORTED_FEATURES: light.SUPPORT_COLOR, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == {"colorModel": "hsv"} assert trt.query_attributes() == { "color": {"spectrumHsv": {"hue": 20, "saturation": 0.94, "value": 200 / 255}} } assert trt.can_execute( trait.COMMAND_COLOR_ABSOLUTE, {"color": {"spectrumRGB": 16715792}} ) calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON) await trt.execute( trait.COMMAND_COLOR_ABSOLUTE, BASIC_DATA, {"color": {"spectrumRGB": 1052927}}, {}, ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "light.bla", light.ATTR_HS_COLOR: (240, 93.725), } await trt.execute( trait.COMMAND_COLOR_ABSOLUTE, BASIC_DATA, {"color": {"spectrumHSV": {"hue": 100, "saturation": 0.50, "value": 0.20}}}, {}, ) assert len(calls) == 2 assert calls[1].data == { ATTR_ENTITY_ID: "light.bla", light.ATTR_HS_COLOR: [100, 50], light.ATTR_BRIGHTNESS: 0.2 * 255, } async def test_color_setting_temperature_light(hass): """Test ColorTemperature trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert not trait.ColorSettingTrait.supported(light.DOMAIN, 0, None) assert trait.ColorSettingTrait.supported( light.DOMAIN, light.SUPPORT_COLOR_TEMP, None ) trt = trait.ColorSettingTrait( hass, State( "light.bla", STATE_ON, { light.ATTR_MIN_MIREDS: 200, light.ATTR_COLOR_TEMP: 300, light.ATTR_MAX_MIREDS: 500, ATTR_SUPPORTED_FEATURES: light.SUPPORT_COLOR_TEMP, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "colorTemperatureRange": {"temperatureMinK": 2000, "temperatureMaxK": 5000} } assert trt.query_attributes() == {"color": {"temperatureK": 3333}} assert trt.can_execute( trait.COMMAND_COLOR_ABSOLUTE, {"color": {"temperature": 400}} ) calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON) with pytest.raises(helpers.SmartHomeError) as err: await trt.execute( trait.COMMAND_COLOR_ABSOLUTE, BASIC_DATA, {"color": {"temperature": 5555}}, {}, ) assert err.value.code == const.ERR_VALUE_OUT_OF_RANGE await trt.execute( trait.COMMAND_COLOR_ABSOLUTE, BASIC_DATA, {"color": {"temperature": 2857}}, {} ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "light.bla", light.ATTR_COLOR_TEMP: color.color_temperature_kelvin_to_mired(2857), } async def test_color_light_temperature_light_bad_temp(hass): """Test ColorTemperature trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert not trait.ColorSettingTrait.supported(light.DOMAIN, 0, None) assert trait.ColorSettingTrait.supported( light.DOMAIN, light.SUPPORT_COLOR_TEMP, None ) trt = trait.ColorSettingTrait( hass, State( "light.bla", STATE_ON, { light.ATTR_MIN_MIREDS: 200, light.ATTR_COLOR_TEMP: 0, light.ATTR_MAX_MIREDS: 500, }, ), BASIC_CONFIG, ) assert trt.query_attributes() == {} async def test_light_modes(hass): """Test Light Mode trait.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert trait.ModesTrait.supported(light.DOMAIN, light.SUPPORT_EFFECT, None) trt = trait.ModesTrait( hass, State( "light.living_room", light.STATE_ON, attributes={ light.ATTR_EFFECT_LIST: ["random", "colorloop"], light.ATTR_EFFECT: "random", }, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableModes": [ { "name": "effect", "name_values": [{"name_synonym": ["effect"], "lang": "en"}], "settings": [ { "setting_name": "random", "setting_values": [ {"setting_synonym": ["random"], "lang": "en"} ], }, { "setting_name": "colorloop", "setting_values": [ {"setting_synonym": ["colorloop"], "lang": "en"} ], }, ], "ordered": False, } ] } assert trt.query_attributes() == { "currentModeSettings": {"effect": "random"}, "on": True, } assert trt.can_execute( trait.COMMAND_MODES, params={"updateModeSettings": {"effect": "colorloop"}}, ) calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON) await trt.execute( trait.COMMAND_MODES, BASIC_DATA, {"updateModeSettings": {"effect": "colorloop"}}, {}, ) assert len(calls) == 1 assert calls[0].data == { "entity_id": "light.living_room", "effect": "colorloop", } async def test_scene_scene(hass): """Test Scene trait support for scene domain.""" assert helpers.get_google_type(scene.DOMAIN, None) is not None assert trait.SceneTrait.supported(scene.DOMAIN, 0, None) trt = trait.SceneTrait(hass, State("scene.bla", scene.STATE), BASIC_CONFIG) assert trt.sync_attributes() == {} assert trt.query_attributes() == {} assert trt.can_execute(trait.COMMAND_ACTIVATE_SCENE, {}) calls = async_mock_service(hass, scene.DOMAIN, SERVICE_TURN_ON) await trt.execute(trait.COMMAND_ACTIVATE_SCENE, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "scene.bla"} async def test_scene_script(hass): """Test Scene trait support for script domain.""" assert helpers.get_google_type(script.DOMAIN, None) is not None assert trait.SceneTrait.supported(script.DOMAIN, 0, None) trt = trait.SceneTrait(hass, State("script.bla", STATE_OFF), BASIC_CONFIG) assert trt.sync_attributes() == {} assert trt.query_attributes() == {} assert trt.can_execute(trait.COMMAND_ACTIVATE_SCENE, {}) calls = async_mock_service(hass, script.DOMAIN, SERVICE_TURN_ON) await trt.execute(trait.COMMAND_ACTIVATE_SCENE, BASIC_DATA, {}, {}) # We don't wait till script execution is done. await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "script.bla"} async def test_temperature_setting_climate_onoff(hass): """Test TemperatureSetting trait support for climate domain - range.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.TemperatureSettingTrait.supported(climate.DOMAIN, 0, None) hass.config.units.temperature_unit = TEMP_FAHRENHEIT trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_AUTO, { ATTR_SUPPORTED_FEATURES: climate.SUPPORT_TARGET_TEMPERATURE_RANGE, climate.ATTR_HVAC_MODES: [ climate.HVAC_MODE_OFF, climate.HVAC_MODE_COOL, climate.HVAC_MODE_HEAT, climate.HVAC_MODE_HEAT_COOL, ], climate.ATTR_MIN_TEMP: None, climate.ATTR_MAX_TEMP: None, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["off", "cool", "heat", "heatcool", "on"], "thermostatTemperatureUnit": "F", } assert trt.can_execute(trait.COMMAND_THERMOSTAT_SET_MODE, {}) calls = async_mock_service(hass, climate.DOMAIN, SERVICE_TURN_ON) await trt.execute( trait.COMMAND_THERMOSTAT_SET_MODE, BASIC_DATA, {"thermostatMode": "on"}, {} ) assert len(calls) == 1 calls = async_mock_service(hass, climate.DOMAIN, SERVICE_TURN_OFF) await trt.execute( trait.COMMAND_THERMOSTAT_SET_MODE, BASIC_DATA, {"thermostatMode": "off"}, {} ) assert len(calls) == 1 async def test_temperature_setting_climate_no_modes(hass): """Test TemperatureSetting trait support for climate domain not supporting any modes.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.TemperatureSettingTrait.supported(climate.DOMAIN, 0, None) hass.config.units.temperature_unit = TEMP_CELSIUS trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_AUTO, { climate.ATTR_HVAC_MODES: [], climate.ATTR_MIN_TEMP: None, climate.ATTR_MAX_TEMP: None, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["heat"], "thermostatTemperatureUnit": "C", } async def test_temperature_setting_climate_range(hass): """Test TemperatureSetting trait support for climate domain - range.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.TemperatureSettingTrait.supported(climate.DOMAIN, 0, None) hass.config.units.temperature_unit = TEMP_FAHRENHEIT trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_AUTO, { climate.ATTR_CURRENT_TEMPERATURE: 70, climate.ATTR_CURRENT_HUMIDITY: 25, ATTR_SUPPORTED_FEATURES: climate.SUPPORT_TARGET_TEMPERATURE_RANGE, climate.ATTR_HVAC_MODES: [ STATE_OFF, climate.HVAC_MODE_COOL, climate.HVAC_MODE_HEAT, climate.HVAC_MODE_AUTO, ], climate.ATTR_TARGET_TEMP_HIGH: 75, climate.ATTR_TARGET_TEMP_LOW: 65, climate.ATTR_MIN_TEMP: 50, climate.ATTR_MAX_TEMP: 80, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["off", "cool", "heat", "auto", "on"], "thermostatTemperatureUnit": "F", } assert trt.query_attributes() == { "thermostatMode": "auto", "thermostatTemperatureAmbient": 21.1, "thermostatHumidityAmbient": 25, "thermostatTemperatureSetpointLow": 18.3, "thermostatTemperatureSetpointHigh": 23.9, } assert trt.can_execute(trait.COMMAND_THERMOSTAT_TEMPERATURE_SET_RANGE, {}) assert trt.can_execute(trait.COMMAND_THERMOSTAT_SET_MODE, {}) calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_TEMPERATURE) await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SET_RANGE, BASIC_DATA, { "thermostatTemperatureSetpointHigh": 25, "thermostatTemperatureSetpointLow": 20, }, {}, ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "climate.bla", climate.ATTR_TARGET_TEMP_HIGH: 77, climate.ATTR_TARGET_TEMP_LOW: 68, } calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_HVAC_MODE) await trt.execute( trait.COMMAND_THERMOSTAT_SET_MODE, BASIC_DATA, {"thermostatMode": "cool"}, {} ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "climate.bla", climate.ATTR_HVAC_MODE: climate.HVAC_MODE_COOL, } with pytest.raises(helpers.SmartHomeError) as err: await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, BASIC_DATA, {"thermostatTemperatureSetpoint": -100}, {}, ) assert err.value.code == const.ERR_VALUE_OUT_OF_RANGE hass.config.units.temperature_unit = TEMP_CELSIUS async def test_temperature_setting_climate_setpoint(hass): """Test TemperatureSetting trait support for climate domain - setpoint.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.TemperatureSettingTrait.supported(climate.DOMAIN, 0, None) hass.config.units.temperature_unit = TEMP_CELSIUS trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_COOL, { climate.ATTR_HVAC_MODES: [STATE_OFF, climate.HVAC_MODE_COOL], climate.ATTR_MIN_TEMP: 10, climate.ATTR_MAX_TEMP: 30, ATTR_TEMPERATURE: 18, climate.ATTR_CURRENT_TEMPERATURE: 20, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["off", "cool", "on"], "thermostatTemperatureUnit": "C", } assert trt.query_attributes() == { "thermostatMode": "cool", "thermostatTemperatureAmbient": 20, "thermostatTemperatureSetpoint": 18, } assert trt.can_execute(trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, {}) assert trt.can_execute(trait.COMMAND_THERMOSTAT_SET_MODE, {}) calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_TEMPERATURE) with pytest.raises(helpers.SmartHomeError): await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, BASIC_DATA, {"thermostatTemperatureSetpoint": -100}, {}, ) await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, BASIC_DATA, {"thermostatTemperatureSetpoint": 19}, {}, ) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "climate.bla", ATTR_TEMPERATURE: 19} async def test_temperature_setting_climate_setpoint_auto(hass): """ Test TemperatureSetting trait support for climate domain. Setpoint in auto mode. """ hass.config.units.temperature_unit = TEMP_CELSIUS trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_HEAT_COOL, { climate.ATTR_HVAC_MODES: [ climate.HVAC_MODE_OFF, climate.HVAC_MODE_HEAT_COOL, ], climate.ATTR_MIN_TEMP: 10, climate.ATTR_MAX_TEMP: 30, ATTR_TEMPERATURE: 18, climate.ATTR_CURRENT_TEMPERATURE: 20, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["off", "heatcool", "on"], "thermostatTemperatureUnit": "C", } assert trt.query_attributes() == { "thermostatMode": "heatcool", "thermostatTemperatureAmbient": 20, "thermostatTemperatureSetpointHigh": 18, "thermostatTemperatureSetpointLow": 18, } assert trt.can_execute(trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, {}) assert trt.can_execute(trait.COMMAND_THERMOSTAT_SET_MODE, {}) calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_TEMPERATURE) await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, BASIC_DATA, {"thermostatTemperatureSetpoint": 19}, {}, ) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "climate.bla", ATTR_TEMPERATURE: 19} async def test_humidity_setting_humidifier_setpoint(hass): """Test HumiditySetting trait support for humidifier domain - setpoint.""" assert helpers.get_google_type(humidifier.DOMAIN, None) is not None assert trait.HumiditySettingTrait.supported(humidifier.DOMAIN, 0, None) trt = trait.HumiditySettingTrait( hass, State( "humidifier.bla", STATE_ON, { humidifier.ATTR_MIN_HUMIDITY: 20, humidifier.ATTR_MAX_HUMIDITY: 90, humidifier.ATTR_HUMIDITY: 38, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "humiditySetpointRange": {"minPercent": 20, "maxPercent": 90} } assert trt.query_attributes() == { "humiditySetpointPercent": 38, } assert trt.can_execute(trait.COMMAND_SET_HUMIDITY, {}) calls = async_mock_service(hass, humidifier.DOMAIN, humidifier.SERVICE_SET_HUMIDITY) await trt.execute(trait.COMMAND_SET_HUMIDITY, BASIC_DATA, {"humidity": 32}, {}) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "humidifier.bla", humidifier.ATTR_HUMIDITY: 32, } async def test_lock_unlock_lock(hass): """Test LockUnlock trait locking support for lock domain.""" assert helpers.get_google_type(lock.DOMAIN, None) is not None assert trait.LockUnlockTrait.supported(lock.DOMAIN, lock.SUPPORT_OPEN, None) assert trait.LockUnlockTrait.might_2fa(lock.DOMAIN, lock.SUPPORT_OPEN, None) trt = trait.LockUnlockTrait( hass, State("lock.front_door", lock.STATE_LOCKED), PIN_CONFIG ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"isLocked": True} assert trt.can_execute(trait.COMMAND_LOCKUNLOCK, {"lock": True}) calls = async_mock_service(hass, lock.DOMAIN, lock.SERVICE_LOCK) await trt.execute(trait.COMMAND_LOCKUNLOCK, PIN_DATA, {"lock": True}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "lock.front_door"} async def test_lock_unlock_unlock(hass): """Test LockUnlock trait unlocking support for lock domain.""" assert helpers.get_google_type(lock.DOMAIN, None) is not None assert trait.LockUnlockTrait.supported(lock.DOMAIN, lock.SUPPORT_OPEN, None) trt = trait.LockUnlockTrait( hass, State("lock.front_door", lock.STATE_LOCKED), PIN_CONFIG ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"isLocked": True} assert trt.can_execute(trait.COMMAND_LOCKUNLOCK, {"lock": False}) calls = async_mock_service(hass, lock.DOMAIN, lock.SERVICE_UNLOCK) # No challenge data with pytest.raises(error.ChallengeNeeded) as err: await trt.execute(trait.COMMAND_LOCKUNLOCK, PIN_DATA, {"lock": False}, {}) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # invalid pin with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_LOCKUNLOCK, PIN_DATA, {"lock": False}, {"pin": 9999} ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_FAILED_PIN_NEEDED await trt.execute( trait.COMMAND_LOCKUNLOCK, PIN_DATA, {"lock": False}, {"pin": "1234"} ) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "lock.front_door"} # Test without pin trt = trait.LockUnlockTrait( hass, State("lock.front_door", lock.STATE_LOCKED), BASIC_CONFIG ) with pytest.raises(error.SmartHomeError) as err: await trt.execute(trait.COMMAND_LOCKUNLOCK, BASIC_DATA, {"lock": False}, {}) assert len(calls) == 1 assert err.value.code == const.ERR_CHALLENGE_NOT_SETUP # Test with 2FA override with patch.object( BASIC_CONFIG, "should_2fa", return_value=False, ): await trt.execute(trait.COMMAND_LOCKUNLOCK, BASIC_DATA, {"lock": False}, {}) assert len(calls) == 2 async def test_arm_disarm_arm_away(hass): """Test ArmDisarm trait Arming support for alarm_control_panel domain.""" assert helpers.get_google_type(alarm_control_panel.DOMAIN, None) is not None assert trait.ArmDisArmTrait.supported(alarm_control_panel.DOMAIN, 0, None) assert trait.ArmDisArmTrait.might_2fa(alarm_control_panel.DOMAIN, 0, None) trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, { alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True, ATTR_SUPPORTED_FEATURES: alarm_control_panel.const.SUPPORT_ALARM_ARM_HOME | alarm_control_panel.const.SUPPORT_ALARM_ARM_AWAY, }, ), PIN_CONFIG, ) assert trt.sync_attributes() == { "availableArmLevels": { "levels": [ { "level_name": "armed_home", "level_values": [ {"level_synonym": ["armed home", "home"], "lang": "en"} ], }, { "level_name": "armed_away", "level_values": [ {"level_synonym": ["armed away", "away"], "lang": "en"} ], }, ], "ordered": False, } } assert trt.query_attributes() == { "isArmed": True, "currentArmLevel": STATE_ALARM_ARMED_AWAY, } assert trt.can_execute( trait.COMMAND_ARMDISARM, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY} ) calls = async_mock_service( hass, alarm_control_panel.DOMAIN, alarm_control_panel.SERVICE_ALARM_ARM_AWAY ) # Test with no secure_pin configured with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), BASIC_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, BASIC_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {}, ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NOT_SETUP trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), PIN_CONFIG, ) # No challenge data with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {}, ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # invalid pin with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {"pin": 9999}, ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_FAILED_PIN_NEEDED # correct pin await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {"pin": "1234"}, ) assert len(calls) == 1 # Test already armed with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), PIN_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {}, ) assert len(calls) == 1 assert err.value.code == const.ERR_ALREADY_ARMED # Test with code_arm_required False trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: False}, ), PIN_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {}, ) assert len(calls) == 2 with pytest.raises(error.SmartHomeError) as err: await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True}, {}, ) async def test_arm_disarm_disarm(hass): """Test ArmDisarm trait Disarming support for alarm_control_panel domain.""" assert helpers.get_google_type(alarm_control_panel.DOMAIN, None) is not None assert trait.ArmDisArmTrait.supported(alarm_control_panel.DOMAIN, 0, None) assert trait.ArmDisArmTrait.might_2fa(alarm_control_panel.DOMAIN, 0, None) trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, { alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True, ATTR_SUPPORTED_FEATURES: alarm_control_panel.const.SUPPORT_ALARM_TRIGGER | alarm_control_panel.const.SUPPORT_ALARM_ARM_CUSTOM_BYPASS, }, ), PIN_CONFIG, ) assert trt.sync_attributes() == { "availableArmLevels": { "levels": [ { "level_name": "armed_custom_bypass", "level_values": [ { "level_synonym": ["armed custom bypass", "custom"], "lang": "en", } ], }, { "level_name": "triggered", "level_values": [{"level_synonym": ["triggered"], "lang": "en"}], }, ], "ordered": False, } } assert trt.query_attributes() == {"isArmed": False} assert trt.can_execute(trait.COMMAND_ARMDISARM, {"arm": False}) calls = async_mock_service( hass, alarm_control_panel.DOMAIN, alarm_control_panel.SERVICE_ALARM_DISARM ) # Test without secure_pin configured with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), BASIC_CONFIG, ) await trt.execute(trait.COMMAND_ARMDISARM, BASIC_DATA, {"arm": False}, {}) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NOT_SETUP trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), PIN_CONFIG, ) # No challenge data with pytest.raises(error.ChallengeNeeded) as err: await trt.execute(trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": False}, {}) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # invalid pin with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": False}, {"pin": 9999} ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_FAILED_PIN_NEEDED # correct pin await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": False}, {"pin": "1234"} ) assert len(calls) == 1 # Test already disarmed with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), PIN_CONFIG, ) await trt.execute(trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": False}, {}) assert len(calls) == 1 assert err.value.code == const.ERR_ALREADY_DISARMED # Cancel arming after already armed will require pin with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: False}, ), PIN_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "cancel": True}, {} ) assert len(calls) == 1 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # Cancel arming while pending to arm doesn't require pin trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_PENDING, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: False}, ), PIN_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "cancel": True}, {} ) assert len(calls) == 2 async def test_fan_speed(hass): """Test FanSpeed trait speed control support for fan domain.""" assert helpers.get_google_type(fan.DOMAIN, None) is not None assert trait.FanSpeedTrait.supported(fan.DOMAIN, fan.SUPPORT_SET_SPEED, None) trt = trait.FanSpeedTrait( hass, State( "fan.living_room_fan", fan.SPEED_HIGH, attributes={ "speed_list": [ fan.SPEED_OFF, fan.SPEED_LOW, fan.SPEED_MEDIUM, fan.SPEED_HIGH, ], "speed": "low", "percentage": 33, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableFanSpeeds": { "ordered": True, "speeds": [ { "speed_name": "off", "speed_values": [{"speed_synonym": ["stop", "off"], "lang": "en"}], }, { "speed_name": "low", "speed_values": [ { "speed_synonym": ["slow", "low", "slowest", "lowest"], "lang": "en", } ], }, { "speed_name": "medium", "speed_values": [ {"speed_synonym": ["medium", "mid", "middle"], "lang": "en"} ], }, { "speed_name": "high", "speed_values": [ { "speed_synonym": [ "high", "max", "fast", "highest", "fastest", "maximum", ], "lang": "en", } ], }, ], }, "reversible": False, "supportsFanSpeedPercent": True, } assert trt.query_attributes() == { "currentFanSpeedSetting": "low", "on": True, "currentFanSpeedPercent": 33, } assert trt.can_execute(trait.COMMAND_FANSPEED, params={"fanSpeed": "medium"}) calls = async_mock_service(hass, fan.DOMAIN, fan.SERVICE_SET_SPEED) await trt.execute(trait.COMMAND_FANSPEED, BASIC_DATA, {"fanSpeed": "medium"}, {}) assert len(calls) == 1 assert calls[0].data == {"entity_id": "fan.living_room_fan", "speed": "medium"} assert trt.can_execute(trait.COMMAND_FANSPEED, params={"fanSpeedPercent": 10}) calls = async_mock_service(hass, fan.DOMAIN, fan.SERVICE_SET_PERCENTAGE) await trt.execute(trait.COMMAND_FANSPEED, BASIC_DATA, {"fanSpeedPercent": 10}, {}) assert len(calls) == 1 assert calls[0].data == {"entity_id": "fan.living_room_fan", "percentage": 10} async def test_climate_fan_speed(hass): """Test FanSpeed trait speed control support for climate domain.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.FanSpeedTrait.supported(climate.DOMAIN, climate.SUPPORT_FAN_MODE, None) trt = trait.FanSpeedTrait( hass, State( "climate.living_room_ac", "on", attributes={ "fan_modes": ["auto", "low", "medium", "high"], "fan_mode": "low", }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableFanSpeeds": { "ordered": True, "speeds": [ { "speed_name": "auto", "speed_values": [{"speed_synonym": ["auto"], "lang": "en"}], }, { "speed_name": "low", "speed_values": [{"speed_synonym": ["low"], "lang": "en"}], }, { "speed_name": "medium", "speed_values": [{"speed_synonym": ["medium"], "lang": "en"}], }, { "speed_name": "high", "speed_values": [{"speed_synonym": ["high"], "lang": "en"}], }, ], }, "reversible": False, "supportsFanSpeedPercent": True, } assert trt.query_attributes() == { "currentFanSpeedSetting": "low", } assert trt.can_execute(trait.COMMAND_FANSPEED, params={"fanSpeed": "medium"}) calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_FAN_MODE) await trt.execute(trait.COMMAND_FANSPEED, BASIC_DATA, {"fanSpeed": "medium"}, {}) assert len(calls) == 1 assert calls[0].data == { "entity_id": "climate.living_room_ac", "fan_mode": "medium", } async def test_inputselector(hass): """Test input selector trait.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.InputSelectorTrait.supported( media_player.DOMAIN, media_player.SUPPORT_SELECT_SOURCE, None ) trt = trait.InputSelectorTrait( hass, State( "media_player.living_room", media_player.STATE_PLAYING, attributes={ media_player.ATTR_INPUT_SOURCE_LIST: [ "media", "game", "chromecast", "plex", ], media_player.ATTR_INPUT_SOURCE: "game", }, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableInputs": [ {"key": "media", "names": [{"name_synonym": ["media"], "lang": "en"}]}, {"key": "game", "names": [{"name_synonym": ["game"], "lang": "en"}]}, { "key": "chromecast", "names": [{"name_synonym": ["chromecast"], "lang": "en"}], }, {"key": "plex", "names": [{"name_synonym": ["plex"], "lang": "en"}]}, ], "orderedInputs": True, } assert trt.query_attributes() == { "currentInput": "game", } assert trt.can_execute( trait.COMMAND_INPUT, params={"newInput": "media"}, ) calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_SELECT_SOURCE ) await trt.execute( trait.COMMAND_INPUT, BASIC_DATA, {"newInput": "media"}, {}, ) assert len(calls) == 1 assert calls[0].data == {"entity_id": "media_player.living_room", "source": "media"} @pytest.mark.parametrize( "sources,source,source_next,source_prev", [ (["a"], "a", "a", "a"), (["a", "b"], "a", "b", "b"), (["a", "b", "c"], "a", "b", "c"), ], ) async def test_inputselector_nextprev(hass, sources, source, source_next, source_prev): """Test input selector trait.""" trt = trait.InputSelectorTrait( hass, State( "media_player.living_room", media_player.STATE_PLAYING, attributes={ media_player.ATTR_INPUT_SOURCE_LIST: sources, media_player.ATTR_INPUT_SOURCE: source, }, ), BASIC_CONFIG, ) assert trt.can_execute("action.devices.commands.NextInput", params={}) assert trt.can_execute("action.devices.commands.PreviousInput", params={}) calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_SELECT_SOURCE ) await trt.execute( "action.devices.commands.NextInput", BASIC_DATA, {}, {}, ) await trt.execute( "action.devices.commands.PreviousInput", BASIC_DATA, {}, {}, ) assert len(calls) == 2 assert calls[0].data == { "entity_id": "media_player.living_room", "source": source_next, } assert calls[1].data == { "entity_id": "media_player.living_room", "source": source_prev, } @pytest.mark.parametrize( "sources,source", [(None, "a"), (["a", "b"], None), (["a", "b"], "c")] ) async def test_inputselector_nextprev_invalid(hass, sources, source): """Test input selector trait.""" trt = trait.InputSelectorTrait( hass, State( "media_player.living_room", media_player.STATE_PLAYING, attributes={ media_player.ATTR_INPUT_SOURCE_LIST: sources, media_player.ATTR_INPUT_SOURCE: source, }, ), BASIC_CONFIG, ) with pytest.raises(SmartHomeError): await trt.execute( "action.devices.commands.NextInput", BASIC_DATA, {}, {}, ) with pytest.raises(SmartHomeError): await trt.execute( "action.devices.commands.PreviousInput", BASIC_DATA, {}, {}, ) with pytest.raises(SmartHomeError): await trt.execute( "action.devices.commands.InvalidCommand", BASIC_DATA, {}, {}, ) async def test_modes_input_select(hass): """Test Input Select Mode trait.""" assert helpers.get_google_type(input_select.DOMAIN, None) is not None assert trait.ModesTrait.supported(input_select.DOMAIN, None, None) trt = trait.ModesTrait( hass, State("input_select.bla", "unavailable"), BASIC_CONFIG, ) assert trt.sync_attributes() == {"availableModes": []} trt = trait.ModesTrait( hass, State( "input_select.bla", "abc", attributes={input_select.ATTR_OPTIONS: ["abc", "123", "xyz"]}, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableModes": [ { "name": "option", "name_values": [ { "name_synonym": ["option", "setting", "mode", "value"], "lang": "en", } ], "settings": [ { "setting_name": "abc", "setting_values": [{"setting_synonym": ["abc"], "lang": "en"}], }, { "setting_name": "123", "setting_values": [{"setting_synonym": ["123"], "lang": "en"}], }, { "setting_name": "xyz", "setting_values": [{"setting_synonym": ["xyz"], "lang": "en"}], }, ], "ordered": False, } ] } assert trt.query_attributes() == { "currentModeSettings": {"option": "abc"}, "on": True, } assert trt.can_execute( trait.COMMAND_MODES, params={"updateModeSettings": {"option": "xyz"}}, ) calls = async_mock_service( hass, input_select.DOMAIN, input_select.SERVICE_SELECT_OPTION ) await trt.execute( trait.COMMAND_MODES, BASIC_DATA, {"updateModeSettings": {"option": "xyz"}}, {}, ) assert len(calls) == 1 assert calls[0].data == {"entity_id": "input_select.bla", "option": "xyz"} async def test_modes_humidifier(hass): """Test Humidifier Mode trait.""" assert helpers.get_google_type(humidifier.DOMAIN, None) is not None assert trait.ModesTrait.supported(humidifier.DOMAIN, humidifier.SUPPORT_MODES, None) trt = trait.ModesTrait( hass, State( "humidifier.humidifier", STATE_OFF, attributes={ humidifier.ATTR_AVAILABLE_MODES: [ humidifier.MODE_NORMAL, humidifier.MODE_AUTO, humidifier.MODE_AWAY, ], ATTR_SUPPORTED_FEATURES: humidifier.SUPPORT_MODES, humidifier.ATTR_MIN_HUMIDITY: 30, humidifier.ATTR_MAX_HUMIDITY: 99, humidifier.ATTR_HUMIDITY: 50, ATTR_MODE: humidifier.MODE_AUTO, }, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableModes": [ { "name": "mode", "name_values": [{"name_synonym": ["mode"], "lang": "en"}], "settings": [ { "setting_name": "normal", "setting_values": [ {"setting_synonym": ["normal"], "lang": "en"} ], }, { "setting_name": "auto", "setting_values": [{"setting_synonym": ["auto"], "lang": "en"}], }, { "setting_name": "away", "setting_values": [{"setting_synonym": ["away"], "lang": "en"}], }, ], "ordered": False, }, ] } assert trt.query_attributes() == { "currentModeSettings": {"mode": "auto"}, "on": False, } assert trt.can_execute( trait.COMMAND_MODES, params={"updateModeSettings": {"mode": "away"}} ) calls = async_mock_service(hass, humidifier.DOMAIN, humidifier.SERVICE_SET_MODE) await trt.execute( trait.COMMAND_MODES, BASIC_DATA, {"updateModeSettings": {"mode": "away"}}, {}, ) assert len(calls) == 1 assert calls[0].data == { "entity_id": "humidifier.humidifier", "mode": "away", } async def test_sound_modes(hass): """Test Mode trait.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.ModesTrait.supported( media_player.DOMAIN, media_player.SUPPORT_SELECT_SOUND_MODE, None ) trt = trait.ModesTrait( hass, State( "media_player.living_room", media_player.STATE_PLAYING, attributes={ media_player.ATTR_SOUND_MODE_LIST: ["stereo", "prologic"], media_player.ATTR_SOUND_MODE: "stereo", }, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableModes": [ { "name": "sound mode", "name_values": [ {"name_synonym": ["sound mode", "effects"], "lang": "en"} ], "settings": [ { "setting_name": "stereo", "setting_values": [ {"setting_synonym": ["stereo"], "lang": "en"} ], }, { "setting_name": "prologic", "setting_values": [ {"setting_synonym": ["prologic"], "lang": "en"} ], }, ], "ordered": False, } ] } assert trt.query_attributes() == { "currentModeSettings": {"sound mode": "stereo"}, "on": True, } assert trt.can_execute( trait.COMMAND_MODES, params={"updateModeSettings": {"sound mode": "stereo"}}, ) calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_SELECT_SOUND_MODE ) await trt.execute( trait.COMMAND_MODES, BASIC_DATA, {"updateModeSettings": {"sound mode": "stereo"}}, {}, ) assert len(calls) == 1 assert calls[0].data == { "entity_id": "media_player.living_room", "sound_mode": "stereo", } async def test_openclose_cover(hass): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_SET_POSITION, None ) trt = trait.OpenCloseTrait( hass, State( "cover.bla", cover.STATE_OPEN, { cover.ATTR_CURRENT_POSITION: 75, ATTR_SUPPORTED_FEATURES: cover.SUPPORT_SET_POSITION, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"openPercent": 75} calls_set = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_SET_COVER_POSITION) calls_open = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_OPEN_COVER) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 50}, {}) await trt.execute( trait.COMMAND_OPENCLOSE_RELATIVE, BASIC_DATA, {"openRelativePercent": 50}, {} ) assert len(calls_set) == 1 assert calls_set[0].data == {ATTR_ENTITY_ID: "cover.bla", cover.ATTR_POSITION: 50} assert len(calls_open) == 1 assert calls_open[0].data == {ATTR_ENTITY_ID: "cover.bla"} async def test_openclose_cover_unknown_state(hass): """Test OpenClose trait support for cover domain with unknown state.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_SET_POSITION, None ) # No state trt = trait.OpenCloseTrait( hass, State( "cover.bla", STATE_UNKNOWN, {ATTR_SUPPORTED_FEATURES: cover.SUPPORT_OPEN} ), BASIC_CONFIG, ) assert trt.sync_attributes() == {"discreteOnlyOpenClose": True} with pytest.raises(helpers.SmartHomeError): trt.query_attributes() calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_OPEN_COVER) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 100}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} with pytest.raises(helpers.SmartHomeError): trt.query_attributes() async def test_openclose_cover_assumed_state(hass): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_SET_POSITION, None ) trt = trait.OpenCloseTrait( hass, State( "cover.bla", cover.STATE_OPEN, { ATTR_ASSUMED_STATE: True, ATTR_SUPPORTED_FEATURES: cover.SUPPORT_SET_POSITION, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == {"commandOnlyOpenClose": True} assert trt.query_attributes() == {} calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_SET_COVER_POSITION) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 40}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla", cover.ATTR_POSITION: 40} async def test_openclose_cover_query_only(hass): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported(cover.DOMAIN, 0, None) state = State( "cover.bla", cover.STATE_OPEN, ) trt = trait.OpenCloseTrait( hass, state, BASIC_CONFIG, ) assert trt.sync_attributes() == { "discreteOnlyOpenClose": True, "queryOnlyOpenClose": True, } assert trt.query_attributes() == {"openPercent": 100} async def test_openclose_cover_no_position(hass): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_OPEN | cover.SUPPORT_CLOSE, None ) state = State( "cover.bla", cover.STATE_OPEN, { ATTR_SUPPORTED_FEATURES: cover.SUPPORT_OPEN | cover.SUPPORT_CLOSE, }, ) trt = trait.OpenCloseTrait( hass, state, BASIC_CONFIG, ) assert trt.sync_attributes() == {"discreteOnlyOpenClose": True} assert trt.query_attributes() == {"openPercent": 100} state.state = cover.STATE_CLOSED assert trt.sync_attributes() == {"discreteOnlyOpenClose": True} assert trt.query_attributes() == {"openPercent": 0} calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_CLOSE_COVER) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 0}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_OPEN_COVER) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 100}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} with pytest.raises( SmartHomeError, match=r"Current position not know for relative command" ): await trt.execute( trait.COMMAND_OPENCLOSE_RELATIVE, BASIC_DATA, {"openRelativePercent": 100}, {}, ) with pytest.raises(SmartHomeError, match=r"No support for partial open close"): await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 50}, {}) @pytest.mark.parametrize( "device_class", (cover.DEVICE_CLASS_DOOR, cover.DEVICE_CLASS_GARAGE, cover.DEVICE_CLASS_GATE), ) async def test_openclose_cover_secure(hass, device_class): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, device_class) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_SET_POSITION, device_class ) assert trait.OpenCloseTrait.might_2fa( cover.DOMAIN, cover.SUPPORT_SET_POSITION, device_class ) trt = trait.OpenCloseTrait( hass, State( "cover.bla", cover.STATE_OPEN, { ATTR_DEVICE_CLASS: device_class, ATTR_SUPPORTED_FEATURES: cover.SUPPORT_SET_POSITION, cover.ATTR_CURRENT_POSITION: 75, }, ), PIN_CONFIG, ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"openPercent": 75} calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_SET_COVER_POSITION) calls_close = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_CLOSE_COVER) # No challenge data with pytest.raises(error.ChallengeNeeded) as err: await trt.execute(trait.COMMAND_OPENCLOSE, PIN_DATA, {"openPercent": 50}, {}) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # invalid pin with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_OPENCLOSE, PIN_DATA, {"openPercent": 50}, {"pin": "9999"} ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_FAILED_PIN_NEEDED await trt.execute( trait.COMMAND_OPENCLOSE, PIN_DATA, {"openPercent": 50}, {"pin": "1234"} ) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla", cover.ATTR_POSITION: 50} # no challenge on close await trt.execute(trait.COMMAND_OPENCLOSE, PIN_DATA, {"openPercent": 0}, {}) assert len(calls_close) == 1 assert calls_close[0].data == {ATTR_ENTITY_ID: "cover.bla"} @pytest.mark.parametrize( "device_class", ( binary_sensor.DEVICE_CLASS_DOOR, binary_sensor.DEVICE_CLASS_GARAGE_DOOR, binary_sensor.DEVICE_CLASS_LOCK, binary_sensor.DEVICE_CLASS_OPENING, binary_sensor.DEVICE_CLASS_WINDOW, ), ) async def test_openclose_binary_sensor(hass, device_class): """Test OpenClose trait support for binary_sensor domain.""" assert helpers.get_google_type(binary_sensor.DOMAIN, device_class) is not None assert trait.OpenCloseTrait.supported(binary_sensor.DOMAIN, 0, device_class) trt = trait.OpenCloseTrait( hass, State("binary_sensor.test", STATE_ON, {ATTR_DEVICE_CLASS: device_class}), BASIC_CONFIG, ) assert trt.sync_attributes() == { "queryOnlyOpenClose": True, "discreteOnlyOpenClose": True, } assert trt.query_attributes() == {"openPercent": 100} trt = trait.OpenCloseTrait( hass, State("binary_sensor.test", STATE_OFF, {ATTR_DEVICE_CLASS: device_class}), BASIC_CONFIG, ) assert trt.sync_attributes() == { "queryOnlyOpenClose": True, "discreteOnlyOpenClose": True, } assert trt.query_attributes() == {"openPercent": 0} async def test_volume_media_player(hass): """Test volume trait support for media player domain.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.VolumeTrait.supported( media_player.DOMAIN, media_player.SUPPORT_VOLUME_SET, None, ) trt = trait.VolumeTrait( hass, State( "media_player.bla", media_player.STATE_PLAYING, { ATTR_SUPPORTED_FEATURES: media_player.SUPPORT_VOLUME_SET, media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.3, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "volumeMaxLevel": 100, "levelStepSize": 10, "volumeCanMuteAndUnmute": False, "commandOnlyVolume": False, } assert trt.query_attributes() == {"currentVolume": 30} calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_SET ) await trt.execute(trait.COMMAND_SET_VOLUME, BASIC_DATA, {"volumeLevel": 60}, {}) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.6, } calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_SET ) await trt.execute( trait.COMMAND_VOLUME_RELATIVE, BASIC_DATA, {"relativeSteps": 10}, {} ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.4, } async def test_volume_media_player_relative(hass): """Test volume trait support for relative-volume-only media players.""" assert trait.VolumeTrait.supported( media_player.DOMAIN, media_player.SUPPORT_VOLUME_STEP, None, ) trt = trait.VolumeTrait( hass, State( "media_player.bla", media_player.STATE_PLAYING, { ATTR_ASSUMED_STATE: True, ATTR_SUPPORTED_FEATURES: media_player.SUPPORT_VOLUME_STEP, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "volumeMaxLevel": 100, "levelStepSize": 10, "volumeCanMuteAndUnmute": False, "commandOnlyVolume": True, } assert trt.query_attributes() == {} calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_UP ) await trt.execute( trait.COMMAND_VOLUME_RELATIVE, BASIC_DATA, {"relativeSteps": 10}, {}, ) assert len(calls) == 10 for call in calls: assert call.data == { ATTR_ENTITY_ID: "media_player.bla", } calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_DOWN ) await trt.execute( trait.COMMAND_VOLUME_RELATIVE, BASIC_DATA, {"relativeSteps": -10}, {}, ) assert len(calls) == 10 for call in calls: assert call.data == { ATTR_ENTITY_ID: "media_player.bla", } with pytest.raises(SmartHomeError): await trt.execute(trait.COMMAND_SET_VOLUME, BASIC_DATA, {"volumeLevel": 42}, {}) with pytest.raises(SmartHomeError): await trt.execute(trait.COMMAND_MUTE, BASIC_DATA, {"mute": True}, {}) async def test_media_player_mute(hass): """Test volume trait support for muting.""" assert trait.VolumeTrait.supported( media_player.DOMAIN, media_player.SUPPORT_VOLUME_STEP | media_player.SUPPORT_VOLUME_MUTE, None, ) trt = trait.VolumeTrait( hass, State( "media_player.bla", media_player.STATE_PLAYING, { ATTR_SUPPORTED_FEATURES: ( media_player.SUPPORT_VOLUME_STEP | media_player.SUPPORT_VOLUME_MUTE ), media_player.ATTR_MEDIA_VOLUME_MUTED: False, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "volumeMaxLevel": 100, "levelStepSize": 10, "volumeCanMuteAndUnmute": True, "commandOnlyVolume": False, } assert trt.query_attributes() == {"isMuted": False} mute_calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_MUTE ) await trt.execute( trait.COMMAND_MUTE, BASIC_DATA, {"mute": True}, {}, ) assert len(mute_calls) == 1 assert mute_calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_VOLUME_MUTED: True, } unmute_calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_MUTE ) await trt.execute( trait.COMMAND_MUTE, BASIC_DATA, {"mute": False}, {}, ) assert len(unmute_calls) == 1 assert unmute_calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_VOLUME_MUTED: False, } async def test_temperature_setting_sensor(hass): """Test TemperatureSetting trait support for temperature sensor.""" assert ( helpers.get_google_type(sensor.DOMAIN, sensor.DEVICE_CLASS_TEMPERATURE) is not None ) assert not trait.TemperatureSettingTrait.supported( sensor.DOMAIN, 0, sensor.DEVICE_CLASS_HUMIDITY ) assert trait.TemperatureSettingTrait.supported( sensor.DOMAIN, 0, sensor.DEVICE_CLASS_TEMPERATURE ) @pytest.mark.parametrize( "unit_in,unit_out,state,ambient", [ (TEMP_FAHRENHEIT, "F", "70", 21.1), (TEMP_CELSIUS, "C", "21.1", 21.1), (TEMP_FAHRENHEIT, "F", "unavailable", None), (TEMP_FAHRENHEIT, "F", "unknown", None), ], ) async def test_temperature_setting_sensor_data(hass, unit_in, unit_out, state, ambient): """Test TemperatureSetting trait support for temperature sensor.""" hass.config.units.temperature_unit = unit_in trt = trait.TemperatureSettingTrait( hass, State( "sensor.test", state, {ATTR_DEVICE_CLASS: sensor.DEVICE_CLASS_TEMPERATURE} ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "queryOnlyTemperatureSetting": True, "thermostatTemperatureUnit": unit_out, } if ambient: assert trt.query_attributes() == {"thermostatTemperatureAmbient": ambient} else: assert trt.query_attributes() == {} hass.config.units.temperature_unit = TEMP_CELSIUS async def test_humidity_setting_sensor(hass): """Test HumiditySetting trait support for humidity sensor.""" assert ( helpers.get_google_type(sensor.DOMAIN, sensor.DEVICE_CLASS_HUMIDITY) is not None ) assert not trait.HumiditySettingTrait.supported( sensor.DOMAIN, 0, sensor.DEVICE_CLASS_TEMPERATURE ) assert trait.HumiditySettingTrait.supported( sensor.DOMAIN, 0, sensor.DEVICE_CLASS_HUMIDITY ) @pytest.mark.parametrize( "state,ambient", [("70", 70), ("unavailable", None), ("unknown", None)] ) async def test_humidity_setting_sensor_data(hass, state, ambient): """Test HumiditySetting trait support for humidity sensor.""" trt = trait.HumiditySettingTrait( hass, State("sensor.test", state, {ATTR_DEVICE_CLASS: sensor.DEVICE_CLASS_HUMIDITY}), BASIC_CONFIG, ) assert trt.sync_attributes() == {"queryOnlyHumiditySetting": True} if ambient: assert trt.query_attributes() == {"humidityAmbientPercent": ambient} else: assert trt.query_attributes() == {} with pytest.raises(helpers.SmartHomeError) as err: await trt.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert err.value.code == const.ERR_NOT_SUPPORTED async def test_transport_control(hass): """Test the TransportControlTrait.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None for feature in trait.MEDIA_COMMAND_SUPPORT_MAPPING.values(): assert trait.TransportControlTrait.supported(media_player.DOMAIN, feature, None) now = datetime(2020, 1, 1) trt = trait.TransportControlTrait( hass, State( "media_player.bla", media_player.STATE_PLAYING, { media_player.ATTR_MEDIA_POSITION: 100, media_player.ATTR_MEDIA_DURATION: 200, media_player.ATTR_MEDIA_POSITION_UPDATED_AT: now - timedelta(seconds=10), media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.5, ATTR_SUPPORTED_FEATURES: media_player.SUPPORT_PLAY | media_player.SUPPORT_STOP, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "transportControlSupportedCommands": ["RESUME", "STOP"] } assert trt.query_attributes() == {} # COMMAND_MEDIA_SEEK_RELATIVE calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_SEEK ) # Patch to avoid time ticking over during the command failing the test with patch("homeassistant.util.dt.utcnow", return_value=now): await trt.execute( trait.COMMAND_MEDIA_SEEK_RELATIVE, BASIC_DATA, {"relativePositionMs": 10000}, {}, ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", # 100s (current position) + 10s (from command) + 10s (from updated_at) media_player.ATTR_MEDIA_SEEK_POSITION: 120, } # COMMAND_MEDIA_SEEK_TO_POSITION calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_SEEK ) await trt.execute( trait.COMMAND_MEDIA_SEEK_TO_POSITION, BASIC_DATA, {"absPositionMs": 50000}, {} ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_SEEK_POSITION: 50, } # COMMAND_MEDIA_NEXT calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_NEXT_TRACK ) await trt.execute(trait.COMMAND_MEDIA_NEXT, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} # COMMAND_MEDIA_PAUSE calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_PAUSE ) await trt.execute(trait.COMMAND_MEDIA_PAUSE, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} # COMMAND_MEDIA_PREVIOUS calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_PREVIOUS_TRACK ) await trt.execute(trait.COMMAND_MEDIA_PREVIOUS, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} # COMMAND_MEDIA_RESUME calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_PLAY ) await trt.execute(trait.COMMAND_MEDIA_RESUME, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} # COMMAND_MEDIA_SHUFFLE calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_SHUFFLE_SET ) await trt.execute(trait.COMMAND_MEDIA_SHUFFLE, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_SHUFFLE: True, } # COMMAND_MEDIA_STOP calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_STOP ) await trt.execute(trait.COMMAND_MEDIA_STOP, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} @pytest.mark.parametrize( "state", ( STATE_OFF, STATE_IDLE, STATE_PLAYING, STATE_ON, STATE_PAUSED, STATE_STANDBY, STATE_UNAVAILABLE, STATE_UNKNOWN, ), ) async def test_media_state(hass, state): """Test the MediaStateTrait.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.TransportControlTrait.supported( media_player.DOMAIN, media_player.SUPPORT_PLAY, None ) trt = trait.MediaStateTrait( hass, State( "media_player.bla", state, { media_player.ATTR_MEDIA_POSITION: 100, media_player.ATTR_MEDIA_DURATION: 200, media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.5, ATTR_SUPPORTED_FEATURES: media_player.SUPPORT_PLAY | media_player.SUPPORT_STOP, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "supportActivityState": True, "supportPlaybackState": True, } assert trt.query_attributes() == { "activityState": trt.activity_lookup.get(state), "playbackState": trt.playback_lookup.get(state), }
sander76/home-assistant
tests/components/google_assistant/test_trait.py
homeassistant/components/flume/sensor.py
"""The airvisual component.""" import asyncio from datetime import timedelta from math import ceil from pyairvisual import CloudAPI, NodeSamba from pyairvisual.errors import ( AirVisualError, InvalidKeyError, KeyExpiredError, NodeProError, ) from homeassistant.config_entries import SOURCE_REAUTH from homeassistant.const import ( ATTR_ATTRIBUTION, CONF_API_KEY, CONF_IP_ADDRESS, CONF_LATITUDE, CONF_LONGITUDE, CONF_PASSWORD, CONF_SHOW_ON_MAP, CONF_STATE, ) from homeassistant.core import callback from homeassistant.helpers import aiohttp_client, config_validation as cv from homeassistant.helpers.update_coordinator import ( CoordinatorEntity, DataUpdateCoordinator, UpdateFailed, ) from .const import ( CONF_CITY, CONF_COUNTRY, CONF_GEOGRAPHIES, CONF_INTEGRATION_TYPE, DATA_COORDINATOR, DOMAIN, INTEGRATION_TYPE_GEOGRAPHY_COORDS, INTEGRATION_TYPE_GEOGRAPHY_NAME, INTEGRATION_TYPE_NODE_PRO, LOGGER, ) PLATFORMS = ["air_quality", "sensor"] DATA_LISTENER = "listener" DEFAULT_ATTRIBUTION = "Data provided by AirVisual" DEFAULT_NODE_PRO_UPDATE_INTERVAL = timedelta(minutes=1) CONFIG_SCHEMA = cv.deprecated(DOMAIN) @callback def async_get_geography_id(geography_dict): """Generate a unique ID from a geography dict.""" if not geography_dict: return if CONF_CITY in geography_dict: return ", ".join( ( geography_dict[CONF_CITY], geography_dict[CONF_STATE], geography_dict[CONF_COUNTRY], ) ) return ", ".join( (str(geography_dict[CONF_LATITUDE]), str(geography_dict[CONF_LONGITUDE])) ) @callback def async_get_cloud_api_update_interval(hass, api_key, num_consumers): """Get a leveled scan interval for a particular cloud API key. This will shift based on the number of active consumers, thus keeping the user under the monthly API limit. """ # Assuming 10,000 calls per month and a "largest possible month" of 31 days; note # that we give a buffer of 1500 API calls for any drift, restarts, etc.: minutes_between_api_calls = ceil(num_consumers * 31 * 24 * 60 / 8500) LOGGER.debug( "Leveling API key usage (%s): %s consumers, %s minutes between updates", api_key, num_consumers, minutes_between_api_calls, ) return timedelta(minutes=minutes_between_api_calls) @callback def async_get_cloud_coordinators_by_api_key(hass, api_key): """Get all DataUpdateCoordinator objects related to a particular API key.""" coordinators = [] for entry_id, coordinator in hass.data[DOMAIN][DATA_COORDINATOR].items(): config_entry = hass.config_entries.async_get_entry(entry_id) if config_entry.data.get(CONF_API_KEY) == api_key: coordinators.append(coordinator) return coordinators @callback def async_sync_geo_coordinator_update_intervals(hass, api_key): """Sync the update interval for geography-based data coordinators (by API key).""" coordinators = async_get_cloud_coordinators_by_api_key(hass, api_key) if not coordinators: return update_interval = async_get_cloud_api_update_interval( hass, api_key, len(coordinators) ) for coordinator in coordinators: LOGGER.debug( "Updating interval for coordinator: %s, %s", coordinator.name, update_interval, ) coordinator.update_interval = update_interval async def async_setup(hass, config): """Set up the AirVisual component.""" hass.data[DOMAIN] = {DATA_COORDINATOR: {}, DATA_LISTENER: {}} return True @callback def _standardize_geography_config_entry(hass, config_entry): """Ensure that geography config entries have appropriate properties.""" entry_updates = {} if not config_entry.unique_id: # If the config entry doesn't already have a unique ID, set one: entry_updates["unique_id"] = config_entry.data[CONF_API_KEY] if not config_entry.options: # If the config entry doesn't already have any options set, set defaults: entry_updates["options"] = {CONF_SHOW_ON_MAP: True} if config_entry.data.get(CONF_INTEGRATION_TYPE) not in [ INTEGRATION_TYPE_GEOGRAPHY_COORDS, INTEGRATION_TYPE_GEOGRAPHY_NAME, ]: # If the config entry data doesn't contain an integration type that we know # about, infer it from the data we have: entry_updates["data"] = {**config_entry.data} if CONF_CITY in config_entry.data: entry_updates["data"][ CONF_INTEGRATION_TYPE ] = INTEGRATION_TYPE_GEOGRAPHY_NAME else: entry_updates["data"][ CONF_INTEGRATION_TYPE ] = INTEGRATION_TYPE_GEOGRAPHY_COORDS if not entry_updates: return hass.config_entries.async_update_entry(config_entry, **entry_updates) @callback def _standardize_node_pro_config_entry(hass, config_entry): """Ensure that Node/Pro config entries have appropriate properties.""" entry_updates = {} if CONF_INTEGRATION_TYPE not in config_entry.data: # If the config entry data doesn't contain the integration type, add it: entry_updates["data"] = { **config_entry.data, CONF_INTEGRATION_TYPE: INTEGRATION_TYPE_NODE_PRO, } if not entry_updates: return hass.config_entries.async_update_entry(config_entry, **entry_updates) async def async_setup_entry(hass, config_entry): """Set up AirVisual as config entry.""" if CONF_API_KEY in config_entry.data: _standardize_geography_config_entry(hass, config_entry) websession = aiohttp_client.async_get_clientsession(hass) cloud_api = CloudAPI(config_entry.data[CONF_API_KEY], session=websession) async def async_update_data(): """Get new data from the API.""" if CONF_CITY in config_entry.data: api_coro = cloud_api.air_quality.city( config_entry.data[CONF_CITY], config_entry.data[CONF_STATE], config_entry.data[CONF_COUNTRY], ) else: api_coro = cloud_api.air_quality.nearest_city( config_entry.data[CONF_LATITUDE], config_entry.data[CONF_LONGITUDE], ) try: return await api_coro except (InvalidKeyError, KeyExpiredError): matching_flows = [ flow for flow in hass.config_entries.flow.async_progress() if flow["context"]["source"] == SOURCE_REAUTH and flow["context"]["unique_id"] == config_entry.unique_id ] if not matching_flows: hass.async_create_task( hass.config_entries.flow.async_init( DOMAIN, context={ "source": SOURCE_REAUTH, "unique_id": config_entry.unique_id, }, data=config_entry.data, ) ) return {} except AirVisualError as err: raise UpdateFailed(f"Error while retrieving data: {err}") from err coordinator = DataUpdateCoordinator( hass, LOGGER, name=async_get_geography_id(config_entry.data), # We give a placeholder update interval in order to create the coordinator; # then, below, we use the coordinator's presence (along with any other # coordinators using the same API key) to calculate an actual, leveled # update interval: update_interval=timedelta(minutes=5), update_method=async_update_data, ) # Only geography-based entries have options: hass.data[DOMAIN][DATA_LISTENER][ config_entry.entry_id ] = config_entry.add_update_listener(async_reload_entry) else: _standardize_node_pro_config_entry(hass, config_entry) async def async_update_data(): """Get new data from the API.""" try: async with NodeSamba( config_entry.data[CONF_IP_ADDRESS], config_entry.data[CONF_PASSWORD] ) as node: return await node.async_get_latest_measurements() except NodeProError as err: raise UpdateFailed(f"Error while retrieving data: {err}") from err coordinator = DataUpdateCoordinator( hass, LOGGER, name="Node/Pro data", update_interval=DEFAULT_NODE_PRO_UPDATE_INTERVAL, update_method=async_update_data, ) await coordinator.async_config_entry_first_refresh() hass.data[DOMAIN][DATA_COORDINATOR][config_entry.entry_id] = coordinator # Reassess the interval between 2 server requests if CONF_API_KEY in config_entry.data: async_sync_geo_coordinator_update_intervals( hass, config_entry.data[CONF_API_KEY] ) for platform in PLATFORMS: hass.async_create_task( hass.config_entries.async_forward_entry_setup(config_entry, platform) ) return True async def async_migrate_entry(hass, config_entry): """Migrate an old config entry.""" version = config_entry.version LOGGER.debug("Migrating from version %s", version) # 1 -> 2: One geography per config entry if version == 1: version = config_entry.version = 2 # Update the config entry to only include the first geography (there is always # guaranteed to be at least one): geographies = list(config_entry.data[CONF_GEOGRAPHIES]) first_geography = geographies.pop(0) first_id = async_get_geography_id(first_geography) hass.config_entries.async_update_entry( config_entry, unique_id=first_id, title=f"Cloud API ({first_id})", data={CONF_API_KEY: config_entry.data[CONF_API_KEY], **first_geography}, ) # For any geographies that remain, create a new config entry for each one: for geography in geographies: if CONF_LATITUDE in geography: source = "geography_by_coords" else: source = "geography_by_name" hass.async_create_task( hass.config_entries.flow.async_init( DOMAIN, context={"source": source}, data={CONF_API_KEY: config_entry.data[CONF_API_KEY], **geography}, ) ) LOGGER.info("Migration to version %s successful", version) return True async def async_unload_entry(hass, config_entry): """Unload an AirVisual config entry.""" unload_ok = all( await asyncio.gather( *[ hass.config_entries.async_forward_entry_unload(config_entry, platform) for platform in PLATFORMS ] ) ) if unload_ok: hass.data[DOMAIN][DATA_COORDINATOR].pop(config_entry.entry_id) remove_listener = hass.data[DOMAIN][DATA_LISTENER].pop(config_entry.entry_id) remove_listener() if CONF_API_KEY in config_entry.data: # Re-calculate the update interval period for any remaining consumers of # this API key: async_sync_geo_coordinator_update_intervals( hass, config_entry.data[CONF_API_KEY] ) return unload_ok async def async_reload_entry(hass, config_entry): """Handle an options update.""" await hass.config_entries.async_reload(config_entry.entry_id) class AirVisualEntity(CoordinatorEntity): """Define a generic AirVisual entity.""" def __init__(self, coordinator): """Initialize.""" super().__init__(coordinator) self._attrs = {ATTR_ATTRIBUTION: DEFAULT_ATTRIBUTION} self._icon = None self._unit = None @property def extra_state_attributes(self): """Return the device state attributes.""" return self._attrs @property def icon(self): """Return the icon.""" return self._icon @property def unit_of_measurement(self): """Return the unit the value is expressed in.""" return self._unit async def async_added_to_hass(self): """Register callbacks.""" @callback def update(): """Update the state.""" self.update_from_latest_data() self.async_write_ha_state() self.async_on_remove(self.coordinator.async_add_listener(update)) self.update_from_latest_data() @callback def update_from_latest_data(self): """Update the entity from the latest data.""" raise NotImplementedError
"""Tests for the Google Assistant traits.""" from datetime import datetime, timedelta from unittest.mock import patch import pytest from homeassistant.components import ( alarm_control_panel, binary_sensor, camera, cover, fan, group, input_boolean, input_select, light, lock, media_player, scene, script, sensor, switch, vacuum, ) from homeassistant.components.climate import const as climate from homeassistant.components.google_assistant import const, error, helpers, trait from homeassistant.components.google_assistant.error import SmartHomeError from homeassistant.components.humidifier import const as humidifier from homeassistant.config import async_process_ha_core_config from homeassistant.const import ( ATTR_ASSUMED_STATE, ATTR_DEVICE_CLASS, ATTR_ENTITY_ID, ATTR_MODE, ATTR_SUPPORTED_FEATURES, ATTR_TEMPERATURE, SERVICE_TURN_OFF, SERVICE_TURN_ON, STATE_ALARM_ARMED_AWAY, STATE_ALARM_DISARMED, STATE_ALARM_PENDING, STATE_IDLE, STATE_OFF, STATE_ON, STATE_PAUSED, STATE_PLAYING, STATE_STANDBY, STATE_UNAVAILABLE, STATE_UNKNOWN, TEMP_CELSIUS, TEMP_FAHRENHEIT, ) from homeassistant.core import DOMAIN as HA_DOMAIN, EVENT_CALL_SERVICE, State from homeassistant.util import color from . import BASIC_CONFIG, MockConfig from tests.common import async_capture_events, async_mock_service REQ_ID = "ff36a3cc-ec34-11e6-b1a0-64510650abcf" BASIC_DATA = helpers.RequestData( BASIC_CONFIG, "test-agent", const.SOURCE_CLOUD, REQ_ID, None ) PIN_CONFIG = MockConfig(secure_devices_pin="1234") PIN_DATA = helpers.RequestData( PIN_CONFIG, "test-agent", const.SOURCE_CLOUD, REQ_ID, None ) async def test_brightness_light(hass): """Test brightness trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert trait.BrightnessTrait.supported(light.DOMAIN, light.SUPPORT_BRIGHTNESS, None) trt = trait.BrightnessTrait( hass, State("light.bla", light.STATE_ON, {light.ATTR_BRIGHTNESS: 243}), BASIC_CONFIG, ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"brightness": 95} events = async_capture_events(hass, EVENT_CALL_SERVICE) calls = async_mock_service(hass, light.DOMAIN, light.SERVICE_TURN_ON) await trt.execute( trait.COMMAND_BRIGHTNESS_ABSOLUTE, BASIC_DATA, {"brightness": 50}, {} ) await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "light.bla", light.ATTR_BRIGHTNESS_PCT: 50} assert len(events) == 1 assert events[0].data == { "domain": "light", "service": "turn_on", "service_data": {"brightness_pct": 50, "entity_id": "light.bla"}, } async def test_camera_stream(hass): """Test camera stream trait support for camera domain.""" await async_process_ha_core_config( hass, {"external_url": "https://example.com"}, ) assert helpers.get_google_type(camera.DOMAIN, None) is not None assert trait.CameraStreamTrait.supported(camera.DOMAIN, camera.SUPPORT_STREAM, None) trt = trait.CameraStreamTrait( hass, State("camera.bla", camera.STATE_IDLE, {}), BASIC_CONFIG ) assert trt.sync_attributes() == { "cameraStreamSupportedProtocols": ["hls"], "cameraStreamNeedAuthToken": False, "cameraStreamNeedDrmEncryption": False, } assert trt.query_attributes() == {} with patch( "homeassistant.components.camera.async_request_stream", return_value="/api/streams/bla", ): await trt.execute(trait.COMMAND_GET_CAMERA_STREAM, BASIC_DATA, {}, {}) assert trt.query_attributes() == { "cameraStreamAccessUrl": "https://example.com/api/streams/bla", "cameraStreamReceiverAppId": "B12CE3CA", } async def test_onoff_group(hass): """Test OnOff trait support for group domain.""" assert helpers.get_google_type(group.DOMAIN, None) is not None assert trait.OnOffTrait.supported(group.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("group.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("group.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, HA_DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "group.bla"} off_calls = async_mock_service(hass, HA_DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "group.bla"} async def test_onoff_input_boolean(hass): """Test OnOff trait support for input_boolean domain.""" assert helpers.get_google_type(input_boolean.DOMAIN, None) is not None assert trait.OnOffTrait.supported(input_boolean.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("input_boolean.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait( hass, State("input_boolean.bla", STATE_OFF), BASIC_CONFIG ) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, input_boolean.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "input_boolean.bla"} off_calls = async_mock_service(hass, input_boolean.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "input_boolean.bla"} async def test_onoff_switch(hass): """Test OnOff trait support for switch domain.""" assert helpers.get_google_type(switch.DOMAIN, None) is not None assert trait.OnOffTrait.supported(switch.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("switch.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("switch.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} trt_assumed = trait.OnOffTrait( hass, State("switch.bla", STATE_OFF, {"assumed_state": True}), BASIC_CONFIG ) assert trt_assumed.sync_attributes() == {"commandOnlyOnOff": True} on_calls = async_mock_service(hass, switch.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "switch.bla"} off_calls = async_mock_service(hass, switch.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "switch.bla"} async def test_onoff_fan(hass): """Test OnOff trait support for fan domain.""" assert helpers.get_google_type(fan.DOMAIN, None) is not None assert trait.OnOffTrait.supported(fan.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("fan.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("fan.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, fan.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "fan.bla"} off_calls = async_mock_service(hass, fan.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "fan.bla"} async def test_onoff_light(hass): """Test OnOff trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert trait.OnOffTrait.supported(light.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("light.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("light.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "light.bla"} off_calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "light.bla"} async def test_onoff_media_player(hass): """Test OnOff trait support for media_player domain.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.OnOffTrait.supported(media_player.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("media_player.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("media_player.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, media_player.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} off_calls = async_mock_service(hass, media_player.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} async def test_onoff_humidifier(hass): """Test OnOff trait support for humidifier domain.""" assert helpers.get_google_type(humidifier.DOMAIN, None) is not None assert trait.OnOffTrait.supported(humidifier.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("humidifier.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("humidifier.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, humidifier.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "humidifier.bla"} off_calls = async_mock_service(hass, humidifier.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "humidifier.bla"} async def test_dock_vacuum(hass): """Test dock trait support for vacuum domain.""" assert helpers.get_google_type(vacuum.DOMAIN, None) is not None assert trait.DockTrait.supported(vacuum.DOMAIN, 0, None) trt = trait.DockTrait(hass, State("vacuum.bla", vacuum.STATE_IDLE), BASIC_CONFIG) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"isDocked": False} calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_RETURN_TO_BASE) await trt.execute(trait.COMMAND_DOCK, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} async def test_startstop_vacuum(hass): """Test startStop trait support for vacuum domain.""" assert helpers.get_google_type(vacuum.DOMAIN, None) is not None assert trait.StartStopTrait.supported(vacuum.DOMAIN, 0, None) trt = trait.StartStopTrait( hass, State( "vacuum.bla", vacuum.STATE_PAUSED, {ATTR_SUPPORTED_FEATURES: vacuum.SUPPORT_PAUSE}, ), BASIC_CONFIG, ) assert trt.sync_attributes() == {"pausable": True} assert trt.query_attributes() == {"isRunning": False, "isPaused": True} start_calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_START) await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": True}, {}) assert len(start_calls) == 1 assert start_calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} stop_calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_STOP) await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": False}, {}) assert len(stop_calls) == 1 assert stop_calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} pause_calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_PAUSE) await trt.execute(trait.COMMAND_PAUSEUNPAUSE, BASIC_DATA, {"pause": True}, {}) assert len(pause_calls) == 1 assert pause_calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} unpause_calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_START) await trt.execute(trait.COMMAND_PAUSEUNPAUSE, BASIC_DATA, {"pause": False}, {}) assert len(unpause_calls) == 1 assert unpause_calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} async def test_startstop_cover(hass): """Test startStop trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.StartStopTrait.supported(cover.DOMAIN, cover.SUPPORT_STOP, None) state = State( "cover.bla", cover.STATE_CLOSED, {ATTR_SUPPORTED_FEATURES: cover.SUPPORT_STOP}, ) trt = trait.StartStopTrait( hass, state, BASIC_CONFIG, ) assert trt.sync_attributes() == {} for state_value in (cover.STATE_CLOSING, cover.STATE_OPENING): state.state = state_value assert trt.query_attributes() == {"isRunning": True} stop_calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_STOP_COVER) await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": False}, {}) assert len(stop_calls) == 1 assert stop_calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} for state_value in (cover.STATE_CLOSED, cover.STATE_OPEN): state.state = state_value assert trt.query_attributes() == {"isRunning": False} with pytest.raises(SmartHomeError, match="Cover is already stopped"): await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": False}, {}) with pytest.raises(SmartHomeError, match="Starting a cover is not supported"): await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": True}, {}) with pytest.raises( SmartHomeError, match="Command action.devices.commands.PauseUnpause is not supported", ): await trt.execute(trait.COMMAND_PAUSEUNPAUSE, BASIC_DATA, {"start": True}, {}) async def test_startstop_cover_assumed(hass): """Test startStop trait support for cover domain of assumed state.""" trt = trait.StartStopTrait( hass, State( "cover.bla", cover.STATE_CLOSED, {ATTR_SUPPORTED_FEATURES: cover.SUPPORT_STOP, ATTR_ASSUMED_STATE: True}, ), BASIC_CONFIG, ) stop_calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_STOP_COVER) await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": False}, {}) assert len(stop_calls) == 1 assert stop_calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} async def test_color_setting_color_light(hass): """Test ColorSpectrum trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert not trait.ColorSettingTrait.supported(light.DOMAIN, 0, None) assert trait.ColorSettingTrait.supported(light.DOMAIN, light.SUPPORT_COLOR, None) trt = trait.ColorSettingTrait( hass, State( "light.bla", STATE_ON, { light.ATTR_HS_COLOR: (20, 94), light.ATTR_BRIGHTNESS: 200, ATTR_SUPPORTED_FEATURES: light.SUPPORT_COLOR, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == {"colorModel": "hsv"} assert trt.query_attributes() == { "color": {"spectrumHsv": {"hue": 20, "saturation": 0.94, "value": 200 / 255}} } assert trt.can_execute( trait.COMMAND_COLOR_ABSOLUTE, {"color": {"spectrumRGB": 16715792}} ) calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON) await trt.execute( trait.COMMAND_COLOR_ABSOLUTE, BASIC_DATA, {"color": {"spectrumRGB": 1052927}}, {}, ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "light.bla", light.ATTR_HS_COLOR: (240, 93.725), } await trt.execute( trait.COMMAND_COLOR_ABSOLUTE, BASIC_DATA, {"color": {"spectrumHSV": {"hue": 100, "saturation": 0.50, "value": 0.20}}}, {}, ) assert len(calls) == 2 assert calls[1].data == { ATTR_ENTITY_ID: "light.bla", light.ATTR_HS_COLOR: [100, 50], light.ATTR_BRIGHTNESS: 0.2 * 255, } async def test_color_setting_temperature_light(hass): """Test ColorTemperature trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert not trait.ColorSettingTrait.supported(light.DOMAIN, 0, None) assert trait.ColorSettingTrait.supported( light.DOMAIN, light.SUPPORT_COLOR_TEMP, None ) trt = trait.ColorSettingTrait( hass, State( "light.bla", STATE_ON, { light.ATTR_MIN_MIREDS: 200, light.ATTR_COLOR_TEMP: 300, light.ATTR_MAX_MIREDS: 500, ATTR_SUPPORTED_FEATURES: light.SUPPORT_COLOR_TEMP, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "colorTemperatureRange": {"temperatureMinK": 2000, "temperatureMaxK": 5000} } assert trt.query_attributes() == {"color": {"temperatureK": 3333}} assert trt.can_execute( trait.COMMAND_COLOR_ABSOLUTE, {"color": {"temperature": 400}} ) calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON) with pytest.raises(helpers.SmartHomeError) as err: await trt.execute( trait.COMMAND_COLOR_ABSOLUTE, BASIC_DATA, {"color": {"temperature": 5555}}, {}, ) assert err.value.code == const.ERR_VALUE_OUT_OF_RANGE await trt.execute( trait.COMMAND_COLOR_ABSOLUTE, BASIC_DATA, {"color": {"temperature": 2857}}, {} ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "light.bla", light.ATTR_COLOR_TEMP: color.color_temperature_kelvin_to_mired(2857), } async def test_color_light_temperature_light_bad_temp(hass): """Test ColorTemperature trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert not trait.ColorSettingTrait.supported(light.DOMAIN, 0, None) assert trait.ColorSettingTrait.supported( light.DOMAIN, light.SUPPORT_COLOR_TEMP, None ) trt = trait.ColorSettingTrait( hass, State( "light.bla", STATE_ON, { light.ATTR_MIN_MIREDS: 200, light.ATTR_COLOR_TEMP: 0, light.ATTR_MAX_MIREDS: 500, }, ), BASIC_CONFIG, ) assert trt.query_attributes() == {} async def test_light_modes(hass): """Test Light Mode trait.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert trait.ModesTrait.supported(light.DOMAIN, light.SUPPORT_EFFECT, None) trt = trait.ModesTrait( hass, State( "light.living_room", light.STATE_ON, attributes={ light.ATTR_EFFECT_LIST: ["random", "colorloop"], light.ATTR_EFFECT: "random", }, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableModes": [ { "name": "effect", "name_values": [{"name_synonym": ["effect"], "lang": "en"}], "settings": [ { "setting_name": "random", "setting_values": [ {"setting_synonym": ["random"], "lang": "en"} ], }, { "setting_name": "colorloop", "setting_values": [ {"setting_synonym": ["colorloop"], "lang": "en"} ], }, ], "ordered": False, } ] } assert trt.query_attributes() == { "currentModeSettings": {"effect": "random"}, "on": True, } assert trt.can_execute( trait.COMMAND_MODES, params={"updateModeSettings": {"effect": "colorloop"}}, ) calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON) await trt.execute( trait.COMMAND_MODES, BASIC_DATA, {"updateModeSettings": {"effect": "colorloop"}}, {}, ) assert len(calls) == 1 assert calls[0].data == { "entity_id": "light.living_room", "effect": "colorloop", } async def test_scene_scene(hass): """Test Scene trait support for scene domain.""" assert helpers.get_google_type(scene.DOMAIN, None) is not None assert trait.SceneTrait.supported(scene.DOMAIN, 0, None) trt = trait.SceneTrait(hass, State("scene.bla", scene.STATE), BASIC_CONFIG) assert trt.sync_attributes() == {} assert trt.query_attributes() == {} assert trt.can_execute(trait.COMMAND_ACTIVATE_SCENE, {}) calls = async_mock_service(hass, scene.DOMAIN, SERVICE_TURN_ON) await trt.execute(trait.COMMAND_ACTIVATE_SCENE, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "scene.bla"} async def test_scene_script(hass): """Test Scene trait support for script domain.""" assert helpers.get_google_type(script.DOMAIN, None) is not None assert trait.SceneTrait.supported(script.DOMAIN, 0, None) trt = trait.SceneTrait(hass, State("script.bla", STATE_OFF), BASIC_CONFIG) assert trt.sync_attributes() == {} assert trt.query_attributes() == {} assert trt.can_execute(trait.COMMAND_ACTIVATE_SCENE, {}) calls = async_mock_service(hass, script.DOMAIN, SERVICE_TURN_ON) await trt.execute(trait.COMMAND_ACTIVATE_SCENE, BASIC_DATA, {}, {}) # We don't wait till script execution is done. await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "script.bla"} async def test_temperature_setting_climate_onoff(hass): """Test TemperatureSetting trait support for climate domain - range.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.TemperatureSettingTrait.supported(climate.DOMAIN, 0, None) hass.config.units.temperature_unit = TEMP_FAHRENHEIT trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_AUTO, { ATTR_SUPPORTED_FEATURES: climate.SUPPORT_TARGET_TEMPERATURE_RANGE, climate.ATTR_HVAC_MODES: [ climate.HVAC_MODE_OFF, climate.HVAC_MODE_COOL, climate.HVAC_MODE_HEAT, climate.HVAC_MODE_HEAT_COOL, ], climate.ATTR_MIN_TEMP: None, climate.ATTR_MAX_TEMP: None, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["off", "cool", "heat", "heatcool", "on"], "thermostatTemperatureUnit": "F", } assert trt.can_execute(trait.COMMAND_THERMOSTAT_SET_MODE, {}) calls = async_mock_service(hass, climate.DOMAIN, SERVICE_TURN_ON) await trt.execute( trait.COMMAND_THERMOSTAT_SET_MODE, BASIC_DATA, {"thermostatMode": "on"}, {} ) assert len(calls) == 1 calls = async_mock_service(hass, climate.DOMAIN, SERVICE_TURN_OFF) await trt.execute( trait.COMMAND_THERMOSTAT_SET_MODE, BASIC_DATA, {"thermostatMode": "off"}, {} ) assert len(calls) == 1 async def test_temperature_setting_climate_no_modes(hass): """Test TemperatureSetting trait support for climate domain not supporting any modes.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.TemperatureSettingTrait.supported(climate.DOMAIN, 0, None) hass.config.units.temperature_unit = TEMP_CELSIUS trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_AUTO, { climate.ATTR_HVAC_MODES: [], climate.ATTR_MIN_TEMP: None, climate.ATTR_MAX_TEMP: None, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["heat"], "thermostatTemperatureUnit": "C", } async def test_temperature_setting_climate_range(hass): """Test TemperatureSetting trait support for climate domain - range.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.TemperatureSettingTrait.supported(climate.DOMAIN, 0, None) hass.config.units.temperature_unit = TEMP_FAHRENHEIT trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_AUTO, { climate.ATTR_CURRENT_TEMPERATURE: 70, climate.ATTR_CURRENT_HUMIDITY: 25, ATTR_SUPPORTED_FEATURES: climate.SUPPORT_TARGET_TEMPERATURE_RANGE, climate.ATTR_HVAC_MODES: [ STATE_OFF, climate.HVAC_MODE_COOL, climate.HVAC_MODE_HEAT, climate.HVAC_MODE_AUTO, ], climate.ATTR_TARGET_TEMP_HIGH: 75, climate.ATTR_TARGET_TEMP_LOW: 65, climate.ATTR_MIN_TEMP: 50, climate.ATTR_MAX_TEMP: 80, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["off", "cool", "heat", "auto", "on"], "thermostatTemperatureUnit": "F", } assert trt.query_attributes() == { "thermostatMode": "auto", "thermostatTemperatureAmbient": 21.1, "thermostatHumidityAmbient": 25, "thermostatTemperatureSetpointLow": 18.3, "thermostatTemperatureSetpointHigh": 23.9, } assert trt.can_execute(trait.COMMAND_THERMOSTAT_TEMPERATURE_SET_RANGE, {}) assert trt.can_execute(trait.COMMAND_THERMOSTAT_SET_MODE, {}) calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_TEMPERATURE) await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SET_RANGE, BASIC_DATA, { "thermostatTemperatureSetpointHigh": 25, "thermostatTemperatureSetpointLow": 20, }, {}, ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "climate.bla", climate.ATTR_TARGET_TEMP_HIGH: 77, climate.ATTR_TARGET_TEMP_LOW: 68, } calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_HVAC_MODE) await trt.execute( trait.COMMAND_THERMOSTAT_SET_MODE, BASIC_DATA, {"thermostatMode": "cool"}, {} ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "climate.bla", climate.ATTR_HVAC_MODE: climate.HVAC_MODE_COOL, } with pytest.raises(helpers.SmartHomeError) as err: await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, BASIC_DATA, {"thermostatTemperatureSetpoint": -100}, {}, ) assert err.value.code == const.ERR_VALUE_OUT_OF_RANGE hass.config.units.temperature_unit = TEMP_CELSIUS async def test_temperature_setting_climate_setpoint(hass): """Test TemperatureSetting trait support for climate domain - setpoint.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.TemperatureSettingTrait.supported(climate.DOMAIN, 0, None) hass.config.units.temperature_unit = TEMP_CELSIUS trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_COOL, { climate.ATTR_HVAC_MODES: [STATE_OFF, climate.HVAC_MODE_COOL], climate.ATTR_MIN_TEMP: 10, climate.ATTR_MAX_TEMP: 30, ATTR_TEMPERATURE: 18, climate.ATTR_CURRENT_TEMPERATURE: 20, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["off", "cool", "on"], "thermostatTemperatureUnit": "C", } assert trt.query_attributes() == { "thermostatMode": "cool", "thermostatTemperatureAmbient": 20, "thermostatTemperatureSetpoint": 18, } assert trt.can_execute(trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, {}) assert trt.can_execute(trait.COMMAND_THERMOSTAT_SET_MODE, {}) calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_TEMPERATURE) with pytest.raises(helpers.SmartHomeError): await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, BASIC_DATA, {"thermostatTemperatureSetpoint": -100}, {}, ) await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, BASIC_DATA, {"thermostatTemperatureSetpoint": 19}, {}, ) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "climate.bla", ATTR_TEMPERATURE: 19} async def test_temperature_setting_climate_setpoint_auto(hass): """ Test TemperatureSetting trait support for climate domain. Setpoint in auto mode. """ hass.config.units.temperature_unit = TEMP_CELSIUS trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_HEAT_COOL, { climate.ATTR_HVAC_MODES: [ climate.HVAC_MODE_OFF, climate.HVAC_MODE_HEAT_COOL, ], climate.ATTR_MIN_TEMP: 10, climate.ATTR_MAX_TEMP: 30, ATTR_TEMPERATURE: 18, climate.ATTR_CURRENT_TEMPERATURE: 20, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["off", "heatcool", "on"], "thermostatTemperatureUnit": "C", } assert trt.query_attributes() == { "thermostatMode": "heatcool", "thermostatTemperatureAmbient": 20, "thermostatTemperatureSetpointHigh": 18, "thermostatTemperatureSetpointLow": 18, } assert trt.can_execute(trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, {}) assert trt.can_execute(trait.COMMAND_THERMOSTAT_SET_MODE, {}) calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_TEMPERATURE) await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, BASIC_DATA, {"thermostatTemperatureSetpoint": 19}, {}, ) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "climate.bla", ATTR_TEMPERATURE: 19} async def test_humidity_setting_humidifier_setpoint(hass): """Test HumiditySetting trait support for humidifier domain - setpoint.""" assert helpers.get_google_type(humidifier.DOMAIN, None) is not None assert trait.HumiditySettingTrait.supported(humidifier.DOMAIN, 0, None) trt = trait.HumiditySettingTrait( hass, State( "humidifier.bla", STATE_ON, { humidifier.ATTR_MIN_HUMIDITY: 20, humidifier.ATTR_MAX_HUMIDITY: 90, humidifier.ATTR_HUMIDITY: 38, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "humiditySetpointRange": {"minPercent": 20, "maxPercent": 90} } assert trt.query_attributes() == { "humiditySetpointPercent": 38, } assert trt.can_execute(trait.COMMAND_SET_HUMIDITY, {}) calls = async_mock_service(hass, humidifier.DOMAIN, humidifier.SERVICE_SET_HUMIDITY) await trt.execute(trait.COMMAND_SET_HUMIDITY, BASIC_DATA, {"humidity": 32}, {}) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "humidifier.bla", humidifier.ATTR_HUMIDITY: 32, } async def test_lock_unlock_lock(hass): """Test LockUnlock trait locking support for lock domain.""" assert helpers.get_google_type(lock.DOMAIN, None) is not None assert trait.LockUnlockTrait.supported(lock.DOMAIN, lock.SUPPORT_OPEN, None) assert trait.LockUnlockTrait.might_2fa(lock.DOMAIN, lock.SUPPORT_OPEN, None) trt = trait.LockUnlockTrait( hass, State("lock.front_door", lock.STATE_LOCKED), PIN_CONFIG ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"isLocked": True} assert trt.can_execute(trait.COMMAND_LOCKUNLOCK, {"lock": True}) calls = async_mock_service(hass, lock.DOMAIN, lock.SERVICE_LOCK) await trt.execute(trait.COMMAND_LOCKUNLOCK, PIN_DATA, {"lock": True}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "lock.front_door"} async def test_lock_unlock_unlock(hass): """Test LockUnlock trait unlocking support for lock domain.""" assert helpers.get_google_type(lock.DOMAIN, None) is not None assert trait.LockUnlockTrait.supported(lock.DOMAIN, lock.SUPPORT_OPEN, None) trt = trait.LockUnlockTrait( hass, State("lock.front_door", lock.STATE_LOCKED), PIN_CONFIG ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"isLocked": True} assert trt.can_execute(trait.COMMAND_LOCKUNLOCK, {"lock": False}) calls = async_mock_service(hass, lock.DOMAIN, lock.SERVICE_UNLOCK) # No challenge data with pytest.raises(error.ChallengeNeeded) as err: await trt.execute(trait.COMMAND_LOCKUNLOCK, PIN_DATA, {"lock": False}, {}) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # invalid pin with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_LOCKUNLOCK, PIN_DATA, {"lock": False}, {"pin": 9999} ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_FAILED_PIN_NEEDED await trt.execute( trait.COMMAND_LOCKUNLOCK, PIN_DATA, {"lock": False}, {"pin": "1234"} ) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "lock.front_door"} # Test without pin trt = trait.LockUnlockTrait( hass, State("lock.front_door", lock.STATE_LOCKED), BASIC_CONFIG ) with pytest.raises(error.SmartHomeError) as err: await trt.execute(trait.COMMAND_LOCKUNLOCK, BASIC_DATA, {"lock": False}, {}) assert len(calls) == 1 assert err.value.code == const.ERR_CHALLENGE_NOT_SETUP # Test with 2FA override with patch.object( BASIC_CONFIG, "should_2fa", return_value=False, ): await trt.execute(trait.COMMAND_LOCKUNLOCK, BASIC_DATA, {"lock": False}, {}) assert len(calls) == 2 async def test_arm_disarm_arm_away(hass): """Test ArmDisarm trait Arming support for alarm_control_panel domain.""" assert helpers.get_google_type(alarm_control_panel.DOMAIN, None) is not None assert trait.ArmDisArmTrait.supported(alarm_control_panel.DOMAIN, 0, None) assert trait.ArmDisArmTrait.might_2fa(alarm_control_panel.DOMAIN, 0, None) trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, { alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True, ATTR_SUPPORTED_FEATURES: alarm_control_panel.const.SUPPORT_ALARM_ARM_HOME | alarm_control_panel.const.SUPPORT_ALARM_ARM_AWAY, }, ), PIN_CONFIG, ) assert trt.sync_attributes() == { "availableArmLevels": { "levels": [ { "level_name": "armed_home", "level_values": [ {"level_synonym": ["armed home", "home"], "lang": "en"} ], }, { "level_name": "armed_away", "level_values": [ {"level_synonym": ["armed away", "away"], "lang": "en"} ], }, ], "ordered": False, } } assert trt.query_attributes() == { "isArmed": True, "currentArmLevel": STATE_ALARM_ARMED_AWAY, } assert trt.can_execute( trait.COMMAND_ARMDISARM, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY} ) calls = async_mock_service( hass, alarm_control_panel.DOMAIN, alarm_control_panel.SERVICE_ALARM_ARM_AWAY ) # Test with no secure_pin configured with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), BASIC_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, BASIC_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {}, ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NOT_SETUP trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), PIN_CONFIG, ) # No challenge data with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {}, ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # invalid pin with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {"pin": 9999}, ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_FAILED_PIN_NEEDED # correct pin await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {"pin": "1234"}, ) assert len(calls) == 1 # Test already armed with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), PIN_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {}, ) assert len(calls) == 1 assert err.value.code == const.ERR_ALREADY_ARMED # Test with code_arm_required False trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: False}, ), PIN_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {}, ) assert len(calls) == 2 with pytest.raises(error.SmartHomeError) as err: await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True}, {}, ) async def test_arm_disarm_disarm(hass): """Test ArmDisarm trait Disarming support for alarm_control_panel domain.""" assert helpers.get_google_type(alarm_control_panel.DOMAIN, None) is not None assert trait.ArmDisArmTrait.supported(alarm_control_panel.DOMAIN, 0, None) assert trait.ArmDisArmTrait.might_2fa(alarm_control_panel.DOMAIN, 0, None) trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, { alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True, ATTR_SUPPORTED_FEATURES: alarm_control_panel.const.SUPPORT_ALARM_TRIGGER | alarm_control_panel.const.SUPPORT_ALARM_ARM_CUSTOM_BYPASS, }, ), PIN_CONFIG, ) assert trt.sync_attributes() == { "availableArmLevels": { "levels": [ { "level_name": "armed_custom_bypass", "level_values": [ { "level_synonym": ["armed custom bypass", "custom"], "lang": "en", } ], }, { "level_name": "triggered", "level_values": [{"level_synonym": ["triggered"], "lang": "en"}], }, ], "ordered": False, } } assert trt.query_attributes() == {"isArmed": False} assert trt.can_execute(trait.COMMAND_ARMDISARM, {"arm": False}) calls = async_mock_service( hass, alarm_control_panel.DOMAIN, alarm_control_panel.SERVICE_ALARM_DISARM ) # Test without secure_pin configured with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), BASIC_CONFIG, ) await trt.execute(trait.COMMAND_ARMDISARM, BASIC_DATA, {"arm": False}, {}) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NOT_SETUP trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), PIN_CONFIG, ) # No challenge data with pytest.raises(error.ChallengeNeeded) as err: await trt.execute(trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": False}, {}) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # invalid pin with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": False}, {"pin": 9999} ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_FAILED_PIN_NEEDED # correct pin await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": False}, {"pin": "1234"} ) assert len(calls) == 1 # Test already disarmed with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), PIN_CONFIG, ) await trt.execute(trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": False}, {}) assert len(calls) == 1 assert err.value.code == const.ERR_ALREADY_DISARMED # Cancel arming after already armed will require pin with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: False}, ), PIN_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "cancel": True}, {} ) assert len(calls) == 1 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # Cancel arming while pending to arm doesn't require pin trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_PENDING, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: False}, ), PIN_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "cancel": True}, {} ) assert len(calls) == 2 async def test_fan_speed(hass): """Test FanSpeed trait speed control support for fan domain.""" assert helpers.get_google_type(fan.DOMAIN, None) is not None assert trait.FanSpeedTrait.supported(fan.DOMAIN, fan.SUPPORT_SET_SPEED, None) trt = trait.FanSpeedTrait( hass, State( "fan.living_room_fan", fan.SPEED_HIGH, attributes={ "speed_list": [ fan.SPEED_OFF, fan.SPEED_LOW, fan.SPEED_MEDIUM, fan.SPEED_HIGH, ], "speed": "low", "percentage": 33, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableFanSpeeds": { "ordered": True, "speeds": [ { "speed_name": "off", "speed_values": [{"speed_synonym": ["stop", "off"], "lang": "en"}], }, { "speed_name": "low", "speed_values": [ { "speed_synonym": ["slow", "low", "slowest", "lowest"], "lang": "en", } ], }, { "speed_name": "medium", "speed_values": [ {"speed_synonym": ["medium", "mid", "middle"], "lang": "en"} ], }, { "speed_name": "high", "speed_values": [ { "speed_synonym": [ "high", "max", "fast", "highest", "fastest", "maximum", ], "lang": "en", } ], }, ], }, "reversible": False, "supportsFanSpeedPercent": True, } assert trt.query_attributes() == { "currentFanSpeedSetting": "low", "on": True, "currentFanSpeedPercent": 33, } assert trt.can_execute(trait.COMMAND_FANSPEED, params={"fanSpeed": "medium"}) calls = async_mock_service(hass, fan.DOMAIN, fan.SERVICE_SET_SPEED) await trt.execute(trait.COMMAND_FANSPEED, BASIC_DATA, {"fanSpeed": "medium"}, {}) assert len(calls) == 1 assert calls[0].data == {"entity_id": "fan.living_room_fan", "speed": "medium"} assert trt.can_execute(trait.COMMAND_FANSPEED, params={"fanSpeedPercent": 10}) calls = async_mock_service(hass, fan.DOMAIN, fan.SERVICE_SET_PERCENTAGE) await trt.execute(trait.COMMAND_FANSPEED, BASIC_DATA, {"fanSpeedPercent": 10}, {}) assert len(calls) == 1 assert calls[0].data == {"entity_id": "fan.living_room_fan", "percentage": 10} async def test_climate_fan_speed(hass): """Test FanSpeed trait speed control support for climate domain.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.FanSpeedTrait.supported(climate.DOMAIN, climate.SUPPORT_FAN_MODE, None) trt = trait.FanSpeedTrait( hass, State( "climate.living_room_ac", "on", attributes={ "fan_modes": ["auto", "low", "medium", "high"], "fan_mode": "low", }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableFanSpeeds": { "ordered": True, "speeds": [ { "speed_name": "auto", "speed_values": [{"speed_synonym": ["auto"], "lang": "en"}], }, { "speed_name": "low", "speed_values": [{"speed_synonym": ["low"], "lang": "en"}], }, { "speed_name": "medium", "speed_values": [{"speed_synonym": ["medium"], "lang": "en"}], }, { "speed_name": "high", "speed_values": [{"speed_synonym": ["high"], "lang": "en"}], }, ], }, "reversible": False, "supportsFanSpeedPercent": True, } assert trt.query_attributes() == { "currentFanSpeedSetting": "low", } assert trt.can_execute(trait.COMMAND_FANSPEED, params={"fanSpeed": "medium"}) calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_FAN_MODE) await trt.execute(trait.COMMAND_FANSPEED, BASIC_DATA, {"fanSpeed": "medium"}, {}) assert len(calls) == 1 assert calls[0].data == { "entity_id": "climate.living_room_ac", "fan_mode": "medium", } async def test_inputselector(hass): """Test input selector trait.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.InputSelectorTrait.supported( media_player.DOMAIN, media_player.SUPPORT_SELECT_SOURCE, None ) trt = trait.InputSelectorTrait( hass, State( "media_player.living_room", media_player.STATE_PLAYING, attributes={ media_player.ATTR_INPUT_SOURCE_LIST: [ "media", "game", "chromecast", "plex", ], media_player.ATTR_INPUT_SOURCE: "game", }, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableInputs": [ {"key": "media", "names": [{"name_synonym": ["media"], "lang": "en"}]}, {"key": "game", "names": [{"name_synonym": ["game"], "lang": "en"}]}, { "key": "chromecast", "names": [{"name_synonym": ["chromecast"], "lang": "en"}], }, {"key": "plex", "names": [{"name_synonym": ["plex"], "lang": "en"}]}, ], "orderedInputs": True, } assert trt.query_attributes() == { "currentInput": "game", } assert trt.can_execute( trait.COMMAND_INPUT, params={"newInput": "media"}, ) calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_SELECT_SOURCE ) await trt.execute( trait.COMMAND_INPUT, BASIC_DATA, {"newInput": "media"}, {}, ) assert len(calls) == 1 assert calls[0].data == {"entity_id": "media_player.living_room", "source": "media"} @pytest.mark.parametrize( "sources,source,source_next,source_prev", [ (["a"], "a", "a", "a"), (["a", "b"], "a", "b", "b"), (["a", "b", "c"], "a", "b", "c"), ], ) async def test_inputselector_nextprev(hass, sources, source, source_next, source_prev): """Test input selector trait.""" trt = trait.InputSelectorTrait( hass, State( "media_player.living_room", media_player.STATE_PLAYING, attributes={ media_player.ATTR_INPUT_SOURCE_LIST: sources, media_player.ATTR_INPUT_SOURCE: source, }, ), BASIC_CONFIG, ) assert trt.can_execute("action.devices.commands.NextInput", params={}) assert trt.can_execute("action.devices.commands.PreviousInput", params={}) calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_SELECT_SOURCE ) await trt.execute( "action.devices.commands.NextInput", BASIC_DATA, {}, {}, ) await trt.execute( "action.devices.commands.PreviousInput", BASIC_DATA, {}, {}, ) assert len(calls) == 2 assert calls[0].data == { "entity_id": "media_player.living_room", "source": source_next, } assert calls[1].data == { "entity_id": "media_player.living_room", "source": source_prev, } @pytest.mark.parametrize( "sources,source", [(None, "a"), (["a", "b"], None), (["a", "b"], "c")] ) async def test_inputselector_nextprev_invalid(hass, sources, source): """Test input selector trait.""" trt = trait.InputSelectorTrait( hass, State( "media_player.living_room", media_player.STATE_PLAYING, attributes={ media_player.ATTR_INPUT_SOURCE_LIST: sources, media_player.ATTR_INPUT_SOURCE: source, }, ), BASIC_CONFIG, ) with pytest.raises(SmartHomeError): await trt.execute( "action.devices.commands.NextInput", BASIC_DATA, {}, {}, ) with pytest.raises(SmartHomeError): await trt.execute( "action.devices.commands.PreviousInput", BASIC_DATA, {}, {}, ) with pytest.raises(SmartHomeError): await trt.execute( "action.devices.commands.InvalidCommand", BASIC_DATA, {}, {}, ) async def test_modes_input_select(hass): """Test Input Select Mode trait.""" assert helpers.get_google_type(input_select.DOMAIN, None) is not None assert trait.ModesTrait.supported(input_select.DOMAIN, None, None) trt = trait.ModesTrait( hass, State("input_select.bla", "unavailable"), BASIC_CONFIG, ) assert trt.sync_attributes() == {"availableModes": []} trt = trait.ModesTrait( hass, State( "input_select.bla", "abc", attributes={input_select.ATTR_OPTIONS: ["abc", "123", "xyz"]}, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableModes": [ { "name": "option", "name_values": [ { "name_synonym": ["option", "setting", "mode", "value"], "lang": "en", } ], "settings": [ { "setting_name": "abc", "setting_values": [{"setting_synonym": ["abc"], "lang": "en"}], }, { "setting_name": "123", "setting_values": [{"setting_synonym": ["123"], "lang": "en"}], }, { "setting_name": "xyz", "setting_values": [{"setting_synonym": ["xyz"], "lang": "en"}], }, ], "ordered": False, } ] } assert trt.query_attributes() == { "currentModeSettings": {"option": "abc"}, "on": True, } assert trt.can_execute( trait.COMMAND_MODES, params={"updateModeSettings": {"option": "xyz"}}, ) calls = async_mock_service( hass, input_select.DOMAIN, input_select.SERVICE_SELECT_OPTION ) await trt.execute( trait.COMMAND_MODES, BASIC_DATA, {"updateModeSettings": {"option": "xyz"}}, {}, ) assert len(calls) == 1 assert calls[0].data == {"entity_id": "input_select.bla", "option": "xyz"} async def test_modes_humidifier(hass): """Test Humidifier Mode trait.""" assert helpers.get_google_type(humidifier.DOMAIN, None) is not None assert trait.ModesTrait.supported(humidifier.DOMAIN, humidifier.SUPPORT_MODES, None) trt = trait.ModesTrait( hass, State( "humidifier.humidifier", STATE_OFF, attributes={ humidifier.ATTR_AVAILABLE_MODES: [ humidifier.MODE_NORMAL, humidifier.MODE_AUTO, humidifier.MODE_AWAY, ], ATTR_SUPPORTED_FEATURES: humidifier.SUPPORT_MODES, humidifier.ATTR_MIN_HUMIDITY: 30, humidifier.ATTR_MAX_HUMIDITY: 99, humidifier.ATTR_HUMIDITY: 50, ATTR_MODE: humidifier.MODE_AUTO, }, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableModes": [ { "name": "mode", "name_values": [{"name_synonym": ["mode"], "lang": "en"}], "settings": [ { "setting_name": "normal", "setting_values": [ {"setting_synonym": ["normal"], "lang": "en"} ], }, { "setting_name": "auto", "setting_values": [{"setting_synonym": ["auto"], "lang": "en"}], }, { "setting_name": "away", "setting_values": [{"setting_synonym": ["away"], "lang": "en"}], }, ], "ordered": False, }, ] } assert trt.query_attributes() == { "currentModeSettings": {"mode": "auto"}, "on": False, } assert trt.can_execute( trait.COMMAND_MODES, params={"updateModeSettings": {"mode": "away"}} ) calls = async_mock_service(hass, humidifier.DOMAIN, humidifier.SERVICE_SET_MODE) await trt.execute( trait.COMMAND_MODES, BASIC_DATA, {"updateModeSettings": {"mode": "away"}}, {}, ) assert len(calls) == 1 assert calls[0].data == { "entity_id": "humidifier.humidifier", "mode": "away", } async def test_sound_modes(hass): """Test Mode trait.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.ModesTrait.supported( media_player.DOMAIN, media_player.SUPPORT_SELECT_SOUND_MODE, None ) trt = trait.ModesTrait( hass, State( "media_player.living_room", media_player.STATE_PLAYING, attributes={ media_player.ATTR_SOUND_MODE_LIST: ["stereo", "prologic"], media_player.ATTR_SOUND_MODE: "stereo", }, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableModes": [ { "name": "sound mode", "name_values": [ {"name_synonym": ["sound mode", "effects"], "lang": "en"} ], "settings": [ { "setting_name": "stereo", "setting_values": [ {"setting_synonym": ["stereo"], "lang": "en"} ], }, { "setting_name": "prologic", "setting_values": [ {"setting_synonym": ["prologic"], "lang": "en"} ], }, ], "ordered": False, } ] } assert trt.query_attributes() == { "currentModeSettings": {"sound mode": "stereo"}, "on": True, } assert trt.can_execute( trait.COMMAND_MODES, params={"updateModeSettings": {"sound mode": "stereo"}}, ) calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_SELECT_SOUND_MODE ) await trt.execute( trait.COMMAND_MODES, BASIC_DATA, {"updateModeSettings": {"sound mode": "stereo"}}, {}, ) assert len(calls) == 1 assert calls[0].data == { "entity_id": "media_player.living_room", "sound_mode": "stereo", } async def test_openclose_cover(hass): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_SET_POSITION, None ) trt = trait.OpenCloseTrait( hass, State( "cover.bla", cover.STATE_OPEN, { cover.ATTR_CURRENT_POSITION: 75, ATTR_SUPPORTED_FEATURES: cover.SUPPORT_SET_POSITION, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"openPercent": 75} calls_set = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_SET_COVER_POSITION) calls_open = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_OPEN_COVER) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 50}, {}) await trt.execute( trait.COMMAND_OPENCLOSE_RELATIVE, BASIC_DATA, {"openRelativePercent": 50}, {} ) assert len(calls_set) == 1 assert calls_set[0].data == {ATTR_ENTITY_ID: "cover.bla", cover.ATTR_POSITION: 50} assert len(calls_open) == 1 assert calls_open[0].data == {ATTR_ENTITY_ID: "cover.bla"} async def test_openclose_cover_unknown_state(hass): """Test OpenClose trait support for cover domain with unknown state.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_SET_POSITION, None ) # No state trt = trait.OpenCloseTrait( hass, State( "cover.bla", STATE_UNKNOWN, {ATTR_SUPPORTED_FEATURES: cover.SUPPORT_OPEN} ), BASIC_CONFIG, ) assert trt.sync_attributes() == {"discreteOnlyOpenClose": True} with pytest.raises(helpers.SmartHomeError): trt.query_attributes() calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_OPEN_COVER) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 100}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} with pytest.raises(helpers.SmartHomeError): trt.query_attributes() async def test_openclose_cover_assumed_state(hass): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_SET_POSITION, None ) trt = trait.OpenCloseTrait( hass, State( "cover.bla", cover.STATE_OPEN, { ATTR_ASSUMED_STATE: True, ATTR_SUPPORTED_FEATURES: cover.SUPPORT_SET_POSITION, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == {"commandOnlyOpenClose": True} assert trt.query_attributes() == {} calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_SET_COVER_POSITION) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 40}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla", cover.ATTR_POSITION: 40} async def test_openclose_cover_query_only(hass): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported(cover.DOMAIN, 0, None) state = State( "cover.bla", cover.STATE_OPEN, ) trt = trait.OpenCloseTrait( hass, state, BASIC_CONFIG, ) assert trt.sync_attributes() == { "discreteOnlyOpenClose": True, "queryOnlyOpenClose": True, } assert trt.query_attributes() == {"openPercent": 100} async def test_openclose_cover_no_position(hass): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_OPEN | cover.SUPPORT_CLOSE, None ) state = State( "cover.bla", cover.STATE_OPEN, { ATTR_SUPPORTED_FEATURES: cover.SUPPORT_OPEN | cover.SUPPORT_CLOSE, }, ) trt = trait.OpenCloseTrait( hass, state, BASIC_CONFIG, ) assert trt.sync_attributes() == {"discreteOnlyOpenClose": True} assert trt.query_attributes() == {"openPercent": 100} state.state = cover.STATE_CLOSED assert trt.sync_attributes() == {"discreteOnlyOpenClose": True} assert trt.query_attributes() == {"openPercent": 0} calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_CLOSE_COVER) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 0}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_OPEN_COVER) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 100}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} with pytest.raises( SmartHomeError, match=r"Current position not know for relative command" ): await trt.execute( trait.COMMAND_OPENCLOSE_RELATIVE, BASIC_DATA, {"openRelativePercent": 100}, {}, ) with pytest.raises(SmartHomeError, match=r"No support for partial open close"): await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 50}, {}) @pytest.mark.parametrize( "device_class", (cover.DEVICE_CLASS_DOOR, cover.DEVICE_CLASS_GARAGE, cover.DEVICE_CLASS_GATE), ) async def test_openclose_cover_secure(hass, device_class): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, device_class) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_SET_POSITION, device_class ) assert trait.OpenCloseTrait.might_2fa( cover.DOMAIN, cover.SUPPORT_SET_POSITION, device_class ) trt = trait.OpenCloseTrait( hass, State( "cover.bla", cover.STATE_OPEN, { ATTR_DEVICE_CLASS: device_class, ATTR_SUPPORTED_FEATURES: cover.SUPPORT_SET_POSITION, cover.ATTR_CURRENT_POSITION: 75, }, ), PIN_CONFIG, ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"openPercent": 75} calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_SET_COVER_POSITION) calls_close = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_CLOSE_COVER) # No challenge data with pytest.raises(error.ChallengeNeeded) as err: await trt.execute(trait.COMMAND_OPENCLOSE, PIN_DATA, {"openPercent": 50}, {}) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # invalid pin with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_OPENCLOSE, PIN_DATA, {"openPercent": 50}, {"pin": "9999"} ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_FAILED_PIN_NEEDED await trt.execute( trait.COMMAND_OPENCLOSE, PIN_DATA, {"openPercent": 50}, {"pin": "1234"} ) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla", cover.ATTR_POSITION: 50} # no challenge on close await trt.execute(trait.COMMAND_OPENCLOSE, PIN_DATA, {"openPercent": 0}, {}) assert len(calls_close) == 1 assert calls_close[0].data == {ATTR_ENTITY_ID: "cover.bla"} @pytest.mark.parametrize( "device_class", ( binary_sensor.DEVICE_CLASS_DOOR, binary_sensor.DEVICE_CLASS_GARAGE_DOOR, binary_sensor.DEVICE_CLASS_LOCK, binary_sensor.DEVICE_CLASS_OPENING, binary_sensor.DEVICE_CLASS_WINDOW, ), ) async def test_openclose_binary_sensor(hass, device_class): """Test OpenClose trait support for binary_sensor domain.""" assert helpers.get_google_type(binary_sensor.DOMAIN, device_class) is not None assert trait.OpenCloseTrait.supported(binary_sensor.DOMAIN, 0, device_class) trt = trait.OpenCloseTrait( hass, State("binary_sensor.test", STATE_ON, {ATTR_DEVICE_CLASS: device_class}), BASIC_CONFIG, ) assert trt.sync_attributes() == { "queryOnlyOpenClose": True, "discreteOnlyOpenClose": True, } assert trt.query_attributes() == {"openPercent": 100} trt = trait.OpenCloseTrait( hass, State("binary_sensor.test", STATE_OFF, {ATTR_DEVICE_CLASS: device_class}), BASIC_CONFIG, ) assert trt.sync_attributes() == { "queryOnlyOpenClose": True, "discreteOnlyOpenClose": True, } assert trt.query_attributes() == {"openPercent": 0} async def test_volume_media_player(hass): """Test volume trait support for media player domain.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.VolumeTrait.supported( media_player.DOMAIN, media_player.SUPPORT_VOLUME_SET, None, ) trt = trait.VolumeTrait( hass, State( "media_player.bla", media_player.STATE_PLAYING, { ATTR_SUPPORTED_FEATURES: media_player.SUPPORT_VOLUME_SET, media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.3, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "volumeMaxLevel": 100, "levelStepSize": 10, "volumeCanMuteAndUnmute": False, "commandOnlyVolume": False, } assert trt.query_attributes() == {"currentVolume": 30} calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_SET ) await trt.execute(trait.COMMAND_SET_VOLUME, BASIC_DATA, {"volumeLevel": 60}, {}) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.6, } calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_SET ) await trt.execute( trait.COMMAND_VOLUME_RELATIVE, BASIC_DATA, {"relativeSteps": 10}, {} ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.4, } async def test_volume_media_player_relative(hass): """Test volume trait support for relative-volume-only media players.""" assert trait.VolumeTrait.supported( media_player.DOMAIN, media_player.SUPPORT_VOLUME_STEP, None, ) trt = trait.VolumeTrait( hass, State( "media_player.bla", media_player.STATE_PLAYING, { ATTR_ASSUMED_STATE: True, ATTR_SUPPORTED_FEATURES: media_player.SUPPORT_VOLUME_STEP, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "volumeMaxLevel": 100, "levelStepSize": 10, "volumeCanMuteAndUnmute": False, "commandOnlyVolume": True, } assert trt.query_attributes() == {} calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_UP ) await trt.execute( trait.COMMAND_VOLUME_RELATIVE, BASIC_DATA, {"relativeSteps": 10}, {}, ) assert len(calls) == 10 for call in calls: assert call.data == { ATTR_ENTITY_ID: "media_player.bla", } calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_DOWN ) await trt.execute( trait.COMMAND_VOLUME_RELATIVE, BASIC_DATA, {"relativeSteps": -10}, {}, ) assert len(calls) == 10 for call in calls: assert call.data == { ATTR_ENTITY_ID: "media_player.bla", } with pytest.raises(SmartHomeError): await trt.execute(trait.COMMAND_SET_VOLUME, BASIC_DATA, {"volumeLevel": 42}, {}) with pytest.raises(SmartHomeError): await trt.execute(trait.COMMAND_MUTE, BASIC_DATA, {"mute": True}, {}) async def test_media_player_mute(hass): """Test volume trait support for muting.""" assert trait.VolumeTrait.supported( media_player.DOMAIN, media_player.SUPPORT_VOLUME_STEP | media_player.SUPPORT_VOLUME_MUTE, None, ) trt = trait.VolumeTrait( hass, State( "media_player.bla", media_player.STATE_PLAYING, { ATTR_SUPPORTED_FEATURES: ( media_player.SUPPORT_VOLUME_STEP | media_player.SUPPORT_VOLUME_MUTE ), media_player.ATTR_MEDIA_VOLUME_MUTED: False, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "volumeMaxLevel": 100, "levelStepSize": 10, "volumeCanMuteAndUnmute": True, "commandOnlyVolume": False, } assert trt.query_attributes() == {"isMuted": False} mute_calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_MUTE ) await trt.execute( trait.COMMAND_MUTE, BASIC_DATA, {"mute": True}, {}, ) assert len(mute_calls) == 1 assert mute_calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_VOLUME_MUTED: True, } unmute_calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_MUTE ) await trt.execute( trait.COMMAND_MUTE, BASIC_DATA, {"mute": False}, {}, ) assert len(unmute_calls) == 1 assert unmute_calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_VOLUME_MUTED: False, } async def test_temperature_setting_sensor(hass): """Test TemperatureSetting trait support for temperature sensor.""" assert ( helpers.get_google_type(sensor.DOMAIN, sensor.DEVICE_CLASS_TEMPERATURE) is not None ) assert not trait.TemperatureSettingTrait.supported( sensor.DOMAIN, 0, sensor.DEVICE_CLASS_HUMIDITY ) assert trait.TemperatureSettingTrait.supported( sensor.DOMAIN, 0, sensor.DEVICE_CLASS_TEMPERATURE ) @pytest.mark.parametrize( "unit_in,unit_out,state,ambient", [ (TEMP_FAHRENHEIT, "F", "70", 21.1), (TEMP_CELSIUS, "C", "21.1", 21.1), (TEMP_FAHRENHEIT, "F", "unavailable", None), (TEMP_FAHRENHEIT, "F", "unknown", None), ], ) async def test_temperature_setting_sensor_data(hass, unit_in, unit_out, state, ambient): """Test TemperatureSetting trait support for temperature sensor.""" hass.config.units.temperature_unit = unit_in trt = trait.TemperatureSettingTrait( hass, State( "sensor.test", state, {ATTR_DEVICE_CLASS: sensor.DEVICE_CLASS_TEMPERATURE} ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "queryOnlyTemperatureSetting": True, "thermostatTemperatureUnit": unit_out, } if ambient: assert trt.query_attributes() == {"thermostatTemperatureAmbient": ambient} else: assert trt.query_attributes() == {} hass.config.units.temperature_unit = TEMP_CELSIUS async def test_humidity_setting_sensor(hass): """Test HumiditySetting trait support for humidity sensor.""" assert ( helpers.get_google_type(sensor.DOMAIN, sensor.DEVICE_CLASS_HUMIDITY) is not None ) assert not trait.HumiditySettingTrait.supported( sensor.DOMAIN, 0, sensor.DEVICE_CLASS_TEMPERATURE ) assert trait.HumiditySettingTrait.supported( sensor.DOMAIN, 0, sensor.DEVICE_CLASS_HUMIDITY ) @pytest.mark.parametrize( "state,ambient", [("70", 70), ("unavailable", None), ("unknown", None)] ) async def test_humidity_setting_sensor_data(hass, state, ambient): """Test HumiditySetting trait support for humidity sensor.""" trt = trait.HumiditySettingTrait( hass, State("sensor.test", state, {ATTR_DEVICE_CLASS: sensor.DEVICE_CLASS_HUMIDITY}), BASIC_CONFIG, ) assert trt.sync_attributes() == {"queryOnlyHumiditySetting": True} if ambient: assert trt.query_attributes() == {"humidityAmbientPercent": ambient} else: assert trt.query_attributes() == {} with pytest.raises(helpers.SmartHomeError) as err: await trt.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert err.value.code == const.ERR_NOT_SUPPORTED async def test_transport_control(hass): """Test the TransportControlTrait.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None for feature in trait.MEDIA_COMMAND_SUPPORT_MAPPING.values(): assert trait.TransportControlTrait.supported(media_player.DOMAIN, feature, None) now = datetime(2020, 1, 1) trt = trait.TransportControlTrait( hass, State( "media_player.bla", media_player.STATE_PLAYING, { media_player.ATTR_MEDIA_POSITION: 100, media_player.ATTR_MEDIA_DURATION: 200, media_player.ATTR_MEDIA_POSITION_UPDATED_AT: now - timedelta(seconds=10), media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.5, ATTR_SUPPORTED_FEATURES: media_player.SUPPORT_PLAY | media_player.SUPPORT_STOP, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "transportControlSupportedCommands": ["RESUME", "STOP"] } assert trt.query_attributes() == {} # COMMAND_MEDIA_SEEK_RELATIVE calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_SEEK ) # Patch to avoid time ticking over during the command failing the test with patch("homeassistant.util.dt.utcnow", return_value=now): await trt.execute( trait.COMMAND_MEDIA_SEEK_RELATIVE, BASIC_DATA, {"relativePositionMs": 10000}, {}, ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", # 100s (current position) + 10s (from command) + 10s (from updated_at) media_player.ATTR_MEDIA_SEEK_POSITION: 120, } # COMMAND_MEDIA_SEEK_TO_POSITION calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_SEEK ) await trt.execute( trait.COMMAND_MEDIA_SEEK_TO_POSITION, BASIC_DATA, {"absPositionMs": 50000}, {} ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_SEEK_POSITION: 50, } # COMMAND_MEDIA_NEXT calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_NEXT_TRACK ) await trt.execute(trait.COMMAND_MEDIA_NEXT, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} # COMMAND_MEDIA_PAUSE calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_PAUSE ) await trt.execute(trait.COMMAND_MEDIA_PAUSE, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} # COMMAND_MEDIA_PREVIOUS calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_PREVIOUS_TRACK ) await trt.execute(trait.COMMAND_MEDIA_PREVIOUS, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} # COMMAND_MEDIA_RESUME calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_PLAY ) await trt.execute(trait.COMMAND_MEDIA_RESUME, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} # COMMAND_MEDIA_SHUFFLE calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_SHUFFLE_SET ) await trt.execute(trait.COMMAND_MEDIA_SHUFFLE, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_SHUFFLE: True, } # COMMAND_MEDIA_STOP calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_STOP ) await trt.execute(trait.COMMAND_MEDIA_STOP, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} @pytest.mark.parametrize( "state", ( STATE_OFF, STATE_IDLE, STATE_PLAYING, STATE_ON, STATE_PAUSED, STATE_STANDBY, STATE_UNAVAILABLE, STATE_UNKNOWN, ), ) async def test_media_state(hass, state): """Test the MediaStateTrait.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.TransportControlTrait.supported( media_player.DOMAIN, media_player.SUPPORT_PLAY, None ) trt = trait.MediaStateTrait( hass, State( "media_player.bla", state, { media_player.ATTR_MEDIA_POSITION: 100, media_player.ATTR_MEDIA_DURATION: 200, media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.5, ATTR_SUPPORTED_FEATURES: media_player.SUPPORT_PLAY | media_player.SUPPORT_STOP, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "supportActivityState": True, "supportPlaybackState": True, } assert trt.query_attributes() == { "activityState": trt.activity_lookup.get(state), "playbackState": trt.playback_lookup.get(state), }
sander76/home-assistant
tests/components/google_assistant/test_trait.py
homeassistant/components/airvisual/__init__.py
"""Module that groups code required to handle state restore for component.""" from __future__ import annotations import asyncio import logging from typing import Any, Iterable from homeassistant.const import ( ATTR_MODE, SERVICE_TURN_OFF, SERVICE_TURN_ON, STATE_OFF, STATE_ON, ) from homeassistant.core import Context, HomeAssistant, State from .const import ATTR_HUMIDITY, DOMAIN, SERVICE_SET_HUMIDITY, SERVICE_SET_MODE _LOGGER = logging.getLogger(__name__) async def _async_reproduce_states( hass: HomeAssistant, state: State, *, context: Context | None = None, reproduce_options: dict[str, Any] | None = None, ) -> None: """Reproduce component states.""" cur_state = hass.states.get(state.entity_id) if cur_state is None: _LOGGER.warning("Unable to find entity %s", state.entity_id) return async def call_service(service: str, keys: Iterable, data=None): """Call service with set of attributes given.""" data = data or {} data["entity_id"] = state.entity_id for key in keys: if key in state.attributes: data[key] = state.attributes[key] await hass.services.async_call( DOMAIN, service, data, blocking=True, context=context ) if state.state == STATE_OFF: # Ensure the device is off if it needs to be and exit if cur_state.state != STATE_OFF: await call_service(SERVICE_TURN_OFF, []) return if state.state != STATE_ON: # we can't know how to handle this _LOGGER.warning( "Invalid state specified for %s: %s", state.entity_id, state.state ) return # First of all, turn on if needed, because the device might not # be able to set mode and humidity while being off if cur_state.state != STATE_ON: await call_service(SERVICE_TURN_ON, []) # refetch the state as turning on might allow us to see some more values cur_state = hass.states.get(state.entity_id) # Then set the mode before target humidity, because switching modes # may invalidate target humidity if ATTR_MODE in state.attributes and state.attributes[ ATTR_MODE ] != cur_state.attributes.get(ATTR_MODE): await call_service(SERVICE_SET_MODE, [ATTR_MODE]) # Next, restore target humidity for the current mode if ATTR_HUMIDITY in state.attributes and state.attributes[ ATTR_HUMIDITY ] != cur_state.attributes.get(ATTR_HUMIDITY): await call_service(SERVICE_SET_HUMIDITY, [ATTR_HUMIDITY]) async def async_reproduce_states( hass: HomeAssistant, states: Iterable[State], *, context: Context | None = None, reproduce_options: dict[str, Any] | None = None, ) -> None: """Reproduce component states.""" await asyncio.gather( *( _async_reproduce_states( hass, state, context=context, reproduce_options=reproduce_options ) for state in states ) )
"""Tests for the Google Assistant traits.""" from datetime import datetime, timedelta from unittest.mock import patch import pytest from homeassistant.components import ( alarm_control_panel, binary_sensor, camera, cover, fan, group, input_boolean, input_select, light, lock, media_player, scene, script, sensor, switch, vacuum, ) from homeassistant.components.climate import const as climate from homeassistant.components.google_assistant import const, error, helpers, trait from homeassistant.components.google_assistant.error import SmartHomeError from homeassistant.components.humidifier import const as humidifier from homeassistant.config import async_process_ha_core_config from homeassistant.const import ( ATTR_ASSUMED_STATE, ATTR_DEVICE_CLASS, ATTR_ENTITY_ID, ATTR_MODE, ATTR_SUPPORTED_FEATURES, ATTR_TEMPERATURE, SERVICE_TURN_OFF, SERVICE_TURN_ON, STATE_ALARM_ARMED_AWAY, STATE_ALARM_DISARMED, STATE_ALARM_PENDING, STATE_IDLE, STATE_OFF, STATE_ON, STATE_PAUSED, STATE_PLAYING, STATE_STANDBY, STATE_UNAVAILABLE, STATE_UNKNOWN, TEMP_CELSIUS, TEMP_FAHRENHEIT, ) from homeassistant.core import DOMAIN as HA_DOMAIN, EVENT_CALL_SERVICE, State from homeassistant.util import color from . import BASIC_CONFIG, MockConfig from tests.common import async_capture_events, async_mock_service REQ_ID = "ff36a3cc-ec34-11e6-b1a0-64510650abcf" BASIC_DATA = helpers.RequestData( BASIC_CONFIG, "test-agent", const.SOURCE_CLOUD, REQ_ID, None ) PIN_CONFIG = MockConfig(secure_devices_pin="1234") PIN_DATA = helpers.RequestData( PIN_CONFIG, "test-agent", const.SOURCE_CLOUD, REQ_ID, None ) async def test_brightness_light(hass): """Test brightness trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert trait.BrightnessTrait.supported(light.DOMAIN, light.SUPPORT_BRIGHTNESS, None) trt = trait.BrightnessTrait( hass, State("light.bla", light.STATE_ON, {light.ATTR_BRIGHTNESS: 243}), BASIC_CONFIG, ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"brightness": 95} events = async_capture_events(hass, EVENT_CALL_SERVICE) calls = async_mock_service(hass, light.DOMAIN, light.SERVICE_TURN_ON) await trt.execute( trait.COMMAND_BRIGHTNESS_ABSOLUTE, BASIC_DATA, {"brightness": 50}, {} ) await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "light.bla", light.ATTR_BRIGHTNESS_PCT: 50} assert len(events) == 1 assert events[0].data == { "domain": "light", "service": "turn_on", "service_data": {"brightness_pct": 50, "entity_id": "light.bla"}, } async def test_camera_stream(hass): """Test camera stream trait support for camera domain.""" await async_process_ha_core_config( hass, {"external_url": "https://example.com"}, ) assert helpers.get_google_type(camera.DOMAIN, None) is not None assert trait.CameraStreamTrait.supported(camera.DOMAIN, camera.SUPPORT_STREAM, None) trt = trait.CameraStreamTrait( hass, State("camera.bla", camera.STATE_IDLE, {}), BASIC_CONFIG ) assert trt.sync_attributes() == { "cameraStreamSupportedProtocols": ["hls"], "cameraStreamNeedAuthToken": False, "cameraStreamNeedDrmEncryption": False, } assert trt.query_attributes() == {} with patch( "homeassistant.components.camera.async_request_stream", return_value="/api/streams/bla", ): await trt.execute(trait.COMMAND_GET_CAMERA_STREAM, BASIC_DATA, {}, {}) assert trt.query_attributes() == { "cameraStreamAccessUrl": "https://example.com/api/streams/bla", "cameraStreamReceiverAppId": "B12CE3CA", } async def test_onoff_group(hass): """Test OnOff trait support for group domain.""" assert helpers.get_google_type(group.DOMAIN, None) is not None assert trait.OnOffTrait.supported(group.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("group.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("group.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, HA_DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "group.bla"} off_calls = async_mock_service(hass, HA_DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "group.bla"} async def test_onoff_input_boolean(hass): """Test OnOff trait support for input_boolean domain.""" assert helpers.get_google_type(input_boolean.DOMAIN, None) is not None assert trait.OnOffTrait.supported(input_boolean.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("input_boolean.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait( hass, State("input_boolean.bla", STATE_OFF), BASIC_CONFIG ) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, input_boolean.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "input_boolean.bla"} off_calls = async_mock_service(hass, input_boolean.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "input_boolean.bla"} async def test_onoff_switch(hass): """Test OnOff trait support for switch domain.""" assert helpers.get_google_type(switch.DOMAIN, None) is not None assert trait.OnOffTrait.supported(switch.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("switch.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("switch.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} trt_assumed = trait.OnOffTrait( hass, State("switch.bla", STATE_OFF, {"assumed_state": True}), BASIC_CONFIG ) assert trt_assumed.sync_attributes() == {"commandOnlyOnOff": True} on_calls = async_mock_service(hass, switch.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "switch.bla"} off_calls = async_mock_service(hass, switch.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "switch.bla"} async def test_onoff_fan(hass): """Test OnOff trait support for fan domain.""" assert helpers.get_google_type(fan.DOMAIN, None) is not None assert trait.OnOffTrait.supported(fan.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("fan.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("fan.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, fan.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "fan.bla"} off_calls = async_mock_service(hass, fan.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "fan.bla"} async def test_onoff_light(hass): """Test OnOff trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert trait.OnOffTrait.supported(light.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("light.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("light.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "light.bla"} off_calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "light.bla"} async def test_onoff_media_player(hass): """Test OnOff trait support for media_player domain.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.OnOffTrait.supported(media_player.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("media_player.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("media_player.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, media_player.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} off_calls = async_mock_service(hass, media_player.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} async def test_onoff_humidifier(hass): """Test OnOff trait support for humidifier domain.""" assert helpers.get_google_type(humidifier.DOMAIN, None) is not None assert trait.OnOffTrait.supported(humidifier.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("humidifier.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("humidifier.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, humidifier.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "humidifier.bla"} off_calls = async_mock_service(hass, humidifier.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "humidifier.bla"} async def test_dock_vacuum(hass): """Test dock trait support for vacuum domain.""" assert helpers.get_google_type(vacuum.DOMAIN, None) is not None assert trait.DockTrait.supported(vacuum.DOMAIN, 0, None) trt = trait.DockTrait(hass, State("vacuum.bla", vacuum.STATE_IDLE), BASIC_CONFIG) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"isDocked": False} calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_RETURN_TO_BASE) await trt.execute(trait.COMMAND_DOCK, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} async def test_startstop_vacuum(hass): """Test startStop trait support for vacuum domain.""" assert helpers.get_google_type(vacuum.DOMAIN, None) is not None assert trait.StartStopTrait.supported(vacuum.DOMAIN, 0, None) trt = trait.StartStopTrait( hass, State( "vacuum.bla", vacuum.STATE_PAUSED, {ATTR_SUPPORTED_FEATURES: vacuum.SUPPORT_PAUSE}, ), BASIC_CONFIG, ) assert trt.sync_attributes() == {"pausable": True} assert trt.query_attributes() == {"isRunning": False, "isPaused": True} start_calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_START) await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": True}, {}) assert len(start_calls) == 1 assert start_calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} stop_calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_STOP) await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": False}, {}) assert len(stop_calls) == 1 assert stop_calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} pause_calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_PAUSE) await trt.execute(trait.COMMAND_PAUSEUNPAUSE, BASIC_DATA, {"pause": True}, {}) assert len(pause_calls) == 1 assert pause_calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} unpause_calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_START) await trt.execute(trait.COMMAND_PAUSEUNPAUSE, BASIC_DATA, {"pause": False}, {}) assert len(unpause_calls) == 1 assert unpause_calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} async def test_startstop_cover(hass): """Test startStop trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.StartStopTrait.supported(cover.DOMAIN, cover.SUPPORT_STOP, None) state = State( "cover.bla", cover.STATE_CLOSED, {ATTR_SUPPORTED_FEATURES: cover.SUPPORT_STOP}, ) trt = trait.StartStopTrait( hass, state, BASIC_CONFIG, ) assert trt.sync_attributes() == {} for state_value in (cover.STATE_CLOSING, cover.STATE_OPENING): state.state = state_value assert trt.query_attributes() == {"isRunning": True} stop_calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_STOP_COVER) await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": False}, {}) assert len(stop_calls) == 1 assert stop_calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} for state_value in (cover.STATE_CLOSED, cover.STATE_OPEN): state.state = state_value assert trt.query_attributes() == {"isRunning": False} with pytest.raises(SmartHomeError, match="Cover is already stopped"): await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": False}, {}) with pytest.raises(SmartHomeError, match="Starting a cover is not supported"): await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": True}, {}) with pytest.raises( SmartHomeError, match="Command action.devices.commands.PauseUnpause is not supported", ): await trt.execute(trait.COMMAND_PAUSEUNPAUSE, BASIC_DATA, {"start": True}, {}) async def test_startstop_cover_assumed(hass): """Test startStop trait support for cover domain of assumed state.""" trt = trait.StartStopTrait( hass, State( "cover.bla", cover.STATE_CLOSED, {ATTR_SUPPORTED_FEATURES: cover.SUPPORT_STOP, ATTR_ASSUMED_STATE: True}, ), BASIC_CONFIG, ) stop_calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_STOP_COVER) await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": False}, {}) assert len(stop_calls) == 1 assert stop_calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} async def test_color_setting_color_light(hass): """Test ColorSpectrum trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert not trait.ColorSettingTrait.supported(light.DOMAIN, 0, None) assert trait.ColorSettingTrait.supported(light.DOMAIN, light.SUPPORT_COLOR, None) trt = trait.ColorSettingTrait( hass, State( "light.bla", STATE_ON, { light.ATTR_HS_COLOR: (20, 94), light.ATTR_BRIGHTNESS: 200, ATTR_SUPPORTED_FEATURES: light.SUPPORT_COLOR, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == {"colorModel": "hsv"} assert trt.query_attributes() == { "color": {"spectrumHsv": {"hue": 20, "saturation": 0.94, "value": 200 / 255}} } assert trt.can_execute( trait.COMMAND_COLOR_ABSOLUTE, {"color": {"spectrumRGB": 16715792}} ) calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON) await trt.execute( trait.COMMAND_COLOR_ABSOLUTE, BASIC_DATA, {"color": {"spectrumRGB": 1052927}}, {}, ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "light.bla", light.ATTR_HS_COLOR: (240, 93.725), } await trt.execute( trait.COMMAND_COLOR_ABSOLUTE, BASIC_DATA, {"color": {"spectrumHSV": {"hue": 100, "saturation": 0.50, "value": 0.20}}}, {}, ) assert len(calls) == 2 assert calls[1].data == { ATTR_ENTITY_ID: "light.bla", light.ATTR_HS_COLOR: [100, 50], light.ATTR_BRIGHTNESS: 0.2 * 255, } async def test_color_setting_temperature_light(hass): """Test ColorTemperature trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert not trait.ColorSettingTrait.supported(light.DOMAIN, 0, None) assert trait.ColorSettingTrait.supported( light.DOMAIN, light.SUPPORT_COLOR_TEMP, None ) trt = trait.ColorSettingTrait( hass, State( "light.bla", STATE_ON, { light.ATTR_MIN_MIREDS: 200, light.ATTR_COLOR_TEMP: 300, light.ATTR_MAX_MIREDS: 500, ATTR_SUPPORTED_FEATURES: light.SUPPORT_COLOR_TEMP, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "colorTemperatureRange": {"temperatureMinK": 2000, "temperatureMaxK": 5000} } assert trt.query_attributes() == {"color": {"temperatureK": 3333}} assert trt.can_execute( trait.COMMAND_COLOR_ABSOLUTE, {"color": {"temperature": 400}} ) calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON) with pytest.raises(helpers.SmartHomeError) as err: await trt.execute( trait.COMMAND_COLOR_ABSOLUTE, BASIC_DATA, {"color": {"temperature": 5555}}, {}, ) assert err.value.code == const.ERR_VALUE_OUT_OF_RANGE await trt.execute( trait.COMMAND_COLOR_ABSOLUTE, BASIC_DATA, {"color": {"temperature": 2857}}, {} ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "light.bla", light.ATTR_COLOR_TEMP: color.color_temperature_kelvin_to_mired(2857), } async def test_color_light_temperature_light_bad_temp(hass): """Test ColorTemperature trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert not trait.ColorSettingTrait.supported(light.DOMAIN, 0, None) assert trait.ColorSettingTrait.supported( light.DOMAIN, light.SUPPORT_COLOR_TEMP, None ) trt = trait.ColorSettingTrait( hass, State( "light.bla", STATE_ON, { light.ATTR_MIN_MIREDS: 200, light.ATTR_COLOR_TEMP: 0, light.ATTR_MAX_MIREDS: 500, }, ), BASIC_CONFIG, ) assert trt.query_attributes() == {} async def test_light_modes(hass): """Test Light Mode trait.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert trait.ModesTrait.supported(light.DOMAIN, light.SUPPORT_EFFECT, None) trt = trait.ModesTrait( hass, State( "light.living_room", light.STATE_ON, attributes={ light.ATTR_EFFECT_LIST: ["random", "colorloop"], light.ATTR_EFFECT: "random", }, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableModes": [ { "name": "effect", "name_values": [{"name_synonym": ["effect"], "lang": "en"}], "settings": [ { "setting_name": "random", "setting_values": [ {"setting_synonym": ["random"], "lang": "en"} ], }, { "setting_name": "colorloop", "setting_values": [ {"setting_synonym": ["colorloop"], "lang": "en"} ], }, ], "ordered": False, } ] } assert trt.query_attributes() == { "currentModeSettings": {"effect": "random"}, "on": True, } assert trt.can_execute( trait.COMMAND_MODES, params={"updateModeSettings": {"effect": "colorloop"}}, ) calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON) await trt.execute( trait.COMMAND_MODES, BASIC_DATA, {"updateModeSettings": {"effect": "colorloop"}}, {}, ) assert len(calls) == 1 assert calls[0].data == { "entity_id": "light.living_room", "effect": "colorloop", } async def test_scene_scene(hass): """Test Scene trait support for scene domain.""" assert helpers.get_google_type(scene.DOMAIN, None) is not None assert trait.SceneTrait.supported(scene.DOMAIN, 0, None) trt = trait.SceneTrait(hass, State("scene.bla", scene.STATE), BASIC_CONFIG) assert trt.sync_attributes() == {} assert trt.query_attributes() == {} assert trt.can_execute(trait.COMMAND_ACTIVATE_SCENE, {}) calls = async_mock_service(hass, scene.DOMAIN, SERVICE_TURN_ON) await trt.execute(trait.COMMAND_ACTIVATE_SCENE, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "scene.bla"} async def test_scene_script(hass): """Test Scene trait support for script domain.""" assert helpers.get_google_type(script.DOMAIN, None) is not None assert trait.SceneTrait.supported(script.DOMAIN, 0, None) trt = trait.SceneTrait(hass, State("script.bla", STATE_OFF), BASIC_CONFIG) assert trt.sync_attributes() == {} assert trt.query_attributes() == {} assert trt.can_execute(trait.COMMAND_ACTIVATE_SCENE, {}) calls = async_mock_service(hass, script.DOMAIN, SERVICE_TURN_ON) await trt.execute(trait.COMMAND_ACTIVATE_SCENE, BASIC_DATA, {}, {}) # We don't wait till script execution is done. await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "script.bla"} async def test_temperature_setting_climate_onoff(hass): """Test TemperatureSetting trait support for climate domain - range.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.TemperatureSettingTrait.supported(climate.DOMAIN, 0, None) hass.config.units.temperature_unit = TEMP_FAHRENHEIT trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_AUTO, { ATTR_SUPPORTED_FEATURES: climate.SUPPORT_TARGET_TEMPERATURE_RANGE, climate.ATTR_HVAC_MODES: [ climate.HVAC_MODE_OFF, climate.HVAC_MODE_COOL, climate.HVAC_MODE_HEAT, climate.HVAC_MODE_HEAT_COOL, ], climate.ATTR_MIN_TEMP: None, climate.ATTR_MAX_TEMP: None, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["off", "cool", "heat", "heatcool", "on"], "thermostatTemperatureUnit": "F", } assert trt.can_execute(trait.COMMAND_THERMOSTAT_SET_MODE, {}) calls = async_mock_service(hass, climate.DOMAIN, SERVICE_TURN_ON) await trt.execute( trait.COMMAND_THERMOSTAT_SET_MODE, BASIC_DATA, {"thermostatMode": "on"}, {} ) assert len(calls) == 1 calls = async_mock_service(hass, climate.DOMAIN, SERVICE_TURN_OFF) await trt.execute( trait.COMMAND_THERMOSTAT_SET_MODE, BASIC_DATA, {"thermostatMode": "off"}, {} ) assert len(calls) == 1 async def test_temperature_setting_climate_no_modes(hass): """Test TemperatureSetting trait support for climate domain not supporting any modes.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.TemperatureSettingTrait.supported(climate.DOMAIN, 0, None) hass.config.units.temperature_unit = TEMP_CELSIUS trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_AUTO, { climate.ATTR_HVAC_MODES: [], climate.ATTR_MIN_TEMP: None, climate.ATTR_MAX_TEMP: None, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["heat"], "thermostatTemperatureUnit": "C", } async def test_temperature_setting_climate_range(hass): """Test TemperatureSetting trait support for climate domain - range.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.TemperatureSettingTrait.supported(climate.DOMAIN, 0, None) hass.config.units.temperature_unit = TEMP_FAHRENHEIT trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_AUTO, { climate.ATTR_CURRENT_TEMPERATURE: 70, climate.ATTR_CURRENT_HUMIDITY: 25, ATTR_SUPPORTED_FEATURES: climate.SUPPORT_TARGET_TEMPERATURE_RANGE, climate.ATTR_HVAC_MODES: [ STATE_OFF, climate.HVAC_MODE_COOL, climate.HVAC_MODE_HEAT, climate.HVAC_MODE_AUTO, ], climate.ATTR_TARGET_TEMP_HIGH: 75, climate.ATTR_TARGET_TEMP_LOW: 65, climate.ATTR_MIN_TEMP: 50, climate.ATTR_MAX_TEMP: 80, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["off", "cool", "heat", "auto", "on"], "thermostatTemperatureUnit": "F", } assert trt.query_attributes() == { "thermostatMode": "auto", "thermostatTemperatureAmbient": 21.1, "thermostatHumidityAmbient": 25, "thermostatTemperatureSetpointLow": 18.3, "thermostatTemperatureSetpointHigh": 23.9, } assert trt.can_execute(trait.COMMAND_THERMOSTAT_TEMPERATURE_SET_RANGE, {}) assert trt.can_execute(trait.COMMAND_THERMOSTAT_SET_MODE, {}) calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_TEMPERATURE) await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SET_RANGE, BASIC_DATA, { "thermostatTemperatureSetpointHigh": 25, "thermostatTemperatureSetpointLow": 20, }, {}, ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "climate.bla", climate.ATTR_TARGET_TEMP_HIGH: 77, climate.ATTR_TARGET_TEMP_LOW: 68, } calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_HVAC_MODE) await trt.execute( trait.COMMAND_THERMOSTAT_SET_MODE, BASIC_DATA, {"thermostatMode": "cool"}, {} ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "climate.bla", climate.ATTR_HVAC_MODE: climate.HVAC_MODE_COOL, } with pytest.raises(helpers.SmartHomeError) as err: await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, BASIC_DATA, {"thermostatTemperatureSetpoint": -100}, {}, ) assert err.value.code == const.ERR_VALUE_OUT_OF_RANGE hass.config.units.temperature_unit = TEMP_CELSIUS async def test_temperature_setting_climate_setpoint(hass): """Test TemperatureSetting trait support for climate domain - setpoint.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.TemperatureSettingTrait.supported(climate.DOMAIN, 0, None) hass.config.units.temperature_unit = TEMP_CELSIUS trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_COOL, { climate.ATTR_HVAC_MODES: [STATE_OFF, climate.HVAC_MODE_COOL], climate.ATTR_MIN_TEMP: 10, climate.ATTR_MAX_TEMP: 30, ATTR_TEMPERATURE: 18, climate.ATTR_CURRENT_TEMPERATURE: 20, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["off", "cool", "on"], "thermostatTemperatureUnit": "C", } assert trt.query_attributes() == { "thermostatMode": "cool", "thermostatTemperatureAmbient": 20, "thermostatTemperatureSetpoint": 18, } assert trt.can_execute(trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, {}) assert trt.can_execute(trait.COMMAND_THERMOSTAT_SET_MODE, {}) calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_TEMPERATURE) with pytest.raises(helpers.SmartHomeError): await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, BASIC_DATA, {"thermostatTemperatureSetpoint": -100}, {}, ) await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, BASIC_DATA, {"thermostatTemperatureSetpoint": 19}, {}, ) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "climate.bla", ATTR_TEMPERATURE: 19} async def test_temperature_setting_climate_setpoint_auto(hass): """ Test TemperatureSetting trait support for climate domain. Setpoint in auto mode. """ hass.config.units.temperature_unit = TEMP_CELSIUS trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_HEAT_COOL, { climate.ATTR_HVAC_MODES: [ climate.HVAC_MODE_OFF, climate.HVAC_MODE_HEAT_COOL, ], climate.ATTR_MIN_TEMP: 10, climate.ATTR_MAX_TEMP: 30, ATTR_TEMPERATURE: 18, climate.ATTR_CURRENT_TEMPERATURE: 20, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["off", "heatcool", "on"], "thermostatTemperatureUnit": "C", } assert trt.query_attributes() == { "thermostatMode": "heatcool", "thermostatTemperatureAmbient": 20, "thermostatTemperatureSetpointHigh": 18, "thermostatTemperatureSetpointLow": 18, } assert trt.can_execute(trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, {}) assert trt.can_execute(trait.COMMAND_THERMOSTAT_SET_MODE, {}) calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_TEMPERATURE) await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, BASIC_DATA, {"thermostatTemperatureSetpoint": 19}, {}, ) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "climate.bla", ATTR_TEMPERATURE: 19} async def test_humidity_setting_humidifier_setpoint(hass): """Test HumiditySetting trait support for humidifier domain - setpoint.""" assert helpers.get_google_type(humidifier.DOMAIN, None) is not None assert trait.HumiditySettingTrait.supported(humidifier.DOMAIN, 0, None) trt = trait.HumiditySettingTrait( hass, State( "humidifier.bla", STATE_ON, { humidifier.ATTR_MIN_HUMIDITY: 20, humidifier.ATTR_MAX_HUMIDITY: 90, humidifier.ATTR_HUMIDITY: 38, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "humiditySetpointRange": {"minPercent": 20, "maxPercent": 90} } assert trt.query_attributes() == { "humiditySetpointPercent": 38, } assert trt.can_execute(trait.COMMAND_SET_HUMIDITY, {}) calls = async_mock_service(hass, humidifier.DOMAIN, humidifier.SERVICE_SET_HUMIDITY) await trt.execute(trait.COMMAND_SET_HUMIDITY, BASIC_DATA, {"humidity": 32}, {}) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "humidifier.bla", humidifier.ATTR_HUMIDITY: 32, } async def test_lock_unlock_lock(hass): """Test LockUnlock trait locking support for lock domain.""" assert helpers.get_google_type(lock.DOMAIN, None) is not None assert trait.LockUnlockTrait.supported(lock.DOMAIN, lock.SUPPORT_OPEN, None) assert trait.LockUnlockTrait.might_2fa(lock.DOMAIN, lock.SUPPORT_OPEN, None) trt = trait.LockUnlockTrait( hass, State("lock.front_door", lock.STATE_LOCKED), PIN_CONFIG ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"isLocked": True} assert trt.can_execute(trait.COMMAND_LOCKUNLOCK, {"lock": True}) calls = async_mock_service(hass, lock.DOMAIN, lock.SERVICE_LOCK) await trt.execute(trait.COMMAND_LOCKUNLOCK, PIN_DATA, {"lock": True}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "lock.front_door"} async def test_lock_unlock_unlock(hass): """Test LockUnlock trait unlocking support for lock domain.""" assert helpers.get_google_type(lock.DOMAIN, None) is not None assert trait.LockUnlockTrait.supported(lock.DOMAIN, lock.SUPPORT_OPEN, None) trt = trait.LockUnlockTrait( hass, State("lock.front_door", lock.STATE_LOCKED), PIN_CONFIG ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"isLocked": True} assert trt.can_execute(trait.COMMAND_LOCKUNLOCK, {"lock": False}) calls = async_mock_service(hass, lock.DOMAIN, lock.SERVICE_UNLOCK) # No challenge data with pytest.raises(error.ChallengeNeeded) as err: await trt.execute(trait.COMMAND_LOCKUNLOCK, PIN_DATA, {"lock": False}, {}) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # invalid pin with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_LOCKUNLOCK, PIN_DATA, {"lock": False}, {"pin": 9999} ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_FAILED_PIN_NEEDED await trt.execute( trait.COMMAND_LOCKUNLOCK, PIN_DATA, {"lock": False}, {"pin": "1234"} ) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "lock.front_door"} # Test without pin trt = trait.LockUnlockTrait( hass, State("lock.front_door", lock.STATE_LOCKED), BASIC_CONFIG ) with pytest.raises(error.SmartHomeError) as err: await trt.execute(trait.COMMAND_LOCKUNLOCK, BASIC_DATA, {"lock": False}, {}) assert len(calls) == 1 assert err.value.code == const.ERR_CHALLENGE_NOT_SETUP # Test with 2FA override with patch.object( BASIC_CONFIG, "should_2fa", return_value=False, ): await trt.execute(trait.COMMAND_LOCKUNLOCK, BASIC_DATA, {"lock": False}, {}) assert len(calls) == 2 async def test_arm_disarm_arm_away(hass): """Test ArmDisarm trait Arming support for alarm_control_panel domain.""" assert helpers.get_google_type(alarm_control_panel.DOMAIN, None) is not None assert trait.ArmDisArmTrait.supported(alarm_control_panel.DOMAIN, 0, None) assert trait.ArmDisArmTrait.might_2fa(alarm_control_panel.DOMAIN, 0, None) trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, { alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True, ATTR_SUPPORTED_FEATURES: alarm_control_panel.const.SUPPORT_ALARM_ARM_HOME | alarm_control_panel.const.SUPPORT_ALARM_ARM_AWAY, }, ), PIN_CONFIG, ) assert trt.sync_attributes() == { "availableArmLevels": { "levels": [ { "level_name": "armed_home", "level_values": [ {"level_synonym": ["armed home", "home"], "lang": "en"} ], }, { "level_name": "armed_away", "level_values": [ {"level_synonym": ["armed away", "away"], "lang": "en"} ], }, ], "ordered": False, } } assert trt.query_attributes() == { "isArmed": True, "currentArmLevel": STATE_ALARM_ARMED_AWAY, } assert trt.can_execute( trait.COMMAND_ARMDISARM, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY} ) calls = async_mock_service( hass, alarm_control_panel.DOMAIN, alarm_control_panel.SERVICE_ALARM_ARM_AWAY ) # Test with no secure_pin configured with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), BASIC_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, BASIC_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {}, ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NOT_SETUP trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), PIN_CONFIG, ) # No challenge data with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {}, ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # invalid pin with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {"pin": 9999}, ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_FAILED_PIN_NEEDED # correct pin await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {"pin": "1234"}, ) assert len(calls) == 1 # Test already armed with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), PIN_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {}, ) assert len(calls) == 1 assert err.value.code == const.ERR_ALREADY_ARMED # Test with code_arm_required False trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: False}, ), PIN_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {}, ) assert len(calls) == 2 with pytest.raises(error.SmartHomeError) as err: await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True}, {}, ) async def test_arm_disarm_disarm(hass): """Test ArmDisarm trait Disarming support for alarm_control_panel domain.""" assert helpers.get_google_type(alarm_control_panel.DOMAIN, None) is not None assert trait.ArmDisArmTrait.supported(alarm_control_panel.DOMAIN, 0, None) assert trait.ArmDisArmTrait.might_2fa(alarm_control_panel.DOMAIN, 0, None) trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, { alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True, ATTR_SUPPORTED_FEATURES: alarm_control_panel.const.SUPPORT_ALARM_TRIGGER | alarm_control_panel.const.SUPPORT_ALARM_ARM_CUSTOM_BYPASS, }, ), PIN_CONFIG, ) assert trt.sync_attributes() == { "availableArmLevels": { "levels": [ { "level_name": "armed_custom_bypass", "level_values": [ { "level_synonym": ["armed custom bypass", "custom"], "lang": "en", } ], }, { "level_name": "triggered", "level_values": [{"level_synonym": ["triggered"], "lang": "en"}], }, ], "ordered": False, } } assert trt.query_attributes() == {"isArmed": False} assert trt.can_execute(trait.COMMAND_ARMDISARM, {"arm": False}) calls = async_mock_service( hass, alarm_control_panel.DOMAIN, alarm_control_panel.SERVICE_ALARM_DISARM ) # Test without secure_pin configured with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), BASIC_CONFIG, ) await trt.execute(trait.COMMAND_ARMDISARM, BASIC_DATA, {"arm": False}, {}) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NOT_SETUP trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), PIN_CONFIG, ) # No challenge data with pytest.raises(error.ChallengeNeeded) as err: await trt.execute(trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": False}, {}) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # invalid pin with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": False}, {"pin": 9999} ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_FAILED_PIN_NEEDED # correct pin await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": False}, {"pin": "1234"} ) assert len(calls) == 1 # Test already disarmed with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), PIN_CONFIG, ) await trt.execute(trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": False}, {}) assert len(calls) == 1 assert err.value.code == const.ERR_ALREADY_DISARMED # Cancel arming after already armed will require pin with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: False}, ), PIN_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "cancel": True}, {} ) assert len(calls) == 1 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # Cancel arming while pending to arm doesn't require pin trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_PENDING, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: False}, ), PIN_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "cancel": True}, {} ) assert len(calls) == 2 async def test_fan_speed(hass): """Test FanSpeed trait speed control support for fan domain.""" assert helpers.get_google_type(fan.DOMAIN, None) is not None assert trait.FanSpeedTrait.supported(fan.DOMAIN, fan.SUPPORT_SET_SPEED, None) trt = trait.FanSpeedTrait( hass, State( "fan.living_room_fan", fan.SPEED_HIGH, attributes={ "speed_list": [ fan.SPEED_OFF, fan.SPEED_LOW, fan.SPEED_MEDIUM, fan.SPEED_HIGH, ], "speed": "low", "percentage": 33, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableFanSpeeds": { "ordered": True, "speeds": [ { "speed_name": "off", "speed_values": [{"speed_synonym": ["stop", "off"], "lang": "en"}], }, { "speed_name": "low", "speed_values": [ { "speed_synonym": ["slow", "low", "slowest", "lowest"], "lang": "en", } ], }, { "speed_name": "medium", "speed_values": [ {"speed_synonym": ["medium", "mid", "middle"], "lang": "en"} ], }, { "speed_name": "high", "speed_values": [ { "speed_synonym": [ "high", "max", "fast", "highest", "fastest", "maximum", ], "lang": "en", } ], }, ], }, "reversible": False, "supportsFanSpeedPercent": True, } assert trt.query_attributes() == { "currentFanSpeedSetting": "low", "on": True, "currentFanSpeedPercent": 33, } assert trt.can_execute(trait.COMMAND_FANSPEED, params={"fanSpeed": "medium"}) calls = async_mock_service(hass, fan.DOMAIN, fan.SERVICE_SET_SPEED) await trt.execute(trait.COMMAND_FANSPEED, BASIC_DATA, {"fanSpeed": "medium"}, {}) assert len(calls) == 1 assert calls[0].data == {"entity_id": "fan.living_room_fan", "speed": "medium"} assert trt.can_execute(trait.COMMAND_FANSPEED, params={"fanSpeedPercent": 10}) calls = async_mock_service(hass, fan.DOMAIN, fan.SERVICE_SET_PERCENTAGE) await trt.execute(trait.COMMAND_FANSPEED, BASIC_DATA, {"fanSpeedPercent": 10}, {}) assert len(calls) == 1 assert calls[0].data == {"entity_id": "fan.living_room_fan", "percentage": 10} async def test_climate_fan_speed(hass): """Test FanSpeed trait speed control support for climate domain.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.FanSpeedTrait.supported(climate.DOMAIN, climate.SUPPORT_FAN_MODE, None) trt = trait.FanSpeedTrait( hass, State( "climate.living_room_ac", "on", attributes={ "fan_modes": ["auto", "low", "medium", "high"], "fan_mode": "low", }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableFanSpeeds": { "ordered": True, "speeds": [ { "speed_name": "auto", "speed_values": [{"speed_synonym": ["auto"], "lang": "en"}], }, { "speed_name": "low", "speed_values": [{"speed_synonym": ["low"], "lang": "en"}], }, { "speed_name": "medium", "speed_values": [{"speed_synonym": ["medium"], "lang": "en"}], }, { "speed_name": "high", "speed_values": [{"speed_synonym": ["high"], "lang": "en"}], }, ], }, "reversible": False, "supportsFanSpeedPercent": True, } assert trt.query_attributes() == { "currentFanSpeedSetting": "low", } assert trt.can_execute(trait.COMMAND_FANSPEED, params={"fanSpeed": "medium"}) calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_FAN_MODE) await trt.execute(trait.COMMAND_FANSPEED, BASIC_DATA, {"fanSpeed": "medium"}, {}) assert len(calls) == 1 assert calls[0].data == { "entity_id": "climate.living_room_ac", "fan_mode": "medium", } async def test_inputselector(hass): """Test input selector trait.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.InputSelectorTrait.supported( media_player.DOMAIN, media_player.SUPPORT_SELECT_SOURCE, None ) trt = trait.InputSelectorTrait( hass, State( "media_player.living_room", media_player.STATE_PLAYING, attributes={ media_player.ATTR_INPUT_SOURCE_LIST: [ "media", "game", "chromecast", "plex", ], media_player.ATTR_INPUT_SOURCE: "game", }, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableInputs": [ {"key": "media", "names": [{"name_synonym": ["media"], "lang": "en"}]}, {"key": "game", "names": [{"name_synonym": ["game"], "lang": "en"}]}, { "key": "chromecast", "names": [{"name_synonym": ["chromecast"], "lang": "en"}], }, {"key": "plex", "names": [{"name_synonym": ["plex"], "lang": "en"}]}, ], "orderedInputs": True, } assert trt.query_attributes() == { "currentInput": "game", } assert trt.can_execute( trait.COMMAND_INPUT, params={"newInput": "media"}, ) calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_SELECT_SOURCE ) await trt.execute( trait.COMMAND_INPUT, BASIC_DATA, {"newInput": "media"}, {}, ) assert len(calls) == 1 assert calls[0].data == {"entity_id": "media_player.living_room", "source": "media"} @pytest.mark.parametrize( "sources,source,source_next,source_prev", [ (["a"], "a", "a", "a"), (["a", "b"], "a", "b", "b"), (["a", "b", "c"], "a", "b", "c"), ], ) async def test_inputselector_nextprev(hass, sources, source, source_next, source_prev): """Test input selector trait.""" trt = trait.InputSelectorTrait( hass, State( "media_player.living_room", media_player.STATE_PLAYING, attributes={ media_player.ATTR_INPUT_SOURCE_LIST: sources, media_player.ATTR_INPUT_SOURCE: source, }, ), BASIC_CONFIG, ) assert trt.can_execute("action.devices.commands.NextInput", params={}) assert trt.can_execute("action.devices.commands.PreviousInput", params={}) calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_SELECT_SOURCE ) await trt.execute( "action.devices.commands.NextInput", BASIC_DATA, {}, {}, ) await trt.execute( "action.devices.commands.PreviousInput", BASIC_DATA, {}, {}, ) assert len(calls) == 2 assert calls[0].data == { "entity_id": "media_player.living_room", "source": source_next, } assert calls[1].data == { "entity_id": "media_player.living_room", "source": source_prev, } @pytest.mark.parametrize( "sources,source", [(None, "a"), (["a", "b"], None), (["a", "b"], "c")] ) async def test_inputselector_nextprev_invalid(hass, sources, source): """Test input selector trait.""" trt = trait.InputSelectorTrait( hass, State( "media_player.living_room", media_player.STATE_PLAYING, attributes={ media_player.ATTR_INPUT_SOURCE_LIST: sources, media_player.ATTR_INPUT_SOURCE: source, }, ), BASIC_CONFIG, ) with pytest.raises(SmartHomeError): await trt.execute( "action.devices.commands.NextInput", BASIC_DATA, {}, {}, ) with pytest.raises(SmartHomeError): await trt.execute( "action.devices.commands.PreviousInput", BASIC_DATA, {}, {}, ) with pytest.raises(SmartHomeError): await trt.execute( "action.devices.commands.InvalidCommand", BASIC_DATA, {}, {}, ) async def test_modes_input_select(hass): """Test Input Select Mode trait.""" assert helpers.get_google_type(input_select.DOMAIN, None) is not None assert trait.ModesTrait.supported(input_select.DOMAIN, None, None) trt = trait.ModesTrait( hass, State("input_select.bla", "unavailable"), BASIC_CONFIG, ) assert trt.sync_attributes() == {"availableModes": []} trt = trait.ModesTrait( hass, State( "input_select.bla", "abc", attributes={input_select.ATTR_OPTIONS: ["abc", "123", "xyz"]}, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableModes": [ { "name": "option", "name_values": [ { "name_synonym": ["option", "setting", "mode", "value"], "lang": "en", } ], "settings": [ { "setting_name": "abc", "setting_values": [{"setting_synonym": ["abc"], "lang": "en"}], }, { "setting_name": "123", "setting_values": [{"setting_synonym": ["123"], "lang": "en"}], }, { "setting_name": "xyz", "setting_values": [{"setting_synonym": ["xyz"], "lang": "en"}], }, ], "ordered": False, } ] } assert trt.query_attributes() == { "currentModeSettings": {"option": "abc"}, "on": True, } assert trt.can_execute( trait.COMMAND_MODES, params={"updateModeSettings": {"option": "xyz"}}, ) calls = async_mock_service( hass, input_select.DOMAIN, input_select.SERVICE_SELECT_OPTION ) await trt.execute( trait.COMMAND_MODES, BASIC_DATA, {"updateModeSettings": {"option": "xyz"}}, {}, ) assert len(calls) == 1 assert calls[0].data == {"entity_id": "input_select.bla", "option": "xyz"} async def test_modes_humidifier(hass): """Test Humidifier Mode trait.""" assert helpers.get_google_type(humidifier.DOMAIN, None) is not None assert trait.ModesTrait.supported(humidifier.DOMAIN, humidifier.SUPPORT_MODES, None) trt = trait.ModesTrait( hass, State( "humidifier.humidifier", STATE_OFF, attributes={ humidifier.ATTR_AVAILABLE_MODES: [ humidifier.MODE_NORMAL, humidifier.MODE_AUTO, humidifier.MODE_AWAY, ], ATTR_SUPPORTED_FEATURES: humidifier.SUPPORT_MODES, humidifier.ATTR_MIN_HUMIDITY: 30, humidifier.ATTR_MAX_HUMIDITY: 99, humidifier.ATTR_HUMIDITY: 50, ATTR_MODE: humidifier.MODE_AUTO, }, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableModes": [ { "name": "mode", "name_values": [{"name_synonym": ["mode"], "lang": "en"}], "settings": [ { "setting_name": "normal", "setting_values": [ {"setting_synonym": ["normal"], "lang": "en"} ], }, { "setting_name": "auto", "setting_values": [{"setting_synonym": ["auto"], "lang": "en"}], }, { "setting_name": "away", "setting_values": [{"setting_synonym": ["away"], "lang": "en"}], }, ], "ordered": False, }, ] } assert trt.query_attributes() == { "currentModeSettings": {"mode": "auto"}, "on": False, } assert trt.can_execute( trait.COMMAND_MODES, params={"updateModeSettings": {"mode": "away"}} ) calls = async_mock_service(hass, humidifier.DOMAIN, humidifier.SERVICE_SET_MODE) await trt.execute( trait.COMMAND_MODES, BASIC_DATA, {"updateModeSettings": {"mode": "away"}}, {}, ) assert len(calls) == 1 assert calls[0].data == { "entity_id": "humidifier.humidifier", "mode": "away", } async def test_sound_modes(hass): """Test Mode trait.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.ModesTrait.supported( media_player.DOMAIN, media_player.SUPPORT_SELECT_SOUND_MODE, None ) trt = trait.ModesTrait( hass, State( "media_player.living_room", media_player.STATE_PLAYING, attributes={ media_player.ATTR_SOUND_MODE_LIST: ["stereo", "prologic"], media_player.ATTR_SOUND_MODE: "stereo", }, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableModes": [ { "name": "sound mode", "name_values": [ {"name_synonym": ["sound mode", "effects"], "lang": "en"} ], "settings": [ { "setting_name": "stereo", "setting_values": [ {"setting_synonym": ["stereo"], "lang": "en"} ], }, { "setting_name": "prologic", "setting_values": [ {"setting_synonym": ["prologic"], "lang": "en"} ], }, ], "ordered": False, } ] } assert trt.query_attributes() == { "currentModeSettings": {"sound mode": "stereo"}, "on": True, } assert trt.can_execute( trait.COMMAND_MODES, params={"updateModeSettings": {"sound mode": "stereo"}}, ) calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_SELECT_SOUND_MODE ) await trt.execute( trait.COMMAND_MODES, BASIC_DATA, {"updateModeSettings": {"sound mode": "stereo"}}, {}, ) assert len(calls) == 1 assert calls[0].data == { "entity_id": "media_player.living_room", "sound_mode": "stereo", } async def test_openclose_cover(hass): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_SET_POSITION, None ) trt = trait.OpenCloseTrait( hass, State( "cover.bla", cover.STATE_OPEN, { cover.ATTR_CURRENT_POSITION: 75, ATTR_SUPPORTED_FEATURES: cover.SUPPORT_SET_POSITION, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"openPercent": 75} calls_set = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_SET_COVER_POSITION) calls_open = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_OPEN_COVER) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 50}, {}) await trt.execute( trait.COMMAND_OPENCLOSE_RELATIVE, BASIC_DATA, {"openRelativePercent": 50}, {} ) assert len(calls_set) == 1 assert calls_set[0].data == {ATTR_ENTITY_ID: "cover.bla", cover.ATTR_POSITION: 50} assert len(calls_open) == 1 assert calls_open[0].data == {ATTR_ENTITY_ID: "cover.bla"} async def test_openclose_cover_unknown_state(hass): """Test OpenClose trait support for cover domain with unknown state.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_SET_POSITION, None ) # No state trt = trait.OpenCloseTrait( hass, State( "cover.bla", STATE_UNKNOWN, {ATTR_SUPPORTED_FEATURES: cover.SUPPORT_OPEN} ), BASIC_CONFIG, ) assert trt.sync_attributes() == {"discreteOnlyOpenClose": True} with pytest.raises(helpers.SmartHomeError): trt.query_attributes() calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_OPEN_COVER) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 100}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} with pytest.raises(helpers.SmartHomeError): trt.query_attributes() async def test_openclose_cover_assumed_state(hass): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_SET_POSITION, None ) trt = trait.OpenCloseTrait( hass, State( "cover.bla", cover.STATE_OPEN, { ATTR_ASSUMED_STATE: True, ATTR_SUPPORTED_FEATURES: cover.SUPPORT_SET_POSITION, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == {"commandOnlyOpenClose": True} assert trt.query_attributes() == {} calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_SET_COVER_POSITION) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 40}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla", cover.ATTR_POSITION: 40} async def test_openclose_cover_query_only(hass): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported(cover.DOMAIN, 0, None) state = State( "cover.bla", cover.STATE_OPEN, ) trt = trait.OpenCloseTrait( hass, state, BASIC_CONFIG, ) assert trt.sync_attributes() == { "discreteOnlyOpenClose": True, "queryOnlyOpenClose": True, } assert trt.query_attributes() == {"openPercent": 100} async def test_openclose_cover_no_position(hass): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_OPEN | cover.SUPPORT_CLOSE, None ) state = State( "cover.bla", cover.STATE_OPEN, { ATTR_SUPPORTED_FEATURES: cover.SUPPORT_OPEN | cover.SUPPORT_CLOSE, }, ) trt = trait.OpenCloseTrait( hass, state, BASIC_CONFIG, ) assert trt.sync_attributes() == {"discreteOnlyOpenClose": True} assert trt.query_attributes() == {"openPercent": 100} state.state = cover.STATE_CLOSED assert trt.sync_attributes() == {"discreteOnlyOpenClose": True} assert trt.query_attributes() == {"openPercent": 0} calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_CLOSE_COVER) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 0}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_OPEN_COVER) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 100}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} with pytest.raises( SmartHomeError, match=r"Current position not know for relative command" ): await trt.execute( trait.COMMAND_OPENCLOSE_RELATIVE, BASIC_DATA, {"openRelativePercent": 100}, {}, ) with pytest.raises(SmartHomeError, match=r"No support for partial open close"): await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 50}, {}) @pytest.mark.parametrize( "device_class", (cover.DEVICE_CLASS_DOOR, cover.DEVICE_CLASS_GARAGE, cover.DEVICE_CLASS_GATE), ) async def test_openclose_cover_secure(hass, device_class): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, device_class) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_SET_POSITION, device_class ) assert trait.OpenCloseTrait.might_2fa( cover.DOMAIN, cover.SUPPORT_SET_POSITION, device_class ) trt = trait.OpenCloseTrait( hass, State( "cover.bla", cover.STATE_OPEN, { ATTR_DEVICE_CLASS: device_class, ATTR_SUPPORTED_FEATURES: cover.SUPPORT_SET_POSITION, cover.ATTR_CURRENT_POSITION: 75, }, ), PIN_CONFIG, ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"openPercent": 75} calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_SET_COVER_POSITION) calls_close = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_CLOSE_COVER) # No challenge data with pytest.raises(error.ChallengeNeeded) as err: await trt.execute(trait.COMMAND_OPENCLOSE, PIN_DATA, {"openPercent": 50}, {}) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # invalid pin with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_OPENCLOSE, PIN_DATA, {"openPercent": 50}, {"pin": "9999"} ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_FAILED_PIN_NEEDED await trt.execute( trait.COMMAND_OPENCLOSE, PIN_DATA, {"openPercent": 50}, {"pin": "1234"} ) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla", cover.ATTR_POSITION: 50} # no challenge on close await trt.execute(trait.COMMAND_OPENCLOSE, PIN_DATA, {"openPercent": 0}, {}) assert len(calls_close) == 1 assert calls_close[0].data == {ATTR_ENTITY_ID: "cover.bla"} @pytest.mark.parametrize( "device_class", ( binary_sensor.DEVICE_CLASS_DOOR, binary_sensor.DEVICE_CLASS_GARAGE_DOOR, binary_sensor.DEVICE_CLASS_LOCK, binary_sensor.DEVICE_CLASS_OPENING, binary_sensor.DEVICE_CLASS_WINDOW, ), ) async def test_openclose_binary_sensor(hass, device_class): """Test OpenClose trait support for binary_sensor domain.""" assert helpers.get_google_type(binary_sensor.DOMAIN, device_class) is not None assert trait.OpenCloseTrait.supported(binary_sensor.DOMAIN, 0, device_class) trt = trait.OpenCloseTrait( hass, State("binary_sensor.test", STATE_ON, {ATTR_DEVICE_CLASS: device_class}), BASIC_CONFIG, ) assert trt.sync_attributes() == { "queryOnlyOpenClose": True, "discreteOnlyOpenClose": True, } assert trt.query_attributes() == {"openPercent": 100} trt = trait.OpenCloseTrait( hass, State("binary_sensor.test", STATE_OFF, {ATTR_DEVICE_CLASS: device_class}), BASIC_CONFIG, ) assert trt.sync_attributes() == { "queryOnlyOpenClose": True, "discreteOnlyOpenClose": True, } assert trt.query_attributes() == {"openPercent": 0} async def test_volume_media_player(hass): """Test volume trait support for media player domain.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.VolumeTrait.supported( media_player.DOMAIN, media_player.SUPPORT_VOLUME_SET, None, ) trt = trait.VolumeTrait( hass, State( "media_player.bla", media_player.STATE_PLAYING, { ATTR_SUPPORTED_FEATURES: media_player.SUPPORT_VOLUME_SET, media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.3, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "volumeMaxLevel": 100, "levelStepSize": 10, "volumeCanMuteAndUnmute": False, "commandOnlyVolume": False, } assert trt.query_attributes() == {"currentVolume": 30} calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_SET ) await trt.execute(trait.COMMAND_SET_VOLUME, BASIC_DATA, {"volumeLevel": 60}, {}) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.6, } calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_SET ) await trt.execute( trait.COMMAND_VOLUME_RELATIVE, BASIC_DATA, {"relativeSteps": 10}, {} ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.4, } async def test_volume_media_player_relative(hass): """Test volume trait support for relative-volume-only media players.""" assert trait.VolumeTrait.supported( media_player.DOMAIN, media_player.SUPPORT_VOLUME_STEP, None, ) trt = trait.VolumeTrait( hass, State( "media_player.bla", media_player.STATE_PLAYING, { ATTR_ASSUMED_STATE: True, ATTR_SUPPORTED_FEATURES: media_player.SUPPORT_VOLUME_STEP, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "volumeMaxLevel": 100, "levelStepSize": 10, "volumeCanMuteAndUnmute": False, "commandOnlyVolume": True, } assert trt.query_attributes() == {} calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_UP ) await trt.execute( trait.COMMAND_VOLUME_RELATIVE, BASIC_DATA, {"relativeSteps": 10}, {}, ) assert len(calls) == 10 for call in calls: assert call.data == { ATTR_ENTITY_ID: "media_player.bla", } calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_DOWN ) await trt.execute( trait.COMMAND_VOLUME_RELATIVE, BASIC_DATA, {"relativeSteps": -10}, {}, ) assert len(calls) == 10 for call in calls: assert call.data == { ATTR_ENTITY_ID: "media_player.bla", } with pytest.raises(SmartHomeError): await trt.execute(trait.COMMAND_SET_VOLUME, BASIC_DATA, {"volumeLevel": 42}, {}) with pytest.raises(SmartHomeError): await trt.execute(trait.COMMAND_MUTE, BASIC_DATA, {"mute": True}, {}) async def test_media_player_mute(hass): """Test volume trait support for muting.""" assert trait.VolumeTrait.supported( media_player.DOMAIN, media_player.SUPPORT_VOLUME_STEP | media_player.SUPPORT_VOLUME_MUTE, None, ) trt = trait.VolumeTrait( hass, State( "media_player.bla", media_player.STATE_PLAYING, { ATTR_SUPPORTED_FEATURES: ( media_player.SUPPORT_VOLUME_STEP | media_player.SUPPORT_VOLUME_MUTE ), media_player.ATTR_MEDIA_VOLUME_MUTED: False, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "volumeMaxLevel": 100, "levelStepSize": 10, "volumeCanMuteAndUnmute": True, "commandOnlyVolume": False, } assert trt.query_attributes() == {"isMuted": False} mute_calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_MUTE ) await trt.execute( trait.COMMAND_MUTE, BASIC_DATA, {"mute": True}, {}, ) assert len(mute_calls) == 1 assert mute_calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_VOLUME_MUTED: True, } unmute_calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_MUTE ) await trt.execute( trait.COMMAND_MUTE, BASIC_DATA, {"mute": False}, {}, ) assert len(unmute_calls) == 1 assert unmute_calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_VOLUME_MUTED: False, } async def test_temperature_setting_sensor(hass): """Test TemperatureSetting trait support for temperature sensor.""" assert ( helpers.get_google_type(sensor.DOMAIN, sensor.DEVICE_CLASS_TEMPERATURE) is not None ) assert not trait.TemperatureSettingTrait.supported( sensor.DOMAIN, 0, sensor.DEVICE_CLASS_HUMIDITY ) assert trait.TemperatureSettingTrait.supported( sensor.DOMAIN, 0, sensor.DEVICE_CLASS_TEMPERATURE ) @pytest.mark.parametrize( "unit_in,unit_out,state,ambient", [ (TEMP_FAHRENHEIT, "F", "70", 21.1), (TEMP_CELSIUS, "C", "21.1", 21.1), (TEMP_FAHRENHEIT, "F", "unavailable", None), (TEMP_FAHRENHEIT, "F", "unknown", None), ], ) async def test_temperature_setting_sensor_data(hass, unit_in, unit_out, state, ambient): """Test TemperatureSetting trait support for temperature sensor.""" hass.config.units.temperature_unit = unit_in trt = trait.TemperatureSettingTrait( hass, State( "sensor.test", state, {ATTR_DEVICE_CLASS: sensor.DEVICE_CLASS_TEMPERATURE} ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "queryOnlyTemperatureSetting": True, "thermostatTemperatureUnit": unit_out, } if ambient: assert trt.query_attributes() == {"thermostatTemperatureAmbient": ambient} else: assert trt.query_attributes() == {} hass.config.units.temperature_unit = TEMP_CELSIUS async def test_humidity_setting_sensor(hass): """Test HumiditySetting trait support for humidity sensor.""" assert ( helpers.get_google_type(sensor.DOMAIN, sensor.DEVICE_CLASS_HUMIDITY) is not None ) assert not trait.HumiditySettingTrait.supported( sensor.DOMAIN, 0, sensor.DEVICE_CLASS_TEMPERATURE ) assert trait.HumiditySettingTrait.supported( sensor.DOMAIN, 0, sensor.DEVICE_CLASS_HUMIDITY ) @pytest.mark.parametrize( "state,ambient", [("70", 70), ("unavailable", None), ("unknown", None)] ) async def test_humidity_setting_sensor_data(hass, state, ambient): """Test HumiditySetting trait support for humidity sensor.""" trt = trait.HumiditySettingTrait( hass, State("sensor.test", state, {ATTR_DEVICE_CLASS: sensor.DEVICE_CLASS_HUMIDITY}), BASIC_CONFIG, ) assert trt.sync_attributes() == {"queryOnlyHumiditySetting": True} if ambient: assert trt.query_attributes() == {"humidityAmbientPercent": ambient} else: assert trt.query_attributes() == {} with pytest.raises(helpers.SmartHomeError) as err: await trt.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert err.value.code == const.ERR_NOT_SUPPORTED async def test_transport_control(hass): """Test the TransportControlTrait.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None for feature in trait.MEDIA_COMMAND_SUPPORT_MAPPING.values(): assert trait.TransportControlTrait.supported(media_player.DOMAIN, feature, None) now = datetime(2020, 1, 1) trt = trait.TransportControlTrait( hass, State( "media_player.bla", media_player.STATE_PLAYING, { media_player.ATTR_MEDIA_POSITION: 100, media_player.ATTR_MEDIA_DURATION: 200, media_player.ATTR_MEDIA_POSITION_UPDATED_AT: now - timedelta(seconds=10), media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.5, ATTR_SUPPORTED_FEATURES: media_player.SUPPORT_PLAY | media_player.SUPPORT_STOP, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "transportControlSupportedCommands": ["RESUME", "STOP"] } assert trt.query_attributes() == {} # COMMAND_MEDIA_SEEK_RELATIVE calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_SEEK ) # Patch to avoid time ticking over during the command failing the test with patch("homeassistant.util.dt.utcnow", return_value=now): await trt.execute( trait.COMMAND_MEDIA_SEEK_RELATIVE, BASIC_DATA, {"relativePositionMs": 10000}, {}, ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", # 100s (current position) + 10s (from command) + 10s (from updated_at) media_player.ATTR_MEDIA_SEEK_POSITION: 120, } # COMMAND_MEDIA_SEEK_TO_POSITION calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_SEEK ) await trt.execute( trait.COMMAND_MEDIA_SEEK_TO_POSITION, BASIC_DATA, {"absPositionMs": 50000}, {} ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_SEEK_POSITION: 50, } # COMMAND_MEDIA_NEXT calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_NEXT_TRACK ) await trt.execute(trait.COMMAND_MEDIA_NEXT, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} # COMMAND_MEDIA_PAUSE calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_PAUSE ) await trt.execute(trait.COMMAND_MEDIA_PAUSE, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} # COMMAND_MEDIA_PREVIOUS calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_PREVIOUS_TRACK ) await trt.execute(trait.COMMAND_MEDIA_PREVIOUS, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} # COMMAND_MEDIA_RESUME calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_PLAY ) await trt.execute(trait.COMMAND_MEDIA_RESUME, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} # COMMAND_MEDIA_SHUFFLE calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_SHUFFLE_SET ) await trt.execute(trait.COMMAND_MEDIA_SHUFFLE, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_SHUFFLE: True, } # COMMAND_MEDIA_STOP calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_STOP ) await trt.execute(trait.COMMAND_MEDIA_STOP, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} @pytest.mark.parametrize( "state", ( STATE_OFF, STATE_IDLE, STATE_PLAYING, STATE_ON, STATE_PAUSED, STATE_STANDBY, STATE_UNAVAILABLE, STATE_UNKNOWN, ), ) async def test_media_state(hass, state): """Test the MediaStateTrait.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.TransportControlTrait.supported( media_player.DOMAIN, media_player.SUPPORT_PLAY, None ) trt = trait.MediaStateTrait( hass, State( "media_player.bla", state, { media_player.ATTR_MEDIA_POSITION: 100, media_player.ATTR_MEDIA_DURATION: 200, media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.5, ATTR_SUPPORTED_FEATURES: media_player.SUPPORT_PLAY | media_player.SUPPORT_STOP, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "supportActivityState": True, "supportPlaybackState": True, } assert trt.query_attributes() == { "activityState": trt.activity_lookup.get(state), "playbackState": trt.playback_lookup.get(state), }
sander76/home-assistant
tests/components/google_assistant/test_trait.py
homeassistant/components/humidifier/reproduce_state.py
"""Platform for switch integration.""" from homeassistant.components.switch import SwitchEntity from homeassistant.config_entries import ConfigEntry from homeassistant.helpers.typing import HomeAssistantType from .const import DOMAIN from .devolo_device import DevoloDeviceEntity async def async_setup_entry( hass: HomeAssistantType, entry: ConfigEntry, async_add_entities ) -> None: """Get all devices and setup the switch devices via config entry.""" entities = [] for gateway in hass.data[DOMAIN][entry.entry_id]["gateways"]: for device in gateway.binary_switch_devices: for binary_switch in device.binary_switch_property: # Exclude the binary switch which also has multi_level_switches here, # because those are implemented as light entities now. if not hasattr(device, "multi_level_switch_property"): entities.append( DevoloSwitch( homecontrol=gateway, device_instance=device, element_uid=binary_switch, ) ) async_add_entities(entities) class DevoloSwitch(DevoloDeviceEntity, SwitchEntity): """Representation of a switch.""" def __init__(self, homecontrol, device_instance, element_uid): """Initialize an devolo Switch.""" super().__init__( homecontrol=homecontrol, device_instance=device_instance, element_uid=element_uid, ) self._binary_switch_property = self._device_instance.binary_switch_property.get( self._unique_id ) self._is_on = self._binary_switch_property.state if hasattr(self._device_instance, "consumption_property"): self._consumption = self._device_instance.consumption_property.get( self._unique_id.replace("BinarySwitch", "Meter") ).current else: self._consumption = None @property def is_on(self): """Return the state.""" return self._is_on @property def current_power_w(self): """Return the current consumption.""" return self._consumption def turn_on(self, **kwargs): """Switch on the device.""" self._is_on = True self._binary_switch_property.set(state=True) def turn_off(self, **kwargs): """Switch off the device.""" self._is_on = False self._binary_switch_property.set(state=False) def _sync(self, message): """Update the binary switch state and consumption.""" if message[0].startswith("devolo.BinarySwitch"): self._is_on = self._device_instance.binary_switch_property[message[0]].state elif message[0].startswith("devolo.Meter"): self._consumption = self._device_instance.consumption_property[ message[0] ].current else: self._generic_message(message) self.schedule_update_ha_state()
"""Tests for the Google Assistant traits.""" from datetime import datetime, timedelta from unittest.mock import patch import pytest from homeassistant.components import ( alarm_control_panel, binary_sensor, camera, cover, fan, group, input_boolean, input_select, light, lock, media_player, scene, script, sensor, switch, vacuum, ) from homeassistant.components.climate import const as climate from homeassistant.components.google_assistant import const, error, helpers, trait from homeassistant.components.google_assistant.error import SmartHomeError from homeassistant.components.humidifier import const as humidifier from homeassistant.config import async_process_ha_core_config from homeassistant.const import ( ATTR_ASSUMED_STATE, ATTR_DEVICE_CLASS, ATTR_ENTITY_ID, ATTR_MODE, ATTR_SUPPORTED_FEATURES, ATTR_TEMPERATURE, SERVICE_TURN_OFF, SERVICE_TURN_ON, STATE_ALARM_ARMED_AWAY, STATE_ALARM_DISARMED, STATE_ALARM_PENDING, STATE_IDLE, STATE_OFF, STATE_ON, STATE_PAUSED, STATE_PLAYING, STATE_STANDBY, STATE_UNAVAILABLE, STATE_UNKNOWN, TEMP_CELSIUS, TEMP_FAHRENHEIT, ) from homeassistant.core import DOMAIN as HA_DOMAIN, EVENT_CALL_SERVICE, State from homeassistant.util import color from . import BASIC_CONFIG, MockConfig from tests.common import async_capture_events, async_mock_service REQ_ID = "ff36a3cc-ec34-11e6-b1a0-64510650abcf" BASIC_DATA = helpers.RequestData( BASIC_CONFIG, "test-agent", const.SOURCE_CLOUD, REQ_ID, None ) PIN_CONFIG = MockConfig(secure_devices_pin="1234") PIN_DATA = helpers.RequestData( PIN_CONFIG, "test-agent", const.SOURCE_CLOUD, REQ_ID, None ) async def test_brightness_light(hass): """Test brightness trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert trait.BrightnessTrait.supported(light.DOMAIN, light.SUPPORT_BRIGHTNESS, None) trt = trait.BrightnessTrait( hass, State("light.bla", light.STATE_ON, {light.ATTR_BRIGHTNESS: 243}), BASIC_CONFIG, ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"brightness": 95} events = async_capture_events(hass, EVENT_CALL_SERVICE) calls = async_mock_service(hass, light.DOMAIN, light.SERVICE_TURN_ON) await trt.execute( trait.COMMAND_BRIGHTNESS_ABSOLUTE, BASIC_DATA, {"brightness": 50}, {} ) await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "light.bla", light.ATTR_BRIGHTNESS_PCT: 50} assert len(events) == 1 assert events[0].data == { "domain": "light", "service": "turn_on", "service_data": {"brightness_pct": 50, "entity_id": "light.bla"}, } async def test_camera_stream(hass): """Test camera stream trait support for camera domain.""" await async_process_ha_core_config( hass, {"external_url": "https://example.com"}, ) assert helpers.get_google_type(camera.DOMAIN, None) is not None assert trait.CameraStreamTrait.supported(camera.DOMAIN, camera.SUPPORT_STREAM, None) trt = trait.CameraStreamTrait( hass, State("camera.bla", camera.STATE_IDLE, {}), BASIC_CONFIG ) assert trt.sync_attributes() == { "cameraStreamSupportedProtocols": ["hls"], "cameraStreamNeedAuthToken": False, "cameraStreamNeedDrmEncryption": False, } assert trt.query_attributes() == {} with patch( "homeassistant.components.camera.async_request_stream", return_value="/api/streams/bla", ): await trt.execute(trait.COMMAND_GET_CAMERA_STREAM, BASIC_DATA, {}, {}) assert trt.query_attributes() == { "cameraStreamAccessUrl": "https://example.com/api/streams/bla", "cameraStreamReceiverAppId": "B12CE3CA", } async def test_onoff_group(hass): """Test OnOff trait support for group domain.""" assert helpers.get_google_type(group.DOMAIN, None) is not None assert trait.OnOffTrait.supported(group.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("group.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("group.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, HA_DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "group.bla"} off_calls = async_mock_service(hass, HA_DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "group.bla"} async def test_onoff_input_boolean(hass): """Test OnOff trait support for input_boolean domain.""" assert helpers.get_google_type(input_boolean.DOMAIN, None) is not None assert trait.OnOffTrait.supported(input_boolean.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("input_boolean.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait( hass, State("input_boolean.bla", STATE_OFF), BASIC_CONFIG ) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, input_boolean.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "input_boolean.bla"} off_calls = async_mock_service(hass, input_boolean.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "input_boolean.bla"} async def test_onoff_switch(hass): """Test OnOff trait support for switch domain.""" assert helpers.get_google_type(switch.DOMAIN, None) is not None assert trait.OnOffTrait.supported(switch.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("switch.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("switch.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} trt_assumed = trait.OnOffTrait( hass, State("switch.bla", STATE_OFF, {"assumed_state": True}), BASIC_CONFIG ) assert trt_assumed.sync_attributes() == {"commandOnlyOnOff": True} on_calls = async_mock_service(hass, switch.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "switch.bla"} off_calls = async_mock_service(hass, switch.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "switch.bla"} async def test_onoff_fan(hass): """Test OnOff trait support for fan domain.""" assert helpers.get_google_type(fan.DOMAIN, None) is not None assert trait.OnOffTrait.supported(fan.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("fan.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("fan.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, fan.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "fan.bla"} off_calls = async_mock_service(hass, fan.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "fan.bla"} async def test_onoff_light(hass): """Test OnOff trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert trait.OnOffTrait.supported(light.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("light.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("light.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "light.bla"} off_calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "light.bla"} async def test_onoff_media_player(hass): """Test OnOff trait support for media_player domain.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.OnOffTrait.supported(media_player.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("media_player.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("media_player.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, media_player.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} off_calls = async_mock_service(hass, media_player.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} async def test_onoff_humidifier(hass): """Test OnOff trait support for humidifier domain.""" assert helpers.get_google_type(humidifier.DOMAIN, None) is not None assert trait.OnOffTrait.supported(humidifier.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("humidifier.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("humidifier.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, humidifier.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "humidifier.bla"} off_calls = async_mock_service(hass, humidifier.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "humidifier.bla"} async def test_dock_vacuum(hass): """Test dock trait support for vacuum domain.""" assert helpers.get_google_type(vacuum.DOMAIN, None) is not None assert trait.DockTrait.supported(vacuum.DOMAIN, 0, None) trt = trait.DockTrait(hass, State("vacuum.bla", vacuum.STATE_IDLE), BASIC_CONFIG) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"isDocked": False} calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_RETURN_TO_BASE) await trt.execute(trait.COMMAND_DOCK, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} async def test_startstop_vacuum(hass): """Test startStop trait support for vacuum domain.""" assert helpers.get_google_type(vacuum.DOMAIN, None) is not None assert trait.StartStopTrait.supported(vacuum.DOMAIN, 0, None) trt = trait.StartStopTrait( hass, State( "vacuum.bla", vacuum.STATE_PAUSED, {ATTR_SUPPORTED_FEATURES: vacuum.SUPPORT_PAUSE}, ), BASIC_CONFIG, ) assert trt.sync_attributes() == {"pausable": True} assert trt.query_attributes() == {"isRunning": False, "isPaused": True} start_calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_START) await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": True}, {}) assert len(start_calls) == 1 assert start_calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} stop_calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_STOP) await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": False}, {}) assert len(stop_calls) == 1 assert stop_calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} pause_calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_PAUSE) await trt.execute(trait.COMMAND_PAUSEUNPAUSE, BASIC_DATA, {"pause": True}, {}) assert len(pause_calls) == 1 assert pause_calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} unpause_calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_START) await trt.execute(trait.COMMAND_PAUSEUNPAUSE, BASIC_DATA, {"pause": False}, {}) assert len(unpause_calls) == 1 assert unpause_calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} async def test_startstop_cover(hass): """Test startStop trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.StartStopTrait.supported(cover.DOMAIN, cover.SUPPORT_STOP, None) state = State( "cover.bla", cover.STATE_CLOSED, {ATTR_SUPPORTED_FEATURES: cover.SUPPORT_STOP}, ) trt = trait.StartStopTrait( hass, state, BASIC_CONFIG, ) assert trt.sync_attributes() == {} for state_value in (cover.STATE_CLOSING, cover.STATE_OPENING): state.state = state_value assert trt.query_attributes() == {"isRunning": True} stop_calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_STOP_COVER) await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": False}, {}) assert len(stop_calls) == 1 assert stop_calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} for state_value in (cover.STATE_CLOSED, cover.STATE_OPEN): state.state = state_value assert trt.query_attributes() == {"isRunning": False} with pytest.raises(SmartHomeError, match="Cover is already stopped"): await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": False}, {}) with pytest.raises(SmartHomeError, match="Starting a cover is not supported"): await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": True}, {}) with pytest.raises( SmartHomeError, match="Command action.devices.commands.PauseUnpause is not supported", ): await trt.execute(trait.COMMAND_PAUSEUNPAUSE, BASIC_DATA, {"start": True}, {}) async def test_startstop_cover_assumed(hass): """Test startStop trait support for cover domain of assumed state.""" trt = trait.StartStopTrait( hass, State( "cover.bla", cover.STATE_CLOSED, {ATTR_SUPPORTED_FEATURES: cover.SUPPORT_STOP, ATTR_ASSUMED_STATE: True}, ), BASIC_CONFIG, ) stop_calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_STOP_COVER) await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": False}, {}) assert len(stop_calls) == 1 assert stop_calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} async def test_color_setting_color_light(hass): """Test ColorSpectrum trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert not trait.ColorSettingTrait.supported(light.DOMAIN, 0, None) assert trait.ColorSettingTrait.supported(light.DOMAIN, light.SUPPORT_COLOR, None) trt = trait.ColorSettingTrait( hass, State( "light.bla", STATE_ON, { light.ATTR_HS_COLOR: (20, 94), light.ATTR_BRIGHTNESS: 200, ATTR_SUPPORTED_FEATURES: light.SUPPORT_COLOR, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == {"colorModel": "hsv"} assert trt.query_attributes() == { "color": {"spectrumHsv": {"hue": 20, "saturation": 0.94, "value": 200 / 255}} } assert trt.can_execute( trait.COMMAND_COLOR_ABSOLUTE, {"color": {"spectrumRGB": 16715792}} ) calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON) await trt.execute( trait.COMMAND_COLOR_ABSOLUTE, BASIC_DATA, {"color": {"spectrumRGB": 1052927}}, {}, ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "light.bla", light.ATTR_HS_COLOR: (240, 93.725), } await trt.execute( trait.COMMAND_COLOR_ABSOLUTE, BASIC_DATA, {"color": {"spectrumHSV": {"hue": 100, "saturation": 0.50, "value": 0.20}}}, {}, ) assert len(calls) == 2 assert calls[1].data == { ATTR_ENTITY_ID: "light.bla", light.ATTR_HS_COLOR: [100, 50], light.ATTR_BRIGHTNESS: 0.2 * 255, } async def test_color_setting_temperature_light(hass): """Test ColorTemperature trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert not trait.ColorSettingTrait.supported(light.DOMAIN, 0, None) assert trait.ColorSettingTrait.supported( light.DOMAIN, light.SUPPORT_COLOR_TEMP, None ) trt = trait.ColorSettingTrait( hass, State( "light.bla", STATE_ON, { light.ATTR_MIN_MIREDS: 200, light.ATTR_COLOR_TEMP: 300, light.ATTR_MAX_MIREDS: 500, ATTR_SUPPORTED_FEATURES: light.SUPPORT_COLOR_TEMP, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "colorTemperatureRange": {"temperatureMinK": 2000, "temperatureMaxK": 5000} } assert trt.query_attributes() == {"color": {"temperatureK": 3333}} assert trt.can_execute( trait.COMMAND_COLOR_ABSOLUTE, {"color": {"temperature": 400}} ) calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON) with pytest.raises(helpers.SmartHomeError) as err: await trt.execute( trait.COMMAND_COLOR_ABSOLUTE, BASIC_DATA, {"color": {"temperature": 5555}}, {}, ) assert err.value.code == const.ERR_VALUE_OUT_OF_RANGE await trt.execute( trait.COMMAND_COLOR_ABSOLUTE, BASIC_DATA, {"color": {"temperature": 2857}}, {} ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "light.bla", light.ATTR_COLOR_TEMP: color.color_temperature_kelvin_to_mired(2857), } async def test_color_light_temperature_light_bad_temp(hass): """Test ColorTemperature trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert not trait.ColorSettingTrait.supported(light.DOMAIN, 0, None) assert trait.ColorSettingTrait.supported( light.DOMAIN, light.SUPPORT_COLOR_TEMP, None ) trt = trait.ColorSettingTrait( hass, State( "light.bla", STATE_ON, { light.ATTR_MIN_MIREDS: 200, light.ATTR_COLOR_TEMP: 0, light.ATTR_MAX_MIREDS: 500, }, ), BASIC_CONFIG, ) assert trt.query_attributes() == {} async def test_light_modes(hass): """Test Light Mode trait.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert trait.ModesTrait.supported(light.DOMAIN, light.SUPPORT_EFFECT, None) trt = trait.ModesTrait( hass, State( "light.living_room", light.STATE_ON, attributes={ light.ATTR_EFFECT_LIST: ["random", "colorloop"], light.ATTR_EFFECT: "random", }, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableModes": [ { "name": "effect", "name_values": [{"name_synonym": ["effect"], "lang": "en"}], "settings": [ { "setting_name": "random", "setting_values": [ {"setting_synonym": ["random"], "lang": "en"} ], }, { "setting_name": "colorloop", "setting_values": [ {"setting_synonym": ["colorloop"], "lang": "en"} ], }, ], "ordered": False, } ] } assert trt.query_attributes() == { "currentModeSettings": {"effect": "random"}, "on": True, } assert trt.can_execute( trait.COMMAND_MODES, params={"updateModeSettings": {"effect": "colorloop"}}, ) calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON) await trt.execute( trait.COMMAND_MODES, BASIC_DATA, {"updateModeSettings": {"effect": "colorloop"}}, {}, ) assert len(calls) == 1 assert calls[0].data == { "entity_id": "light.living_room", "effect": "colorloop", } async def test_scene_scene(hass): """Test Scene trait support for scene domain.""" assert helpers.get_google_type(scene.DOMAIN, None) is not None assert trait.SceneTrait.supported(scene.DOMAIN, 0, None) trt = trait.SceneTrait(hass, State("scene.bla", scene.STATE), BASIC_CONFIG) assert trt.sync_attributes() == {} assert trt.query_attributes() == {} assert trt.can_execute(trait.COMMAND_ACTIVATE_SCENE, {}) calls = async_mock_service(hass, scene.DOMAIN, SERVICE_TURN_ON) await trt.execute(trait.COMMAND_ACTIVATE_SCENE, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "scene.bla"} async def test_scene_script(hass): """Test Scene trait support for script domain.""" assert helpers.get_google_type(script.DOMAIN, None) is not None assert trait.SceneTrait.supported(script.DOMAIN, 0, None) trt = trait.SceneTrait(hass, State("script.bla", STATE_OFF), BASIC_CONFIG) assert trt.sync_attributes() == {} assert trt.query_attributes() == {} assert trt.can_execute(trait.COMMAND_ACTIVATE_SCENE, {}) calls = async_mock_service(hass, script.DOMAIN, SERVICE_TURN_ON) await trt.execute(trait.COMMAND_ACTIVATE_SCENE, BASIC_DATA, {}, {}) # We don't wait till script execution is done. await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "script.bla"} async def test_temperature_setting_climate_onoff(hass): """Test TemperatureSetting trait support for climate domain - range.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.TemperatureSettingTrait.supported(climate.DOMAIN, 0, None) hass.config.units.temperature_unit = TEMP_FAHRENHEIT trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_AUTO, { ATTR_SUPPORTED_FEATURES: climate.SUPPORT_TARGET_TEMPERATURE_RANGE, climate.ATTR_HVAC_MODES: [ climate.HVAC_MODE_OFF, climate.HVAC_MODE_COOL, climate.HVAC_MODE_HEAT, climate.HVAC_MODE_HEAT_COOL, ], climate.ATTR_MIN_TEMP: None, climate.ATTR_MAX_TEMP: None, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["off", "cool", "heat", "heatcool", "on"], "thermostatTemperatureUnit": "F", } assert trt.can_execute(trait.COMMAND_THERMOSTAT_SET_MODE, {}) calls = async_mock_service(hass, climate.DOMAIN, SERVICE_TURN_ON) await trt.execute( trait.COMMAND_THERMOSTAT_SET_MODE, BASIC_DATA, {"thermostatMode": "on"}, {} ) assert len(calls) == 1 calls = async_mock_service(hass, climate.DOMAIN, SERVICE_TURN_OFF) await trt.execute( trait.COMMAND_THERMOSTAT_SET_MODE, BASIC_DATA, {"thermostatMode": "off"}, {} ) assert len(calls) == 1 async def test_temperature_setting_climate_no_modes(hass): """Test TemperatureSetting trait support for climate domain not supporting any modes.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.TemperatureSettingTrait.supported(climate.DOMAIN, 0, None) hass.config.units.temperature_unit = TEMP_CELSIUS trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_AUTO, { climate.ATTR_HVAC_MODES: [], climate.ATTR_MIN_TEMP: None, climate.ATTR_MAX_TEMP: None, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["heat"], "thermostatTemperatureUnit": "C", } async def test_temperature_setting_climate_range(hass): """Test TemperatureSetting trait support for climate domain - range.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.TemperatureSettingTrait.supported(climate.DOMAIN, 0, None) hass.config.units.temperature_unit = TEMP_FAHRENHEIT trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_AUTO, { climate.ATTR_CURRENT_TEMPERATURE: 70, climate.ATTR_CURRENT_HUMIDITY: 25, ATTR_SUPPORTED_FEATURES: climate.SUPPORT_TARGET_TEMPERATURE_RANGE, climate.ATTR_HVAC_MODES: [ STATE_OFF, climate.HVAC_MODE_COOL, climate.HVAC_MODE_HEAT, climate.HVAC_MODE_AUTO, ], climate.ATTR_TARGET_TEMP_HIGH: 75, climate.ATTR_TARGET_TEMP_LOW: 65, climate.ATTR_MIN_TEMP: 50, climate.ATTR_MAX_TEMP: 80, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["off", "cool", "heat", "auto", "on"], "thermostatTemperatureUnit": "F", } assert trt.query_attributes() == { "thermostatMode": "auto", "thermostatTemperatureAmbient": 21.1, "thermostatHumidityAmbient": 25, "thermostatTemperatureSetpointLow": 18.3, "thermostatTemperatureSetpointHigh": 23.9, } assert trt.can_execute(trait.COMMAND_THERMOSTAT_TEMPERATURE_SET_RANGE, {}) assert trt.can_execute(trait.COMMAND_THERMOSTAT_SET_MODE, {}) calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_TEMPERATURE) await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SET_RANGE, BASIC_DATA, { "thermostatTemperatureSetpointHigh": 25, "thermostatTemperatureSetpointLow": 20, }, {}, ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "climate.bla", climate.ATTR_TARGET_TEMP_HIGH: 77, climate.ATTR_TARGET_TEMP_LOW: 68, } calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_HVAC_MODE) await trt.execute( trait.COMMAND_THERMOSTAT_SET_MODE, BASIC_DATA, {"thermostatMode": "cool"}, {} ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "climate.bla", climate.ATTR_HVAC_MODE: climate.HVAC_MODE_COOL, } with pytest.raises(helpers.SmartHomeError) as err: await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, BASIC_DATA, {"thermostatTemperatureSetpoint": -100}, {}, ) assert err.value.code == const.ERR_VALUE_OUT_OF_RANGE hass.config.units.temperature_unit = TEMP_CELSIUS async def test_temperature_setting_climate_setpoint(hass): """Test TemperatureSetting trait support for climate domain - setpoint.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.TemperatureSettingTrait.supported(climate.DOMAIN, 0, None) hass.config.units.temperature_unit = TEMP_CELSIUS trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_COOL, { climate.ATTR_HVAC_MODES: [STATE_OFF, climate.HVAC_MODE_COOL], climate.ATTR_MIN_TEMP: 10, climate.ATTR_MAX_TEMP: 30, ATTR_TEMPERATURE: 18, climate.ATTR_CURRENT_TEMPERATURE: 20, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["off", "cool", "on"], "thermostatTemperatureUnit": "C", } assert trt.query_attributes() == { "thermostatMode": "cool", "thermostatTemperatureAmbient": 20, "thermostatTemperatureSetpoint": 18, } assert trt.can_execute(trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, {}) assert trt.can_execute(trait.COMMAND_THERMOSTAT_SET_MODE, {}) calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_TEMPERATURE) with pytest.raises(helpers.SmartHomeError): await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, BASIC_DATA, {"thermostatTemperatureSetpoint": -100}, {}, ) await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, BASIC_DATA, {"thermostatTemperatureSetpoint": 19}, {}, ) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "climate.bla", ATTR_TEMPERATURE: 19} async def test_temperature_setting_climate_setpoint_auto(hass): """ Test TemperatureSetting trait support for climate domain. Setpoint in auto mode. """ hass.config.units.temperature_unit = TEMP_CELSIUS trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_HEAT_COOL, { climate.ATTR_HVAC_MODES: [ climate.HVAC_MODE_OFF, climate.HVAC_MODE_HEAT_COOL, ], climate.ATTR_MIN_TEMP: 10, climate.ATTR_MAX_TEMP: 30, ATTR_TEMPERATURE: 18, climate.ATTR_CURRENT_TEMPERATURE: 20, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["off", "heatcool", "on"], "thermostatTemperatureUnit": "C", } assert trt.query_attributes() == { "thermostatMode": "heatcool", "thermostatTemperatureAmbient": 20, "thermostatTemperatureSetpointHigh": 18, "thermostatTemperatureSetpointLow": 18, } assert trt.can_execute(trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, {}) assert trt.can_execute(trait.COMMAND_THERMOSTAT_SET_MODE, {}) calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_TEMPERATURE) await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, BASIC_DATA, {"thermostatTemperatureSetpoint": 19}, {}, ) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "climate.bla", ATTR_TEMPERATURE: 19} async def test_humidity_setting_humidifier_setpoint(hass): """Test HumiditySetting trait support for humidifier domain - setpoint.""" assert helpers.get_google_type(humidifier.DOMAIN, None) is not None assert trait.HumiditySettingTrait.supported(humidifier.DOMAIN, 0, None) trt = trait.HumiditySettingTrait( hass, State( "humidifier.bla", STATE_ON, { humidifier.ATTR_MIN_HUMIDITY: 20, humidifier.ATTR_MAX_HUMIDITY: 90, humidifier.ATTR_HUMIDITY: 38, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "humiditySetpointRange": {"minPercent": 20, "maxPercent": 90} } assert trt.query_attributes() == { "humiditySetpointPercent": 38, } assert trt.can_execute(trait.COMMAND_SET_HUMIDITY, {}) calls = async_mock_service(hass, humidifier.DOMAIN, humidifier.SERVICE_SET_HUMIDITY) await trt.execute(trait.COMMAND_SET_HUMIDITY, BASIC_DATA, {"humidity": 32}, {}) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "humidifier.bla", humidifier.ATTR_HUMIDITY: 32, } async def test_lock_unlock_lock(hass): """Test LockUnlock trait locking support for lock domain.""" assert helpers.get_google_type(lock.DOMAIN, None) is not None assert trait.LockUnlockTrait.supported(lock.DOMAIN, lock.SUPPORT_OPEN, None) assert trait.LockUnlockTrait.might_2fa(lock.DOMAIN, lock.SUPPORT_OPEN, None) trt = trait.LockUnlockTrait( hass, State("lock.front_door", lock.STATE_LOCKED), PIN_CONFIG ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"isLocked": True} assert trt.can_execute(trait.COMMAND_LOCKUNLOCK, {"lock": True}) calls = async_mock_service(hass, lock.DOMAIN, lock.SERVICE_LOCK) await trt.execute(trait.COMMAND_LOCKUNLOCK, PIN_DATA, {"lock": True}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "lock.front_door"} async def test_lock_unlock_unlock(hass): """Test LockUnlock trait unlocking support for lock domain.""" assert helpers.get_google_type(lock.DOMAIN, None) is not None assert trait.LockUnlockTrait.supported(lock.DOMAIN, lock.SUPPORT_OPEN, None) trt = trait.LockUnlockTrait( hass, State("lock.front_door", lock.STATE_LOCKED), PIN_CONFIG ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"isLocked": True} assert trt.can_execute(trait.COMMAND_LOCKUNLOCK, {"lock": False}) calls = async_mock_service(hass, lock.DOMAIN, lock.SERVICE_UNLOCK) # No challenge data with pytest.raises(error.ChallengeNeeded) as err: await trt.execute(trait.COMMAND_LOCKUNLOCK, PIN_DATA, {"lock": False}, {}) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # invalid pin with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_LOCKUNLOCK, PIN_DATA, {"lock": False}, {"pin": 9999} ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_FAILED_PIN_NEEDED await trt.execute( trait.COMMAND_LOCKUNLOCK, PIN_DATA, {"lock": False}, {"pin": "1234"} ) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "lock.front_door"} # Test without pin trt = trait.LockUnlockTrait( hass, State("lock.front_door", lock.STATE_LOCKED), BASIC_CONFIG ) with pytest.raises(error.SmartHomeError) as err: await trt.execute(trait.COMMAND_LOCKUNLOCK, BASIC_DATA, {"lock": False}, {}) assert len(calls) == 1 assert err.value.code == const.ERR_CHALLENGE_NOT_SETUP # Test with 2FA override with patch.object( BASIC_CONFIG, "should_2fa", return_value=False, ): await trt.execute(trait.COMMAND_LOCKUNLOCK, BASIC_DATA, {"lock": False}, {}) assert len(calls) == 2 async def test_arm_disarm_arm_away(hass): """Test ArmDisarm trait Arming support for alarm_control_panel domain.""" assert helpers.get_google_type(alarm_control_panel.DOMAIN, None) is not None assert trait.ArmDisArmTrait.supported(alarm_control_panel.DOMAIN, 0, None) assert trait.ArmDisArmTrait.might_2fa(alarm_control_panel.DOMAIN, 0, None) trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, { alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True, ATTR_SUPPORTED_FEATURES: alarm_control_panel.const.SUPPORT_ALARM_ARM_HOME | alarm_control_panel.const.SUPPORT_ALARM_ARM_AWAY, }, ), PIN_CONFIG, ) assert trt.sync_attributes() == { "availableArmLevels": { "levels": [ { "level_name": "armed_home", "level_values": [ {"level_synonym": ["armed home", "home"], "lang": "en"} ], }, { "level_name": "armed_away", "level_values": [ {"level_synonym": ["armed away", "away"], "lang": "en"} ], }, ], "ordered": False, } } assert trt.query_attributes() == { "isArmed": True, "currentArmLevel": STATE_ALARM_ARMED_AWAY, } assert trt.can_execute( trait.COMMAND_ARMDISARM, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY} ) calls = async_mock_service( hass, alarm_control_panel.DOMAIN, alarm_control_panel.SERVICE_ALARM_ARM_AWAY ) # Test with no secure_pin configured with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), BASIC_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, BASIC_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {}, ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NOT_SETUP trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), PIN_CONFIG, ) # No challenge data with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {}, ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # invalid pin with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {"pin": 9999}, ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_FAILED_PIN_NEEDED # correct pin await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {"pin": "1234"}, ) assert len(calls) == 1 # Test already armed with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), PIN_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {}, ) assert len(calls) == 1 assert err.value.code == const.ERR_ALREADY_ARMED # Test with code_arm_required False trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: False}, ), PIN_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {}, ) assert len(calls) == 2 with pytest.raises(error.SmartHomeError) as err: await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True}, {}, ) async def test_arm_disarm_disarm(hass): """Test ArmDisarm trait Disarming support for alarm_control_panel domain.""" assert helpers.get_google_type(alarm_control_panel.DOMAIN, None) is not None assert trait.ArmDisArmTrait.supported(alarm_control_panel.DOMAIN, 0, None) assert trait.ArmDisArmTrait.might_2fa(alarm_control_panel.DOMAIN, 0, None) trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, { alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True, ATTR_SUPPORTED_FEATURES: alarm_control_panel.const.SUPPORT_ALARM_TRIGGER | alarm_control_panel.const.SUPPORT_ALARM_ARM_CUSTOM_BYPASS, }, ), PIN_CONFIG, ) assert trt.sync_attributes() == { "availableArmLevels": { "levels": [ { "level_name": "armed_custom_bypass", "level_values": [ { "level_synonym": ["armed custom bypass", "custom"], "lang": "en", } ], }, { "level_name": "triggered", "level_values": [{"level_synonym": ["triggered"], "lang": "en"}], }, ], "ordered": False, } } assert trt.query_attributes() == {"isArmed": False} assert trt.can_execute(trait.COMMAND_ARMDISARM, {"arm": False}) calls = async_mock_service( hass, alarm_control_panel.DOMAIN, alarm_control_panel.SERVICE_ALARM_DISARM ) # Test without secure_pin configured with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), BASIC_CONFIG, ) await trt.execute(trait.COMMAND_ARMDISARM, BASIC_DATA, {"arm": False}, {}) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NOT_SETUP trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), PIN_CONFIG, ) # No challenge data with pytest.raises(error.ChallengeNeeded) as err: await trt.execute(trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": False}, {}) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # invalid pin with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": False}, {"pin": 9999} ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_FAILED_PIN_NEEDED # correct pin await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": False}, {"pin": "1234"} ) assert len(calls) == 1 # Test already disarmed with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), PIN_CONFIG, ) await trt.execute(trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": False}, {}) assert len(calls) == 1 assert err.value.code == const.ERR_ALREADY_DISARMED # Cancel arming after already armed will require pin with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: False}, ), PIN_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "cancel": True}, {} ) assert len(calls) == 1 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # Cancel arming while pending to arm doesn't require pin trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_PENDING, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: False}, ), PIN_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "cancel": True}, {} ) assert len(calls) == 2 async def test_fan_speed(hass): """Test FanSpeed trait speed control support for fan domain.""" assert helpers.get_google_type(fan.DOMAIN, None) is not None assert trait.FanSpeedTrait.supported(fan.DOMAIN, fan.SUPPORT_SET_SPEED, None) trt = trait.FanSpeedTrait( hass, State( "fan.living_room_fan", fan.SPEED_HIGH, attributes={ "speed_list": [ fan.SPEED_OFF, fan.SPEED_LOW, fan.SPEED_MEDIUM, fan.SPEED_HIGH, ], "speed": "low", "percentage": 33, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableFanSpeeds": { "ordered": True, "speeds": [ { "speed_name": "off", "speed_values": [{"speed_synonym": ["stop", "off"], "lang": "en"}], }, { "speed_name": "low", "speed_values": [ { "speed_synonym": ["slow", "low", "slowest", "lowest"], "lang": "en", } ], }, { "speed_name": "medium", "speed_values": [ {"speed_synonym": ["medium", "mid", "middle"], "lang": "en"} ], }, { "speed_name": "high", "speed_values": [ { "speed_synonym": [ "high", "max", "fast", "highest", "fastest", "maximum", ], "lang": "en", } ], }, ], }, "reversible": False, "supportsFanSpeedPercent": True, } assert trt.query_attributes() == { "currentFanSpeedSetting": "low", "on": True, "currentFanSpeedPercent": 33, } assert trt.can_execute(trait.COMMAND_FANSPEED, params={"fanSpeed": "medium"}) calls = async_mock_service(hass, fan.DOMAIN, fan.SERVICE_SET_SPEED) await trt.execute(trait.COMMAND_FANSPEED, BASIC_DATA, {"fanSpeed": "medium"}, {}) assert len(calls) == 1 assert calls[0].data == {"entity_id": "fan.living_room_fan", "speed": "medium"} assert trt.can_execute(trait.COMMAND_FANSPEED, params={"fanSpeedPercent": 10}) calls = async_mock_service(hass, fan.DOMAIN, fan.SERVICE_SET_PERCENTAGE) await trt.execute(trait.COMMAND_FANSPEED, BASIC_DATA, {"fanSpeedPercent": 10}, {}) assert len(calls) == 1 assert calls[0].data == {"entity_id": "fan.living_room_fan", "percentage": 10} async def test_climate_fan_speed(hass): """Test FanSpeed trait speed control support for climate domain.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.FanSpeedTrait.supported(climate.DOMAIN, climate.SUPPORT_FAN_MODE, None) trt = trait.FanSpeedTrait( hass, State( "climate.living_room_ac", "on", attributes={ "fan_modes": ["auto", "low", "medium", "high"], "fan_mode": "low", }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableFanSpeeds": { "ordered": True, "speeds": [ { "speed_name": "auto", "speed_values": [{"speed_synonym": ["auto"], "lang": "en"}], }, { "speed_name": "low", "speed_values": [{"speed_synonym": ["low"], "lang": "en"}], }, { "speed_name": "medium", "speed_values": [{"speed_synonym": ["medium"], "lang": "en"}], }, { "speed_name": "high", "speed_values": [{"speed_synonym": ["high"], "lang": "en"}], }, ], }, "reversible": False, "supportsFanSpeedPercent": True, } assert trt.query_attributes() == { "currentFanSpeedSetting": "low", } assert trt.can_execute(trait.COMMAND_FANSPEED, params={"fanSpeed": "medium"}) calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_FAN_MODE) await trt.execute(trait.COMMAND_FANSPEED, BASIC_DATA, {"fanSpeed": "medium"}, {}) assert len(calls) == 1 assert calls[0].data == { "entity_id": "climate.living_room_ac", "fan_mode": "medium", } async def test_inputselector(hass): """Test input selector trait.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.InputSelectorTrait.supported( media_player.DOMAIN, media_player.SUPPORT_SELECT_SOURCE, None ) trt = trait.InputSelectorTrait( hass, State( "media_player.living_room", media_player.STATE_PLAYING, attributes={ media_player.ATTR_INPUT_SOURCE_LIST: [ "media", "game", "chromecast", "plex", ], media_player.ATTR_INPUT_SOURCE: "game", }, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableInputs": [ {"key": "media", "names": [{"name_synonym": ["media"], "lang": "en"}]}, {"key": "game", "names": [{"name_synonym": ["game"], "lang": "en"}]}, { "key": "chromecast", "names": [{"name_synonym": ["chromecast"], "lang": "en"}], }, {"key": "plex", "names": [{"name_synonym": ["plex"], "lang": "en"}]}, ], "orderedInputs": True, } assert trt.query_attributes() == { "currentInput": "game", } assert trt.can_execute( trait.COMMAND_INPUT, params={"newInput": "media"}, ) calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_SELECT_SOURCE ) await trt.execute( trait.COMMAND_INPUT, BASIC_DATA, {"newInput": "media"}, {}, ) assert len(calls) == 1 assert calls[0].data == {"entity_id": "media_player.living_room", "source": "media"} @pytest.mark.parametrize( "sources,source,source_next,source_prev", [ (["a"], "a", "a", "a"), (["a", "b"], "a", "b", "b"), (["a", "b", "c"], "a", "b", "c"), ], ) async def test_inputselector_nextprev(hass, sources, source, source_next, source_prev): """Test input selector trait.""" trt = trait.InputSelectorTrait( hass, State( "media_player.living_room", media_player.STATE_PLAYING, attributes={ media_player.ATTR_INPUT_SOURCE_LIST: sources, media_player.ATTR_INPUT_SOURCE: source, }, ), BASIC_CONFIG, ) assert trt.can_execute("action.devices.commands.NextInput", params={}) assert trt.can_execute("action.devices.commands.PreviousInput", params={}) calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_SELECT_SOURCE ) await trt.execute( "action.devices.commands.NextInput", BASIC_DATA, {}, {}, ) await trt.execute( "action.devices.commands.PreviousInput", BASIC_DATA, {}, {}, ) assert len(calls) == 2 assert calls[0].data == { "entity_id": "media_player.living_room", "source": source_next, } assert calls[1].data == { "entity_id": "media_player.living_room", "source": source_prev, } @pytest.mark.parametrize( "sources,source", [(None, "a"), (["a", "b"], None), (["a", "b"], "c")] ) async def test_inputselector_nextprev_invalid(hass, sources, source): """Test input selector trait.""" trt = trait.InputSelectorTrait( hass, State( "media_player.living_room", media_player.STATE_PLAYING, attributes={ media_player.ATTR_INPUT_SOURCE_LIST: sources, media_player.ATTR_INPUT_SOURCE: source, }, ), BASIC_CONFIG, ) with pytest.raises(SmartHomeError): await trt.execute( "action.devices.commands.NextInput", BASIC_DATA, {}, {}, ) with pytest.raises(SmartHomeError): await trt.execute( "action.devices.commands.PreviousInput", BASIC_DATA, {}, {}, ) with pytest.raises(SmartHomeError): await trt.execute( "action.devices.commands.InvalidCommand", BASIC_DATA, {}, {}, ) async def test_modes_input_select(hass): """Test Input Select Mode trait.""" assert helpers.get_google_type(input_select.DOMAIN, None) is not None assert trait.ModesTrait.supported(input_select.DOMAIN, None, None) trt = trait.ModesTrait( hass, State("input_select.bla", "unavailable"), BASIC_CONFIG, ) assert trt.sync_attributes() == {"availableModes": []} trt = trait.ModesTrait( hass, State( "input_select.bla", "abc", attributes={input_select.ATTR_OPTIONS: ["abc", "123", "xyz"]}, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableModes": [ { "name": "option", "name_values": [ { "name_synonym": ["option", "setting", "mode", "value"], "lang": "en", } ], "settings": [ { "setting_name": "abc", "setting_values": [{"setting_synonym": ["abc"], "lang": "en"}], }, { "setting_name": "123", "setting_values": [{"setting_synonym": ["123"], "lang": "en"}], }, { "setting_name": "xyz", "setting_values": [{"setting_synonym": ["xyz"], "lang": "en"}], }, ], "ordered": False, } ] } assert trt.query_attributes() == { "currentModeSettings": {"option": "abc"}, "on": True, } assert trt.can_execute( trait.COMMAND_MODES, params={"updateModeSettings": {"option": "xyz"}}, ) calls = async_mock_service( hass, input_select.DOMAIN, input_select.SERVICE_SELECT_OPTION ) await trt.execute( trait.COMMAND_MODES, BASIC_DATA, {"updateModeSettings": {"option": "xyz"}}, {}, ) assert len(calls) == 1 assert calls[0].data == {"entity_id": "input_select.bla", "option": "xyz"} async def test_modes_humidifier(hass): """Test Humidifier Mode trait.""" assert helpers.get_google_type(humidifier.DOMAIN, None) is not None assert trait.ModesTrait.supported(humidifier.DOMAIN, humidifier.SUPPORT_MODES, None) trt = trait.ModesTrait( hass, State( "humidifier.humidifier", STATE_OFF, attributes={ humidifier.ATTR_AVAILABLE_MODES: [ humidifier.MODE_NORMAL, humidifier.MODE_AUTO, humidifier.MODE_AWAY, ], ATTR_SUPPORTED_FEATURES: humidifier.SUPPORT_MODES, humidifier.ATTR_MIN_HUMIDITY: 30, humidifier.ATTR_MAX_HUMIDITY: 99, humidifier.ATTR_HUMIDITY: 50, ATTR_MODE: humidifier.MODE_AUTO, }, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableModes": [ { "name": "mode", "name_values": [{"name_synonym": ["mode"], "lang": "en"}], "settings": [ { "setting_name": "normal", "setting_values": [ {"setting_synonym": ["normal"], "lang": "en"} ], }, { "setting_name": "auto", "setting_values": [{"setting_synonym": ["auto"], "lang": "en"}], }, { "setting_name": "away", "setting_values": [{"setting_synonym": ["away"], "lang": "en"}], }, ], "ordered": False, }, ] } assert trt.query_attributes() == { "currentModeSettings": {"mode": "auto"}, "on": False, } assert trt.can_execute( trait.COMMAND_MODES, params={"updateModeSettings": {"mode": "away"}} ) calls = async_mock_service(hass, humidifier.DOMAIN, humidifier.SERVICE_SET_MODE) await trt.execute( trait.COMMAND_MODES, BASIC_DATA, {"updateModeSettings": {"mode": "away"}}, {}, ) assert len(calls) == 1 assert calls[0].data == { "entity_id": "humidifier.humidifier", "mode": "away", } async def test_sound_modes(hass): """Test Mode trait.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.ModesTrait.supported( media_player.DOMAIN, media_player.SUPPORT_SELECT_SOUND_MODE, None ) trt = trait.ModesTrait( hass, State( "media_player.living_room", media_player.STATE_PLAYING, attributes={ media_player.ATTR_SOUND_MODE_LIST: ["stereo", "prologic"], media_player.ATTR_SOUND_MODE: "stereo", }, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableModes": [ { "name": "sound mode", "name_values": [ {"name_synonym": ["sound mode", "effects"], "lang": "en"} ], "settings": [ { "setting_name": "stereo", "setting_values": [ {"setting_synonym": ["stereo"], "lang": "en"} ], }, { "setting_name": "prologic", "setting_values": [ {"setting_synonym": ["prologic"], "lang": "en"} ], }, ], "ordered": False, } ] } assert trt.query_attributes() == { "currentModeSettings": {"sound mode": "stereo"}, "on": True, } assert trt.can_execute( trait.COMMAND_MODES, params={"updateModeSettings": {"sound mode": "stereo"}}, ) calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_SELECT_SOUND_MODE ) await trt.execute( trait.COMMAND_MODES, BASIC_DATA, {"updateModeSettings": {"sound mode": "stereo"}}, {}, ) assert len(calls) == 1 assert calls[0].data == { "entity_id": "media_player.living_room", "sound_mode": "stereo", } async def test_openclose_cover(hass): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_SET_POSITION, None ) trt = trait.OpenCloseTrait( hass, State( "cover.bla", cover.STATE_OPEN, { cover.ATTR_CURRENT_POSITION: 75, ATTR_SUPPORTED_FEATURES: cover.SUPPORT_SET_POSITION, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"openPercent": 75} calls_set = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_SET_COVER_POSITION) calls_open = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_OPEN_COVER) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 50}, {}) await trt.execute( trait.COMMAND_OPENCLOSE_RELATIVE, BASIC_DATA, {"openRelativePercent": 50}, {} ) assert len(calls_set) == 1 assert calls_set[0].data == {ATTR_ENTITY_ID: "cover.bla", cover.ATTR_POSITION: 50} assert len(calls_open) == 1 assert calls_open[0].data == {ATTR_ENTITY_ID: "cover.bla"} async def test_openclose_cover_unknown_state(hass): """Test OpenClose trait support for cover domain with unknown state.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_SET_POSITION, None ) # No state trt = trait.OpenCloseTrait( hass, State( "cover.bla", STATE_UNKNOWN, {ATTR_SUPPORTED_FEATURES: cover.SUPPORT_OPEN} ), BASIC_CONFIG, ) assert trt.sync_attributes() == {"discreteOnlyOpenClose": True} with pytest.raises(helpers.SmartHomeError): trt.query_attributes() calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_OPEN_COVER) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 100}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} with pytest.raises(helpers.SmartHomeError): trt.query_attributes() async def test_openclose_cover_assumed_state(hass): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_SET_POSITION, None ) trt = trait.OpenCloseTrait( hass, State( "cover.bla", cover.STATE_OPEN, { ATTR_ASSUMED_STATE: True, ATTR_SUPPORTED_FEATURES: cover.SUPPORT_SET_POSITION, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == {"commandOnlyOpenClose": True} assert trt.query_attributes() == {} calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_SET_COVER_POSITION) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 40}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla", cover.ATTR_POSITION: 40} async def test_openclose_cover_query_only(hass): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported(cover.DOMAIN, 0, None) state = State( "cover.bla", cover.STATE_OPEN, ) trt = trait.OpenCloseTrait( hass, state, BASIC_CONFIG, ) assert trt.sync_attributes() == { "discreteOnlyOpenClose": True, "queryOnlyOpenClose": True, } assert trt.query_attributes() == {"openPercent": 100} async def test_openclose_cover_no_position(hass): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_OPEN | cover.SUPPORT_CLOSE, None ) state = State( "cover.bla", cover.STATE_OPEN, { ATTR_SUPPORTED_FEATURES: cover.SUPPORT_OPEN | cover.SUPPORT_CLOSE, }, ) trt = trait.OpenCloseTrait( hass, state, BASIC_CONFIG, ) assert trt.sync_attributes() == {"discreteOnlyOpenClose": True} assert trt.query_attributes() == {"openPercent": 100} state.state = cover.STATE_CLOSED assert trt.sync_attributes() == {"discreteOnlyOpenClose": True} assert trt.query_attributes() == {"openPercent": 0} calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_CLOSE_COVER) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 0}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_OPEN_COVER) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 100}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} with pytest.raises( SmartHomeError, match=r"Current position not know for relative command" ): await trt.execute( trait.COMMAND_OPENCLOSE_RELATIVE, BASIC_DATA, {"openRelativePercent": 100}, {}, ) with pytest.raises(SmartHomeError, match=r"No support for partial open close"): await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 50}, {}) @pytest.mark.parametrize( "device_class", (cover.DEVICE_CLASS_DOOR, cover.DEVICE_CLASS_GARAGE, cover.DEVICE_CLASS_GATE), ) async def test_openclose_cover_secure(hass, device_class): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, device_class) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_SET_POSITION, device_class ) assert trait.OpenCloseTrait.might_2fa( cover.DOMAIN, cover.SUPPORT_SET_POSITION, device_class ) trt = trait.OpenCloseTrait( hass, State( "cover.bla", cover.STATE_OPEN, { ATTR_DEVICE_CLASS: device_class, ATTR_SUPPORTED_FEATURES: cover.SUPPORT_SET_POSITION, cover.ATTR_CURRENT_POSITION: 75, }, ), PIN_CONFIG, ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"openPercent": 75} calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_SET_COVER_POSITION) calls_close = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_CLOSE_COVER) # No challenge data with pytest.raises(error.ChallengeNeeded) as err: await trt.execute(trait.COMMAND_OPENCLOSE, PIN_DATA, {"openPercent": 50}, {}) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # invalid pin with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_OPENCLOSE, PIN_DATA, {"openPercent": 50}, {"pin": "9999"} ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_FAILED_PIN_NEEDED await trt.execute( trait.COMMAND_OPENCLOSE, PIN_DATA, {"openPercent": 50}, {"pin": "1234"} ) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla", cover.ATTR_POSITION: 50} # no challenge on close await trt.execute(trait.COMMAND_OPENCLOSE, PIN_DATA, {"openPercent": 0}, {}) assert len(calls_close) == 1 assert calls_close[0].data == {ATTR_ENTITY_ID: "cover.bla"} @pytest.mark.parametrize( "device_class", ( binary_sensor.DEVICE_CLASS_DOOR, binary_sensor.DEVICE_CLASS_GARAGE_DOOR, binary_sensor.DEVICE_CLASS_LOCK, binary_sensor.DEVICE_CLASS_OPENING, binary_sensor.DEVICE_CLASS_WINDOW, ), ) async def test_openclose_binary_sensor(hass, device_class): """Test OpenClose trait support for binary_sensor domain.""" assert helpers.get_google_type(binary_sensor.DOMAIN, device_class) is not None assert trait.OpenCloseTrait.supported(binary_sensor.DOMAIN, 0, device_class) trt = trait.OpenCloseTrait( hass, State("binary_sensor.test", STATE_ON, {ATTR_DEVICE_CLASS: device_class}), BASIC_CONFIG, ) assert trt.sync_attributes() == { "queryOnlyOpenClose": True, "discreteOnlyOpenClose": True, } assert trt.query_attributes() == {"openPercent": 100} trt = trait.OpenCloseTrait( hass, State("binary_sensor.test", STATE_OFF, {ATTR_DEVICE_CLASS: device_class}), BASIC_CONFIG, ) assert trt.sync_attributes() == { "queryOnlyOpenClose": True, "discreteOnlyOpenClose": True, } assert trt.query_attributes() == {"openPercent": 0} async def test_volume_media_player(hass): """Test volume trait support for media player domain.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.VolumeTrait.supported( media_player.DOMAIN, media_player.SUPPORT_VOLUME_SET, None, ) trt = trait.VolumeTrait( hass, State( "media_player.bla", media_player.STATE_PLAYING, { ATTR_SUPPORTED_FEATURES: media_player.SUPPORT_VOLUME_SET, media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.3, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "volumeMaxLevel": 100, "levelStepSize": 10, "volumeCanMuteAndUnmute": False, "commandOnlyVolume": False, } assert trt.query_attributes() == {"currentVolume": 30} calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_SET ) await trt.execute(trait.COMMAND_SET_VOLUME, BASIC_DATA, {"volumeLevel": 60}, {}) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.6, } calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_SET ) await trt.execute( trait.COMMAND_VOLUME_RELATIVE, BASIC_DATA, {"relativeSteps": 10}, {} ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.4, } async def test_volume_media_player_relative(hass): """Test volume trait support for relative-volume-only media players.""" assert trait.VolumeTrait.supported( media_player.DOMAIN, media_player.SUPPORT_VOLUME_STEP, None, ) trt = trait.VolumeTrait( hass, State( "media_player.bla", media_player.STATE_PLAYING, { ATTR_ASSUMED_STATE: True, ATTR_SUPPORTED_FEATURES: media_player.SUPPORT_VOLUME_STEP, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "volumeMaxLevel": 100, "levelStepSize": 10, "volumeCanMuteAndUnmute": False, "commandOnlyVolume": True, } assert trt.query_attributes() == {} calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_UP ) await trt.execute( trait.COMMAND_VOLUME_RELATIVE, BASIC_DATA, {"relativeSteps": 10}, {}, ) assert len(calls) == 10 for call in calls: assert call.data == { ATTR_ENTITY_ID: "media_player.bla", } calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_DOWN ) await trt.execute( trait.COMMAND_VOLUME_RELATIVE, BASIC_DATA, {"relativeSteps": -10}, {}, ) assert len(calls) == 10 for call in calls: assert call.data == { ATTR_ENTITY_ID: "media_player.bla", } with pytest.raises(SmartHomeError): await trt.execute(trait.COMMAND_SET_VOLUME, BASIC_DATA, {"volumeLevel": 42}, {}) with pytest.raises(SmartHomeError): await trt.execute(trait.COMMAND_MUTE, BASIC_DATA, {"mute": True}, {}) async def test_media_player_mute(hass): """Test volume trait support for muting.""" assert trait.VolumeTrait.supported( media_player.DOMAIN, media_player.SUPPORT_VOLUME_STEP | media_player.SUPPORT_VOLUME_MUTE, None, ) trt = trait.VolumeTrait( hass, State( "media_player.bla", media_player.STATE_PLAYING, { ATTR_SUPPORTED_FEATURES: ( media_player.SUPPORT_VOLUME_STEP | media_player.SUPPORT_VOLUME_MUTE ), media_player.ATTR_MEDIA_VOLUME_MUTED: False, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "volumeMaxLevel": 100, "levelStepSize": 10, "volumeCanMuteAndUnmute": True, "commandOnlyVolume": False, } assert trt.query_attributes() == {"isMuted": False} mute_calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_MUTE ) await trt.execute( trait.COMMAND_MUTE, BASIC_DATA, {"mute": True}, {}, ) assert len(mute_calls) == 1 assert mute_calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_VOLUME_MUTED: True, } unmute_calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_MUTE ) await trt.execute( trait.COMMAND_MUTE, BASIC_DATA, {"mute": False}, {}, ) assert len(unmute_calls) == 1 assert unmute_calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_VOLUME_MUTED: False, } async def test_temperature_setting_sensor(hass): """Test TemperatureSetting trait support for temperature sensor.""" assert ( helpers.get_google_type(sensor.DOMAIN, sensor.DEVICE_CLASS_TEMPERATURE) is not None ) assert not trait.TemperatureSettingTrait.supported( sensor.DOMAIN, 0, sensor.DEVICE_CLASS_HUMIDITY ) assert trait.TemperatureSettingTrait.supported( sensor.DOMAIN, 0, sensor.DEVICE_CLASS_TEMPERATURE ) @pytest.mark.parametrize( "unit_in,unit_out,state,ambient", [ (TEMP_FAHRENHEIT, "F", "70", 21.1), (TEMP_CELSIUS, "C", "21.1", 21.1), (TEMP_FAHRENHEIT, "F", "unavailable", None), (TEMP_FAHRENHEIT, "F", "unknown", None), ], ) async def test_temperature_setting_sensor_data(hass, unit_in, unit_out, state, ambient): """Test TemperatureSetting trait support for temperature sensor.""" hass.config.units.temperature_unit = unit_in trt = trait.TemperatureSettingTrait( hass, State( "sensor.test", state, {ATTR_DEVICE_CLASS: sensor.DEVICE_CLASS_TEMPERATURE} ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "queryOnlyTemperatureSetting": True, "thermostatTemperatureUnit": unit_out, } if ambient: assert trt.query_attributes() == {"thermostatTemperatureAmbient": ambient} else: assert trt.query_attributes() == {} hass.config.units.temperature_unit = TEMP_CELSIUS async def test_humidity_setting_sensor(hass): """Test HumiditySetting trait support for humidity sensor.""" assert ( helpers.get_google_type(sensor.DOMAIN, sensor.DEVICE_CLASS_HUMIDITY) is not None ) assert not trait.HumiditySettingTrait.supported( sensor.DOMAIN, 0, sensor.DEVICE_CLASS_TEMPERATURE ) assert trait.HumiditySettingTrait.supported( sensor.DOMAIN, 0, sensor.DEVICE_CLASS_HUMIDITY ) @pytest.mark.parametrize( "state,ambient", [("70", 70), ("unavailable", None), ("unknown", None)] ) async def test_humidity_setting_sensor_data(hass, state, ambient): """Test HumiditySetting trait support for humidity sensor.""" trt = trait.HumiditySettingTrait( hass, State("sensor.test", state, {ATTR_DEVICE_CLASS: sensor.DEVICE_CLASS_HUMIDITY}), BASIC_CONFIG, ) assert trt.sync_attributes() == {"queryOnlyHumiditySetting": True} if ambient: assert trt.query_attributes() == {"humidityAmbientPercent": ambient} else: assert trt.query_attributes() == {} with pytest.raises(helpers.SmartHomeError) as err: await trt.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert err.value.code == const.ERR_NOT_SUPPORTED async def test_transport_control(hass): """Test the TransportControlTrait.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None for feature in trait.MEDIA_COMMAND_SUPPORT_MAPPING.values(): assert trait.TransportControlTrait.supported(media_player.DOMAIN, feature, None) now = datetime(2020, 1, 1) trt = trait.TransportControlTrait( hass, State( "media_player.bla", media_player.STATE_PLAYING, { media_player.ATTR_MEDIA_POSITION: 100, media_player.ATTR_MEDIA_DURATION: 200, media_player.ATTR_MEDIA_POSITION_UPDATED_AT: now - timedelta(seconds=10), media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.5, ATTR_SUPPORTED_FEATURES: media_player.SUPPORT_PLAY | media_player.SUPPORT_STOP, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "transportControlSupportedCommands": ["RESUME", "STOP"] } assert trt.query_attributes() == {} # COMMAND_MEDIA_SEEK_RELATIVE calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_SEEK ) # Patch to avoid time ticking over during the command failing the test with patch("homeassistant.util.dt.utcnow", return_value=now): await trt.execute( trait.COMMAND_MEDIA_SEEK_RELATIVE, BASIC_DATA, {"relativePositionMs": 10000}, {}, ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", # 100s (current position) + 10s (from command) + 10s (from updated_at) media_player.ATTR_MEDIA_SEEK_POSITION: 120, } # COMMAND_MEDIA_SEEK_TO_POSITION calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_SEEK ) await trt.execute( trait.COMMAND_MEDIA_SEEK_TO_POSITION, BASIC_DATA, {"absPositionMs": 50000}, {} ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_SEEK_POSITION: 50, } # COMMAND_MEDIA_NEXT calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_NEXT_TRACK ) await trt.execute(trait.COMMAND_MEDIA_NEXT, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} # COMMAND_MEDIA_PAUSE calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_PAUSE ) await trt.execute(trait.COMMAND_MEDIA_PAUSE, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} # COMMAND_MEDIA_PREVIOUS calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_PREVIOUS_TRACK ) await trt.execute(trait.COMMAND_MEDIA_PREVIOUS, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} # COMMAND_MEDIA_RESUME calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_PLAY ) await trt.execute(trait.COMMAND_MEDIA_RESUME, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} # COMMAND_MEDIA_SHUFFLE calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_SHUFFLE_SET ) await trt.execute(trait.COMMAND_MEDIA_SHUFFLE, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_SHUFFLE: True, } # COMMAND_MEDIA_STOP calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_STOP ) await trt.execute(trait.COMMAND_MEDIA_STOP, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} @pytest.mark.parametrize( "state", ( STATE_OFF, STATE_IDLE, STATE_PLAYING, STATE_ON, STATE_PAUSED, STATE_STANDBY, STATE_UNAVAILABLE, STATE_UNKNOWN, ), ) async def test_media_state(hass, state): """Test the MediaStateTrait.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.TransportControlTrait.supported( media_player.DOMAIN, media_player.SUPPORT_PLAY, None ) trt = trait.MediaStateTrait( hass, State( "media_player.bla", state, { media_player.ATTR_MEDIA_POSITION: 100, media_player.ATTR_MEDIA_DURATION: 200, media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.5, ATTR_SUPPORTED_FEATURES: media_player.SUPPORT_PLAY | media_player.SUPPORT_STOP, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "supportActivityState": True, "supportPlaybackState": True, } assert trt.query_attributes() == { "activityState": trt.activity_lookup.get(state), "playbackState": trt.playback_lookup.get(state), }
sander76/home-assistant
tests/components/google_assistant/test_trait.py
homeassistant/components/devolo_home_control/switch.py
"""Brother helpers functions.""" import logging import pysnmp.hlapi.asyncio as hlapi from pysnmp.hlapi.asyncio.cmdgen import lcd from homeassistant.const import EVENT_HOMEASSISTANT_STOP from homeassistant.core import callback from homeassistant.helpers import singleton from .const import DOMAIN, SNMP _LOGGER = logging.getLogger(__name__) @singleton.singleton("snmp_engine") def get_snmp_engine(hass): """Get SNMP engine.""" _LOGGER.debug("Creating SNMP engine") snmp_engine = hlapi.SnmpEngine() @callback def shutdown_listener(ev): if hass.data.get(DOMAIN): _LOGGER.debug("Unconfiguring SNMP engine") lcd.unconfigure(hass.data[DOMAIN][SNMP], None) hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, shutdown_listener) return snmp_engine
"""Tests for the Google Assistant traits.""" from datetime import datetime, timedelta from unittest.mock import patch import pytest from homeassistant.components import ( alarm_control_panel, binary_sensor, camera, cover, fan, group, input_boolean, input_select, light, lock, media_player, scene, script, sensor, switch, vacuum, ) from homeassistant.components.climate import const as climate from homeassistant.components.google_assistant import const, error, helpers, trait from homeassistant.components.google_assistant.error import SmartHomeError from homeassistant.components.humidifier import const as humidifier from homeassistant.config import async_process_ha_core_config from homeassistant.const import ( ATTR_ASSUMED_STATE, ATTR_DEVICE_CLASS, ATTR_ENTITY_ID, ATTR_MODE, ATTR_SUPPORTED_FEATURES, ATTR_TEMPERATURE, SERVICE_TURN_OFF, SERVICE_TURN_ON, STATE_ALARM_ARMED_AWAY, STATE_ALARM_DISARMED, STATE_ALARM_PENDING, STATE_IDLE, STATE_OFF, STATE_ON, STATE_PAUSED, STATE_PLAYING, STATE_STANDBY, STATE_UNAVAILABLE, STATE_UNKNOWN, TEMP_CELSIUS, TEMP_FAHRENHEIT, ) from homeassistant.core import DOMAIN as HA_DOMAIN, EVENT_CALL_SERVICE, State from homeassistant.util import color from . import BASIC_CONFIG, MockConfig from tests.common import async_capture_events, async_mock_service REQ_ID = "ff36a3cc-ec34-11e6-b1a0-64510650abcf" BASIC_DATA = helpers.RequestData( BASIC_CONFIG, "test-agent", const.SOURCE_CLOUD, REQ_ID, None ) PIN_CONFIG = MockConfig(secure_devices_pin="1234") PIN_DATA = helpers.RequestData( PIN_CONFIG, "test-agent", const.SOURCE_CLOUD, REQ_ID, None ) async def test_brightness_light(hass): """Test brightness trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert trait.BrightnessTrait.supported(light.DOMAIN, light.SUPPORT_BRIGHTNESS, None) trt = trait.BrightnessTrait( hass, State("light.bla", light.STATE_ON, {light.ATTR_BRIGHTNESS: 243}), BASIC_CONFIG, ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"brightness": 95} events = async_capture_events(hass, EVENT_CALL_SERVICE) calls = async_mock_service(hass, light.DOMAIN, light.SERVICE_TURN_ON) await trt.execute( trait.COMMAND_BRIGHTNESS_ABSOLUTE, BASIC_DATA, {"brightness": 50}, {} ) await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "light.bla", light.ATTR_BRIGHTNESS_PCT: 50} assert len(events) == 1 assert events[0].data == { "domain": "light", "service": "turn_on", "service_data": {"brightness_pct": 50, "entity_id": "light.bla"}, } async def test_camera_stream(hass): """Test camera stream trait support for camera domain.""" await async_process_ha_core_config( hass, {"external_url": "https://example.com"}, ) assert helpers.get_google_type(camera.DOMAIN, None) is not None assert trait.CameraStreamTrait.supported(camera.DOMAIN, camera.SUPPORT_STREAM, None) trt = trait.CameraStreamTrait( hass, State("camera.bla", camera.STATE_IDLE, {}), BASIC_CONFIG ) assert trt.sync_attributes() == { "cameraStreamSupportedProtocols": ["hls"], "cameraStreamNeedAuthToken": False, "cameraStreamNeedDrmEncryption": False, } assert trt.query_attributes() == {} with patch( "homeassistant.components.camera.async_request_stream", return_value="/api/streams/bla", ): await trt.execute(trait.COMMAND_GET_CAMERA_STREAM, BASIC_DATA, {}, {}) assert trt.query_attributes() == { "cameraStreamAccessUrl": "https://example.com/api/streams/bla", "cameraStreamReceiverAppId": "B12CE3CA", } async def test_onoff_group(hass): """Test OnOff trait support for group domain.""" assert helpers.get_google_type(group.DOMAIN, None) is not None assert trait.OnOffTrait.supported(group.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("group.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("group.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, HA_DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "group.bla"} off_calls = async_mock_service(hass, HA_DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "group.bla"} async def test_onoff_input_boolean(hass): """Test OnOff trait support for input_boolean domain.""" assert helpers.get_google_type(input_boolean.DOMAIN, None) is not None assert trait.OnOffTrait.supported(input_boolean.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("input_boolean.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait( hass, State("input_boolean.bla", STATE_OFF), BASIC_CONFIG ) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, input_boolean.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "input_boolean.bla"} off_calls = async_mock_service(hass, input_boolean.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "input_boolean.bla"} async def test_onoff_switch(hass): """Test OnOff trait support for switch domain.""" assert helpers.get_google_type(switch.DOMAIN, None) is not None assert trait.OnOffTrait.supported(switch.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("switch.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("switch.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} trt_assumed = trait.OnOffTrait( hass, State("switch.bla", STATE_OFF, {"assumed_state": True}), BASIC_CONFIG ) assert trt_assumed.sync_attributes() == {"commandOnlyOnOff": True} on_calls = async_mock_service(hass, switch.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "switch.bla"} off_calls = async_mock_service(hass, switch.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "switch.bla"} async def test_onoff_fan(hass): """Test OnOff trait support for fan domain.""" assert helpers.get_google_type(fan.DOMAIN, None) is not None assert trait.OnOffTrait.supported(fan.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("fan.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("fan.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, fan.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "fan.bla"} off_calls = async_mock_service(hass, fan.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "fan.bla"} async def test_onoff_light(hass): """Test OnOff trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert trait.OnOffTrait.supported(light.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("light.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("light.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "light.bla"} off_calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "light.bla"} async def test_onoff_media_player(hass): """Test OnOff trait support for media_player domain.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.OnOffTrait.supported(media_player.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("media_player.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("media_player.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, media_player.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} off_calls = async_mock_service(hass, media_player.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} async def test_onoff_humidifier(hass): """Test OnOff trait support for humidifier domain.""" assert helpers.get_google_type(humidifier.DOMAIN, None) is not None assert trait.OnOffTrait.supported(humidifier.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("humidifier.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("humidifier.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, humidifier.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "humidifier.bla"} off_calls = async_mock_service(hass, humidifier.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "humidifier.bla"} async def test_dock_vacuum(hass): """Test dock trait support for vacuum domain.""" assert helpers.get_google_type(vacuum.DOMAIN, None) is not None assert trait.DockTrait.supported(vacuum.DOMAIN, 0, None) trt = trait.DockTrait(hass, State("vacuum.bla", vacuum.STATE_IDLE), BASIC_CONFIG) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"isDocked": False} calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_RETURN_TO_BASE) await trt.execute(trait.COMMAND_DOCK, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} async def test_startstop_vacuum(hass): """Test startStop trait support for vacuum domain.""" assert helpers.get_google_type(vacuum.DOMAIN, None) is not None assert trait.StartStopTrait.supported(vacuum.DOMAIN, 0, None) trt = trait.StartStopTrait( hass, State( "vacuum.bla", vacuum.STATE_PAUSED, {ATTR_SUPPORTED_FEATURES: vacuum.SUPPORT_PAUSE}, ), BASIC_CONFIG, ) assert trt.sync_attributes() == {"pausable": True} assert trt.query_attributes() == {"isRunning": False, "isPaused": True} start_calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_START) await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": True}, {}) assert len(start_calls) == 1 assert start_calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} stop_calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_STOP) await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": False}, {}) assert len(stop_calls) == 1 assert stop_calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} pause_calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_PAUSE) await trt.execute(trait.COMMAND_PAUSEUNPAUSE, BASIC_DATA, {"pause": True}, {}) assert len(pause_calls) == 1 assert pause_calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} unpause_calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_START) await trt.execute(trait.COMMAND_PAUSEUNPAUSE, BASIC_DATA, {"pause": False}, {}) assert len(unpause_calls) == 1 assert unpause_calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} async def test_startstop_cover(hass): """Test startStop trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.StartStopTrait.supported(cover.DOMAIN, cover.SUPPORT_STOP, None) state = State( "cover.bla", cover.STATE_CLOSED, {ATTR_SUPPORTED_FEATURES: cover.SUPPORT_STOP}, ) trt = trait.StartStopTrait( hass, state, BASIC_CONFIG, ) assert trt.sync_attributes() == {} for state_value in (cover.STATE_CLOSING, cover.STATE_OPENING): state.state = state_value assert trt.query_attributes() == {"isRunning": True} stop_calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_STOP_COVER) await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": False}, {}) assert len(stop_calls) == 1 assert stop_calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} for state_value in (cover.STATE_CLOSED, cover.STATE_OPEN): state.state = state_value assert trt.query_attributes() == {"isRunning": False} with pytest.raises(SmartHomeError, match="Cover is already stopped"): await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": False}, {}) with pytest.raises(SmartHomeError, match="Starting a cover is not supported"): await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": True}, {}) with pytest.raises( SmartHomeError, match="Command action.devices.commands.PauseUnpause is not supported", ): await trt.execute(trait.COMMAND_PAUSEUNPAUSE, BASIC_DATA, {"start": True}, {}) async def test_startstop_cover_assumed(hass): """Test startStop trait support for cover domain of assumed state.""" trt = trait.StartStopTrait( hass, State( "cover.bla", cover.STATE_CLOSED, {ATTR_SUPPORTED_FEATURES: cover.SUPPORT_STOP, ATTR_ASSUMED_STATE: True}, ), BASIC_CONFIG, ) stop_calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_STOP_COVER) await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": False}, {}) assert len(stop_calls) == 1 assert stop_calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} async def test_color_setting_color_light(hass): """Test ColorSpectrum trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert not trait.ColorSettingTrait.supported(light.DOMAIN, 0, None) assert trait.ColorSettingTrait.supported(light.DOMAIN, light.SUPPORT_COLOR, None) trt = trait.ColorSettingTrait( hass, State( "light.bla", STATE_ON, { light.ATTR_HS_COLOR: (20, 94), light.ATTR_BRIGHTNESS: 200, ATTR_SUPPORTED_FEATURES: light.SUPPORT_COLOR, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == {"colorModel": "hsv"} assert trt.query_attributes() == { "color": {"spectrumHsv": {"hue": 20, "saturation": 0.94, "value": 200 / 255}} } assert trt.can_execute( trait.COMMAND_COLOR_ABSOLUTE, {"color": {"spectrumRGB": 16715792}} ) calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON) await trt.execute( trait.COMMAND_COLOR_ABSOLUTE, BASIC_DATA, {"color": {"spectrumRGB": 1052927}}, {}, ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "light.bla", light.ATTR_HS_COLOR: (240, 93.725), } await trt.execute( trait.COMMAND_COLOR_ABSOLUTE, BASIC_DATA, {"color": {"spectrumHSV": {"hue": 100, "saturation": 0.50, "value": 0.20}}}, {}, ) assert len(calls) == 2 assert calls[1].data == { ATTR_ENTITY_ID: "light.bla", light.ATTR_HS_COLOR: [100, 50], light.ATTR_BRIGHTNESS: 0.2 * 255, } async def test_color_setting_temperature_light(hass): """Test ColorTemperature trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert not trait.ColorSettingTrait.supported(light.DOMAIN, 0, None) assert trait.ColorSettingTrait.supported( light.DOMAIN, light.SUPPORT_COLOR_TEMP, None ) trt = trait.ColorSettingTrait( hass, State( "light.bla", STATE_ON, { light.ATTR_MIN_MIREDS: 200, light.ATTR_COLOR_TEMP: 300, light.ATTR_MAX_MIREDS: 500, ATTR_SUPPORTED_FEATURES: light.SUPPORT_COLOR_TEMP, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "colorTemperatureRange": {"temperatureMinK": 2000, "temperatureMaxK": 5000} } assert trt.query_attributes() == {"color": {"temperatureK": 3333}} assert trt.can_execute( trait.COMMAND_COLOR_ABSOLUTE, {"color": {"temperature": 400}} ) calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON) with pytest.raises(helpers.SmartHomeError) as err: await trt.execute( trait.COMMAND_COLOR_ABSOLUTE, BASIC_DATA, {"color": {"temperature": 5555}}, {}, ) assert err.value.code == const.ERR_VALUE_OUT_OF_RANGE await trt.execute( trait.COMMAND_COLOR_ABSOLUTE, BASIC_DATA, {"color": {"temperature": 2857}}, {} ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "light.bla", light.ATTR_COLOR_TEMP: color.color_temperature_kelvin_to_mired(2857), } async def test_color_light_temperature_light_bad_temp(hass): """Test ColorTemperature trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert not trait.ColorSettingTrait.supported(light.DOMAIN, 0, None) assert trait.ColorSettingTrait.supported( light.DOMAIN, light.SUPPORT_COLOR_TEMP, None ) trt = trait.ColorSettingTrait( hass, State( "light.bla", STATE_ON, { light.ATTR_MIN_MIREDS: 200, light.ATTR_COLOR_TEMP: 0, light.ATTR_MAX_MIREDS: 500, }, ), BASIC_CONFIG, ) assert trt.query_attributes() == {} async def test_light_modes(hass): """Test Light Mode trait.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert trait.ModesTrait.supported(light.DOMAIN, light.SUPPORT_EFFECT, None) trt = trait.ModesTrait( hass, State( "light.living_room", light.STATE_ON, attributes={ light.ATTR_EFFECT_LIST: ["random", "colorloop"], light.ATTR_EFFECT: "random", }, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableModes": [ { "name": "effect", "name_values": [{"name_synonym": ["effect"], "lang": "en"}], "settings": [ { "setting_name": "random", "setting_values": [ {"setting_synonym": ["random"], "lang": "en"} ], }, { "setting_name": "colorloop", "setting_values": [ {"setting_synonym": ["colorloop"], "lang": "en"} ], }, ], "ordered": False, } ] } assert trt.query_attributes() == { "currentModeSettings": {"effect": "random"}, "on": True, } assert trt.can_execute( trait.COMMAND_MODES, params={"updateModeSettings": {"effect": "colorloop"}}, ) calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON) await trt.execute( trait.COMMAND_MODES, BASIC_DATA, {"updateModeSettings": {"effect": "colorloop"}}, {}, ) assert len(calls) == 1 assert calls[0].data == { "entity_id": "light.living_room", "effect": "colorloop", } async def test_scene_scene(hass): """Test Scene trait support for scene domain.""" assert helpers.get_google_type(scene.DOMAIN, None) is not None assert trait.SceneTrait.supported(scene.DOMAIN, 0, None) trt = trait.SceneTrait(hass, State("scene.bla", scene.STATE), BASIC_CONFIG) assert trt.sync_attributes() == {} assert trt.query_attributes() == {} assert trt.can_execute(trait.COMMAND_ACTIVATE_SCENE, {}) calls = async_mock_service(hass, scene.DOMAIN, SERVICE_TURN_ON) await trt.execute(trait.COMMAND_ACTIVATE_SCENE, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "scene.bla"} async def test_scene_script(hass): """Test Scene trait support for script domain.""" assert helpers.get_google_type(script.DOMAIN, None) is not None assert trait.SceneTrait.supported(script.DOMAIN, 0, None) trt = trait.SceneTrait(hass, State("script.bla", STATE_OFF), BASIC_CONFIG) assert trt.sync_attributes() == {} assert trt.query_attributes() == {} assert trt.can_execute(trait.COMMAND_ACTIVATE_SCENE, {}) calls = async_mock_service(hass, script.DOMAIN, SERVICE_TURN_ON) await trt.execute(trait.COMMAND_ACTIVATE_SCENE, BASIC_DATA, {}, {}) # We don't wait till script execution is done. await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "script.bla"} async def test_temperature_setting_climate_onoff(hass): """Test TemperatureSetting trait support for climate domain - range.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.TemperatureSettingTrait.supported(climate.DOMAIN, 0, None) hass.config.units.temperature_unit = TEMP_FAHRENHEIT trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_AUTO, { ATTR_SUPPORTED_FEATURES: climate.SUPPORT_TARGET_TEMPERATURE_RANGE, climate.ATTR_HVAC_MODES: [ climate.HVAC_MODE_OFF, climate.HVAC_MODE_COOL, climate.HVAC_MODE_HEAT, climate.HVAC_MODE_HEAT_COOL, ], climate.ATTR_MIN_TEMP: None, climate.ATTR_MAX_TEMP: None, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["off", "cool", "heat", "heatcool", "on"], "thermostatTemperatureUnit": "F", } assert trt.can_execute(trait.COMMAND_THERMOSTAT_SET_MODE, {}) calls = async_mock_service(hass, climate.DOMAIN, SERVICE_TURN_ON) await trt.execute( trait.COMMAND_THERMOSTAT_SET_MODE, BASIC_DATA, {"thermostatMode": "on"}, {} ) assert len(calls) == 1 calls = async_mock_service(hass, climate.DOMAIN, SERVICE_TURN_OFF) await trt.execute( trait.COMMAND_THERMOSTAT_SET_MODE, BASIC_DATA, {"thermostatMode": "off"}, {} ) assert len(calls) == 1 async def test_temperature_setting_climate_no_modes(hass): """Test TemperatureSetting trait support for climate domain not supporting any modes.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.TemperatureSettingTrait.supported(climate.DOMAIN, 0, None) hass.config.units.temperature_unit = TEMP_CELSIUS trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_AUTO, { climate.ATTR_HVAC_MODES: [], climate.ATTR_MIN_TEMP: None, climate.ATTR_MAX_TEMP: None, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["heat"], "thermostatTemperatureUnit": "C", } async def test_temperature_setting_climate_range(hass): """Test TemperatureSetting trait support for climate domain - range.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.TemperatureSettingTrait.supported(climate.DOMAIN, 0, None) hass.config.units.temperature_unit = TEMP_FAHRENHEIT trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_AUTO, { climate.ATTR_CURRENT_TEMPERATURE: 70, climate.ATTR_CURRENT_HUMIDITY: 25, ATTR_SUPPORTED_FEATURES: climate.SUPPORT_TARGET_TEMPERATURE_RANGE, climate.ATTR_HVAC_MODES: [ STATE_OFF, climate.HVAC_MODE_COOL, climate.HVAC_MODE_HEAT, climate.HVAC_MODE_AUTO, ], climate.ATTR_TARGET_TEMP_HIGH: 75, climate.ATTR_TARGET_TEMP_LOW: 65, climate.ATTR_MIN_TEMP: 50, climate.ATTR_MAX_TEMP: 80, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["off", "cool", "heat", "auto", "on"], "thermostatTemperatureUnit": "F", } assert trt.query_attributes() == { "thermostatMode": "auto", "thermostatTemperatureAmbient": 21.1, "thermostatHumidityAmbient": 25, "thermostatTemperatureSetpointLow": 18.3, "thermostatTemperatureSetpointHigh": 23.9, } assert trt.can_execute(trait.COMMAND_THERMOSTAT_TEMPERATURE_SET_RANGE, {}) assert trt.can_execute(trait.COMMAND_THERMOSTAT_SET_MODE, {}) calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_TEMPERATURE) await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SET_RANGE, BASIC_DATA, { "thermostatTemperatureSetpointHigh": 25, "thermostatTemperatureSetpointLow": 20, }, {}, ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "climate.bla", climate.ATTR_TARGET_TEMP_HIGH: 77, climate.ATTR_TARGET_TEMP_LOW: 68, } calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_HVAC_MODE) await trt.execute( trait.COMMAND_THERMOSTAT_SET_MODE, BASIC_DATA, {"thermostatMode": "cool"}, {} ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "climate.bla", climate.ATTR_HVAC_MODE: climate.HVAC_MODE_COOL, } with pytest.raises(helpers.SmartHomeError) as err: await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, BASIC_DATA, {"thermostatTemperatureSetpoint": -100}, {}, ) assert err.value.code == const.ERR_VALUE_OUT_OF_RANGE hass.config.units.temperature_unit = TEMP_CELSIUS async def test_temperature_setting_climate_setpoint(hass): """Test TemperatureSetting trait support for climate domain - setpoint.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.TemperatureSettingTrait.supported(climate.DOMAIN, 0, None) hass.config.units.temperature_unit = TEMP_CELSIUS trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_COOL, { climate.ATTR_HVAC_MODES: [STATE_OFF, climate.HVAC_MODE_COOL], climate.ATTR_MIN_TEMP: 10, climate.ATTR_MAX_TEMP: 30, ATTR_TEMPERATURE: 18, climate.ATTR_CURRENT_TEMPERATURE: 20, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["off", "cool", "on"], "thermostatTemperatureUnit": "C", } assert trt.query_attributes() == { "thermostatMode": "cool", "thermostatTemperatureAmbient": 20, "thermostatTemperatureSetpoint": 18, } assert trt.can_execute(trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, {}) assert trt.can_execute(trait.COMMAND_THERMOSTAT_SET_MODE, {}) calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_TEMPERATURE) with pytest.raises(helpers.SmartHomeError): await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, BASIC_DATA, {"thermostatTemperatureSetpoint": -100}, {}, ) await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, BASIC_DATA, {"thermostatTemperatureSetpoint": 19}, {}, ) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "climate.bla", ATTR_TEMPERATURE: 19} async def test_temperature_setting_climate_setpoint_auto(hass): """ Test TemperatureSetting trait support for climate domain. Setpoint in auto mode. """ hass.config.units.temperature_unit = TEMP_CELSIUS trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_HEAT_COOL, { climate.ATTR_HVAC_MODES: [ climate.HVAC_MODE_OFF, climate.HVAC_MODE_HEAT_COOL, ], climate.ATTR_MIN_TEMP: 10, climate.ATTR_MAX_TEMP: 30, ATTR_TEMPERATURE: 18, climate.ATTR_CURRENT_TEMPERATURE: 20, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["off", "heatcool", "on"], "thermostatTemperatureUnit": "C", } assert trt.query_attributes() == { "thermostatMode": "heatcool", "thermostatTemperatureAmbient": 20, "thermostatTemperatureSetpointHigh": 18, "thermostatTemperatureSetpointLow": 18, } assert trt.can_execute(trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, {}) assert trt.can_execute(trait.COMMAND_THERMOSTAT_SET_MODE, {}) calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_TEMPERATURE) await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, BASIC_DATA, {"thermostatTemperatureSetpoint": 19}, {}, ) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "climate.bla", ATTR_TEMPERATURE: 19} async def test_humidity_setting_humidifier_setpoint(hass): """Test HumiditySetting trait support for humidifier domain - setpoint.""" assert helpers.get_google_type(humidifier.DOMAIN, None) is not None assert trait.HumiditySettingTrait.supported(humidifier.DOMAIN, 0, None) trt = trait.HumiditySettingTrait( hass, State( "humidifier.bla", STATE_ON, { humidifier.ATTR_MIN_HUMIDITY: 20, humidifier.ATTR_MAX_HUMIDITY: 90, humidifier.ATTR_HUMIDITY: 38, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "humiditySetpointRange": {"minPercent": 20, "maxPercent": 90} } assert trt.query_attributes() == { "humiditySetpointPercent": 38, } assert trt.can_execute(trait.COMMAND_SET_HUMIDITY, {}) calls = async_mock_service(hass, humidifier.DOMAIN, humidifier.SERVICE_SET_HUMIDITY) await trt.execute(trait.COMMAND_SET_HUMIDITY, BASIC_DATA, {"humidity": 32}, {}) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "humidifier.bla", humidifier.ATTR_HUMIDITY: 32, } async def test_lock_unlock_lock(hass): """Test LockUnlock trait locking support for lock domain.""" assert helpers.get_google_type(lock.DOMAIN, None) is not None assert trait.LockUnlockTrait.supported(lock.DOMAIN, lock.SUPPORT_OPEN, None) assert trait.LockUnlockTrait.might_2fa(lock.DOMAIN, lock.SUPPORT_OPEN, None) trt = trait.LockUnlockTrait( hass, State("lock.front_door", lock.STATE_LOCKED), PIN_CONFIG ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"isLocked": True} assert trt.can_execute(trait.COMMAND_LOCKUNLOCK, {"lock": True}) calls = async_mock_service(hass, lock.DOMAIN, lock.SERVICE_LOCK) await trt.execute(trait.COMMAND_LOCKUNLOCK, PIN_DATA, {"lock": True}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "lock.front_door"} async def test_lock_unlock_unlock(hass): """Test LockUnlock trait unlocking support for lock domain.""" assert helpers.get_google_type(lock.DOMAIN, None) is not None assert trait.LockUnlockTrait.supported(lock.DOMAIN, lock.SUPPORT_OPEN, None) trt = trait.LockUnlockTrait( hass, State("lock.front_door", lock.STATE_LOCKED), PIN_CONFIG ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"isLocked": True} assert trt.can_execute(trait.COMMAND_LOCKUNLOCK, {"lock": False}) calls = async_mock_service(hass, lock.DOMAIN, lock.SERVICE_UNLOCK) # No challenge data with pytest.raises(error.ChallengeNeeded) as err: await trt.execute(trait.COMMAND_LOCKUNLOCK, PIN_DATA, {"lock": False}, {}) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # invalid pin with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_LOCKUNLOCK, PIN_DATA, {"lock": False}, {"pin": 9999} ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_FAILED_PIN_NEEDED await trt.execute( trait.COMMAND_LOCKUNLOCK, PIN_DATA, {"lock": False}, {"pin": "1234"} ) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "lock.front_door"} # Test without pin trt = trait.LockUnlockTrait( hass, State("lock.front_door", lock.STATE_LOCKED), BASIC_CONFIG ) with pytest.raises(error.SmartHomeError) as err: await trt.execute(trait.COMMAND_LOCKUNLOCK, BASIC_DATA, {"lock": False}, {}) assert len(calls) == 1 assert err.value.code == const.ERR_CHALLENGE_NOT_SETUP # Test with 2FA override with patch.object( BASIC_CONFIG, "should_2fa", return_value=False, ): await trt.execute(trait.COMMAND_LOCKUNLOCK, BASIC_DATA, {"lock": False}, {}) assert len(calls) == 2 async def test_arm_disarm_arm_away(hass): """Test ArmDisarm trait Arming support for alarm_control_panel domain.""" assert helpers.get_google_type(alarm_control_panel.DOMAIN, None) is not None assert trait.ArmDisArmTrait.supported(alarm_control_panel.DOMAIN, 0, None) assert trait.ArmDisArmTrait.might_2fa(alarm_control_panel.DOMAIN, 0, None) trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, { alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True, ATTR_SUPPORTED_FEATURES: alarm_control_panel.const.SUPPORT_ALARM_ARM_HOME | alarm_control_panel.const.SUPPORT_ALARM_ARM_AWAY, }, ), PIN_CONFIG, ) assert trt.sync_attributes() == { "availableArmLevels": { "levels": [ { "level_name": "armed_home", "level_values": [ {"level_synonym": ["armed home", "home"], "lang": "en"} ], }, { "level_name": "armed_away", "level_values": [ {"level_synonym": ["armed away", "away"], "lang": "en"} ], }, ], "ordered": False, } } assert trt.query_attributes() == { "isArmed": True, "currentArmLevel": STATE_ALARM_ARMED_AWAY, } assert trt.can_execute( trait.COMMAND_ARMDISARM, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY} ) calls = async_mock_service( hass, alarm_control_panel.DOMAIN, alarm_control_panel.SERVICE_ALARM_ARM_AWAY ) # Test with no secure_pin configured with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), BASIC_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, BASIC_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {}, ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NOT_SETUP trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), PIN_CONFIG, ) # No challenge data with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {}, ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # invalid pin with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {"pin": 9999}, ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_FAILED_PIN_NEEDED # correct pin await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {"pin": "1234"}, ) assert len(calls) == 1 # Test already armed with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), PIN_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {}, ) assert len(calls) == 1 assert err.value.code == const.ERR_ALREADY_ARMED # Test with code_arm_required False trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: False}, ), PIN_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {}, ) assert len(calls) == 2 with pytest.raises(error.SmartHomeError) as err: await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True}, {}, ) async def test_arm_disarm_disarm(hass): """Test ArmDisarm trait Disarming support for alarm_control_panel domain.""" assert helpers.get_google_type(alarm_control_panel.DOMAIN, None) is not None assert trait.ArmDisArmTrait.supported(alarm_control_panel.DOMAIN, 0, None) assert trait.ArmDisArmTrait.might_2fa(alarm_control_panel.DOMAIN, 0, None) trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, { alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True, ATTR_SUPPORTED_FEATURES: alarm_control_panel.const.SUPPORT_ALARM_TRIGGER | alarm_control_panel.const.SUPPORT_ALARM_ARM_CUSTOM_BYPASS, }, ), PIN_CONFIG, ) assert trt.sync_attributes() == { "availableArmLevels": { "levels": [ { "level_name": "armed_custom_bypass", "level_values": [ { "level_synonym": ["armed custom bypass", "custom"], "lang": "en", } ], }, { "level_name": "triggered", "level_values": [{"level_synonym": ["triggered"], "lang": "en"}], }, ], "ordered": False, } } assert trt.query_attributes() == {"isArmed": False} assert trt.can_execute(trait.COMMAND_ARMDISARM, {"arm": False}) calls = async_mock_service( hass, alarm_control_panel.DOMAIN, alarm_control_panel.SERVICE_ALARM_DISARM ) # Test without secure_pin configured with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), BASIC_CONFIG, ) await trt.execute(trait.COMMAND_ARMDISARM, BASIC_DATA, {"arm": False}, {}) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NOT_SETUP trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), PIN_CONFIG, ) # No challenge data with pytest.raises(error.ChallengeNeeded) as err: await trt.execute(trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": False}, {}) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # invalid pin with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": False}, {"pin": 9999} ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_FAILED_PIN_NEEDED # correct pin await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": False}, {"pin": "1234"} ) assert len(calls) == 1 # Test already disarmed with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), PIN_CONFIG, ) await trt.execute(trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": False}, {}) assert len(calls) == 1 assert err.value.code == const.ERR_ALREADY_DISARMED # Cancel arming after already armed will require pin with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: False}, ), PIN_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "cancel": True}, {} ) assert len(calls) == 1 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # Cancel arming while pending to arm doesn't require pin trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_PENDING, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: False}, ), PIN_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "cancel": True}, {} ) assert len(calls) == 2 async def test_fan_speed(hass): """Test FanSpeed trait speed control support for fan domain.""" assert helpers.get_google_type(fan.DOMAIN, None) is not None assert trait.FanSpeedTrait.supported(fan.DOMAIN, fan.SUPPORT_SET_SPEED, None) trt = trait.FanSpeedTrait( hass, State( "fan.living_room_fan", fan.SPEED_HIGH, attributes={ "speed_list": [ fan.SPEED_OFF, fan.SPEED_LOW, fan.SPEED_MEDIUM, fan.SPEED_HIGH, ], "speed": "low", "percentage": 33, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableFanSpeeds": { "ordered": True, "speeds": [ { "speed_name": "off", "speed_values": [{"speed_synonym": ["stop", "off"], "lang": "en"}], }, { "speed_name": "low", "speed_values": [ { "speed_synonym": ["slow", "low", "slowest", "lowest"], "lang": "en", } ], }, { "speed_name": "medium", "speed_values": [ {"speed_synonym": ["medium", "mid", "middle"], "lang": "en"} ], }, { "speed_name": "high", "speed_values": [ { "speed_synonym": [ "high", "max", "fast", "highest", "fastest", "maximum", ], "lang": "en", } ], }, ], }, "reversible": False, "supportsFanSpeedPercent": True, } assert trt.query_attributes() == { "currentFanSpeedSetting": "low", "on": True, "currentFanSpeedPercent": 33, } assert trt.can_execute(trait.COMMAND_FANSPEED, params={"fanSpeed": "medium"}) calls = async_mock_service(hass, fan.DOMAIN, fan.SERVICE_SET_SPEED) await trt.execute(trait.COMMAND_FANSPEED, BASIC_DATA, {"fanSpeed": "medium"}, {}) assert len(calls) == 1 assert calls[0].data == {"entity_id": "fan.living_room_fan", "speed": "medium"} assert trt.can_execute(trait.COMMAND_FANSPEED, params={"fanSpeedPercent": 10}) calls = async_mock_service(hass, fan.DOMAIN, fan.SERVICE_SET_PERCENTAGE) await trt.execute(trait.COMMAND_FANSPEED, BASIC_DATA, {"fanSpeedPercent": 10}, {}) assert len(calls) == 1 assert calls[0].data == {"entity_id": "fan.living_room_fan", "percentage": 10} async def test_climate_fan_speed(hass): """Test FanSpeed trait speed control support for climate domain.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.FanSpeedTrait.supported(climate.DOMAIN, climate.SUPPORT_FAN_MODE, None) trt = trait.FanSpeedTrait( hass, State( "climate.living_room_ac", "on", attributes={ "fan_modes": ["auto", "low", "medium", "high"], "fan_mode": "low", }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableFanSpeeds": { "ordered": True, "speeds": [ { "speed_name": "auto", "speed_values": [{"speed_synonym": ["auto"], "lang": "en"}], }, { "speed_name": "low", "speed_values": [{"speed_synonym": ["low"], "lang": "en"}], }, { "speed_name": "medium", "speed_values": [{"speed_synonym": ["medium"], "lang": "en"}], }, { "speed_name": "high", "speed_values": [{"speed_synonym": ["high"], "lang": "en"}], }, ], }, "reversible": False, "supportsFanSpeedPercent": True, } assert trt.query_attributes() == { "currentFanSpeedSetting": "low", } assert trt.can_execute(trait.COMMAND_FANSPEED, params={"fanSpeed": "medium"}) calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_FAN_MODE) await trt.execute(trait.COMMAND_FANSPEED, BASIC_DATA, {"fanSpeed": "medium"}, {}) assert len(calls) == 1 assert calls[0].data == { "entity_id": "climate.living_room_ac", "fan_mode": "medium", } async def test_inputselector(hass): """Test input selector trait.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.InputSelectorTrait.supported( media_player.DOMAIN, media_player.SUPPORT_SELECT_SOURCE, None ) trt = trait.InputSelectorTrait( hass, State( "media_player.living_room", media_player.STATE_PLAYING, attributes={ media_player.ATTR_INPUT_SOURCE_LIST: [ "media", "game", "chromecast", "plex", ], media_player.ATTR_INPUT_SOURCE: "game", }, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableInputs": [ {"key": "media", "names": [{"name_synonym": ["media"], "lang": "en"}]}, {"key": "game", "names": [{"name_synonym": ["game"], "lang": "en"}]}, { "key": "chromecast", "names": [{"name_synonym": ["chromecast"], "lang": "en"}], }, {"key": "plex", "names": [{"name_synonym": ["plex"], "lang": "en"}]}, ], "orderedInputs": True, } assert trt.query_attributes() == { "currentInput": "game", } assert trt.can_execute( trait.COMMAND_INPUT, params={"newInput": "media"}, ) calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_SELECT_SOURCE ) await trt.execute( trait.COMMAND_INPUT, BASIC_DATA, {"newInput": "media"}, {}, ) assert len(calls) == 1 assert calls[0].data == {"entity_id": "media_player.living_room", "source": "media"} @pytest.mark.parametrize( "sources,source,source_next,source_prev", [ (["a"], "a", "a", "a"), (["a", "b"], "a", "b", "b"), (["a", "b", "c"], "a", "b", "c"), ], ) async def test_inputselector_nextprev(hass, sources, source, source_next, source_prev): """Test input selector trait.""" trt = trait.InputSelectorTrait( hass, State( "media_player.living_room", media_player.STATE_PLAYING, attributes={ media_player.ATTR_INPUT_SOURCE_LIST: sources, media_player.ATTR_INPUT_SOURCE: source, }, ), BASIC_CONFIG, ) assert trt.can_execute("action.devices.commands.NextInput", params={}) assert trt.can_execute("action.devices.commands.PreviousInput", params={}) calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_SELECT_SOURCE ) await trt.execute( "action.devices.commands.NextInput", BASIC_DATA, {}, {}, ) await trt.execute( "action.devices.commands.PreviousInput", BASIC_DATA, {}, {}, ) assert len(calls) == 2 assert calls[0].data == { "entity_id": "media_player.living_room", "source": source_next, } assert calls[1].data == { "entity_id": "media_player.living_room", "source": source_prev, } @pytest.mark.parametrize( "sources,source", [(None, "a"), (["a", "b"], None), (["a", "b"], "c")] ) async def test_inputselector_nextprev_invalid(hass, sources, source): """Test input selector trait.""" trt = trait.InputSelectorTrait( hass, State( "media_player.living_room", media_player.STATE_PLAYING, attributes={ media_player.ATTR_INPUT_SOURCE_LIST: sources, media_player.ATTR_INPUT_SOURCE: source, }, ), BASIC_CONFIG, ) with pytest.raises(SmartHomeError): await trt.execute( "action.devices.commands.NextInput", BASIC_DATA, {}, {}, ) with pytest.raises(SmartHomeError): await trt.execute( "action.devices.commands.PreviousInput", BASIC_DATA, {}, {}, ) with pytest.raises(SmartHomeError): await trt.execute( "action.devices.commands.InvalidCommand", BASIC_DATA, {}, {}, ) async def test_modes_input_select(hass): """Test Input Select Mode trait.""" assert helpers.get_google_type(input_select.DOMAIN, None) is not None assert trait.ModesTrait.supported(input_select.DOMAIN, None, None) trt = trait.ModesTrait( hass, State("input_select.bla", "unavailable"), BASIC_CONFIG, ) assert trt.sync_attributes() == {"availableModes": []} trt = trait.ModesTrait( hass, State( "input_select.bla", "abc", attributes={input_select.ATTR_OPTIONS: ["abc", "123", "xyz"]}, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableModes": [ { "name": "option", "name_values": [ { "name_synonym": ["option", "setting", "mode", "value"], "lang": "en", } ], "settings": [ { "setting_name": "abc", "setting_values": [{"setting_synonym": ["abc"], "lang": "en"}], }, { "setting_name": "123", "setting_values": [{"setting_synonym": ["123"], "lang": "en"}], }, { "setting_name": "xyz", "setting_values": [{"setting_synonym": ["xyz"], "lang": "en"}], }, ], "ordered": False, } ] } assert trt.query_attributes() == { "currentModeSettings": {"option": "abc"}, "on": True, } assert trt.can_execute( trait.COMMAND_MODES, params={"updateModeSettings": {"option": "xyz"}}, ) calls = async_mock_service( hass, input_select.DOMAIN, input_select.SERVICE_SELECT_OPTION ) await trt.execute( trait.COMMAND_MODES, BASIC_DATA, {"updateModeSettings": {"option": "xyz"}}, {}, ) assert len(calls) == 1 assert calls[0].data == {"entity_id": "input_select.bla", "option": "xyz"} async def test_modes_humidifier(hass): """Test Humidifier Mode trait.""" assert helpers.get_google_type(humidifier.DOMAIN, None) is not None assert trait.ModesTrait.supported(humidifier.DOMAIN, humidifier.SUPPORT_MODES, None) trt = trait.ModesTrait( hass, State( "humidifier.humidifier", STATE_OFF, attributes={ humidifier.ATTR_AVAILABLE_MODES: [ humidifier.MODE_NORMAL, humidifier.MODE_AUTO, humidifier.MODE_AWAY, ], ATTR_SUPPORTED_FEATURES: humidifier.SUPPORT_MODES, humidifier.ATTR_MIN_HUMIDITY: 30, humidifier.ATTR_MAX_HUMIDITY: 99, humidifier.ATTR_HUMIDITY: 50, ATTR_MODE: humidifier.MODE_AUTO, }, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableModes": [ { "name": "mode", "name_values": [{"name_synonym": ["mode"], "lang": "en"}], "settings": [ { "setting_name": "normal", "setting_values": [ {"setting_synonym": ["normal"], "lang": "en"} ], }, { "setting_name": "auto", "setting_values": [{"setting_synonym": ["auto"], "lang": "en"}], }, { "setting_name": "away", "setting_values": [{"setting_synonym": ["away"], "lang": "en"}], }, ], "ordered": False, }, ] } assert trt.query_attributes() == { "currentModeSettings": {"mode": "auto"}, "on": False, } assert trt.can_execute( trait.COMMAND_MODES, params={"updateModeSettings": {"mode": "away"}} ) calls = async_mock_service(hass, humidifier.DOMAIN, humidifier.SERVICE_SET_MODE) await trt.execute( trait.COMMAND_MODES, BASIC_DATA, {"updateModeSettings": {"mode": "away"}}, {}, ) assert len(calls) == 1 assert calls[0].data == { "entity_id": "humidifier.humidifier", "mode": "away", } async def test_sound_modes(hass): """Test Mode trait.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.ModesTrait.supported( media_player.DOMAIN, media_player.SUPPORT_SELECT_SOUND_MODE, None ) trt = trait.ModesTrait( hass, State( "media_player.living_room", media_player.STATE_PLAYING, attributes={ media_player.ATTR_SOUND_MODE_LIST: ["stereo", "prologic"], media_player.ATTR_SOUND_MODE: "stereo", }, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableModes": [ { "name": "sound mode", "name_values": [ {"name_synonym": ["sound mode", "effects"], "lang": "en"} ], "settings": [ { "setting_name": "stereo", "setting_values": [ {"setting_synonym": ["stereo"], "lang": "en"} ], }, { "setting_name": "prologic", "setting_values": [ {"setting_synonym": ["prologic"], "lang": "en"} ], }, ], "ordered": False, } ] } assert trt.query_attributes() == { "currentModeSettings": {"sound mode": "stereo"}, "on": True, } assert trt.can_execute( trait.COMMAND_MODES, params={"updateModeSettings": {"sound mode": "stereo"}}, ) calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_SELECT_SOUND_MODE ) await trt.execute( trait.COMMAND_MODES, BASIC_DATA, {"updateModeSettings": {"sound mode": "stereo"}}, {}, ) assert len(calls) == 1 assert calls[0].data == { "entity_id": "media_player.living_room", "sound_mode": "stereo", } async def test_openclose_cover(hass): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_SET_POSITION, None ) trt = trait.OpenCloseTrait( hass, State( "cover.bla", cover.STATE_OPEN, { cover.ATTR_CURRENT_POSITION: 75, ATTR_SUPPORTED_FEATURES: cover.SUPPORT_SET_POSITION, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"openPercent": 75} calls_set = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_SET_COVER_POSITION) calls_open = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_OPEN_COVER) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 50}, {}) await trt.execute( trait.COMMAND_OPENCLOSE_RELATIVE, BASIC_DATA, {"openRelativePercent": 50}, {} ) assert len(calls_set) == 1 assert calls_set[0].data == {ATTR_ENTITY_ID: "cover.bla", cover.ATTR_POSITION: 50} assert len(calls_open) == 1 assert calls_open[0].data == {ATTR_ENTITY_ID: "cover.bla"} async def test_openclose_cover_unknown_state(hass): """Test OpenClose trait support for cover domain with unknown state.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_SET_POSITION, None ) # No state trt = trait.OpenCloseTrait( hass, State( "cover.bla", STATE_UNKNOWN, {ATTR_SUPPORTED_FEATURES: cover.SUPPORT_OPEN} ), BASIC_CONFIG, ) assert trt.sync_attributes() == {"discreteOnlyOpenClose": True} with pytest.raises(helpers.SmartHomeError): trt.query_attributes() calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_OPEN_COVER) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 100}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} with pytest.raises(helpers.SmartHomeError): trt.query_attributes() async def test_openclose_cover_assumed_state(hass): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_SET_POSITION, None ) trt = trait.OpenCloseTrait( hass, State( "cover.bla", cover.STATE_OPEN, { ATTR_ASSUMED_STATE: True, ATTR_SUPPORTED_FEATURES: cover.SUPPORT_SET_POSITION, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == {"commandOnlyOpenClose": True} assert trt.query_attributes() == {} calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_SET_COVER_POSITION) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 40}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla", cover.ATTR_POSITION: 40} async def test_openclose_cover_query_only(hass): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported(cover.DOMAIN, 0, None) state = State( "cover.bla", cover.STATE_OPEN, ) trt = trait.OpenCloseTrait( hass, state, BASIC_CONFIG, ) assert trt.sync_attributes() == { "discreteOnlyOpenClose": True, "queryOnlyOpenClose": True, } assert trt.query_attributes() == {"openPercent": 100} async def test_openclose_cover_no_position(hass): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_OPEN | cover.SUPPORT_CLOSE, None ) state = State( "cover.bla", cover.STATE_OPEN, { ATTR_SUPPORTED_FEATURES: cover.SUPPORT_OPEN | cover.SUPPORT_CLOSE, }, ) trt = trait.OpenCloseTrait( hass, state, BASIC_CONFIG, ) assert trt.sync_attributes() == {"discreteOnlyOpenClose": True} assert trt.query_attributes() == {"openPercent": 100} state.state = cover.STATE_CLOSED assert trt.sync_attributes() == {"discreteOnlyOpenClose": True} assert trt.query_attributes() == {"openPercent": 0} calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_CLOSE_COVER) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 0}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_OPEN_COVER) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 100}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} with pytest.raises( SmartHomeError, match=r"Current position not know for relative command" ): await trt.execute( trait.COMMAND_OPENCLOSE_RELATIVE, BASIC_DATA, {"openRelativePercent": 100}, {}, ) with pytest.raises(SmartHomeError, match=r"No support for partial open close"): await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 50}, {}) @pytest.mark.parametrize( "device_class", (cover.DEVICE_CLASS_DOOR, cover.DEVICE_CLASS_GARAGE, cover.DEVICE_CLASS_GATE), ) async def test_openclose_cover_secure(hass, device_class): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, device_class) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_SET_POSITION, device_class ) assert trait.OpenCloseTrait.might_2fa( cover.DOMAIN, cover.SUPPORT_SET_POSITION, device_class ) trt = trait.OpenCloseTrait( hass, State( "cover.bla", cover.STATE_OPEN, { ATTR_DEVICE_CLASS: device_class, ATTR_SUPPORTED_FEATURES: cover.SUPPORT_SET_POSITION, cover.ATTR_CURRENT_POSITION: 75, }, ), PIN_CONFIG, ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"openPercent": 75} calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_SET_COVER_POSITION) calls_close = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_CLOSE_COVER) # No challenge data with pytest.raises(error.ChallengeNeeded) as err: await trt.execute(trait.COMMAND_OPENCLOSE, PIN_DATA, {"openPercent": 50}, {}) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # invalid pin with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_OPENCLOSE, PIN_DATA, {"openPercent": 50}, {"pin": "9999"} ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_FAILED_PIN_NEEDED await trt.execute( trait.COMMAND_OPENCLOSE, PIN_DATA, {"openPercent": 50}, {"pin": "1234"} ) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla", cover.ATTR_POSITION: 50} # no challenge on close await trt.execute(trait.COMMAND_OPENCLOSE, PIN_DATA, {"openPercent": 0}, {}) assert len(calls_close) == 1 assert calls_close[0].data == {ATTR_ENTITY_ID: "cover.bla"} @pytest.mark.parametrize( "device_class", ( binary_sensor.DEVICE_CLASS_DOOR, binary_sensor.DEVICE_CLASS_GARAGE_DOOR, binary_sensor.DEVICE_CLASS_LOCK, binary_sensor.DEVICE_CLASS_OPENING, binary_sensor.DEVICE_CLASS_WINDOW, ), ) async def test_openclose_binary_sensor(hass, device_class): """Test OpenClose trait support for binary_sensor domain.""" assert helpers.get_google_type(binary_sensor.DOMAIN, device_class) is not None assert trait.OpenCloseTrait.supported(binary_sensor.DOMAIN, 0, device_class) trt = trait.OpenCloseTrait( hass, State("binary_sensor.test", STATE_ON, {ATTR_DEVICE_CLASS: device_class}), BASIC_CONFIG, ) assert trt.sync_attributes() == { "queryOnlyOpenClose": True, "discreteOnlyOpenClose": True, } assert trt.query_attributes() == {"openPercent": 100} trt = trait.OpenCloseTrait( hass, State("binary_sensor.test", STATE_OFF, {ATTR_DEVICE_CLASS: device_class}), BASIC_CONFIG, ) assert trt.sync_attributes() == { "queryOnlyOpenClose": True, "discreteOnlyOpenClose": True, } assert trt.query_attributes() == {"openPercent": 0} async def test_volume_media_player(hass): """Test volume trait support for media player domain.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.VolumeTrait.supported( media_player.DOMAIN, media_player.SUPPORT_VOLUME_SET, None, ) trt = trait.VolumeTrait( hass, State( "media_player.bla", media_player.STATE_PLAYING, { ATTR_SUPPORTED_FEATURES: media_player.SUPPORT_VOLUME_SET, media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.3, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "volumeMaxLevel": 100, "levelStepSize": 10, "volumeCanMuteAndUnmute": False, "commandOnlyVolume": False, } assert trt.query_attributes() == {"currentVolume": 30} calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_SET ) await trt.execute(trait.COMMAND_SET_VOLUME, BASIC_DATA, {"volumeLevel": 60}, {}) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.6, } calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_SET ) await trt.execute( trait.COMMAND_VOLUME_RELATIVE, BASIC_DATA, {"relativeSteps": 10}, {} ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.4, } async def test_volume_media_player_relative(hass): """Test volume trait support for relative-volume-only media players.""" assert trait.VolumeTrait.supported( media_player.DOMAIN, media_player.SUPPORT_VOLUME_STEP, None, ) trt = trait.VolumeTrait( hass, State( "media_player.bla", media_player.STATE_PLAYING, { ATTR_ASSUMED_STATE: True, ATTR_SUPPORTED_FEATURES: media_player.SUPPORT_VOLUME_STEP, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "volumeMaxLevel": 100, "levelStepSize": 10, "volumeCanMuteAndUnmute": False, "commandOnlyVolume": True, } assert trt.query_attributes() == {} calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_UP ) await trt.execute( trait.COMMAND_VOLUME_RELATIVE, BASIC_DATA, {"relativeSteps": 10}, {}, ) assert len(calls) == 10 for call in calls: assert call.data == { ATTR_ENTITY_ID: "media_player.bla", } calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_DOWN ) await trt.execute( trait.COMMAND_VOLUME_RELATIVE, BASIC_DATA, {"relativeSteps": -10}, {}, ) assert len(calls) == 10 for call in calls: assert call.data == { ATTR_ENTITY_ID: "media_player.bla", } with pytest.raises(SmartHomeError): await trt.execute(trait.COMMAND_SET_VOLUME, BASIC_DATA, {"volumeLevel": 42}, {}) with pytest.raises(SmartHomeError): await trt.execute(trait.COMMAND_MUTE, BASIC_DATA, {"mute": True}, {}) async def test_media_player_mute(hass): """Test volume trait support for muting.""" assert trait.VolumeTrait.supported( media_player.DOMAIN, media_player.SUPPORT_VOLUME_STEP | media_player.SUPPORT_VOLUME_MUTE, None, ) trt = trait.VolumeTrait( hass, State( "media_player.bla", media_player.STATE_PLAYING, { ATTR_SUPPORTED_FEATURES: ( media_player.SUPPORT_VOLUME_STEP | media_player.SUPPORT_VOLUME_MUTE ), media_player.ATTR_MEDIA_VOLUME_MUTED: False, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "volumeMaxLevel": 100, "levelStepSize": 10, "volumeCanMuteAndUnmute": True, "commandOnlyVolume": False, } assert trt.query_attributes() == {"isMuted": False} mute_calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_MUTE ) await trt.execute( trait.COMMAND_MUTE, BASIC_DATA, {"mute": True}, {}, ) assert len(mute_calls) == 1 assert mute_calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_VOLUME_MUTED: True, } unmute_calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_MUTE ) await trt.execute( trait.COMMAND_MUTE, BASIC_DATA, {"mute": False}, {}, ) assert len(unmute_calls) == 1 assert unmute_calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_VOLUME_MUTED: False, } async def test_temperature_setting_sensor(hass): """Test TemperatureSetting trait support for temperature sensor.""" assert ( helpers.get_google_type(sensor.DOMAIN, sensor.DEVICE_CLASS_TEMPERATURE) is not None ) assert not trait.TemperatureSettingTrait.supported( sensor.DOMAIN, 0, sensor.DEVICE_CLASS_HUMIDITY ) assert trait.TemperatureSettingTrait.supported( sensor.DOMAIN, 0, sensor.DEVICE_CLASS_TEMPERATURE ) @pytest.mark.parametrize( "unit_in,unit_out,state,ambient", [ (TEMP_FAHRENHEIT, "F", "70", 21.1), (TEMP_CELSIUS, "C", "21.1", 21.1), (TEMP_FAHRENHEIT, "F", "unavailable", None), (TEMP_FAHRENHEIT, "F", "unknown", None), ], ) async def test_temperature_setting_sensor_data(hass, unit_in, unit_out, state, ambient): """Test TemperatureSetting trait support for temperature sensor.""" hass.config.units.temperature_unit = unit_in trt = trait.TemperatureSettingTrait( hass, State( "sensor.test", state, {ATTR_DEVICE_CLASS: sensor.DEVICE_CLASS_TEMPERATURE} ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "queryOnlyTemperatureSetting": True, "thermostatTemperatureUnit": unit_out, } if ambient: assert trt.query_attributes() == {"thermostatTemperatureAmbient": ambient} else: assert trt.query_attributes() == {} hass.config.units.temperature_unit = TEMP_CELSIUS async def test_humidity_setting_sensor(hass): """Test HumiditySetting trait support for humidity sensor.""" assert ( helpers.get_google_type(sensor.DOMAIN, sensor.DEVICE_CLASS_HUMIDITY) is not None ) assert not trait.HumiditySettingTrait.supported( sensor.DOMAIN, 0, sensor.DEVICE_CLASS_TEMPERATURE ) assert trait.HumiditySettingTrait.supported( sensor.DOMAIN, 0, sensor.DEVICE_CLASS_HUMIDITY ) @pytest.mark.parametrize( "state,ambient", [("70", 70), ("unavailable", None), ("unknown", None)] ) async def test_humidity_setting_sensor_data(hass, state, ambient): """Test HumiditySetting trait support for humidity sensor.""" trt = trait.HumiditySettingTrait( hass, State("sensor.test", state, {ATTR_DEVICE_CLASS: sensor.DEVICE_CLASS_HUMIDITY}), BASIC_CONFIG, ) assert trt.sync_attributes() == {"queryOnlyHumiditySetting": True} if ambient: assert trt.query_attributes() == {"humidityAmbientPercent": ambient} else: assert trt.query_attributes() == {} with pytest.raises(helpers.SmartHomeError) as err: await trt.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert err.value.code == const.ERR_NOT_SUPPORTED async def test_transport_control(hass): """Test the TransportControlTrait.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None for feature in trait.MEDIA_COMMAND_SUPPORT_MAPPING.values(): assert trait.TransportControlTrait.supported(media_player.DOMAIN, feature, None) now = datetime(2020, 1, 1) trt = trait.TransportControlTrait( hass, State( "media_player.bla", media_player.STATE_PLAYING, { media_player.ATTR_MEDIA_POSITION: 100, media_player.ATTR_MEDIA_DURATION: 200, media_player.ATTR_MEDIA_POSITION_UPDATED_AT: now - timedelta(seconds=10), media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.5, ATTR_SUPPORTED_FEATURES: media_player.SUPPORT_PLAY | media_player.SUPPORT_STOP, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "transportControlSupportedCommands": ["RESUME", "STOP"] } assert trt.query_attributes() == {} # COMMAND_MEDIA_SEEK_RELATIVE calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_SEEK ) # Patch to avoid time ticking over during the command failing the test with patch("homeassistant.util.dt.utcnow", return_value=now): await trt.execute( trait.COMMAND_MEDIA_SEEK_RELATIVE, BASIC_DATA, {"relativePositionMs": 10000}, {}, ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", # 100s (current position) + 10s (from command) + 10s (from updated_at) media_player.ATTR_MEDIA_SEEK_POSITION: 120, } # COMMAND_MEDIA_SEEK_TO_POSITION calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_SEEK ) await trt.execute( trait.COMMAND_MEDIA_SEEK_TO_POSITION, BASIC_DATA, {"absPositionMs": 50000}, {} ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_SEEK_POSITION: 50, } # COMMAND_MEDIA_NEXT calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_NEXT_TRACK ) await trt.execute(trait.COMMAND_MEDIA_NEXT, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} # COMMAND_MEDIA_PAUSE calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_PAUSE ) await trt.execute(trait.COMMAND_MEDIA_PAUSE, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} # COMMAND_MEDIA_PREVIOUS calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_PREVIOUS_TRACK ) await trt.execute(trait.COMMAND_MEDIA_PREVIOUS, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} # COMMAND_MEDIA_RESUME calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_PLAY ) await trt.execute(trait.COMMAND_MEDIA_RESUME, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} # COMMAND_MEDIA_SHUFFLE calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_SHUFFLE_SET ) await trt.execute(trait.COMMAND_MEDIA_SHUFFLE, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_SHUFFLE: True, } # COMMAND_MEDIA_STOP calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_STOP ) await trt.execute(trait.COMMAND_MEDIA_STOP, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} @pytest.mark.parametrize( "state", ( STATE_OFF, STATE_IDLE, STATE_PLAYING, STATE_ON, STATE_PAUSED, STATE_STANDBY, STATE_UNAVAILABLE, STATE_UNKNOWN, ), ) async def test_media_state(hass, state): """Test the MediaStateTrait.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.TransportControlTrait.supported( media_player.DOMAIN, media_player.SUPPORT_PLAY, None ) trt = trait.MediaStateTrait( hass, State( "media_player.bla", state, { media_player.ATTR_MEDIA_POSITION: 100, media_player.ATTR_MEDIA_DURATION: 200, media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.5, ATTR_SUPPORTED_FEATURES: media_player.SUPPORT_PLAY | media_player.SUPPORT_STOP, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "supportActivityState": True, "supportPlaybackState": True, } assert trt.query_attributes() == { "activityState": trt.activity_lookup.get(state), "playbackState": trt.playback_lookup.get(state), }
sander76/home-assistant
tests/components/google_assistant/test_trait.py
homeassistant/components/brother/utils.py
"""Support for Enphase Envoy solar energy monitor.""" from datetime import timedelta import logging import async_timeout from envoy_reader.envoy_reader import EnvoyReader import httpx import voluptuous as vol from homeassistant.components.sensor import PLATFORM_SCHEMA, SensorEntity from homeassistant.const import ( CONF_IP_ADDRESS, CONF_MONITORED_CONDITIONS, CONF_NAME, CONF_PASSWORD, CONF_USERNAME, ENERGY_WATT_HOUR, POWER_WATT, ) from homeassistant.exceptions import PlatformNotReady import homeassistant.helpers.config_validation as cv from homeassistant.helpers.update_coordinator import ( CoordinatorEntity, DataUpdateCoordinator, UpdateFailed, ) _LOGGER = logging.getLogger(__name__) SENSORS = { "production": ("Envoy Current Energy Production", POWER_WATT), "daily_production": ("Envoy Today's Energy Production", ENERGY_WATT_HOUR), "seven_days_production": ( "Envoy Last Seven Days Energy Production", ENERGY_WATT_HOUR, ), "lifetime_production": ("Envoy Lifetime Energy Production", ENERGY_WATT_HOUR), "consumption": ("Envoy Current Energy Consumption", POWER_WATT), "daily_consumption": ("Envoy Today's Energy Consumption", ENERGY_WATT_HOUR), "seven_days_consumption": ( "Envoy Last Seven Days Energy Consumption", ENERGY_WATT_HOUR, ), "lifetime_consumption": ("Envoy Lifetime Energy Consumption", ENERGY_WATT_HOUR), "inverters": ("Envoy Inverter", POWER_WATT), } ICON = "mdi:flash" CONST_DEFAULT_HOST = "envoy" SCAN_INTERVAL = timedelta(seconds=60) PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( { vol.Optional(CONF_IP_ADDRESS, default=CONST_DEFAULT_HOST): cv.string, vol.Optional(CONF_USERNAME, default="envoy"): cv.string, vol.Optional(CONF_PASSWORD, default=""): cv.string, vol.Optional(CONF_MONITORED_CONDITIONS, default=list(SENSORS)): vol.All( cv.ensure_list, [vol.In(list(SENSORS))] ), vol.Optional(CONF_NAME, default=""): cv.string, } ) async def async_setup_platform( homeassistant, config, async_add_entities, discovery_info=None ): """Set up the Enphase Envoy sensor.""" ip_address = config[CONF_IP_ADDRESS] monitored_conditions = config[CONF_MONITORED_CONDITIONS] name = config[CONF_NAME] username = config[CONF_USERNAME] password = config[CONF_PASSWORD] if "inverters" in monitored_conditions: envoy_reader = EnvoyReader(ip_address, username, password, inverters=True) else: envoy_reader = EnvoyReader(ip_address, username, password) try: await envoy_reader.getData() except httpx.HTTPStatusError as err: _LOGGER.error("Authentication failure during setup: %s", err) return except httpx.HTTPError as err: raise PlatformNotReady from err async def async_update_data(): """Fetch data from API endpoint.""" data = {} async with async_timeout.timeout(30): try: await envoy_reader.getData() except httpx.HTTPError as err: raise UpdateFailed(f"Error communicating with API: {err}") from err for condition in monitored_conditions: if condition != "inverters": data[condition] = await getattr(envoy_reader, condition)() else: data["inverters_production"] = await getattr( envoy_reader, "inverters_production" )() _LOGGER.debug("Retrieved data from API: %s", data) return data coordinator = DataUpdateCoordinator( homeassistant, _LOGGER, name="sensor", update_method=async_update_data, update_interval=SCAN_INTERVAL, ) await coordinator.async_refresh() if coordinator.data is None: raise PlatformNotReady entities = [] for condition in monitored_conditions: entity_name = "" if ( condition == "inverters" and coordinator.data.get("inverters_production") is not None ): for inverter in coordinator.data["inverters_production"]: entity_name = f"{name}{SENSORS[condition][0]} {inverter}" split_name = entity_name.split(" ") serial_number = split_name[-1] entities.append( Envoy( condition, entity_name, serial_number, SENSORS[condition][1], coordinator, ) ) elif condition != "inverters": entity_name = f"{name}{SENSORS[condition][0]}" entities.append( Envoy( condition, entity_name, None, SENSORS[condition][1], coordinator, ) ) async_add_entities(entities) class Envoy(CoordinatorEntity, SensorEntity): """Envoy entity.""" def __init__(self, sensor_type, name, serial_number, unit, coordinator): """Initialize Envoy entity.""" self._type = sensor_type self._name = name self._serial_number = serial_number self._unit_of_measurement = unit super().__init__(coordinator) @property def name(self): """Return the name of the sensor.""" return self._name @property def state(self): """Return the state of the sensor.""" if self._type != "inverters": value = self.coordinator.data.get(self._type) elif ( self._type == "inverters" and self.coordinator.data.get("inverters_production") is not None ): value = self.coordinator.data.get("inverters_production").get( self._serial_number )[0] else: return None return value @property def unit_of_measurement(self): """Return the unit of measurement of this entity, if any.""" return self._unit_of_measurement @property def icon(self): """Icon to use in the frontend, if any.""" return ICON @property def extra_state_attributes(self): """Return the state attributes.""" if ( self._type == "inverters" and self.coordinator.data.get("inverters_production") is not None ): value = self.coordinator.data.get("inverters_production").get( self._serial_number )[1] return {"last_reported": value} return None
"""Tests for the Google Assistant traits.""" from datetime import datetime, timedelta from unittest.mock import patch import pytest from homeassistant.components import ( alarm_control_panel, binary_sensor, camera, cover, fan, group, input_boolean, input_select, light, lock, media_player, scene, script, sensor, switch, vacuum, ) from homeassistant.components.climate import const as climate from homeassistant.components.google_assistant import const, error, helpers, trait from homeassistant.components.google_assistant.error import SmartHomeError from homeassistant.components.humidifier import const as humidifier from homeassistant.config import async_process_ha_core_config from homeassistant.const import ( ATTR_ASSUMED_STATE, ATTR_DEVICE_CLASS, ATTR_ENTITY_ID, ATTR_MODE, ATTR_SUPPORTED_FEATURES, ATTR_TEMPERATURE, SERVICE_TURN_OFF, SERVICE_TURN_ON, STATE_ALARM_ARMED_AWAY, STATE_ALARM_DISARMED, STATE_ALARM_PENDING, STATE_IDLE, STATE_OFF, STATE_ON, STATE_PAUSED, STATE_PLAYING, STATE_STANDBY, STATE_UNAVAILABLE, STATE_UNKNOWN, TEMP_CELSIUS, TEMP_FAHRENHEIT, ) from homeassistant.core import DOMAIN as HA_DOMAIN, EVENT_CALL_SERVICE, State from homeassistant.util import color from . import BASIC_CONFIG, MockConfig from tests.common import async_capture_events, async_mock_service REQ_ID = "ff36a3cc-ec34-11e6-b1a0-64510650abcf" BASIC_DATA = helpers.RequestData( BASIC_CONFIG, "test-agent", const.SOURCE_CLOUD, REQ_ID, None ) PIN_CONFIG = MockConfig(secure_devices_pin="1234") PIN_DATA = helpers.RequestData( PIN_CONFIG, "test-agent", const.SOURCE_CLOUD, REQ_ID, None ) async def test_brightness_light(hass): """Test brightness trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert trait.BrightnessTrait.supported(light.DOMAIN, light.SUPPORT_BRIGHTNESS, None) trt = trait.BrightnessTrait( hass, State("light.bla", light.STATE_ON, {light.ATTR_BRIGHTNESS: 243}), BASIC_CONFIG, ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"brightness": 95} events = async_capture_events(hass, EVENT_CALL_SERVICE) calls = async_mock_service(hass, light.DOMAIN, light.SERVICE_TURN_ON) await trt.execute( trait.COMMAND_BRIGHTNESS_ABSOLUTE, BASIC_DATA, {"brightness": 50}, {} ) await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "light.bla", light.ATTR_BRIGHTNESS_PCT: 50} assert len(events) == 1 assert events[0].data == { "domain": "light", "service": "turn_on", "service_data": {"brightness_pct": 50, "entity_id": "light.bla"}, } async def test_camera_stream(hass): """Test camera stream trait support for camera domain.""" await async_process_ha_core_config( hass, {"external_url": "https://example.com"}, ) assert helpers.get_google_type(camera.DOMAIN, None) is not None assert trait.CameraStreamTrait.supported(camera.DOMAIN, camera.SUPPORT_STREAM, None) trt = trait.CameraStreamTrait( hass, State("camera.bla", camera.STATE_IDLE, {}), BASIC_CONFIG ) assert trt.sync_attributes() == { "cameraStreamSupportedProtocols": ["hls"], "cameraStreamNeedAuthToken": False, "cameraStreamNeedDrmEncryption": False, } assert trt.query_attributes() == {} with patch( "homeassistant.components.camera.async_request_stream", return_value="/api/streams/bla", ): await trt.execute(trait.COMMAND_GET_CAMERA_STREAM, BASIC_DATA, {}, {}) assert trt.query_attributes() == { "cameraStreamAccessUrl": "https://example.com/api/streams/bla", "cameraStreamReceiverAppId": "B12CE3CA", } async def test_onoff_group(hass): """Test OnOff trait support for group domain.""" assert helpers.get_google_type(group.DOMAIN, None) is not None assert trait.OnOffTrait.supported(group.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("group.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("group.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, HA_DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "group.bla"} off_calls = async_mock_service(hass, HA_DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "group.bla"} async def test_onoff_input_boolean(hass): """Test OnOff trait support for input_boolean domain.""" assert helpers.get_google_type(input_boolean.DOMAIN, None) is not None assert trait.OnOffTrait.supported(input_boolean.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("input_boolean.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait( hass, State("input_boolean.bla", STATE_OFF), BASIC_CONFIG ) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, input_boolean.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "input_boolean.bla"} off_calls = async_mock_service(hass, input_boolean.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "input_boolean.bla"} async def test_onoff_switch(hass): """Test OnOff trait support for switch domain.""" assert helpers.get_google_type(switch.DOMAIN, None) is not None assert trait.OnOffTrait.supported(switch.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("switch.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("switch.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} trt_assumed = trait.OnOffTrait( hass, State("switch.bla", STATE_OFF, {"assumed_state": True}), BASIC_CONFIG ) assert trt_assumed.sync_attributes() == {"commandOnlyOnOff": True} on_calls = async_mock_service(hass, switch.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "switch.bla"} off_calls = async_mock_service(hass, switch.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "switch.bla"} async def test_onoff_fan(hass): """Test OnOff trait support for fan domain.""" assert helpers.get_google_type(fan.DOMAIN, None) is not None assert trait.OnOffTrait.supported(fan.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("fan.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("fan.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, fan.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "fan.bla"} off_calls = async_mock_service(hass, fan.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "fan.bla"} async def test_onoff_light(hass): """Test OnOff trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert trait.OnOffTrait.supported(light.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("light.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("light.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "light.bla"} off_calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "light.bla"} async def test_onoff_media_player(hass): """Test OnOff trait support for media_player domain.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.OnOffTrait.supported(media_player.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("media_player.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("media_player.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, media_player.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} off_calls = async_mock_service(hass, media_player.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} async def test_onoff_humidifier(hass): """Test OnOff trait support for humidifier domain.""" assert helpers.get_google_type(humidifier.DOMAIN, None) is not None assert trait.OnOffTrait.supported(humidifier.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("humidifier.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("humidifier.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, humidifier.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "humidifier.bla"} off_calls = async_mock_service(hass, humidifier.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "humidifier.bla"} async def test_dock_vacuum(hass): """Test dock trait support for vacuum domain.""" assert helpers.get_google_type(vacuum.DOMAIN, None) is not None assert trait.DockTrait.supported(vacuum.DOMAIN, 0, None) trt = trait.DockTrait(hass, State("vacuum.bla", vacuum.STATE_IDLE), BASIC_CONFIG) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"isDocked": False} calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_RETURN_TO_BASE) await trt.execute(trait.COMMAND_DOCK, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} async def test_startstop_vacuum(hass): """Test startStop trait support for vacuum domain.""" assert helpers.get_google_type(vacuum.DOMAIN, None) is not None assert trait.StartStopTrait.supported(vacuum.DOMAIN, 0, None) trt = trait.StartStopTrait( hass, State( "vacuum.bla", vacuum.STATE_PAUSED, {ATTR_SUPPORTED_FEATURES: vacuum.SUPPORT_PAUSE}, ), BASIC_CONFIG, ) assert trt.sync_attributes() == {"pausable": True} assert trt.query_attributes() == {"isRunning": False, "isPaused": True} start_calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_START) await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": True}, {}) assert len(start_calls) == 1 assert start_calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} stop_calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_STOP) await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": False}, {}) assert len(stop_calls) == 1 assert stop_calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} pause_calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_PAUSE) await trt.execute(trait.COMMAND_PAUSEUNPAUSE, BASIC_DATA, {"pause": True}, {}) assert len(pause_calls) == 1 assert pause_calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} unpause_calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_START) await trt.execute(trait.COMMAND_PAUSEUNPAUSE, BASIC_DATA, {"pause": False}, {}) assert len(unpause_calls) == 1 assert unpause_calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} async def test_startstop_cover(hass): """Test startStop trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.StartStopTrait.supported(cover.DOMAIN, cover.SUPPORT_STOP, None) state = State( "cover.bla", cover.STATE_CLOSED, {ATTR_SUPPORTED_FEATURES: cover.SUPPORT_STOP}, ) trt = trait.StartStopTrait( hass, state, BASIC_CONFIG, ) assert trt.sync_attributes() == {} for state_value in (cover.STATE_CLOSING, cover.STATE_OPENING): state.state = state_value assert trt.query_attributes() == {"isRunning": True} stop_calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_STOP_COVER) await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": False}, {}) assert len(stop_calls) == 1 assert stop_calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} for state_value in (cover.STATE_CLOSED, cover.STATE_OPEN): state.state = state_value assert trt.query_attributes() == {"isRunning": False} with pytest.raises(SmartHomeError, match="Cover is already stopped"): await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": False}, {}) with pytest.raises(SmartHomeError, match="Starting a cover is not supported"): await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": True}, {}) with pytest.raises( SmartHomeError, match="Command action.devices.commands.PauseUnpause is not supported", ): await trt.execute(trait.COMMAND_PAUSEUNPAUSE, BASIC_DATA, {"start": True}, {}) async def test_startstop_cover_assumed(hass): """Test startStop trait support for cover domain of assumed state.""" trt = trait.StartStopTrait( hass, State( "cover.bla", cover.STATE_CLOSED, {ATTR_SUPPORTED_FEATURES: cover.SUPPORT_STOP, ATTR_ASSUMED_STATE: True}, ), BASIC_CONFIG, ) stop_calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_STOP_COVER) await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": False}, {}) assert len(stop_calls) == 1 assert stop_calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} async def test_color_setting_color_light(hass): """Test ColorSpectrum trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert not trait.ColorSettingTrait.supported(light.DOMAIN, 0, None) assert trait.ColorSettingTrait.supported(light.DOMAIN, light.SUPPORT_COLOR, None) trt = trait.ColorSettingTrait( hass, State( "light.bla", STATE_ON, { light.ATTR_HS_COLOR: (20, 94), light.ATTR_BRIGHTNESS: 200, ATTR_SUPPORTED_FEATURES: light.SUPPORT_COLOR, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == {"colorModel": "hsv"} assert trt.query_attributes() == { "color": {"spectrumHsv": {"hue": 20, "saturation": 0.94, "value": 200 / 255}} } assert trt.can_execute( trait.COMMAND_COLOR_ABSOLUTE, {"color": {"spectrumRGB": 16715792}} ) calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON) await trt.execute( trait.COMMAND_COLOR_ABSOLUTE, BASIC_DATA, {"color": {"spectrumRGB": 1052927}}, {}, ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "light.bla", light.ATTR_HS_COLOR: (240, 93.725), } await trt.execute( trait.COMMAND_COLOR_ABSOLUTE, BASIC_DATA, {"color": {"spectrumHSV": {"hue": 100, "saturation": 0.50, "value": 0.20}}}, {}, ) assert len(calls) == 2 assert calls[1].data == { ATTR_ENTITY_ID: "light.bla", light.ATTR_HS_COLOR: [100, 50], light.ATTR_BRIGHTNESS: 0.2 * 255, } async def test_color_setting_temperature_light(hass): """Test ColorTemperature trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert not trait.ColorSettingTrait.supported(light.DOMAIN, 0, None) assert trait.ColorSettingTrait.supported( light.DOMAIN, light.SUPPORT_COLOR_TEMP, None ) trt = trait.ColorSettingTrait( hass, State( "light.bla", STATE_ON, { light.ATTR_MIN_MIREDS: 200, light.ATTR_COLOR_TEMP: 300, light.ATTR_MAX_MIREDS: 500, ATTR_SUPPORTED_FEATURES: light.SUPPORT_COLOR_TEMP, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "colorTemperatureRange": {"temperatureMinK": 2000, "temperatureMaxK": 5000} } assert trt.query_attributes() == {"color": {"temperatureK": 3333}} assert trt.can_execute( trait.COMMAND_COLOR_ABSOLUTE, {"color": {"temperature": 400}} ) calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON) with pytest.raises(helpers.SmartHomeError) as err: await trt.execute( trait.COMMAND_COLOR_ABSOLUTE, BASIC_DATA, {"color": {"temperature": 5555}}, {}, ) assert err.value.code == const.ERR_VALUE_OUT_OF_RANGE await trt.execute( trait.COMMAND_COLOR_ABSOLUTE, BASIC_DATA, {"color": {"temperature": 2857}}, {} ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "light.bla", light.ATTR_COLOR_TEMP: color.color_temperature_kelvin_to_mired(2857), } async def test_color_light_temperature_light_bad_temp(hass): """Test ColorTemperature trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert not trait.ColorSettingTrait.supported(light.DOMAIN, 0, None) assert trait.ColorSettingTrait.supported( light.DOMAIN, light.SUPPORT_COLOR_TEMP, None ) trt = trait.ColorSettingTrait( hass, State( "light.bla", STATE_ON, { light.ATTR_MIN_MIREDS: 200, light.ATTR_COLOR_TEMP: 0, light.ATTR_MAX_MIREDS: 500, }, ), BASIC_CONFIG, ) assert trt.query_attributes() == {} async def test_light_modes(hass): """Test Light Mode trait.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert trait.ModesTrait.supported(light.DOMAIN, light.SUPPORT_EFFECT, None) trt = trait.ModesTrait( hass, State( "light.living_room", light.STATE_ON, attributes={ light.ATTR_EFFECT_LIST: ["random", "colorloop"], light.ATTR_EFFECT: "random", }, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableModes": [ { "name": "effect", "name_values": [{"name_synonym": ["effect"], "lang": "en"}], "settings": [ { "setting_name": "random", "setting_values": [ {"setting_synonym": ["random"], "lang": "en"} ], }, { "setting_name": "colorloop", "setting_values": [ {"setting_synonym": ["colorloop"], "lang": "en"} ], }, ], "ordered": False, } ] } assert trt.query_attributes() == { "currentModeSettings": {"effect": "random"}, "on": True, } assert trt.can_execute( trait.COMMAND_MODES, params={"updateModeSettings": {"effect": "colorloop"}}, ) calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON) await trt.execute( trait.COMMAND_MODES, BASIC_DATA, {"updateModeSettings": {"effect": "colorloop"}}, {}, ) assert len(calls) == 1 assert calls[0].data == { "entity_id": "light.living_room", "effect": "colorloop", } async def test_scene_scene(hass): """Test Scene trait support for scene domain.""" assert helpers.get_google_type(scene.DOMAIN, None) is not None assert trait.SceneTrait.supported(scene.DOMAIN, 0, None) trt = trait.SceneTrait(hass, State("scene.bla", scene.STATE), BASIC_CONFIG) assert trt.sync_attributes() == {} assert trt.query_attributes() == {} assert trt.can_execute(trait.COMMAND_ACTIVATE_SCENE, {}) calls = async_mock_service(hass, scene.DOMAIN, SERVICE_TURN_ON) await trt.execute(trait.COMMAND_ACTIVATE_SCENE, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "scene.bla"} async def test_scene_script(hass): """Test Scene trait support for script domain.""" assert helpers.get_google_type(script.DOMAIN, None) is not None assert trait.SceneTrait.supported(script.DOMAIN, 0, None) trt = trait.SceneTrait(hass, State("script.bla", STATE_OFF), BASIC_CONFIG) assert trt.sync_attributes() == {} assert trt.query_attributes() == {} assert trt.can_execute(trait.COMMAND_ACTIVATE_SCENE, {}) calls = async_mock_service(hass, script.DOMAIN, SERVICE_TURN_ON) await trt.execute(trait.COMMAND_ACTIVATE_SCENE, BASIC_DATA, {}, {}) # We don't wait till script execution is done. await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "script.bla"} async def test_temperature_setting_climate_onoff(hass): """Test TemperatureSetting trait support for climate domain - range.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.TemperatureSettingTrait.supported(climate.DOMAIN, 0, None) hass.config.units.temperature_unit = TEMP_FAHRENHEIT trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_AUTO, { ATTR_SUPPORTED_FEATURES: climate.SUPPORT_TARGET_TEMPERATURE_RANGE, climate.ATTR_HVAC_MODES: [ climate.HVAC_MODE_OFF, climate.HVAC_MODE_COOL, climate.HVAC_MODE_HEAT, climate.HVAC_MODE_HEAT_COOL, ], climate.ATTR_MIN_TEMP: None, climate.ATTR_MAX_TEMP: None, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["off", "cool", "heat", "heatcool", "on"], "thermostatTemperatureUnit": "F", } assert trt.can_execute(trait.COMMAND_THERMOSTAT_SET_MODE, {}) calls = async_mock_service(hass, climate.DOMAIN, SERVICE_TURN_ON) await trt.execute( trait.COMMAND_THERMOSTAT_SET_MODE, BASIC_DATA, {"thermostatMode": "on"}, {} ) assert len(calls) == 1 calls = async_mock_service(hass, climate.DOMAIN, SERVICE_TURN_OFF) await trt.execute( trait.COMMAND_THERMOSTAT_SET_MODE, BASIC_DATA, {"thermostatMode": "off"}, {} ) assert len(calls) == 1 async def test_temperature_setting_climate_no_modes(hass): """Test TemperatureSetting trait support for climate domain not supporting any modes.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.TemperatureSettingTrait.supported(climate.DOMAIN, 0, None) hass.config.units.temperature_unit = TEMP_CELSIUS trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_AUTO, { climate.ATTR_HVAC_MODES: [], climate.ATTR_MIN_TEMP: None, climate.ATTR_MAX_TEMP: None, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["heat"], "thermostatTemperatureUnit": "C", } async def test_temperature_setting_climate_range(hass): """Test TemperatureSetting trait support for climate domain - range.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.TemperatureSettingTrait.supported(climate.DOMAIN, 0, None) hass.config.units.temperature_unit = TEMP_FAHRENHEIT trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_AUTO, { climate.ATTR_CURRENT_TEMPERATURE: 70, climate.ATTR_CURRENT_HUMIDITY: 25, ATTR_SUPPORTED_FEATURES: climate.SUPPORT_TARGET_TEMPERATURE_RANGE, climate.ATTR_HVAC_MODES: [ STATE_OFF, climate.HVAC_MODE_COOL, climate.HVAC_MODE_HEAT, climate.HVAC_MODE_AUTO, ], climate.ATTR_TARGET_TEMP_HIGH: 75, climate.ATTR_TARGET_TEMP_LOW: 65, climate.ATTR_MIN_TEMP: 50, climate.ATTR_MAX_TEMP: 80, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["off", "cool", "heat", "auto", "on"], "thermostatTemperatureUnit": "F", } assert trt.query_attributes() == { "thermostatMode": "auto", "thermostatTemperatureAmbient": 21.1, "thermostatHumidityAmbient": 25, "thermostatTemperatureSetpointLow": 18.3, "thermostatTemperatureSetpointHigh": 23.9, } assert trt.can_execute(trait.COMMAND_THERMOSTAT_TEMPERATURE_SET_RANGE, {}) assert trt.can_execute(trait.COMMAND_THERMOSTAT_SET_MODE, {}) calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_TEMPERATURE) await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SET_RANGE, BASIC_DATA, { "thermostatTemperatureSetpointHigh": 25, "thermostatTemperatureSetpointLow": 20, }, {}, ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "climate.bla", climate.ATTR_TARGET_TEMP_HIGH: 77, climate.ATTR_TARGET_TEMP_LOW: 68, } calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_HVAC_MODE) await trt.execute( trait.COMMAND_THERMOSTAT_SET_MODE, BASIC_DATA, {"thermostatMode": "cool"}, {} ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "climate.bla", climate.ATTR_HVAC_MODE: climate.HVAC_MODE_COOL, } with pytest.raises(helpers.SmartHomeError) as err: await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, BASIC_DATA, {"thermostatTemperatureSetpoint": -100}, {}, ) assert err.value.code == const.ERR_VALUE_OUT_OF_RANGE hass.config.units.temperature_unit = TEMP_CELSIUS async def test_temperature_setting_climate_setpoint(hass): """Test TemperatureSetting trait support for climate domain - setpoint.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.TemperatureSettingTrait.supported(climate.DOMAIN, 0, None) hass.config.units.temperature_unit = TEMP_CELSIUS trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_COOL, { climate.ATTR_HVAC_MODES: [STATE_OFF, climate.HVAC_MODE_COOL], climate.ATTR_MIN_TEMP: 10, climate.ATTR_MAX_TEMP: 30, ATTR_TEMPERATURE: 18, climate.ATTR_CURRENT_TEMPERATURE: 20, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["off", "cool", "on"], "thermostatTemperatureUnit": "C", } assert trt.query_attributes() == { "thermostatMode": "cool", "thermostatTemperatureAmbient": 20, "thermostatTemperatureSetpoint": 18, } assert trt.can_execute(trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, {}) assert trt.can_execute(trait.COMMAND_THERMOSTAT_SET_MODE, {}) calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_TEMPERATURE) with pytest.raises(helpers.SmartHomeError): await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, BASIC_DATA, {"thermostatTemperatureSetpoint": -100}, {}, ) await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, BASIC_DATA, {"thermostatTemperatureSetpoint": 19}, {}, ) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "climate.bla", ATTR_TEMPERATURE: 19} async def test_temperature_setting_climate_setpoint_auto(hass): """ Test TemperatureSetting trait support for climate domain. Setpoint in auto mode. """ hass.config.units.temperature_unit = TEMP_CELSIUS trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_HEAT_COOL, { climate.ATTR_HVAC_MODES: [ climate.HVAC_MODE_OFF, climate.HVAC_MODE_HEAT_COOL, ], climate.ATTR_MIN_TEMP: 10, climate.ATTR_MAX_TEMP: 30, ATTR_TEMPERATURE: 18, climate.ATTR_CURRENT_TEMPERATURE: 20, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["off", "heatcool", "on"], "thermostatTemperatureUnit": "C", } assert trt.query_attributes() == { "thermostatMode": "heatcool", "thermostatTemperatureAmbient": 20, "thermostatTemperatureSetpointHigh": 18, "thermostatTemperatureSetpointLow": 18, } assert trt.can_execute(trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, {}) assert trt.can_execute(trait.COMMAND_THERMOSTAT_SET_MODE, {}) calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_TEMPERATURE) await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, BASIC_DATA, {"thermostatTemperatureSetpoint": 19}, {}, ) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "climate.bla", ATTR_TEMPERATURE: 19} async def test_humidity_setting_humidifier_setpoint(hass): """Test HumiditySetting trait support for humidifier domain - setpoint.""" assert helpers.get_google_type(humidifier.DOMAIN, None) is not None assert trait.HumiditySettingTrait.supported(humidifier.DOMAIN, 0, None) trt = trait.HumiditySettingTrait( hass, State( "humidifier.bla", STATE_ON, { humidifier.ATTR_MIN_HUMIDITY: 20, humidifier.ATTR_MAX_HUMIDITY: 90, humidifier.ATTR_HUMIDITY: 38, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "humiditySetpointRange": {"minPercent": 20, "maxPercent": 90} } assert trt.query_attributes() == { "humiditySetpointPercent": 38, } assert trt.can_execute(trait.COMMAND_SET_HUMIDITY, {}) calls = async_mock_service(hass, humidifier.DOMAIN, humidifier.SERVICE_SET_HUMIDITY) await trt.execute(trait.COMMAND_SET_HUMIDITY, BASIC_DATA, {"humidity": 32}, {}) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "humidifier.bla", humidifier.ATTR_HUMIDITY: 32, } async def test_lock_unlock_lock(hass): """Test LockUnlock trait locking support for lock domain.""" assert helpers.get_google_type(lock.DOMAIN, None) is not None assert trait.LockUnlockTrait.supported(lock.DOMAIN, lock.SUPPORT_OPEN, None) assert trait.LockUnlockTrait.might_2fa(lock.DOMAIN, lock.SUPPORT_OPEN, None) trt = trait.LockUnlockTrait( hass, State("lock.front_door", lock.STATE_LOCKED), PIN_CONFIG ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"isLocked": True} assert trt.can_execute(trait.COMMAND_LOCKUNLOCK, {"lock": True}) calls = async_mock_service(hass, lock.DOMAIN, lock.SERVICE_LOCK) await trt.execute(trait.COMMAND_LOCKUNLOCK, PIN_DATA, {"lock": True}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "lock.front_door"} async def test_lock_unlock_unlock(hass): """Test LockUnlock trait unlocking support for lock domain.""" assert helpers.get_google_type(lock.DOMAIN, None) is not None assert trait.LockUnlockTrait.supported(lock.DOMAIN, lock.SUPPORT_OPEN, None) trt = trait.LockUnlockTrait( hass, State("lock.front_door", lock.STATE_LOCKED), PIN_CONFIG ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"isLocked": True} assert trt.can_execute(trait.COMMAND_LOCKUNLOCK, {"lock": False}) calls = async_mock_service(hass, lock.DOMAIN, lock.SERVICE_UNLOCK) # No challenge data with pytest.raises(error.ChallengeNeeded) as err: await trt.execute(trait.COMMAND_LOCKUNLOCK, PIN_DATA, {"lock": False}, {}) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # invalid pin with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_LOCKUNLOCK, PIN_DATA, {"lock": False}, {"pin": 9999} ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_FAILED_PIN_NEEDED await trt.execute( trait.COMMAND_LOCKUNLOCK, PIN_DATA, {"lock": False}, {"pin": "1234"} ) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "lock.front_door"} # Test without pin trt = trait.LockUnlockTrait( hass, State("lock.front_door", lock.STATE_LOCKED), BASIC_CONFIG ) with pytest.raises(error.SmartHomeError) as err: await trt.execute(trait.COMMAND_LOCKUNLOCK, BASIC_DATA, {"lock": False}, {}) assert len(calls) == 1 assert err.value.code == const.ERR_CHALLENGE_NOT_SETUP # Test with 2FA override with patch.object( BASIC_CONFIG, "should_2fa", return_value=False, ): await trt.execute(trait.COMMAND_LOCKUNLOCK, BASIC_DATA, {"lock": False}, {}) assert len(calls) == 2 async def test_arm_disarm_arm_away(hass): """Test ArmDisarm trait Arming support for alarm_control_panel domain.""" assert helpers.get_google_type(alarm_control_panel.DOMAIN, None) is not None assert trait.ArmDisArmTrait.supported(alarm_control_panel.DOMAIN, 0, None) assert trait.ArmDisArmTrait.might_2fa(alarm_control_panel.DOMAIN, 0, None) trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, { alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True, ATTR_SUPPORTED_FEATURES: alarm_control_panel.const.SUPPORT_ALARM_ARM_HOME | alarm_control_panel.const.SUPPORT_ALARM_ARM_AWAY, }, ), PIN_CONFIG, ) assert trt.sync_attributes() == { "availableArmLevels": { "levels": [ { "level_name": "armed_home", "level_values": [ {"level_synonym": ["armed home", "home"], "lang": "en"} ], }, { "level_name": "armed_away", "level_values": [ {"level_synonym": ["armed away", "away"], "lang": "en"} ], }, ], "ordered": False, } } assert trt.query_attributes() == { "isArmed": True, "currentArmLevel": STATE_ALARM_ARMED_AWAY, } assert trt.can_execute( trait.COMMAND_ARMDISARM, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY} ) calls = async_mock_service( hass, alarm_control_panel.DOMAIN, alarm_control_panel.SERVICE_ALARM_ARM_AWAY ) # Test with no secure_pin configured with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), BASIC_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, BASIC_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {}, ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NOT_SETUP trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), PIN_CONFIG, ) # No challenge data with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {}, ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # invalid pin with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {"pin": 9999}, ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_FAILED_PIN_NEEDED # correct pin await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {"pin": "1234"}, ) assert len(calls) == 1 # Test already armed with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), PIN_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {}, ) assert len(calls) == 1 assert err.value.code == const.ERR_ALREADY_ARMED # Test with code_arm_required False trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: False}, ), PIN_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {}, ) assert len(calls) == 2 with pytest.raises(error.SmartHomeError) as err: await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True}, {}, ) async def test_arm_disarm_disarm(hass): """Test ArmDisarm trait Disarming support for alarm_control_panel domain.""" assert helpers.get_google_type(alarm_control_panel.DOMAIN, None) is not None assert trait.ArmDisArmTrait.supported(alarm_control_panel.DOMAIN, 0, None) assert trait.ArmDisArmTrait.might_2fa(alarm_control_panel.DOMAIN, 0, None) trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, { alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True, ATTR_SUPPORTED_FEATURES: alarm_control_panel.const.SUPPORT_ALARM_TRIGGER | alarm_control_panel.const.SUPPORT_ALARM_ARM_CUSTOM_BYPASS, }, ), PIN_CONFIG, ) assert trt.sync_attributes() == { "availableArmLevels": { "levels": [ { "level_name": "armed_custom_bypass", "level_values": [ { "level_synonym": ["armed custom bypass", "custom"], "lang": "en", } ], }, { "level_name": "triggered", "level_values": [{"level_synonym": ["triggered"], "lang": "en"}], }, ], "ordered": False, } } assert trt.query_attributes() == {"isArmed": False} assert trt.can_execute(trait.COMMAND_ARMDISARM, {"arm": False}) calls = async_mock_service( hass, alarm_control_panel.DOMAIN, alarm_control_panel.SERVICE_ALARM_DISARM ) # Test without secure_pin configured with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), BASIC_CONFIG, ) await trt.execute(trait.COMMAND_ARMDISARM, BASIC_DATA, {"arm": False}, {}) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NOT_SETUP trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), PIN_CONFIG, ) # No challenge data with pytest.raises(error.ChallengeNeeded) as err: await trt.execute(trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": False}, {}) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # invalid pin with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": False}, {"pin": 9999} ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_FAILED_PIN_NEEDED # correct pin await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": False}, {"pin": "1234"} ) assert len(calls) == 1 # Test already disarmed with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), PIN_CONFIG, ) await trt.execute(trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": False}, {}) assert len(calls) == 1 assert err.value.code == const.ERR_ALREADY_DISARMED # Cancel arming after already armed will require pin with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: False}, ), PIN_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "cancel": True}, {} ) assert len(calls) == 1 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # Cancel arming while pending to arm doesn't require pin trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_PENDING, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: False}, ), PIN_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "cancel": True}, {} ) assert len(calls) == 2 async def test_fan_speed(hass): """Test FanSpeed trait speed control support for fan domain.""" assert helpers.get_google_type(fan.DOMAIN, None) is not None assert trait.FanSpeedTrait.supported(fan.DOMAIN, fan.SUPPORT_SET_SPEED, None) trt = trait.FanSpeedTrait( hass, State( "fan.living_room_fan", fan.SPEED_HIGH, attributes={ "speed_list": [ fan.SPEED_OFF, fan.SPEED_LOW, fan.SPEED_MEDIUM, fan.SPEED_HIGH, ], "speed": "low", "percentage": 33, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableFanSpeeds": { "ordered": True, "speeds": [ { "speed_name": "off", "speed_values": [{"speed_synonym": ["stop", "off"], "lang": "en"}], }, { "speed_name": "low", "speed_values": [ { "speed_synonym": ["slow", "low", "slowest", "lowest"], "lang": "en", } ], }, { "speed_name": "medium", "speed_values": [ {"speed_synonym": ["medium", "mid", "middle"], "lang": "en"} ], }, { "speed_name": "high", "speed_values": [ { "speed_synonym": [ "high", "max", "fast", "highest", "fastest", "maximum", ], "lang": "en", } ], }, ], }, "reversible": False, "supportsFanSpeedPercent": True, } assert trt.query_attributes() == { "currentFanSpeedSetting": "low", "on": True, "currentFanSpeedPercent": 33, } assert trt.can_execute(trait.COMMAND_FANSPEED, params={"fanSpeed": "medium"}) calls = async_mock_service(hass, fan.DOMAIN, fan.SERVICE_SET_SPEED) await trt.execute(trait.COMMAND_FANSPEED, BASIC_DATA, {"fanSpeed": "medium"}, {}) assert len(calls) == 1 assert calls[0].data == {"entity_id": "fan.living_room_fan", "speed": "medium"} assert trt.can_execute(trait.COMMAND_FANSPEED, params={"fanSpeedPercent": 10}) calls = async_mock_service(hass, fan.DOMAIN, fan.SERVICE_SET_PERCENTAGE) await trt.execute(trait.COMMAND_FANSPEED, BASIC_DATA, {"fanSpeedPercent": 10}, {}) assert len(calls) == 1 assert calls[0].data == {"entity_id": "fan.living_room_fan", "percentage": 10} async def test_climate_fan_speed(hass): """Test FanSpeed trait speed control support for climate domain.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.FanSpeedTrait.supported(climate.DOMAIN, climate.SUPPORT_FAN_MODE, None) trt = trait.FanSpeedTrait( hass, State( "climate.living_room_ac", "on", attributes={ "fan_modes": ["auto", "low", "medium", "high"], "fan_mode": "low", }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableFanSpeeds": { "ordered": True, "speeds": [ { "speed_name": "auto", "speed_values": [{"speed_synonym": ["auto"], "lang": "en"}], }, { "speed_name": "low", "speed_values": [{"speed_synonym": ["low"], "lang": "en"}], }, { "speed_name": "medium", "speed_values": [{"speed_synonym": ["medium"], "lang": "en"}], }, { "speed_name": "high", "speed_values": [{"speed_synonym": ["high"], "lang": "en"}], }, ], }, "reversible": False, "supportsFanSpeedPercent": True, } assert trt.query_attributes() == { "currentFanSpeedSetting": "low", } assert trt.can_execute(trait.COMMAND_FANSPEED, params={"fanSpeed": "medium"}) calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_FAN_MODE) await trt.execute(trait.COMMAND_FANSPEED, BASIC_DATA, {"fanSpeed": "medium"}, {}) assert len(calls) == 1 assert calls[0].data == { "entity_id": "climate.living_room_ac", "fan_mode": "medium", } async def test_inputselector(hass): """Test input selector trait.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.InputSelectorTrait.supported( media_player.DOMAIN, media_player.SUPPORT_SELECT_SOURCE, None ) trt = trait.InputSelectorTrait( hass, State( "media_player.living_room", media_player.STATE_PLAYING, attributes={ media_player.ATTR_INPUT_SOURCE_LIST: [ "media", "game", "chromecast", "plex", ], media_player.ATTR_INPUT_SOURCE: "game", }, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableInputs": [ {"key": "media", "names": [{"name_synonym": ["media"], "lang": "en"}]}, {"key": "game", "names": [{"name_synonym": ["game"], "lang": "en"}]}, { "key": "chromecast", "names": [{"name_synonym": ["chromecast"], "lang": "en"}], }, {"key": "plex", "names": [{"name_synonym": ["plex"], "lang": "en"}]}, ], "orderedInputs": True, } assert trt.query_attributes() == { "currentInput": "game", } assert trt.can_execute( trait.COMMAND_INPUT, params={"newInput": "media"}, ) calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_SELECT_SOURCE ) await trt.execute( trait.COMMAND_INPUT, BASIC_DATA, {"newInput": "media"}, {}, ) assert len(calls) == 1 assert calls[0].data == {"entity_id": "media_player.living_room", "source": "media"} @pytest.mark.parametrize( "sources,source,source_next,source_prev", [ (["a"], "a", "a", "a"), (["a", "b"], "a", "b", "b"), (["a", "b", "c"], "a", "b", "c"), ], ) async def test_inputselector_nextprev(hass, sources, source, source_next, source_prev): """Test input selector trait.""" trt = trait.InputSelectorTrait( hass, State( "media_player.living_room", media_player.STATE_PLAYING, attributes={ media_player.ATTR_INPUT_SOURCE_LIST: sources, media_player.ATTR_INPUT_SOURCE: source, }, ), BASIC_CONFIG, ) assert trt.can_execute("action.devices.commands.NextInput", params={}) assert trt.can_execute("action.devices.commands.PreviousInput", params={}) calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_SELECT_SOURCE ) await trt.execute( "action.devices.commands.NextInput", BASIC_DATA, {}, {}, ) await trt.execute( "action.devices.commands.PreviousInput", BASIC_DATA, {}, {}, ) assert len(calls) == 2 assert calls[0].data == { "entity_id": "media_player.living_room", "source": source_next, } assert calls[1].data == { "entity_id": "media_player.living_room", "source": source_prev, } @pytest.mark.parametrize( "sources,source", [(None, "a"), (["a", "b"], None), (["a", "b"], "c")] ) async def test_inputselector_nextprev_invalid(hass, sources, source): """Test input selector trait.""" trt = trait.InputSelectorTrait( hass, State( "media_player.living_room", media_player.STATE_PLAYING, attributes={ media_player.ATTR_INPUT_SOURCE_LIST: sources, media_player.ATTR_INPUT_SOURCE: source, }, ), BASIC_CONFIG, ) with pytest.raises(SmartHomeError): await trt.execute( "action.devices.commands.NextInput", BASIC_DATA, {}, {}, ) with pytest.raises(SmartHomeError): await trt.execute( "action.devices.commands.PreviousInput", BASIC_DATA, {}, {}, ) with pytest.raises(SmartHomeError): await trt.execute( "action.devices.commands.InvalidCommand", BASIC_DATA, {}, {}, ) async def test_modes_input_select(hass): """Test Input Select Mode trait.""" assert helpers.get_google_type(input_select.DOMAIN, None) is not None assert trait.ModesTrait.supported(input_select.DOMAIN, None, None) trt = trait.ModesTrait( hass, State("input_select.bla", "unavailable"), BASIC_CONFIG, ) assert trt.sync_attributes() == {"availableModes": []} trt = trait.ModesTrait( hass, State( "input_select.bla", "abc", attributes={input_select.ATTR_OPTIONS: ["abc", "123", "xyz"]}, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableModes": [ { "name": "option", "name_values": [ { "name_synonym": ["option", "setting", "mode", "value"], "lang": "en", } ], "settings": [ { "setting_name": "abc", "setting_values": [{"setting_synonym": ["abc"], "lang": "en"}], }, { "setting_name": "123", "setting_values": [{"setting_synonym": ["123"], "lang": "en"}], }, { "setting_name": "xyz", "setting_values": [{"setting_synonym": ["xyz"], "lang": "en"}], }, ], "ordered": False, } ] } assert trt.query_attributes() == { "currentModeSettings": {"option": "abc"}, "on": True, } assert trt.can_execute( trait.COMMAND_MODES, params={"updateModeSettings": {"option": "xyz"}}, ) calls = async_mock_service( hass, input_select.DOMAIN, input_select.SERVICE_SELECT_OPTION ) await trt.execute( trait.COMMAND_MODES, BASIC_DATA, {"updateModeSettings": {"option": "xyz"}}, {}, ) assert len(calls) == 1 assert calls[0].data == {"entity_id": "input_select.bla", "option": "xyz"} async def test_modes_humidifier(hass): """Test Humidifier Mode trait.""" assert helpers.get_google_type(humidifier.DOMAIN, None) is not None assert trait.ModesTrait.supported(humidifier.DOMAIN, humidifier.SUPPORT_MODES, None) trt = trait.ModesTrait( hass, State( "humidifier.humidifier", STATE_OFF, attributes={ humidifier.ATTR_AVAILABLE_MODES: [ humidifier.MODE_NORMAL, humidifier.MODE_AUTO, humidifier.MODE_AWAY, ], ATTR_SUPPORTED_FEATURES: humidifier.SUPPORT_MODES, humidifier.ATTR_MIN_HUMIDITY: 30, humidifier.ATTR_MAX_HUMIDITY: 99, humidifier.ATTR_HUMIDITY: 50, ATTR_MODE: humidifier.MODE_AUTO, }, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableModes": [ { "name": "mode", "name_values": [{"name_synonym": ["mode"], "lang": "en"}], "settings": [ { "setting_name": "normal", "setting_values": [ {"setting_synonym": ["normal"], "lang": "en"} ], }, { "setting_name": "auto", "setting_values": [{"setting_synonym": ["auto"], "lang": "en"}], }, { "setting_name": "away", "setting_values": [{"setting_synonym": ["away"], "lang": "en"}], }, ], "ordered": False, }, ] } assert trt.query_attributes() == { "currentModeSettings": {"mode": "auto"}, "on": False, } assert trt.can_execute( trait.COMMAND_MODES, params={"updateModeSettings": {"mode": "away"}} ) calls = async_mock_service(hass, humidifier.DOMAIN, humidifier.SERVICE_SET_MODE) await trt.execute( trait.COMMAND_MODES, BASIC_DATA, {"updateModeSettings": {"mode": "away"}}, {}, ) assert len(calls) == 1 assert calls[0].data == { "entity_id": "humidifier.humidifier", "mode": "away", } async def test_sound_modes(hass): """Test Mode trait.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.ModesTrait.supported( media_player.DOMAIN, media_player.SUPPORT_SELECT_SOUND_MODE, None ) trt = trait.ModesTrait( hass, State( "media_player.living_room", media_player.STATE_PLAYING, attributes={ media_player.ATTR_SOUND_MODE_LIST: ["stereo", "prologic"], media_player.ATTR_SOUND_MODE: "stereo", }, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableModes": [ { "name": "sound mode", "name_values": [ {"name_synonym": ["sound mode", "effects"], "lang": "en"} ], "settings": [ { "setting_name": "stereo", "setting_values": [ {"setting_synonym": ["stereo"], "lang": "en"} ], }, { "setting_name": "prologic", "setting_values": [ {"setting_synonym": ["prologic"], "lang": "en"} ], }, ], "ordered": False, } ] } assert trt.query_attributes() == { "currentModeSettings": {"sound mode": "stereo"}, "on": True, } assert trt.can_execute( trait.COMMAND_MODES, params={"updateModeSettings": {"sound mode": "stereo"}}, ) calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_SELECT_SOUND_MODE ) await trt.execute( trait.COMMAND_MODES, BASIC_DATA, {"updateModeSettings": {"sound mode": "stereo"}}, {}, ) assert len(calls) == 1 assert calls[0].data == { "entity_id": "media_player.living_room", "sound_mode": "stereo", } async def test_openclose_cover(hass): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_SET_POSITION, None ) trt = trait.OpenCloseTrait( hass, State( "cover.bla", cover.STATE_OPEN, { cover.ATTR_CURRENT_POSITION: 75, ATTR_SUPPORTED_FEATURES: cover.SUPPORT_SET_POSITION, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"openPercent": 75} calls_set = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_SET_COVER_POSITION) calls_open = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_OPEN_COVER) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 50}, {}) await trt.execute( trait.COMMAND_OPENCLOSE_RELATIVE, BASIC_DATA, {"openRelativePercent": 50}, {} ) assert len(calls_set) == 1 assert calls_set[0].data == {ATTR_ENTITY_ID: "cover.bla", cover.ATTR_POSITION: 50} assert len(calls_open) == 1 assert calls_open[0].data == {ATTR_ENTITY_ID: "cover.bla"} async def test_openclose_cover_unknown_state(hass): """Test OpenClose trait support for cover domain with unknown state.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_SET_POSITION, None ) # No state trt = trait.OpenCloseTrait( hass, State( "cover.bla", STATE_UNKNOWN, {ATTR_SUPPORTED_FEATURES: cover.SUPPORT_OPEN} ), BASIC_CONFIG, ) assert trt.sync_attributes() == {"discreteOnlyOpenClose": True} with pytest.raises(helpers.SmartHomeError): trt.query_attributes() calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_OPEN_COVER) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 100}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} with pytest.raises(helpers.SmartHomeError): trt.query_attributes() async def test_openclose_cover_assumed_state(hass): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_SET_POSITION, None ) trt = trait.OpenCloseTrait( hass, State( "cover.bla", cover.STATE_OPEN, { ATTR_ASSUMED_STATE: True, ATTR_SUPPORTED_FEATURES: cover.SUPPORT_SET_POSITION, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == {"commandOnlyOpenClose": True} assert trt.query_attributes() == {} calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_SET_COVER_POSITION) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 40}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla", cover.ATTR_POSITION: 40} async def test_openclose_cover_query_only(hass): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported(cover.DOMAIN, 0, None) state = State( "cover.bla", cover.STATE_OPEN, ) trt = trait.OpenCloseTrait( hass, state, BASIC_CONFIG, ) assert trt.sync_attributes() == { "discreteOnlyOpenClose": True, "queryOnlyOpenClose": True, } assert trt.query_attributes() == {"openPercent": 100} async def test_openclose_cover_no_position(hass): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_OPEN | cover.SUPPORT_CLOSE, None ) state = State( "cover.bla", cover.STATE_OPEN, { ATTR_SUPPORTED_FEATURES: cover.SUPPORT_OPEN | cover.SUPPORT_CLOSE, }, ) trt = trait.OpenCloseTrait( hass, state, BASIC_CONFIG, ) assert trt.sync_attributes() == {"discreteOnlyOpenClose": True} assert trt.query_attributes() == {"openPercent": 100} state.state = cover.STATE_CLOSED assert trt.sync_attributes() == {"discreteOnlyOpenClose": True} assert trt.query_attributes() == {"openPercent": 0} calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_CLOSE_COVER) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 0}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_OPEN_COVER) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 100}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} with pytest.raises( SmartHomeError, match=r"Current position not know for relative command" ): await trt.execute( trait.COMMAND_OPENCLOSE_RELATIVE, BASIC_DATA, {"openRelativePercent": 100}, {}, ) with pytest.raises(SmartHomeError, match=r"No support for partial open close"): await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 50}, {}) @pytest.mark.parametrize( "device_class", (cover.DEVICE_CLASS_DOOR, cover.DEVICE_CLASS_GARAGE, cover.DEVICE_CLASS_GATE), ) async def test_openclose_cover_secure(hass, device_class): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, device_class) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_SET_POSITION, device_class ) assert trait.OpenCloseTrait.might_2fa( cover.DOMAIN, cover.SUPPORT_SET_POSITION, device_class ) trt = trait.OpenCloseTrait( hass, State( "cover.bla", cover.STATE_OPEN, { ATTR_DEVICE_CLASS: device_class, ATTR_SUPPORTED_FEATURES: cover.SUPPORT_SET_POSITION, cover.ATTR_CURRENT_POSITION: 75, }, ), PIN_CONFIG, ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"openPercent": 75} calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_SET_COVER_POSITION) calls_close = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_CLOSE_COVER) # No challenge data with pytest.raises(error.ChallengeNeeded) as err: await trt.execute(trait.COMMAND_OPENCLOSE, PIN_DATA, {"openPercent": 50}, {}) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # invalid pin with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_OPENCLOSE, PIN_DATA, {"openPercent": 50}, {"pin": "9999"} ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_FAILED_PIN_NEEDED await trt.execute( trait.COMMAND_OPENCLOSE, PIN_DATA, {"openPercent": 50}, {"pin": "1234"} ) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla", cover.ATTR_POSITION: 50} # no challenge on close await trt.execute(trait.COMMAND_OPENCLOSE, PIN_DATA, {"openPercent": 0}, {}) assert len(calls_close) == 1 assert calls_close[0].data == {ATTR_ENTITY_ID: "cover.bla"} @pytest.mark.parametrize( "device_class", ( binary_sensor.DEVICE_CLASS_DOOR, binary_sensor.DEVICE_CLASS_GARAGE_DOOR, binary_sensor.DEVICE_CLASS_LOCK, binary_sensor.DEVICE_CLASS_OPENING, binary_sensor.DEVICE_CLASS_WINDOW, ), ) async def test_openclose_binary_sensor(hass, device_class): """Test OpenClose trait support for binary_sensor domain.""" assert helpers.get_google_type(binary_sensor.DOMAIN, device_class) is not None assert trait.OpenCloseTrait.supported(binary_sensor.DOMAIN, 0, device_class) trt = trait.OpenCloseTrait( hass, State("binary_sensor.test", STATE_ON, {ATTR_DEVICE_CLASS: device_class}), BASIC_CONFIG, ) assert trt.sync_attributes() == { "queryOnlyOpenClose": True, "discreteOnlyOpenClose": True, } assert trt.query_attributes() == {"openPercent": 100} trt = trait.OpenCloseTrait( hass, State("binary_sensor.test", STATE_OFF, {ATTR_DEVICE_CLASS: device_class}), BASIC_CONFIG, ) assert trt.sync_attributes() == { "queryOnlyOpenClose": True, "discreteOnlyOpenClose": True, } assert trt.query_attributes() == {"openPercent": 0} async def test_volume_media_player(hass): """Test volume trait support for media player domain.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.VolumeTrait.supported( media_player.DOMAIN, media_player.SUPPORT_VOLUME_SET, None, ) trt = trait.VolumeTrait( hass, State( "media_player.bla", media_player.STATE_PLAYING, { ATTR_SUPPORTED_FEATURES: media_player.SUPPORT_VOLUME_SET, media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.3, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "volumeMaxLevel": 100, "levelStepSize": 10, "volumeCanMuteAndUnmute": False, "commandOnlyVolume": False, } assert trt.query_attributes() == {"currentVolume": 30} calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_SET ) await trt.execute(trait.COMMAND_SET_VOLUME, BASIC_DATA, {"volumeLevel": 60}, {}) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.6, } calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_SET ) await trt.execute( trait.COMMAND_VOLUME_RELATIVE, BASIC_DATA, {"relativeSteps": 10}, {} ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.4, } async def test_volume_media_player_relative(hass): """Test volume trait support for relative-volume-only media players.""" assert trait.VolumeTrait.supported( media_player.DOMAIN, media_player.SUPPORT_VOLUME_STEP, None, ) trt = trait.VolumeTrait( hass, State( "media_player.bla", media_player.STATE_PLAYING, { ATTR_ASSUMED_STATE: True, ATTR_SUPPORTED_FEATURES: media_player.SUPPORT_VOLUME_STEP, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "volumeMaxLevel": 100, "levelStepSize": 10, "volumeCanMuteAndUnmute": False, "commandOnlyVolume": True, } assert trt.query_attributes() == {} calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_UP ) await trt.execute( trait.COMMAND_VOLUME_RELATIVE, BASIC_DATA, {"relativeSteps": 10}, {}, ) assert len(calls) == 10 for call in calls: assert call.data == { ATTR_ENTITY_ID: "media_player.bla", } calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_DOWN ) await trt.execute( trait.COMMAND_VOLUME_RELATIVE, BASIC_DATA, {"relativeSteps": -10}, {}, ) assert len(calls) == 10 for call in calls: assert call.data == { ATTR_ENTITY_ID: "media_player.bla", } with pytest.raises(SmartHomeError): await trt.execute(trait.COMMAND_SET_VOLUME, BASIC_DATA, {"volumeLevel": 42}, {}) with pytest.raises(SmartHomeError): await trt.execute(trait.COMMAND_MUTE, BASIC_DATA, {"mute": True}, {}) async def test_media_player_mute(hass): """Test volume trait support for muting.""" assert trait.VolumeTrait.supported( media_player.DOMAIN, media_player.SUPPORT_VOLUME_STEP | media_player.SUPPORT_VOLUME_MUTE, None, ) trt = trait.VolumeTrait( hass, State( "media_player.bla", media_player.STATE_PLAYING, { ATTR_SUPPORTED_FEATURES: ( media_player.SUPPORT_VOLUME_STEP | media_player.SUPPORT_VOLUME_MUTE ), media_player.ATTR_MEDIA_VOLUME_MUTED: False, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "volumeMaxLevel": 100, "levelStepSize": 10, "volumeCanMuteAndUnmute": True, "commandOnlyVolume": False, } assert trt.query_attributes() == {"isMuted": False} mute_calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_MUTE ) await trt.execute( trait.COMMAND_MUTE, BASIC_DATA, {"mute": True}, {}, ) assert len(mute_calls) == 1 assert mute_calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_VOLUME_MUTED: True, } unmute_calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_MUTE ) await trt.execute( trait.COMMAND_MUTE, BASIC_DATA, {"mute": False}, {}, ) assert len(unmute_calls) == 1 assert unmute_calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_VOLUME_MUTED: False, } async def test_temperature_setting_sensor(hass): """Test TemperatureSetting trait support for temperature sensor.""" assert ( helpers.get_google_type(sensor.DOMAIN, sensor.DEVICE_CLASS_TEMPERATURE) is not None ) assert not trait.TemperatureSettingTrait.supported( sensor.DOMAIN, 0, sensor.DEVICE_CLASS_HUMIDITY ) assert trait.TemperatureSettingTrait.supported( sensor.DOMAIN, 0, sensor.DEVICE_CLASS_TEMPERATURE ) @pytest.mark.parametrize( "unit_in,unit_out,state,ambient", [ (TEMP_FAHRENHEIT, "F", "70", 21.1), (TEMP_CELSIUS, "C", "21.1", 21.1), (TEMP_FAHRENHEIT, "F", "unavailable", None), (TEMP_FAHRENHEIT, "F", "unknown", None), ], ) async def test_temperature_setting_sensor_data(hass, unit_in, unit_out, state, ambient): """Test TemperatureSetting trait support for temperature sensor.""" hass.config.units.temperature_unit = unit_in trt = trait.TemperatureSettingTrait( hass, State( "sensor.test", state, {ATTR_DEVICE_CLASS: sensor.DEVICE_CLASS_TEMPERATURE} ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "queryOnlyTemperatureSetting": True, "thermostatTemperatureUnit": unit_out, } if ambient: assert trt.query_attributes() == {"thermostatTemperatureAmbient": ambient} else: assert trt.query_attributes() == {} hass.config.units.temperature_unit = TEMP_CELSIUS async def test_humidity_setting_sensor(hass): """Test HumiditySetting trait support for humidity sensor.""" assert ( helpers.get_google_type(sensor.DOMAIN, sensor.DEVICE_CLASS_HUMIDITY) is not None ) assert not trait.HumiditySettingTrait.supported( sensor.DOMAIN, 0, sensor.DEVICE_CLASS_TEMPERATURE ) assert trait.HumiditySettingTrait.supported( sensor.DOMAIN, 0, sensor.DEVICE_CLASS_HUMIDITY ) @pytest.mark.parametrize( "state,ambient", [("70", 70), ("unavailable", None), ("unknown", None)] ) async def test_humidity_setting_sensor_data(hass, state, ambient): """Test HumiditySetting trait support for humidity sensor.""" trt = trait.HumiditySettingTrait( hass, State("sensor.test", state, {ATTR_DEVICE_CLASS: sensor.DEVICE_CLASS_HUMIDITY}), BASIC_CONFIG, ) assert trt.sync_attributes() == {"queryOnlyHumiditySetting": True} if ambient: assert trt.query_attributes() == {"humidityAmbientPercent": ambient} else: assert trt.query_attributes() == {} with pytest.raises(helpers.SmartHomeError) as err: await trt.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert err.value.code == const.ERR_NOT_SUPPORTED async def test_transport_control(hass): """Test the TransportControlTrait.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None for feature in trait.MEDIA_COMMAND_SUPPORT_MAPPING.values(): assert trait.TransportControlTrait.supported(media_player.DOMAIN, feature, None) now = datetime(2020, 1, 1) trt = trait.TransportControlTrait( hass, State( "media_player.bla", media_player.STATE_PLAYING, { media_player.ATTR_MEDIA_POSITION: 100, media_player.ATTR_MEDIA_DURATION: 200, media_player.ATTR_MEDIA_POSITION_UPDATED_AT: now - timedelta(seconds=10), media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.5, ATTR_SUPPORTED_FEATURES: media_player.SUPPORT_PLAY | media_player.SUPPORT_STOP, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "transportControlSupportedCommands": ["RESUME", "STOP"] } assert trt.query_attributes() == {} # COMMAND_MEDIA_SEEK_RELATIVE calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_SEEK ) # Patch to avoid time ticking over during the command failing the test with patch("homeassistant.util.dt.utcnow", return_value=now): await trt.execute( trait.COMMAND_MEDIA_SEEK_RELATIVE, BASIC_DATA, {"relativePositionMs": 10000}, {}, ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", # 100s (current position) + 10s (from command) + 10s (from updated_at) media_player.ATTR_MEDIA_SEEK_POSITION: 120, } # COMMAND_MEDIA_SEEK_TO_POSITION calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_SEEK ) await trt.execute( trait.COMMAND_MEDIA_SEEK_TO_POSITION, BASIC_DATA, {"absPositionMs": 50000}, {} ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_SEEK_POSITION: 50, } # COMMAND_MEDIA_NEXT calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_NEXT_TRACK ) await trt.execute(trait.COMMAND_MEDIA_NEXT, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} # COMMAND_MEDIA_PAUSE calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_PAUSE ) await trt.execute(trait.COMMAND_MEDIA_PAUSE, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} # COMMAND_MEDIA_PREVIOUS calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_PREVIOUS_TRACK ) await trt.execute(trait.COMMAND_MEDIA_PREVIOUS, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} # COMMAND_MEDIA_RESUME calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_PLAY ) await trt.execute(trait.COMMAND_MEDIA_RESUME, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} # COMMAND_MEDIA_SHUFFLE calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_SHUFFLE_SET ) await trt.execute(trait.COMMAND_MEDIA_SHUFFLE, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_SHUFFLE: True, } # COMMAND_MEDIA_STOP calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_STOP ) await trt.execute(trait.COMMAND_MEDIA_STOP, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} @pytest.mark.parametrize( "state", ( STATE_OFF, STATE_IDLE, STATE_PLAYING, STATE_ON, STATE_PAUSED, STATE_STANDBY, STATE_UNAVAILABLE, STATE_UNKNOWN, ), ) async def test_media_state(hass, state): """Test the MediaStateTrait.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.TransportControlTrait.supported( media_player.DOMAIN, media_player.SUPPORT_PLAY, None ) trt = trait.MediaStateTrait( hass, State( "media_player.bla", state, { media_player.ATTR_MEDIA_POSITION: 100, media_player.ATTR_MEDIA_DURATION: 200, media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.5, ATTR_SUPPORTED_FEATURES: media_player.SUPPORT_PLAY | media_player.SUPPORT_STOP, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "supportActivityState": True, "supportPlaybackState": True, } assert trt.query_attributes() == { "activityState": trt.activity_lookup.get(state), "playbackState": trt.playback_lookup.get(state), }
sander76/home-assistant
tests/components/google_assistant/test_trait.py
homeassistant/components/enphase_envoy/sensor.py
"""Offer MQTT listening automation rules.""" from contextlib import suppress import json import logging import voluptuous as vol from homeassistant.const import CONF_PAYLOAD, CONF_PLATFORM, CONF_VALUE_TEMPLATE from homeassistant.core import HassJob, callback from homeassistant.helpers import config_validation as cv, template from .. import mqtt # mypy: allow-untyped-defs CONF_ENCODING = "encoding" CONF_QOS = "qos" CONF_TOPIC = "topic" DEFAULT_ENCODING = "utf-8" DEFAULT_QOS = 0 TRIGGER_SCHEMA = vol.Schema( { vol.Required(CONF_PLATFORM): mqtt.DOMAIN, vol.Required(CONF_TOPIC): mqtt.util.valid_subscribe_topic_template, vol.Optional(CONF_PAYLOAD): cv.template, vol.Optional(CONF_VALUE_TEMPLATE): cv.template, vol.Optional(CONF_ENCODING, default=DEFAULT_ENCODING): cv.string, vol.Optional(CONF_QOS, default=DEFAULT_QOS): vol.All( vol.Coerce(int), vol.In([0, 1, 2]) ), } ) _LOGGER = logging.getLogger(__name__) async def async_attach_trigger(hass, config, action, automation_info): """Listen for state changes based on configuration.""" trigger_id = automation_info.get("trigger_id") if automation_info else None topic = config[CONF_TOPIC] wanted_payload = config.get(CONF_PAYLOAD) value_template = config.get(CONF_VALUE_TEMPLATE) encoding = config[CONF_ENCODING] or None qos = config[CONF_QOS] job = HassJob(action) variables = None if automation_info: variables = automation_info.get("variables") template.attach(hass, wanted_payload) if wanted_payload: wanted_payload = wanted_payload.async_render( variables, limited=True, parse_result=False ) template.attach(hass, topic) if isinstance(topic, template.Template): topic = topic.async_render(variables, limited=True, parse_result=False) topic = mqtt.util.valid_subscribe_topic(topic) template.attach(hass, value_template) @callback def mqtt_automation_listener(mqttmsg): """Listen for MQTT messages.""" payload = mqttmsg.payload if value_template is not None: payload = value_template.async_render_with_possible_json_value( payload, error_value=None, ) if wanted_payload is None or wanted_payload == payload: data = { "platform": "mqtt", "topic": mqttmsg.topic, "payload": mqttmsg.payload, "qos": mqttmsg.qos, "description": f"mqtt topic {mqttmsg.topic}", "id": trigger_id, } with suppress(ValueError): data["payload_json"] = json.loads(mqttmsg.payload) hass.async_run_hass_job(job, {"trigger": data}) _LOGGER.debug( "Attaching MQTT trigger for topic: '%s', payload: '%s'", topic, wanted_payload ) remove = await mqtt.async_subscribe( hass, topic, mqtt_automation_listener, encoding=encoding, qos=qos ) return remove
"""Tests for the Google Assistant traits.""" from datetime import datetime, timedelta from unittest.mock import patch import pytest from homeassistant.components import ( alarm_control_panel, binary_sensor, camera, cover, fan, group, input_boolean, input_select, light, lock, media_player, scene, script, sensor, switch, vacuum, ) from homeassistant.components.climate import const as climate from homeassistant.components.google_assistant import const, error, helpers, trait from homeassistant.components.google_assistant.error import SmartHomeError from homeassistant.components.humidifier import const as humidifier from homeassistant.config import async_process_ha_core_config from homeassistant.const import ( ATTR_ASSUMED_STATE, ATTR_DEVICE_CLASS, ATTR_ENTITY_ID, ATTR_MODE, ATTR_SUPPORTED_FEATURES, ATTR_TEMPERATURE, SERVICE_TURN_OFF, SERVICE_TURN_ON, STATE_ALARM_ARMED_AWAY, STATE_ALARM_DISARMED, STATE_ALARM_PENDING, STATE_IDLE, STATE_OFF, STATE_ON, STATE_PAUSED, STATE_PLAYING, STATE_STANDBY, STATE_UNAVAILABLE, STATE_UNKNOWN, TEMP_CELSIUS, TEMP_FAHRENHEIT, ) from homeassistant.core import DOMAIN as HA_DOMAIN, EVENT_CALL_SERVICE, State from homeassistant.util import color from . import BASIC_CONFIG, MockConfig from tests.common import async_capture_events, async_mock_service REQ_ID = "ff36a3cc-ec34-11e6-b1a0-64510650abcf" BASIC_DATA = helpers.RequestData( BASIC_CONFIG, "test-agent", const.SOURCE_CLOUD, REQ_ID, None ) PIN_CONFIG = MockConfig(secure_devices_pin="1234") PIN_DATA = helpers.RequestData( PIN_CONFIG, "test-agent", const.SOURCE_CLOUD, REQ_ID, None ) async def test_brightness_light(hass): """Test brightness trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert trait.BrightnessTrait.supported(light.DOMAIN, light.SUPPORT_BRIGHTNESS, None) trt = trait.BrightnessTrait( hass, State("light.bla", light.STATE_ON, {light.ATTR_BRIGHTNESS: 243}), BASIC_CONFIG, ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"brightness": 95} events = async_capture_events(hass, EVENT_CALL_SERVICE) calls = async_mock_service(hass, light.DOMAIN, light.SERVICE_TURN_ON) await trt.execute( trait.COMMAND_BRIGHTNESS_ABSOLUTE, BASIC_DATA, {"brightness": 50}, {} ) await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "light.bla", light.ATTR_BRIGHTNESS_PCT: 50} assert len(events) == 1 assert events[0].data == { "domain": "light", "service": "turn_on", "service_data": {"brightness_pct": 50, "entity_id": "light.bla"}, } async def test_camera_stream(hass): """Test camera stream trait support for camera domain.""" await async_process_ha_core_config( hass, {"external_url": "https://example.com"}, ) assert helpers.get_google_type(camera.DOMAIN, None) is not None assert trait.CameraStreamTrait.supported(camera.DOMAIN, camera.SUPPORT_STREAM, None) trt = trait.CameraStreamTrait( hass, State("camera.bla", camera.STATE_IDLE, {}), BASIC_CONFIG ) assert trt.sync_attributes() == { "cameraStreamSupportedProtocols": ["hls"], "cameraStreamNeedAuthToken": False, "cameraStreamNeedDrmEncryption": False, } assert trt.query_attributes() == {} with patch( "homeassistant.components.camera.async_request_stream", return_value="/api/streams/bla", ): await trt.execute(trait.COMMAND_GET_CAMERA_STREAM, BASIC_DATA, {}, {}) assert trt.query_attributes() == { "cameraStreamAccessUrl": "https://example.com/api/streams/bla", "cameraStreamReceiverAppId": "B12CE3CA", } async def test_onoff_group(hass): """Test OnOff trait support for group domain.""" assert helpers.get_google_type(group.DOMAIN, None) is not None assert trait.OnOffTrait.supported(group.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("group.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("group.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, HA_DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "group.bla"} off_calls = async_mock_service(hass, HA_DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "group.bla"} async def test_onoff_input_boolean(hass): """Test OnOff trait support for input_boolean domain.""" assert helpers.get_google_type(input_boolean.DOMAIN, None) is not None assert trait.OnOffTrait.supported(input_boolean.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("input_boolean.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait( hass, State("input_boolean.bla", STATE_OFF), BASIC_CONFIG ) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, input_boolean.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "input_boolean.bla"} off_calls = async_mock_service(hass, input_boolean.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "input_boolean.bla"} async def test_onoff_switch(hass): """Test OnOff trait support for switch domain.""" assert helpers.get_google_type(switch.DOMAIN, None) is not None assert trait.OnOffTrait.supported(switch.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("switch.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("switch.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} trt_assumed = trait.OnOffTrait( hass, State("switch.bla", STATE_OFF, {"assumed_state": True}), BASIC_CONFIG ) assert trt_assumed.sync_attributes() == {"commandOnlyOnOff": True} on_calls = async_mock_service(hass, switch.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "switch.bla"} off_calls = async_mock_service(hass, switch.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "switch.bla"} async def test_onoff_fan(hass): """Test OnOff trait support for fan domain.""" assert helpers.get_google_type(fan.DOMAIN, None) is not None assert trait.OnOffTrait.supported(fan.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("fan.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("fan.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, fan.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "fan.bla"} off_calls = async_mock_service(hass, fan.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "fan.bla"} async def test_onoff_light(hass): """Test OnOff trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert trait.OnOffTrait.supported(light.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("light.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("light.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "light.bla"} off_calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "light.bla"} async def test_onoff_media_player(hass): """Test OnOff trait support for media_player domain.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.OnOffTrait.supported(media_player.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("media_player.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("media_player.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, media_player.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} off_calls = async_mock_service(hass, media_player.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} async def test_onoff_humidifier(hass): """Test OnOff trait support for humidifier domain.""" assert helpers.get_google_type(humidifier.DOMAIN, None) is not None assert trait.OnOffTrait.supported(humidifier.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("humidifier.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("humidifier.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, humidifier.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "humidifier.bla"} off_calls = async_mock_service(hass, humidifier.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "humidifier.bla"} async def test_dock_vacuum(hass): """Test dock trait support for vacuum domain.""" assert helpers.get_google_type(vacuum.DOMAIN, None) is not None assert trait.DockTrait.supported(vacuum.DOMAIN, 0, None) trt = trait.DockTrait(hass, State("vacuum.bla", vacuum.STATE_IDLE), BASIC_CONFIG) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"isDocked": False} calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_RETURN_TO_BASE) await trt.execute(trait.COMMAND_DOCK, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} async def test_startstop_vacuum(hass): """Test startStop trait support for vacuum domain.""" assert helpers.get_google_type(vacuum.DOMAIN, None) is not None assert trait.StartStopTrait.supported(vacuum.DOMAIN, 0, None) trt = trait.StartStopTrait( hass, State( "vacuum.bla", vacuum.STATE_PAUSED, {ATTR_SUPPORTED_FEATURES: vacuum.SUPPORT_PAUSE}, ), BASIC_CONFIG, ) assert trt.sync_attributes() == {"pausable": True} assert trt.query_attributes() == {"isRunning": False, "isPaused": True} start_calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_START) await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": True}, {}) assert len(start_calls) == 1 assert start_calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} stop_calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_STOP) await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": False}, {}) assert len(stop_calls) == 1 assert stop_calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} pause_calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_PAUSE) await trt.execute(trait.COMMAND_PAUSEUNPAUSE, BASIC_DATA, {"pause": True}, {}) assert len(pause_calls) == 1 assert pause_calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} unpause_calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_START) await trt.execute(trait.COMMAND_PAUSEUNPAUSE, BASIC_DATA, {"pause": False}, {}) assert len(unpause_calls) == 1 assert unpause_calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} async def test_startstop_cover(hass): """Test startStop trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.StartStopTrait.supported(cover.DOMAIN, cover.SUPPORT_STOP, None) state = State( "cover.bla", cover.STATE_CLOSED, {ATTR_SUPPORTED_FEATURES: cover.SUPPORT_STOP}, ) trt = trait.StartStopTrait( hass, state, BASIC_CONFIG, ) assert trt.sync_attributes() == {} for state_value in (cover.STATE_CLOSING, cover.STATE_OPENING): state.state = state_value assert trt.query_attributes() == {"isRunning": True} stop_calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_STOP_COVER) await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": False}, {}) assert len(stop_calls) == 1 assert stop_calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} for state_value in (cover.STATE_CLOSED, cover.STATE_OPEN): state.state = state_value assert trt.query_attributes() == {"isRunning": False} with pytest.raises(SmartHomeError, match="Cover is already stopped"): await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": False}, {}) with pytest.raises(SmartHomeError, match="Starting a cover is not supported"): await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": True}, {}) with pytest.raises( SmartHomeError, match="Command action.devices.commands.PauseUnpause is not supported", ): await trt.execute(trait.COMMAND_PAUSEUNPAUSE, BASIC_DATA, {"start": True}, {}) async def test_startstop_cover_assumed(hass): """Test startStop trait support for cover domain of assumed state.""" trt = trait.StartStopTrait( hass, State( "cover.bla", cover.STATE_CLOSED, {ATTR_SUPPORTED_FEATURES: cover.SUPPORT_STOP, ATTR_ASSUMED_STATE: True}, ), BASIC_CONFIG, ) stop_calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_STOP_COVER) await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": False}, {}) assert len(stop_calls) == 1 assert stop_calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} async def test_color_setting_color_light(hass): """Test ColorSpectrum trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert not trait.ColorSettingTrait.supported(light.DOMAIN, 0, None) assert trait.ColorSettingTrait.supported(light.DOMAIN, light.SUPPORT_COLOR, None) trt = trait.ColorSettingTrait( hass, State( "light.bla", STATE_ON, { light.ATTR_HS_COLOR: (20, 94), light.ATTR_BRIGHTNESS: 200, ATTR_SUPPORTED_FEATURES: light.SUPPORT_COLOR, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == {"colorModel": "hsv"} assert trt.query_attributes() == { "color": {"spectrumHsv": {"hue": 20, "saturation": 0.94, "value": 200 / 255}} } assert trt.can_execute( trait.COMMAND_COLOR_ABSOLUTE, {"color": {"spectrumRGB": 16715792}} ) calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON) await trt.execute( trait.COMMAND_COLOR_ABSOLUTE, BASIC_DATA, {"color": {"spectrumRGB": 1052927}}, {}, ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "light.bla", light.ATTR_HS_COLOR: (240, 93.725), } await trt.execute( trait.COMMAND_COLOR_ABSOLUTE, BASIC_DATA, {"color": {"spectrumHSV": {"hue": 100, "saturation": 0.50, "value": 0.20}}}, {}, ) assert len(calls) == 2 assert calls[1].data == { ATTR_ENTITY_ID: "light.bla", light.ATTR_HS_COLOR: [100, 50], light.ATTR_BRIGHTNESS: 0.2 * 255, } async def test_color_setting_temperature_light(hass): """Test ColorTemperature trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert not trait.ColorSettingTrait.supported(light.DOMAIN, 0, None) assert trait.ColorSettingTrait.supported( light.DOMAIN, light.SUPPORT_COLOR_TEMP, None ) trt = trait.ColorSettingTrait( hass, State( "light.bla", STATE_ON, { light.ATTR_MIN_MIREDS: 200, light.ATTR_COLOR_TEMP: 300, light.ATTR_MAX_MIREDS: 500, ATTR_SUPPORTED_FEATURES: light.SUPPORT_COLOR_TEMP, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "colorTemperatureRange": {"temperatureMinK": 2000, "temperatureMaxK": 5000} } assert trt.query_attributes() == {"color": {"temperatureK": 3333}} assert trt.can_execute( trait.COMMAND_COLOR_ABSOLUTE, {"color": {"temperature": 400}} ) calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON) with pytest.raises(helpers.SmartHomeError) as err: await trt.execute( trait.COMMAND_COLOR_ABSOLUTE, BASIC_DATA, {"color": {"temperature": 5555}}, {}, ) assert err.value.code == const.ERR_VALUE_OUT_OF_RANGE await trt.execute( trait.COMMAND_COLOR_ABSOLUTE, BASIC_DATA, {"color": {"temperature": 2857}}, {} ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "light.bla", light.ATTR_COLOR_TEMP: color.color_temperature_kelvin_to_mired(2857), } async def test_color_light_temperature_light_bad_temp(hass): """Test ColorTemperature trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert not trait.ColorSettingTrait.supported(light.DOMAIN, 0, None) assert trait.ColorSettingTrait.supported( light.DOMAIN, light.SUPPORT_COLOR_TEMP, None ) trt = trait.ColorSettingTrait( hass, State( "light.bla", STATE_ON, { light.ATTR_MIN_MIREDS: 200, light.ATTR_COLOR_TEMP: 0, light.ATTR_MAX_MIREDS: 500, }, ), BASIC_CONFIG, ) assert trt.query_attributes() == {} async def test_light_modes(hass): """Test Light Mode trait.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert trait.ModesTrait.supported(light.DOMAIN, light.SUPPORT_EFFECT, None) trt = trait.ModesTrait( hass, State( "light.living_room", light.STATE_ON, attributes={ light.ATTR_EFFECT_LIST: ["random", "colorloop"], light.ATTR_EFFECT: "random", }, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableModes": [ { "name": "effect", "name_values": [{"name_synonym": ["effect"], "lang": "en"}], "settings": [ { "setting_name": "random", "setting_values": [ {"setting_synonym": ["random"], "lang": "en"} ], }, { "setting_name": "colorloop", "setting_values": [ {"setting_synonym": ["colorloop"], "lang": "en"} ], }, ], "ordered": False, } ] } assert trt.query_attributes() == { "currentModeSettings": {"effect": "random"}, "on": True, } assert trt.can_execute( trait.COMMAND_MODES, params={"updateModeSettings": {"effect": "colorloop"}}, ) calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON) await trt.execute( trait.COMMAND_MODES, BASIC_DATA, {"updateModeSettings": {"effect": "colorloop"}}, {}, ) assert len(calls) == 1 assert calls[0].data == { "entity_id": "light.living_room", "effect": "colorloop", } async def test_scene_scene(hass): """Test Scene trait support for scene domain.""" assert helpers.get_google_type(scene.DOMAIN, None) is not None assert trait.SceneTrait.supported(scene.DOMAIN, 0, None) trt = trait.SceneTrait(hass, State("scene.bla", scene.STATE), BASIC_CONFIG) assert trt.sync_attributes() == {} assert trt.query_attributes() == {} assert trt.can_execute(trait.COMMAND_ACTIVATE_SCENE, {}) calls = async_mock_service(hass, scene.DOMAIN, SERVICE_TURN_ON) await trt.execute(trait.COMMAND_ACTIVATE_SCENE, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "scene.bla"} async def test_scene_script(hass): """Test Scene trait support for script domain.""" assert helpers.get_google_type(script.DOMAIN, None) is not None assert trait.SceneTrait.supported(script.DOMAIN, 0, None) trt = trait.SceneTrait(hass, State("script.bla", STATE_OFF), BASIC_CONFIG) assert trt.sync_attributes() == {} assert trt.query_attributes() == {} assert trt.can_execute(trait.COMMAND_ACTIVATE_SCENE, {}) calls = async_mock_service(hass, script.DOMAIN, SERVICE_TURN_ON) await trt.execute(trait.COMMAND_ACTIVATE_SCENE, BASIC_DATA, {}, {}) # We don't wait till script execution is done. await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "script.bla"} async def test_temperature_setting_climate_onoff(hass): """Test TemperatureSetting trait support for climate domain - range.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.TemperatureSettingTrait.supported(climate.DOMAIN, 0, None) hass.config.units.temperature_unit = TEMP_FAHRENHEIT trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_AUTO, { ATTR_SUPPORTED_FEATURES: climate.SUPPORT_TARGET_TEMPERATURE_RANGE, climate.ATTR_HVAC_MODES: [ climate.HVAC_MODE_OFF, climate.HVAC_MODE_COOL, climate.HVAC_MODE_HEAT, climate.HVAC_MODE_HEAT_COOL, ], climate.ATTR_MIN_TEMP: None, climate.ATTR_MAX_TEMP: None, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["off", "cool", "heat", "heatcool", "on"], "thermostatTemperatureUnit": "F", } assert trt.can_execute(trait.COMMAND_THERMOSTAT_SET_MODE, {}) calls = async_mock_service(hass, climate.DOMAIN, SERVICE_TURN_ON) await trt.execute( trait.COMMAND_THERMOSTAT_SET_MODE, BASIC_DATA, {"thermostatMode": "on"}, {} ) assert len(calls) == 1 calls = async_mock_service(hass, climate.DOMAIN, SERVICE_TURN_OFF) await trt.execute( trait.COMMAND_THERMOSTAT_SET_MODE, BASIC_DATA, {"thermostatMode": "off"}, {} ) assert len(calls) == 1 async def test_temperature_setting_climate_no_modes(hass): """Test TemperatureSetting trait support for climate domain not supporting any modes.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.TemperatureSettingTrait.supported(climate.DOMAIN, 0, None) hass.config.units.temperature_unit = TEMP_CELSIUS trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_AUTO, { climate.ATTR_HVAC_MODES: [], climate.ATTR_MIN_TEMP: None, climate.ATTR_MAX_TEMP: None, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["heat"], "thermostatTemperatureUnit": "C", } async def test_temperature_setting_climate_range(hass): """Test TemperatureSetting trait support for climate domain - range.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.TemperatureSettingTrait.supported(climate.DOMAIN, 0, None) hass.config.units.temperature_unit = TEMP_FAHRENHEIT trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_AUTO, { climate.ATTR_CURRENT_TEMPERATURE: 70, climate.ATTR_CURRENT_HUMIDITY: 25, ATTR_SUPPORTED_FEATURES: climate.SUPPORT_TARGET_TEMPERATURE_RANGE, climate.ATTR_HVAC_MODES: [ STATE_OFF, climate.HVAC_MODE_COOL, climate.HVAC_MODE_HEAT, climate.HVAC_MODE_AUTO, ], climate.ATTR_TARGET_TEMP_HIGH: 75, climate.ATTR_TARGET_TEMP_LOW: 65, climate.ATTR_MIN_TEMP: 50, climate.ATTR_MAX_TEMP: 80, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["off", "cool", "heat", "auto", "on"], "thermostatTemperatureUnit": "F", } assert trt.query_attributes() == { "thermostatMode": "auto", "thermostatTemperatureAmbient": 21.1, "thermostatHumidityAmbient": 25, "thermostatTemperatureSetpointLow": 18.3, "thermostatTemperatureSetpointHigh": 23.9, } assert trt.can_execute(trait.COMMAND_THERMOSTAT_TEMPERATURE_SET_RANGE, {}) assert trt.can_execute(trait.COMMAND_THERMOSTAT_SET_MODE, {}) calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_TEMPERATURE) await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SET_RANGE, BASIC_DATA, { "thermostatTemperatureSetpointHigh": 25, "thermostatTemperatureSetpointLow": 20, }, {}, ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "climate.bla", climate.ATTR_TARGET_TEMP_HIGH: 77, climate.ATTR_TARGET_TEMP_LOW: 68, } calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_HVAC_MODE) await trt.execute( trait.COMMAND_THERMOSTAT_SET_MODE, BASIC_DATA, {"thermostatMode": "cool"}, {} ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "climate.bla", climate.ATTR_HVAC_MODE: climate.HVAC_MODE_COOL, } with pytest.raises(helpers.SmartHomeError) as err: await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, BASIC_DATA, {"thermostatTemperatureSetpoint": -100}, {}, ) assert err.value.code == const.ERR_VALUE_OUT_OF_RANGE hass.config.units.temperature_unit = TEMP_CELSIUS async def test_temperature_setting_climate_setpoint(hass): """Test TemperatureSetting trait support for climate domain - setpoint.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.TemperatureSettingTrait.supported(climate.DOMAIN, 0, None) hass.config.units.temperature_unit = TEMP_CELSIUS trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_COOL, { climate.ATTR_HVAC_MODES: [STATE_OFF, climate.HVAC_MODE_COOL], climate.ATTR_MIN_TEMP: 10, climate.ATTR_MAX_TEMP: 30, ATTR_TEMPERATURE: 18, climate.ATTR_CURRENT_TEMPERATURE: 20, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["off", "cool", "on"], "thermostatTemperatureUnit": "C", } assert trt.query_attributes() == { "thermostatMode": "cool", "thermostatTemperatureAmbient": 20, "thermostatTemperatureSetpoint": 18, } assert trt.can_execute(trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, {}) assert trt.can_execute(trait.COMMAND_THERMOSTAT_SET_MODE, {}) calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_TEMPERATURE) with pytest.raises(helpers.SmartHomeError): await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, BASIC_DATA, {"thermostatTemperatureSetpoint": -100}, {}, ) await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, BASIC_DATA, {"thermostatTemperatureSetpoint": 19}, {}, ) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "climate.bla", ATTR_TEMPERATURE: 19} async def test_temperature_setting_climate_setpoint_auto(hass): """ Test TemperatureSetting trait support for climate domain. Setpoint in auto mode. """ hass.config.units.temperature_unit = TEMP_CELSIUS trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_HEAT_COOL, { climate.ATTR_HVAC_MODES: [ climate.HVAC_MODE_OFF, climate.HVAC_MODE_HEAT_COOL, ], climate.ATTR_MIN_TEMP: 10, climate.ATTR_MAX_TEMP: 30, ATTR_TEMPERATURE: 18, climate.ATTR_CURRENT_TEMPERATURE: 20, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["off", "heatcool", "on"], "thermostatTemperatureUnit": "C", } assert trt.query_attributes() == { "thermostatMode": "heatcool", "thermostatTemperatureAmbient": 20, "thermostatTemperatureSetpointHigh": 18, "thermostatTemperatureSetpointLow": 18, } assert trt.can_execute(trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, {}) assert trt.can_execute(trait.COMMAND_THERMOSTAT_SET_MODE, {}) calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_TEMPERATURE) await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, BASIC_DATA, {"thermostatTemperatureSetpoint": 19}, {}, ) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "climate.bla", ATTR_TEMPERATURE: 19} async def test_humidity_setting_humidifier_setpoint(hass): """Test HumiditySetting trait support for humidifier domain - setpoint.""" assert helpers.get_google_type(humidifier.DOMAIN, None) is not None assert trait.HumiditySettingTrait.supported(humidifier.DOMAIN, 0, None) trt = trait.HumiditySettingTrait( hass, State( "humidifier.bla", STATE_ON, { humidifier.ATTR_MIN_HUMIDITY: 20, humidifier.ATTR_MAX_HUMIDITY: 90, humidifier.ATTR_HUMIDITY: 38, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "humiditySetpointRange": {"minPercent": 20, "maxPercent": 90} } assert trt.query_attributes() == { "humiditySetpointPercent": 38, } assert trt.can_execute(trait.COMMAND_SET_HUMIDITY, {}) calls = async_mock_service(hass, humidifier.DOMAIN, humidifier.SERVICE_SET_HUMIDITY) await trt.execute(trait.COMMAND_SET_HUMIDITY, BASIC_DATA, {"humidity": 32}, {}) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "humidifier.bla", humidifier.ATTR_HUMIDITY: 32, } async def test_lock_unlock_lock(hass): """Test LockUnlock trait locking support for lock domain.""" assert helpers.get_google_type(lock.DOMAIN, None) is not None assert trait.LockUnlockTrait.supported(lock.DOMAIN, lock.SUPPORT_OPEN, None) assert trait.LockUnlockTrait.might_2fa(lock.DOMAIN, lock.SUPPORT_OPEN, None) trt = trait.LockUnlockTrait( hass, State("lock.front_door", lock.STATE_LOCKED), PIN_CONFIG ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"isLocked": True} assert trt.can_execute(trait.COMMAND_LOCKUNLOCK, {"lock": True}) calls = async_mock_service(hass, lock.DOMAIN, lock.SERVICE_LOCK) await trt.execute(trait.COMMAND_LOCKUNLOCK, PIN_DATA, {"lock": True}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "lock.front_door"} async def test_lock_unlock_unlock(hass): """Test LockUnlock trait unlocking support for lock domain.""" assert helpers.get_google_type(lock.DOMAIN, None) is not None assert trait.LockUnlockTrait.supported(lock.DOMAIN, lock.SUPPORT_OPEN, None) trt = trait.LockUnlockTrait( hass, State("lock.front_door", lock.STATE_LOCKED), PIN_CONFIG ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"isLocked": True} assert trt.can_execute(trait.COMMAND_LOCKUNLOCK, {"lock": False}) calls = async_mock_service(hass, lock.DOMAIN, lock.SERVICE_UNLOCK) # No challenge data with pytest.raises(error.ChallengeNeeded) as err: await trt.execute(trait.COMMAND_LOCKUNLOCK, PIN_DATA, {"lock": False}, {}) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # invalid pin with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_LOCKUNLOCK, PIN_DATA, {"lock": False}, {"pin": 9999} ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_FAILED_PIN_NEEDED await trt.execute( trait.COMMAND_LOCKUNLOCK, PIN_DATA, {"lock": False}, {"pin": "1234"} ) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "lock.front_door"} # Test without pin trt = trait.LockUnlockTrait( hass, State("lock.front_door", lock.STATE_LOCKED), BASIC_CONFIG ) with pytest.raises(error.SmartHomeError) as err: await trt.execute(trait.COMMAND_LOCKUNLOCK, BASIC_DATA, {"lock": False}, {}) assert len(calls) == 1 assert err.value.code == const.ERR_CHALLENGE_NOT_SETUP # Test with 2FA override with patch.object( BASIC_CONFIG, "should_2fa", return_value=False, ): await trt.execute(trait.COMMAND_LOCKUNLOCK, BASIC_DATA, {"lock": False}, {}) assert len(calls) == 2 async def test_arm_disarm_arm_away(hass): """Test ArmDisarm trait Arming support for alarm_control_panel domain.""" assert helpers.get_google_type(alarm_control_panel.DOMAIN, None) is not None assert trait.ArmDisArmTrait.supported(alarm_control_panel.DOMAIN, 0, None) assert trait.ArmDisArmTrait.might_2fa(alarm_control_panel.DOMAIN, 0, None) trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, { alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True, ATTR_SUPPORTED_FEATURES: alarm_control_panel.const.SUPPORT_ALARM_ARM_HOME | alarm_control_panel.const.SUPPORT_ALARM_ARM_AWAY, }, ), PIN_CONFIG, ) assert trt.sync_attributes() == { "availableArmLevels": { "levels": [ { "level_name": "armed_home", "level_values": [ {"level_synonym": ["armed home", "home"], "lang": "en"} ], }, { "level_name": "armed_away", "level_values": [ {"level_synonym": ["armed away", "away"], "lang": "en"} ], }, ], "ordered": False, } } assert trt.query_attributes() == { "isArmed": True, "currentArmLevel": STATE_ALARM_ARMED_AWAY, } assert trt.can_execute( trait.COMMAND_ARMDISARM, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY} ) calls = async_mock_service( hass, alarm_control_panel.DOMAIN, alarm_control_panel.SERVICE_ALARM_ARM_AWAY ) # Test with no secure_pin configured with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), BASIC_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, BASIC_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {}, ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NOT_SETUP trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), PIN_CONFIG, ) # No challenge data with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {}, ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # invalid pin with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {"pin": 9999}, ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_FAILED_PIN_NEEDED # correct pin await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {"pin": "1234"}, ) assert len(calls) == 1 # Test already armed with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), PIN_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {}, ) assert len(calls) == 1 assert err.value.code == const.ERR_ALREADY_ARMED # Test with code_arm_required False trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: False}, ), PIN_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {}, ) assert len(calls) == 2 with pytest.raises(error.SmartHomeError) as err: await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True}, {}, ) async def test_arm_disarm_disarm(hass): """Test ArmDisarm trait Disarming support for alarm_control_panel domain.""" assert helpers.get_google_type(alarm_control_panel.DOMAIN, None) is not None assert trait.ArmDisArmTrait.supported(alarm_control_panel.DOMAIN, 0, None) assert trait.ArmDisArmTrait.might_2fa(alarm_control_panel.DOMAIN, 0, None) trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, { alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True, ATTR_SUPPORTED_FEATURES: alarm_control_panel.const.SUPPORT_ALARM_TRIGGER | alarm_control_panel.const.SUPPORT_ALARM_ARM_CUSTOM_BYPASS, }, ), PIN_CONFIG, ) assert trt.sync_attributes() == { "availableArmLevels": { "levels": [ { "level_name": "armed_custom_bypass", "level_values": [ { "level_synonym": ["armed custom bypass", "custom"], "lang": "en", } ], }, { "level_name": "triggered", "level_values": [{"level_synonym": ["triggered"], "lang": "en"}], }, ], "ordered": False, } } assert trt.query_attributes() == {"isArmed": False} assert trt.can_execute(trait.COMMAND_ARMDISARM, {"arm": False}) calls = async_mock_service( hass, alarm_control_panel.DOMAIN, alarm_control_panel.SERVICE_ALARM_DISARM ) # Test without secure_pin configured with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), BASIC_CONFIG, ) await trt.execute(trait.COMMAND_ARMDISARM, BASIC_DATA, {"arm": False}, {}) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NOT_SETUP trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), PIN_CONFIG, ) # No challenge data with pytest.raises(error.ChallengeNeeded) as err: await trt.execute(trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": False}, {}) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # invalid pin with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": False}, {"pin": 9999} ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_FAILED_PIN_NEEDED # correct pin await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": False}, {"pin": "1234"} ) assert len(calls) == 1 # Test already disarmed with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), PIN_CONFIG, ) await trt.execute(trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": False}, {}) assert len(calls) == 1 assert err.value.code == const.ERR_ALREADY_DISARMED # Cancel arming after already armed will require pin with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: False}, ), PIN_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "cancel": True}, {} ) assert len(calls) == 1 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # Cancel arming while pending to arm doesn't require pin trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_PENDING, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: False}, ), PIN_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "cancel": True}, {} ) assert len(calls) == 2 async def test_fan_speed(hass): """Test FanSpeed trait speed control support for fan domain.""" assert helpers.get_google_type(fan.DOMAIN, None) is not None assert trait.FanSpeedTrait.supported(fan.DOMAIN, fan.SUPPORT_SET_SPEED, None) trt = trait.FanSpeedTrait( hass, State( "fan.living_room_fan", fan.SPEED_HIGH, attributes={ "speed_list": [ fan.SPEED_OFF, fan.SPEED_LOW, fan.SPEED_MEDIUM, fan.SPEED_HIGH, ], "speed": "low", "percentage": 33, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableFanSpeeds": { "ordered": True, "speeds": [ { "speed_name": "off", "speed_values": [{"speed_synonym": ["stop", "off"], "lang": "en"}], }, { "speed_name": "low", "speed_values": [ { "speed_synonym": ["slow", "low", "slowest", "lowest"], "lang": "en", } ], }, { "speed_name": "medium", "speed_values": [ {"speed_synonym": ["medium", "mid", "middle"], "lang": "en"} ], }, { "speed_name": "high", "speed_values": [ { "speed_synonym": [ "high", "max", "fast", "highest", "fastest", "maximum", ], "lang": "en", } ], }, ], }, "reversible": False, "supportsFanSpeedPercent": True, } assert trt.query_attributes() == { "currentFanSpeedSetting": "low", "on": True, "currentFanSpeedPercent": 33, } assert trt.can_execute(trait.COMMAND_FANSPEED, params={"fanSpeed": "medium"}) calls = async_mock_service(hass, fan.DOMAIN, fan.SERVICE_SET_SPEED) await trt.execute(trait.COMMAND_FANSPEED, BASIC_DATA, {"fanSpeed": "medium"}, {}) assert len(calls) == 1 assert calls[0].data == {"entity_id": "fan.living_room_fan", "speed": "medium"} assert trt.can_execute(trait.COMMAND_FANSPEED, params={"fanSpeedPercent": 10}) calls = async_mock_service(hass, fan.DOMAIN, fan.SERVICE_SET_PERCENTAGE) await trt.execute(trait.COMMAND_FANSPEED, BASIC_DATA, {"fanSpeedPercent": 10}, {}) assert len(calls) == 1 assert calls[0].data == {"entity_id": "fan.living_room_fan", "percentage": 10} async def test_climate_fan_speed(hass): """Test FanSpeed trait speed control support for climate domain.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.FanSpeedTrait.supported(climate.DOMAIN, climate.SUPPORT_FAN_MODE, None) trt = trait.FanSpeedTrait( hass, State( "climate.living_room_ac", "on", attributes={ "fan_modes": ["auto", "low", "medium", "high"], "fan_mode": "low", }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableFanSpeeds": { "ordered": True, "speeds": [ { "speed_name": "auto", "speed_values": [{"speed_synonym": ["auto"], "lang": "en"}], }, { "speed_name": "low", "speed_values": [{"speed_synonym": ["low"], "lang": "en"}], }, { "speed_name": "medium", "speed_values": [{"speed_synonym": ["medium"], "lang": "en"}], }, { "speed_name": "high", "speed_values": [{"speed_synonym": ["high"], "lang": "en"}], }, ], }, "reversible": False, "supportsFanSpeedPercent": True, } assert trt.query_attributes() == { "currentFanSpeedSetting": "low", } assert trt.can_execute(trait.COMMAND_FANSPEED, params={"fanSpeed": "medium"}) calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_FAN_MODE) await trt.execute(trait.COMMAND_FANSPEED, BASIC_DATA, {"fanSpeed": "medium"}, {}) assert len(calls) == 1 assert calls[0].data == { "entity_id": "climate.living_room_ac", "fan_mode": "medium", } async def test_inputselector(hass): """Test input selector trait.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.InputSelectorTrait.supported( media_player.DOMAIN, media_player.SUPPORT_SELECT_SOURCE, None ) trt = trait.InputSelectorTrait( hass, State( "media_player.living_room", media_player.STATE_PLAYING, attributes={ media_player.ATTR_INPUT_SOURCE_LIST: [ "media", "game", "chromecast", "plex", ], media_player.ATTR_INPUT_SOURCE: "game", }, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableInputs": [ {"key": "media", "names": [{"name_synonym": ["media"], "lang": "en"}]}, {"key": "game", "names": [{"name_synonym": ["game"], "lang": "en"}]}, { "key": "chromecast", "names": [{"name_synonym": ["chromecast"], "lang": "en"}], }, {"key": "plex", "names": [{"name_synonym": ["plex"], "lang": "en"}]}, ], "orderedInputs": True, } assert trt.query_attributes() == { "currentInput": "game", } assert trt.can_execute( trait.COMMAND_INPUT, params={"newInput": "media"}, ) calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_SELECT_SOURCE ) await trt.execute( trait.COMMAND_INPUT, BASIC_DATA, {"newInput": "media"}, {}, ) assert len(calls) == 1 assert calls[0].data == {"entity_id": "media_player.living_room", "source": "media"} @pytest.mark.parametrize( "sources,source,source_next,source_prev", [ (["a"], "a", "a", "a"), (["a", "b"], "a", "b", "b"), (["a", "b", "c"], "a", "b", "c"), ], ) async def test_inputselector_nextprev(hass, sources, source, source_next, source_prev): """Test input selector trait.""" trt = trait.InputSelectorTrait( hass, State( "media_player.living_room", media_player.STATE_PLAYING, attributes={ media_player.ATTR_INPUT_SOURCE_LIST: sources, media_player.ATTR_INPUT_SOURCE: source, }, ), BASIC_CONFIG, ) assert trt.can_execute("action.devices.commands.NextInput", params={}) assert trt.can_execute("action.devices.commands.PreviousInput", params={}) calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_SELECT_SOURCE ) await trt.execute( "action.devices.commands.NextInput", BASIC_DATA, {}, {}, ) await trt.execute( "action.devices.commands.PreviousInput", BASIC_DATA, {}, {}, ) assert len(calls) == 2 assert calls[0].data == { "entity_id": "media_player.living_room", "source": source_next, } assert calls[1].data == { "entity_id": "media_player.living_room", "source": source_prev, } @pytest.mark.parametrize( "sources,source", [(None, "a"), (["a", "b"], None), (["a", "b"], "c")] ) async def test_inputselector_nextprev_invalid(hass, sources, source): """Test input selector trait.""" trt = trait.InputSelectorTrait( hass, State( "media_player.living_room", media_player.STATE_PLAYING, attributes={ media_player.ATTR_INPUT_SOURCE_LIST: sources, media_player.ATTR_INPUT_SOURCE: source, }, ), BASIC_CONFIG, ) with pytest.raises(SmartHomeError): await trt.execute( "action.devices.commands.NextInput", BASIC_DATA, {}, {}, ) with pytest.raises(SmartHomeError): await trt.execute( "action.devices.commands.PreviousInput", BASIC_DATA, {}, {}, ) with pytest.raises(SmartHomeError): await trt.execute( "action.devices.commands.InvalidCommand", BASIC_DATA, {}, {}, ) async def test_modes_input_select(hass): """Test Input Select Mode trait.""" assert helpers.get_google_type(input_select.DOMAIN, None) is not None assert trait.ModesTrait.supported(input_select.DOMAIN, None, None) trt = trait.ModesTrait( hass, State("input_select.bla", "unavailable"), BASIC_CONFIG, ) assert trt.sync_attributes() == {"availableModes": []} trt = trait.ModesTrait( hass, State( "input_select.bla", "abc", attributes={input_select.ATTR_OPTIONS: ["abc", "123", "xyz"]}, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableModes": [ { "name": "option", "name_values": [ { "name_synonym": ["option", "setting", "mode", "value"], "lang": "en", } ], "settings": [ { "setting_name": "abc", "setting_values": [{"setting_synonym": ["abc"], "lang": "en"}], }, { "setting_name": "123", "setting_values": [{"setting_synonym": ["123"], "lang": "en"}], }, { "setting_name": "xyz", "setting_values": [{"setting_synonym": ["xyz"], "lang": "en"}], }, ], "ordered": False, } ] } assert trt.query_attributes() == { "currentModeSettings": {"option": "abc"}, "on": True, } assert trt.can_execute( trait.COMMAND_MODES, params={"updateModeSettings": {"option": "xyz"}}, ) calls = async_mock_service( hass, input_select.DOMAIN, input_select.SERVICE_SELECT_OPTION ) await trt.execute( trait.COMMAND_MODES, BASIC_DATA, {"updateModeSettings": {"option": "xyz"}}, {}, ) assert len(calls) == 1 assert calls[0].data == {"entity_id": "input_select.bla", "option": "xyz"} async def test_modes_humidifier(hass): """Test Humidifier Mode trait.""" assert helpers.get_google_type(humidifier.DOMAIN, None) is not None assert trait.ModesTrait.supported(humidifier.DOMAIN, humidifier.SUPPORT_MODES, None) trt = trait.ModesTrait( hass, State( "humidifier.humidifier", STATE_OFF, attributes={ humidifier.ATTR_AVAILABLE_MODES: [ humidifier.MODE_NORMAL, humidifier.MODE_AUTO, humidifier.MODE_AWAY, ], ATTR_SUPPORTED_FEATURES: humidifier.SUPPORT_MODES, humidifier.ATTR_MIN_HUMIDITY: 30, humidifier.ATTR_MAX_HUMIDITY: 99, humidifier.ATTR_HUMIDITY: 50, ATTR_MODE: humidifier.MODE_AUTO, }, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableModes": [ { "name": "mode", "name_values": [{"name_synonym": ["mode"], "lang": "en"}], "settings": [ { "setting_name": "normal", "setting_values": [ {"setting_synonym": ["normal"], "lang": "en"} ], }, { "setting_name": "auto", "setting_values": [{"setting_synonym": ["auto"], "lang": "en"}], }, { "setting_name": "away", "setting_values": [{"setting_synonym": ["away"], "lang": "en"}], }, ], "ordered": False, }, ] } assert trt.query_attributes() == { "currentModeSettings": {"mode": "auto"}, "on": False, } assert trt.can_execute( trait.COMMAND_MODES, params={"updateModeSettings": {"mode": "away"}} ) calls = async_mock_service(hass, humidifier.DOMAIN, humidifier.SERVICE_SET_MODE) await trt.execute( trait.COMMAND_MODES, BASIC_DATA, {"updateModeSettings": {"mode": "away"}}, {}, ) assert len(calls) == 1 assert calls[0].data == { "entity_id": "humidifier.humidifier", "mode": "away", } async def test_sound_modes(hass): """Test Mode trait.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.ModesTrait.supported( media_player.DOMAIN, media_player.SUPPORT_SELECT_SOUND_MODE, None ) trt = trait.ModesTrait( hass, State( "media_player.living_room", media_player.STATE_PLAYING, attributes={ media_player.ATTR_SOUND_MODE_LIST: ["stereo", "prologic"], media_player.ATTR_SOUND_MODE: "stereo", }, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableModes": [ { "name": "sound mode", "name_values": [ {"name_synonym": ["sound mode", "effects"], "lang": "en"} ], "settings": [ { "setting_name": "stereo", "setting_values": [ {"setting_synonym": ["stereo"], "lang": "en"} ], }, { "setting_name": "prologic", "setting_values": [ {"setting_synonym": ["prologic"], "lang": "en"} ], }, ], "ordered": False, } ] } assert trt.query_attributes() == { "currentModeSettings": {"sound mode": "stereo"}, "on": True, } assert trt.can_execute( trait.COMMAND_MODES, params={"updateModeSettings": {"sound mode": "stereo"}}, ) calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_SELECT_SOUND_MODE ) await trt.execute( trait.COMMAND_MODES, BASIC_DATA, {"updateModeSettings": {"sound mode": "stereo"}}, {}, ) assert len(calls) == 1 assert calls[0].data == { "entity_id": "media_player.living_room", "sound_mode": "stereo", } async def test_openclose_cover(hass): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_SET_POSITION, None ) trt = trait.OpenCloseTrait( hass, State( "cover.bla", cover.STATE_OPEN, { cover.ATTR_CURRENT_POSITION: 75, ATTR_SUPPORTED_FEATURES: cover.SUPPORT_SET_POSITION, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"openPercent": 75} calls_set = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_SET_COVER_POSITION) calls_open = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_OPEN_COVER) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 50}, {}) await trt.execute( trait.COMMAND_OPENCLOSE_RELATIVE, BASIC_DATA, {"openRelativePercent": 50}, {} ) assert len(calls_set) == 1 assert calls_set[0].data == {ATTR_ENTITY_ID: "cover.bla", cover.ATTR_POSITION: 50} assert len(calls_open) == 1 assert calls_open[0].data == {ATTR_ENTITY_ID: "cover.bla"} async def test_openclose_cover_unknown_state(hass): """Test OpenClose trait support for cover domain with unknown state.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_SET_POSITION, None ) # No state trt = trait.OpenCloseTrait( hass, State( "cover.bla", STATE_UNKNOWN, {ATTR_SUPPORTED_FEATURES: cover.SUPPORT_OPEN} ), BASIC_CONFIG, ) assert trt.sync_attributes() == {"discreteOnlyOpenClose": True} with pytest.raises(helpers.SmartHomeError): trt.query_attributes() calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_OPEN_COVER) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 100}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} with pytest.raises(helpers.SmartHomeError): trt.query_attributes() async def test_openclose_cover_assumed_state(hass): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_SET_POSITION, None ) trt = trait.OpenCloseTrait( hass, State( "cover.bla", cover.STATE_OPEN, { ATTR_ASSUMED_STATE: True, ATTR_SUPPORTED_FEATURES: cover.SUPPORT_SET_POSITION, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == {"commandOnlyOpenClose": True} assert trt.query_attributes() == {} calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_SET_COVER_POSITION) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 40}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla", cover.ATTR_POSITION: 40} async def test_openclose_cover_query_only(hass): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported(cover.DOMAIN, 0, None) state = State( "cover.bla", cover.STATE_OPEN, ) trt = trait.OpenCloseTrait( hass, state, BASIC_CONFIG, ) assert trt.sync_attributes() == { "discreteOnlyOpenClose": True, "queryOnlyOpenClose": True, } assert trt.query_attributes() == {"openPercent": 100} async def test_openclose_cover_no_position(hass): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_OPEN | cover.SUPPORT_CLOSE, None ) state = State( "cover.bla", cover.STATE_OPEN, { ATTR_SUPPORTED_FEATURES: cover.SUPPORT_OPEN | cover.SUPPORT_CLOSE, }, ) trt = trait.OpenCloseTrait( hass, state, BASIC_CONFIG, ) assert trt.sync_attributes() == {"discreteOnlyOpenClose": True} assert trt.query_attributes() == {"openPercent": 100} state.state = cover.STATE_CLOSED assert trt.sync_attributes() == {"discreteOnlyOpenClose": True} assert trt.query_attributes() == {"openPercent": 0} calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_CLOSE_COVER) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 0}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_OPEN_COVER) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 100}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} with pytest.raises( SmartHomeError, match=r"Current position not know for relative command" ): await trt.execute( trait.COMMAND_OPENCLOSE_RELATIVE, BASIC_DATA, {"openRelativePercent": 100}, {}, ) with pytest.raises(SmartHomeError, match=r"No support for partial open close"): await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 50}, {}) @pytest.mark.parametrize( "device_class", (cover.DEVICE_CLASS_DOOR, cover.DEVICE_CLASS_GARAGE, cover.DEVICE_CLASS_GATE), ) async def test_openclose_cover_secure(hass, device_class): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, device_class) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_SET_POSITION, device_class ) assert trait.OpenCloseTrait.might_2fa( cover.DOMAIN, cover.SUPPORT_SET_POSITION, device_class ) trt = trait.OpenCloseTrait( hass, State( "cover.bla", cover.STATE_OPEN, { ATTR_DEVICE_CLASS: device_class, ATTR_SUPPORTED_FEATURES: cover.SUPPORT_SET_POSITION, cover.ATTR_CURRENT_POSITION: 75, }, ), PIN_CONFIG, ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"openPercent": 75} calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_SET_COVER_POSITION) calls_close = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_CLOSE_COVER) # No challenge data with pytest.raises(error.ChallengeNeeded) as err: await trt.execute(trait.COMMAND_OPENCLOSE, PIN_DATA, {"openPercent": 50}, {}) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # invalid pin with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_OPENCLOSE, PIN_DATA, {"openPercent": 50}, {"pin": "9999"} ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_FAILED_PIN_NEEDED await trt.execute( trait.COMMAND_OPENCLOSE, PIN_DATA, {"openPercent": 50}, {"pin": "1234"} ) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla", cover.ATTR_POSITION: 50} # no challenge on close await trt.execute(trait.COMMAND_OPENCLOSE, PIN_DATA, {"openPercent": 0}, {}) assert len(calls_close) == 1 assert calls_close[0].data == {ATTR_ENTITY_ID: "cover.bla"} @pytest.mark.parametrize( "device_class", ( binary_sensor.DEVICE_CLASS_DOOR, binary_sensor.DEVICE_CLASS_GARAGE_DOOR, binary_sensor.DEVICE_CLASS_LOCK, binary_sensor.DEVICE_CLASS_OPENING, binary_sensor.DEVICE_CLASS_WINDOW, ), ) async def test_openclose_binary_sensor(hass, device_class): """Test OpenClose trait support for binary_sensor domain.""" assert helpers.get_google_type(binary_sensor.DOMAIN, device_class) is not None assert trait.OpenCloseTrait.supported(binary_sensor.DOMAIN, 0, device_class) trt = trait.OpenCloseTrait( hass, State("binary_sensor.test", STATE_ON, {ATTR_DEVICE_CLASS: device_class}), BASIC_CONFIG, ) assert trt.sync_attributes() == { "queryOnlyOpenClose": True, "discreteOnlyOpenClose": True, } assert trt.query_attributes() == {"openPercent": 100} trt = trait.OpenCloseTrait( hass, State("binary_sensor.test", STATE_OFF, {ATTR_DEVICE_CLASS: device_class}), BASIC_CONFIG, ) assert trt.sync_attributes() == { "queryOnlyOpenClose": True, "discreteOnlyOpenClose": True, } assert trt.query_attributes() == {"openPercent": 0} async def test_volume_media_player(hass): """Test volume trait support for media player domain.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.VolumeTrait.supported( media_player.DOMAIN, media_player.SUPPORT_VOLUME_SET, None, ) trt = trait.VolumeTrait( hass, State( "media_player.bla", media_player.STATE_PLAYING, { ATTR_SUPPORTED_FEATURES: media_player.SUPPORT_VOLUME_SET, media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.3, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "volumeMaxLevel": 100, "levelStepSize": 10, "volumeCanMuteAndUnmute": False, "commandOnlyVolume": False, } assert trt.query_attributes() == {"currentVolume": 30} calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_SET ) await trt.execute(trait.COMMAND_SET_VOLUME, BASIC_DATA, {"volumeLevel": 60}, {}) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.6, } calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_SET ) await trt.execute( trait.COMMAND_VOLUME_RELATIVE, BASIC_DATA, {"relativeSteps": 10}, {} ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.4, } async def test_volume_media_player_relative(hass): """Test volume trait support for relative-volume-only media players.""" assert trait.VolumeTrait.supported( media_player.DOMAIN, media_player.SUPPORT_VOLUME_STEP, None, ) trt = trait.VolumeTrait( hass, State( "media_player.bla", media_player.STATE_PLAYING, { ATTR_ASSUMED_STATE: True, ATTR_SUPPORTED_FEATURES: media_player.SUPPORT_VOLUME_STEP, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "volumeMaxLevel": 100, "levelStepSize": 10, "volumeCanMuteAndUnmute": False, "commandOnlyVolume": True, } assert trt.query_attributes() == {} calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_UP ) await trt.execute( trait.COMMAND_VOLUME_RELATIVE, BASIC_DATA, {"relativeSteps": 10}, {}, ) assert len(calls) == 10 for call in calls: assert call.data == { ATTR_ENTITY_ID: "media_player.bla", } calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_DOWN ) await trt.execute( trait.COMMAND_VOLUME_RELATIVE, BASIC_DATA, {"relativeSteps": -10}, {}, ) assert len(calls) == 10 for call in calls: assert call.data == { ATTR_ENTITY_ID: "media_player.bla", } with pytest.raises(SmartHomeError): await trt.execute(trait.COMMAND_SET_VOLUME, BASIC_DATA, {"volumeLevel": 42}, {}) with pytest.raises(SmartHomeError): await trt.execute(trait.COMMAND_MUTE, BASIC_DATA, {"mute": True}, {}) async def test_media_player_mute(hass): """Test volume trait support for muting.""" assert trait.VolumeTrait.supported( media_player.DOMAIN, media_player.SUPPORT_VOLUME_STEP | media_player.SUPPORT_VOLUME_MUTE, None, ) trt = trait.VolumeTrait( hass, State( "media_player.bla", media_player.STATE_PLAYING, { ATTR_SUPPORTED_FEATURES: ( media_player.SUPPORT_VOLUME_STEP | media_player.SUPPORT_VOLUME_MUTE ), media_player.ATTR_MEDIA_VOLUME_MUTED: False, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "volumeMaxLevel": 100, "levelStepSize": 10, "volumeCanMuteAndUnmute": True, "commandOnlyVolume": False, } assert trt.query_attributes() == {"isMuted": False} mute_calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_MUTE ) await trt.execute( trait.COMMAND_MUTE, BASIC_DATA, {"mute": True}, {}, ) assert len(mute_calls) == 1 assert mute_calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_VOLUME_MUTED: True, } unmute_calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_MUTE ) await trt.execute( trait.COMMAND_MUTE, BASIC_DATA, {"mute": False}, {}, ) assert len(unmute_calls) == 1 assert unmute_calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_VOLUME_MUTED: False, } async def test_temperature_setting_sensor(hass): """Test TemperatureSetting trait support for temperature sensor.""" assert ( helpers.get_google_type(sensor.DOMAIN, sensor.DEVICE_CLASS_TEMPERATURE) is not None ) assert not trait.TemperatureSettingTrait.supported( sensor.DOMAIN, 0, sensor.DEVICE_CLASS_HUMIDITY ) assert trait.TemperatureSettingTrait.supported( sensor.DOMAIN, 0, sensor.DEVICE_CLASS_TEMPERATURE ) @pytest.mark.parametrize( "unit_in,unit_out,state,ambient", [ (TEMP_FAHRENHEIT, "F", "70", 21.1), (TEMP_CELSIUS, "C", "21.1", 21.1), (TEMP_FAHRENHEIT, "F", "unavailable", None), (TEMP_FAHRENHEIT, "F", "unknown", None), ], ) async def test_temperature_setting_sensor_data(hass, unit_in, unit_out, state, ambient): """Test TemperatureSetting trait support for temperature sensor.""" hass.config.units.temperature_unit = unit_in trt = trait.TemperatureSettingTrait( hass, State( "sensor.test", state, {ATTR_DEVICE_CLASS: sensor.DEVICE_CLASS_TEMPERATURE} ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "queryOnlyTemperatureSetting": True, "thermostatTemperatureUnit": unit_out, } if ambient: assert trt.query_attributes() == {"thermostatTemperatureAmbient": ambient} else: assert trt.query_attributes() == {} hass.config.units.temperature_unit = TEMP_CELSIUS async def test_humidity_setting_sensor(hass): """Test HumiditySetting trait support for humidity sensor.""" assert ( helpers.get_google_type(sensor.DOMAIN, sensor.DEVICE_CLASS_HUMIDITY) is not None ) assert not trait.HumiditySettingTrait.supported( sensor.DOMAIN, 0, sensor.DEVICE_CLASS_TEMPERATURE ) assert trait.HumiditySettingTrait.supported( sensor.DOMAIN, 0, sensor.DEVICE_CLASS_HUMIDITY ) @pytest.mark.parametrize( "state,ambient", [("70", 70), ("unavailable", None), ("unknown", None)] ) async def test_humidity_setting_sensor_data(hass, state, ambient): """Test HumiditySetting trait support for humidity sensor.""" trt = trait.HumiditySettingTrait( hass, State("sensor.test", state, {ATTR_DEVICE_CLASS: sensor.DEVICE_CLASS_HUMIDITY}), BASIC_CONFIG, ) assert trt.sync_attributes() == {"queryOnlyHumiditySetting": True} if ambient: assert trt.query_attributes() == {"humidityAmbientPercent": ambient} else: assert trt.query_attributes() == {} with pytest.raises(helpers.SmartHomeError) as err: await trt.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert err.value.code == const.ERR_NOT_SUPPORTED async def test_transport_control(hass): """Test the TransportControlTrait.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None for feature in trait.MEDIA_COMMAND_SUPPORT_MAPPING.values(): assert trait.TransportControlTrait.supported(media_player.DOMAIN, feature, None) now = datetime(2020, 1, 1) trt = trait.TransportControlTrait( hass, State( "media_player.bla", media_player.STATE_PLAYING, { media_player.ATTR_MEDIA_POSITION: 100, media_player.ATTR_MEDIA_DURATION: 200, media_player.ATTR_MEDIA_POSITION_UPDATED_AT: now - timedelta(seconds=10), media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.5, ATTR_SUPPORTED_FEATURES: media_player.SUPPORT_PLAY | media_player.SUPPORT_STOP, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "transportControlSupportedCommands": ["RESUME", "STOP"] } assert trt.query_attributes() == {} # COMMAND_MEDIA_SEEK_RELATIVE calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_SEEK ) # Patch to avoid time ticking over during the command failing the test with patch("homeassistant.util.dt.utcnow", return_value=now): await trt.execute( trait.COMMAND_MEDIA_SEEK_RELATIVE, BASIC_DATA, {"relativePositionMs": 10000}, {}, ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", # 100s (current position) + 10s (from command) + 10s (from updated_at) media_player.ATTR_MEDIA_SEEK_POSITION: 120, } # COMMAND_MEDIA_SEEK_TO_POSITION calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_SEEK ) await trt.execute( trait.COMMAND_MEDIA_SEEK_TO_POSITION, BASIC_DATA, {"absPositionMs": 50000}, {} ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_SEEK_POSITION: 50, } # COMMAND_MEDIA_NEXT calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_NEXT_TRACK ) await trt.execute(trait.COMMAND_MEDIA_NEXT, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} # COMMAND_MEDIA_PAUSE calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_PAUSE ) await trt.execute(trait.COMMAND_MEDIA_PAUSE, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} # COMMAND_MEDIA_PREVIOUS calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_PREVIOUS_TRACK ) await trt.execute(trait.COMMAND_MEDIA_PREVIOUS, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} # COMMAND_MEDIA_RESUME calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_PLAY ) await trt.execute(trait.COMMAND_MEDIA_RESUME, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} # COMMAND_MEDIA_SHUFFLE calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_SHUFFLE_SET ) await trt.execute(trait.COMMAND_MEDIA_SHUFFLE, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_SHUFFLE: True, } # COMMAND_MEDIA_STOP calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_STOP ) await trt.execute(trait.COMMAND_MEDIA_STOP, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} @pytest.mark.parametrize( "state", ( STATE_OFF, STATE_IDLE, STATE_PLAYING, STATE_ON, STATE_PAUSED, STATE_STANDBY, STATE_UNAVAILABLE, STATE_UNKNOWN, ), ) async def test_media_state(hass, state): """Test the MediaStateTrait.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.TransportControlTrait.supported( media_player.DOMAIN, media_player.SUPPORT_PLAY, None ) trt = trait.MediaStateTrait( hass, State( "media_player.bla", state, { media_player.ATTR_MEDIA_POSITION: 100, media_player.ATTR_MEDIA_DURATION: 200, media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.5, ATTR_SUPPORTED_FEATURES: media_player.SUPPORT_PLAY | media_player.SUPPORT_STOP, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "supportActivityState": True, "supportPlaybackState": True, } assert trt.query_attributes() == { "activityState": trt.activity_lookup.get(state), "playbackState": trt.playback_lookup.get(state), }
sander76/home-assistant
tests/components/google_assistant/test_trait.py
homeassistant/components/mqtt/trigger.py
"""Config flow to configure Xiaomi Aqara.""" import logging from socket import gaierror import voluptuous as vol from xiaomi_gateway import MULTICAST_PORT, XiaomiGateway, XiaomiGatewayDiscovery from homeassistant import config_entries from homeassistant.const import CONF_HOST, CONF_MAC, CONF_NAME, CONF_PORT, CONF_PROTOCOL from homeassistant.core import callback from homeassistant.helpers.device_registry import format_mac from .const import ( CONF_INTERFACE, CONF_KEY, CONF_SID, DEFAULT_DISCOVERY_RETRY, DOMAIN, ZEROCONF_ACPARTNER, ZEROCONF_GATEWAY, ) _LOGGER = logging.getLogger(__name__) DEFAULT_GATEWAY_NAME = "Xiaomi Aqara Gateway" DEFAULT_INTERFACE = "any" GATEWAY_CONFIG = vol.Schema( {vol.Optional(CONF_INTERFACE, default=DEFAULT_INTERFACE): str} ) CONFIG_HOST = { vol.Optional(CONF_HOST): str, vol.Optional(CONF_MAC): str, } GATEWAY_CONFIG_HOST = GATEWAY_CONFIG.extend(CONFIG_HOST) GATEWAY_SETTINGS = vol.Schema( { vol.Optional(CONF_KEY): vol.All(str, vol.Length(min=16, max=16)), vol.Optional(CONF_NAME, default=DEFAULT_GATEWAY_NAME): str, } ) class XiaomiAqaraFlowHandler(config_entries.ConfigFlow, domain=DOMAIN): """Handle a Xiaomi Aqara config flow.""" VERSION = 1 CONNECTION_CLASS = config_entries.CONN_CLASS_LOCAL_PUSH def __init__(self): """Initialize.""" self.host = None self.interface = DEFAULT_INTERFACE self.sid = None self.gateways = None self.selected_gateway = None @callback def async_show_form_step_user(self, errors): """Show the form belonging to the user step.""" schema = GATEWAY_CONFIG if (self.host is None and self.sid is None) or errors: schema = GATEWAY_CONFIG_HOST return self.async_show_form(step_id="user", data_schema=schema, errors=errors) async def async_step_user(self, user_input=None): """Handle a flow initialized by the user.""" errors = {} if user_input is None: return self.async_show_form_step_user(errors) self.interface = user_input[CONF_INTERFACE] # allow optional manual setting of host and mac if self.host is None: self.host = user_input.get(CONF_HOST) if self.sid is None: mac_address = user_input.get(CONF_MAC) # format sid from mac_address if mac_address is not None: self.sid = format_mac(mac_address).replace(":", "") # if host is already known by zeroconf discovery or manual optional settings if self.host is not None and self.sid is not None: # Connect to Xiaomi Aqara Gateway self.selected_gateway = await self.hass.async_add_executor_job( XiaomiGateway, self.host, self.sid, None, DEFAULT_DISCOVERY_RETRY, self.interface, MULTICAST_PORT, None, ) if self.selected_gateway.connection_error: errors[CONF_HOST] = "invalid_host" if self.selected_gateway.mac_error: errors[CONF_MAC] = "invalid_mac" if errors: return self.async_show_form_step_user(errors) return await self.async_step_settings() # Discover Xiaomi Aqara Gateways in the netwerk to get required SIDs. xiaomi = XiaomiGatewayDiscovery(self.hass.add_job, [], self.interface) try: await self.hass.async_add_executor_job(xiaomi.discover_gateways) except gaierror: errors[CONF_INTERFACE] = "invalid_interface" return self.async_show_form_step_user(errors) self.gateways = xiaomi.gateways if len(self.gateways) == 1: self.selected_gateway = list(self.gateways.values())[0] self.sid = self.selected_gateway.sid return await self.async_step_settings() if len(self.gateways) > 1: return await self.async_step_select() errors["base"] = "discovery_error" return self.async_show_form_step_user(errors) async def async_step_select(self, user_input=None): """Handle multiple aqara gateways found.""" errors = {} if user_input is not None: ip_adress = user_input["select_ip"] self.selected_gateway = self.gateways[ip_adress] self.sid = self.selected_gateway.sid return await self.async_step_settings() select_schema = vol.Schema( { vol.Required("select_ip"): vol.In( [gateway.ip_adress for gateway in self.gateways.values()] ) } ) return self.async_show_form( step_id="select", data_schema=select_schema, errors=errors ) async def async_step_zeroconf(self, discovery_info): """Handle zeroconf discovery.""" name = discovery_info.get("name") self.host = discovery_info.get("host") mac_address = discovery_info.get("properties", {}).get("mac") if not name or not self.host or not mac_address: return self.async_abort(reason="not_xiaomi_aqara") # Check if the discovered device is an xiaomi aqara gateway. if not ( name.startswith(ZEROCONF_GATEWAY) or name.startswith(ZEROCONF_ACPARTNER) ): _LOGGER.debug( "Xiaomi device '%s' discovered with host %s, not identified as xiaomi aqara gateway", name, self.host, ) return self.async_abort(reason="not_xiaomi_aqara") # format mac (include semicolns and make lowercase) mac_address = format_mac(mac_address) # format sid from mac_address self.sid = mac_address.replace(":", "") unique_id = mac_address await self.async_set_unique_id(unique_id) self._abort_if_unique_id_configured( {CONF_HOST: self.host, CONF_MAC: mac_address} ) self.context.update({"title_placeholders": {"name": self.host}}) return await self.async_step_user() async def async_step_settings(self, user_input=None): """Specify settings and connect aqara gateway.""" errors = {} if user_input is not None: # get all required data name = user_input[CONF_NAME] key = user_input.get(CONF_KEY) ip_adress = self.selected_gateway.ip_adress port = self.selected_gateway.port protocol = self.selected_gateway.proto if key is not None: # validate key by issuing stop ringtone playback command. self.selected_gateway.key = key valid_key = self.selected_gateway.write_to_hub(self.sid, mid=10000) else: valid_key = True if valid_key: # format_mac, for a gateway the sid equels the mac address mac_address = format_mac(self.sid) # set unique_id unique_id = mac_address await self.async_set_unique_id(unique_id) self._abort_if_unique_id_configured() return self.async_create_entry( title=name, data={ CONF_HOST: ip_adress, CONF_PORT: port, CONF_MAC: mac_address, CONF_INTERFACE: self.interface, CONF_PROTOCOL: protocol, CONF_KEY: key, CONF_SID: self.sid, }, ) errors[CONF_KEY] = "invalid_key" return self.async_show_form( step_id="settings", data_schema=GATEWAY_SETTINGS, errors=errors )
"""Tests for the Google Assistant traits.""" from datetime import datetime, timedelta from unittest.mock import patch import pytest from homeassistant.components import ( alarm_control_panel, binary_sensor, camera, cover, fan, group, input_boolean, input_select, light, lock, media_player, scene, script, sensor, switch, vacuum, ) from homeassistant.components.climate import const as climate from homeassistant.components.google_assistant import const, error, helpers, trait from homeassistant.components.google_assistant.error import SmartHomeError from homeassistant.components.humidifier import const as humidifier from homeassistant.config import async_process_ha_core_config from homeassistant.const import ( ATTR_ASSUMED_STATE, ATTR_DEVICE_CLASS, ATTR_ENTITY_ID, ATTR_MODE, ATTR_SUPPORTED_FEATURES, ATTR_TEMPERATURE, SERVICE_TURN_OFF, SERVICE_TURN_ON, STATE_ALARM_ARMED_AWAY, STATE_ALARM_DISARMED, STATE_ALARM_PENDING, STATE_IDLE, STATE_OFF, STATE_ON, STATE_PAUSED, STATE_PLAYING, STATE_STANDBY, STATE_UNAVAILABLE, STATE_UNKNOWN, TEMP_CELSIUS, TEMP_FAHRENHEIT, ) from homeassistant.core import DOMAIN as HA_DOMAIN, EVENT_CALL_SERVICE, State from homeassistant.util import color from . import BASIC_CONFIG, MockConfig from tests.common import async_capture_events, async_mock_service REQ_ID = "ff36a3cc-ec34-11e6-b1a0-64510650abcf" BASIC_DATA = helpers.RequestData( BASIC_CONFIG, "test-agent", const.SOURCE_CLOUD, REQ_ID, None ) PIN_CONFIG = MockConfig(secure_devices_pin="1234") PIN_DATA = helpers.RequestData( PIN_CONFIG, "test-agent", const.SOURCE_CLOUD, REQ_ID, None ) async def test_brightness_light(hass): """Test brightness trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert trait.BrightnessTrait.supported(light.DOMAIN, light.SUPPORT_BRIGHTNESS, None) trt = trait.BrightnessTrait( hass, State("light.bla", light.STATE_ON, {light.ATTR_BRIGHTNESS: 243}), BASIC_CONFIG, ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"brightness": 95} events = async_capture_events(hass, EVENT_CALL_SERVICE) calls = async_mock_service(hass, light.DOMAIN, light.SERVICE_TURN_ON) await trt.execute( trait.COMMAND_BRIGHTNESS_ABSOLUTE, BASIC_DATA, {"brightness": 50}, {} ) await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "light.bla", light.ATTR_BRIGHTNESS_PCT: 50} assert len(events) == 1 assert events[0].data == { "domain": "light", "service": "turn_on", "service_data": {"brightness_pct": 50, "entity_id": "light.bla"}, } async def test_camera_stream(hass): """Test camera stream trait support for camera domain.""" await async_process_ha_core_config( hass, {"external_url": "https://example.com"}, ) assert helpers.get_google_type(camera.DOMAIN, None) is not None assert trait.CameraStreamTrait.supported(camera.DOMAIN, camera.SUPPORT_STREAM, None) trt = trait.CameraStreamTrait( hass, State("camera.bla", camera.STATE_IDLE, {}), BASIC_CONFIG ) assert trt.sync_attributes() == { "cameraStreamSupportedProtocols": ["hls"], "cameraStreamNeedAuthToken": False, "cameraStreamNeedDrmEncryption": False, } assert trt.query_attributes() == {} with patch( "homeassistant.components.camera.async_request_stream", return_value="/api/streams/bla", ): await trt.execute(trait.COMMAND_GET_CAMERA_STREAM, BASIC_DATA, {}, {}) assert trt.query_attributes() == { "cameraStreamAccessUrl": "https://example.com/api/streams/bla", "cameraStreamReceiverAppId": "B12CE3CA", } async def test_onoff_group(hass): """Test OnOff trait support for group domain.""" assert helpers.get_google_type(group.DOMAIN, None) is not None assert trait.OnOffTrait.supported(group.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("group.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("group.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, HA_DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "group.bla"} off_calls = async_mock_service(hass, HA_DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "group.bla"} async def test_onoff_input_boolean(hass): """Test OnOff trait support for input_boolean domain.""" assert helpers.get_google_type(input_boolean.DOMAIN, None) is not None assert trait.OnOffTrait.supported(input_boolean.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("input_boolean.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait( hass, State("input_boolean.bla", STATE_OFF), BASIC_CONFIG ) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, input_boolean.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "input_boolean.bla"} off_calls = async_mock_service(hass, input_boolean.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "input_boolean.bla"} async def test_onoff_switch(hass): """Test OnOff trait support for switch domain.""" assert helpers.get_google_type(switch.DOMAIN, None) is not None assert trait.OnOffTrait.supported(switch.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("switch.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("switch.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} trt_assumed = trait.OnOffTrait( hass, State("switch.bla", STATE_OFF, {"assumed_state": True}), BASIC_CONFIG ) assert trt_assumed.sync_attributes() == {"commandOnlyOnOff": True} on_calls = async_mock_service(hass, switch.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "switch.bla"} off_calls = async_mock_service(hass, switch.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "switch.bla"} async def test_onoff_fan(hass): """Test OnOff trait support for fan domain.""" assert helpers.get_google_type(fan.DOMAIN, None) is not None assert trait.OnOffTrait.supported(fan.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("fan.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("fan.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, fan.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "fan.bla"} off_calls = async_mock_service(hass, fan.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "fan.bla"} async def test_onoff_light(hass): """Test OnOff trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert trait.OnOffTrait.supported(light.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("light.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("light.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "light.bla"} off_calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "light.bla"} async def test_onoff_media_player(hass): """Test OnOff trait support for media_player domain.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.OnOffTrait.supported(media_player.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("media_player.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("media_player.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, media_player.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} off_calls = async_mock_service(hass, media_player.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} async def test_onoff_humidifier(hass): """Test OnOff trait support for humidifier domain.""" assert helpers.get_google_type(humidifier.DOMAIN, None) is not None assert trait.OnOffTrait.supported(humidifier.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("humidifier.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("humidifier.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, humidifier.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "humidifier.bla"} off_calls = async_mock_service(hass, humidifier.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "humidifier.bla"} async def test_dock_vacuum(hass): """Test dock trait support for vacuum domain.""" assert helpers.get_google_type(vacuum.DOMAIN, None) is not None assert trait.DockTrait.supported(vacuum.DOMAIN, 0, None) trt = trait.DockTrait(hass, State("vacuum.bla", vacuum.STATE_IDLE), BASIC_CONFIG) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"isDocked": False} calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_RETURN_TO_BASE) await trt.execute(trait.COMMAND_DOCK, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} async def test_startstop_vacuum(hass): """Test startStop trait support for vacuum domain.""" assert helpers.get_google_type(vacuum.DOMAIN, None) is not None assert trait.StartStopTrait.supported(vacuum.DOMAIN, 0, None) trt = trait.StartStopTrait( hass, State( "vacuum.bla", vacuum.STATE_PAUSED, {ATTR_SUPPORTED_FEATURES: vacuum.SUPPORT_PAUSE}, ), BASIC_CONFIG, ) assert trt.sync_attributes() == {"pausable": True} assert trt.query_attributes() == {"isRunning": False, "isPaused": True} start_calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_START) await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": True}, {}) assert len(start_calls) == 1 assert start_calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} stop_calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_STOP) await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": False}, {}) assert len(stop_calls) == 1 assert stop_calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} pause_calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_PAUSE) await trt.execute(trait.COMMAND_PAUSEUNPAUSE, BASIC_DATA, {"pause": True}, {}) assert len(pause_calls) == 1 assert pause_calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} unpause_calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_START) await trt.execute(trait.COMMAND_PAUSEUNPAUSE, BASIC_DATA, {"pause": False}, {}) assert len(unpause_calls) == 1 assert unpause_calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} async def test_startstop_cover(hass): """Test startStop trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.StartStopTrait.supported(cover.DOMAIN, cover.SUPPORT_STOP, None) state = State( "cover.bla", cover.STATE_CLOSED, {ATTR_SUPPORTED_FEATURES: cover.SUPPORT_STOP}, ) trt = trait.StartStopTrait( hass, state, BASIC_CONFIG, ) assert trt.sync_attributes() == {} for state_value in (cover.STATE_CLOSING, cover.STATE_OPENING): state.state = state_value assert trt.query_attributes() == {"isRunning": True} stop_calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_STOP_COVER) await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": False}, {}) assert len(stop_calls) == 1 assert stop_calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} for state_value in (cover.STATE_CLOSED, cover.STATE_OPEN): state.state = state_value assert trt.query_attributes() == {"isRunning": False} with pytest.raises(SmartHomeError, match="Cover is already stopped"): await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": False}, {}) with pytest.raises(SmartHomeError, match="Starting a cover is not supported"): await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": True}, {}) with pytest.raises( SmartHomeError, match="Command action.devices.commands.PauseUnpause is not supported", ): await trt.execute(trait.COMMAND_PAUSEUNPAUSE, BASIC_DATA, {"start": True}, {}) async def test_startstop_cover_assumed(hass): """Test startStop trait support for cover domain of assumed state.""" trt = trait.StartStopTrait( hass, State( "cover.bla", cover.STATE_CLOSED, {ATTR_SUPPORTED_FEATURES: cover.SUPPORT_STOP, ATTR_ASSUMED_STATE: True}, ), BASIC_CONFIG, ) stop_calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_STOP_COVER) await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": False}, {}) assert len(stop_calls) == 1 assert stop_calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} async def test_color_setting_color_light(hass): """Test ColorSpectrum trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert not trait.ColorSettingTrait.supported(light.DOMAIN, 0, None) assert trait.ColorSettingTrait.supported(light.DOMAIN, light.SUPPORT_COLOR, None) trt = trait.ColorSettingTrait( hass, State( "light.bla", STATE_ON, { light.ATTR_HS_COLOR: (20, 94), light.ATTR_BRIGHTNESS: 200, ATTR_SUPPORTED_FEATURES: light.SUPPORT_COLOR, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == {"colorModel": "hsv"} assert trt.query_attributes() == { "color": {"spectrumHsv": {"hue": 20, "saturation": 0.94, "value": 200 / 255}} } assert trt.can_execute( trait.COMMAND_COLOR_ABSOLUTE, {"color": {"spectrumRGB": 16715792}} ) calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON) await trt.execute( trait.COMMAND_COLOR_ABSOLUTE, BASIC_DATA, {"color": {"spectrumRGB": 1052927}}, {}, ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "light.bla", light.ATTR_HS_COLOR: (240, 93.725), } await trt.execute( trait.COMMAND_COLOR_ABSOLUTE, BASIC_DATA, {"color": {"spectrumHSV": {"hue": 100, "saturation": 0.50, "value": 0.20}}}, {}, ) assert len(calls) == 2 assert calls[1].data == { ATTR_ENTITY_ID: "light.bla", light.ATTR_HS_COLOR: [100, 50], light.ATTR_BRIGHTNESS: 0.2 * 255, } async def test_color_setting_temperature_light(hass): """Test ColorTemperature trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert not trait.ColorSettingTrait.supported(light.DOMAIN, 0, None) assert trait.ColorSettingTrait.supported( light.DOMAIN, light.SUPPORT_COLOR_TEMP, None ) trt = trait.ColorSettingTrait( hass, State( "light.bla", STATE_ON, { light.ATTR_MIN_MIREDS: 200, light.ATTR_COLOR_TEMP: 300, light.ATTR_MAX_MIREDS: 500, ATTR_SUPPORTED_FEATURES: light.SUPPORT_COLOR_TEMP, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "colorTemperatureRange": {"temperatureMinK": 2000, "temperatureMaxK": 5000} } assert trt.query_attributes() == {"color": {"temperatureK": 3333}} assert trt.can_execute( trait.COMMAND_COLOR_ABSOLUTE, {"color": {"temperature": 400}} ) calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON) with pytest.raises(helpers.SmartHomeError) as err: await trt.execute( trait.COMMAND_COLOR_ABSOLUTE, BASIC_DATA, {"color": {"temperature": 5555}}, {}, ) assert err.value.code == const.ERR_VALUE_OUT_OF_RANGE await trt.execute( trait.COMMAND_COLOR_ABSOLUTE, BASIC_DATA, {"color": {"temperature": 2857}}, {} ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "light.bla", light.ATTR_COLOR_TEMP: color.color_temperature_kelvin_to_mired(2857), } async def test_color_light_temperature_light_bad_temp(hass): """Test ColorTemperature trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert not trait.ColorSettingTrait.supported(light.DOMAIN, 0, None) assert trait.ColorSettingTrait.supported( light.DOMAIN, light.SUPPORT_COLOR_TEMP, None ) trt = trait.ColorSettingTrait( hass, State( "light.bla", STATE_ON, { light.ATTR_MIN_MIREDS: 200, light.ATTR_COLOR_TEMP: 0, light.ATTR_MAX_MIREDS: 500, }, ), BASIC_CONFIG, ) assert trt.query_attributes() == {} async def test_light_modes(hass): """Test Light Mode trait.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert trait.ModesTrait.supported(light.DOMAIN, light.SUPPORT_EFFECT, None) trt = trait.ModesTrait( hass, State( "light.living_room", light.STATE_ON, attributes={ light.ATTR_EFFECT_LIST: ["random", "colorloop"], light.ATTR_EFFECT: "random", }, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableModes": [ { "name": "effect", "name_values": [{"name_synonym": ["effect"], "lang": "en"}], "settings": [ { "setting_name": "random", "setting_values": [ {"setting_synonym": ["random"], "lang": "en"} ], }, { "setting_name": "colorloop", "setting_values": [ {"setting_synonym": ["colorloop"], "lang": "en"} ], }, ], "ordered": False, } ] } assert trt.query_attributes() == { "currentModeSettings": {"effect": "random"}, "on": True, } assert trt.can_execute( trait.COMMAND_MODES, params={"updateModeSettings": {"effect": "colorloop"}}, ) calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON) await trt.execute( trait.COMMAND_MODES, BASIC_DATA, {"updateModeSettings": {"effect": "colorloop"}}, {}, ) assert len(calls) == 1 assert calls[0].data == { "entity_id": "light.living_room", "effect": "colorloop", } async def test_scene_scene(hass): """Test Scene trait support for scene domain.""" assert helpers.get_google_type(scene.DOMAIN, None) is not None assert trait.SceneTrait.supported(scene.DOMAIN, 0, None) trt = trait.SceneTrait(hass, State("scene.bla", scene.STATE), BASIC_CONFIG) assert trt.sync_attributes() == {} assert trt.query_attributes() == {} assert trt.can_execute(trait.COMMAND_ACTIVATE_SCENE, {}) calls = async_mock_service(hass, scene.DOMAIN, SERVICE_TURN_ON) await trt.execute(trait.COMMAND_ACTIVATE_SCENE, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "scene.bla"} async def test_scene_script(hass): """Test Scene trait support for script domain.""" assert helpers.get_google_type(script.DOMAIN, None) is not None assert trait.SceneTrait.supported(script.DOMAIN, 0, None) trt = trait.SceneTrait(hass, State("script.bla", STATE_OFF), BASIC_CONFIG) assert trt.sync_attributes() == {} assert trt.query_attributes() == {} assert trt.can_execute(trait.COMMAND_ACTIVATE_SCENE, {}) calls = async_mock_service(hass, script.DOMAIN, SERVICE_TURN_ON) await trt.execute(trait.COMMAND_ACTIVATE_SCENE, BASIC_DATA, {}, {}) # We don't wait till script execution is done. await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "script.bla"} async def test_temperature_setting_climate_onoff(hass): """Test TemperatureSetting trait support for climate domain - range.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.TemperatureSettingTrait.supported(climate.DOMAIN, 0, None) hass.config.units.temperature_unit = TEMP_FAHRENHEIT trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_AUTO, { ATTR_SUPPORTED_FEATURES: climate.SUPPORT_TARGET_TEMPERATURE_RANGE, climate.ATTR_HVAC_MODES: [ climate.HVAC_MODE_OFF, climate.HVAC_MODE_COOL, climate.HVAC_MODE_HEAT, climate.HVAC_MODE_HEAT_COOL, ], climate.ATTR_MIN_TEMP: None, climate.ATTR_MAX_TEMP: None, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["off", "cool", "heat", "heatcool", "on"], "thermostatTemperatureUnit": "F", } assert trt.can_execute(trait.COMMAND_THERMOSTAT_SET_MODE, {}) calls = async_mock_service(hass, climate.DOMAIN, SERVICE_TURN_ON) await trt.execute( trait.COMMAND_THERMOSTAT_SET_MODE, BASIC_DATA, {"thermostatMode": "on"}, {} ) assert len(calls) == 1 calls = async_mock_service(hass, climate.DOMAIN, SERVICE_TURN_OFF) await trt.execute( trait.COMMAND_THERMOSTAT_SET_MODE, BASIC_DATA, {"thermostatMode": "off"}, {} ) assert len(calls) == 1 async def test_temperature_setting_climate_no_modes(hass): """Test TemperatureSetting trait support for climate domain not supporting any modes.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.TemperatureSettingTrait.supported(climate.DOMAIN, 0, None) hass.config.units.temperature_unit = TEMP_CELSIUS trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_AUTO, { climate.ATTR_HVAC_MODES: [], climate.ATTR_MIN_TEMP: None, climate.ATTR_MAX_TEMP: None, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["heat"], "thermostatTemperatureUnit": "C", } async def test_temperature_setting_climate_range(hass): """Test TemperatureSetting trait support for climate domain - range.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.TemperatureSettingTrait.supported(climate.DOMAIN, 0, None) hass.config.units.temperature_unit = TEMP_FAHRENHEIT trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_AUTO, { climate.ATTR_CURRENT_TEMPERATURE: 70, climate.ATTR_CURRENT_HUMIDITY: 25, ATTR_SUPPORTED_FEATURES: climate.SUPPORT_TARGET_TEMPERATURE_RANGE, climate.ATTR_HVAC_MODES: [ STATE_OFF, climate.HVAC_MODE_COOL, climate.HVAC_MODE_HEAT, climate.HVAC_MODE_AUTO, ], climate.ATTR_TARGET_TEMP_HIGH: 75, climate.ATTR_TARGET_TEMP_LOW: 65, climate.ATTR_MIN_TEMP: 50, climate.ATTR_MAX_TEMP: 80, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["off", "cool", "heat", "auto", "on"], "thermostatTemperatureUnit": "F", } assert trt.query_attributes() == { "thermostatMode": "auto", "thermostatTemperatureAmbient": 21.1, "thermostatHumidityAmbient": 25, "thermostatTemperatureSetpointLow": 18.3, "thermostatTemperatureSetpointHigh": 23.9, } assert trt.can_execute(trait.COMMAND_THERMOSTAT_TEMPERATURE_SET_RANGE, {}) assert trt.can_execute(trait.COMMAND_THERMOSTAT_SET_MODE, {}) calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_TEMPERATURE) await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SET_RANGE, BASIC_DATA, { "thermostatTemperatureSetpointHigh": 25, "thermostatTemperatureSetpointLow": 20, }, {}, ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "climate.bla", climate.ATTR_TARGET_TEMP_HIGH: 77, climate.ATTR_TARGET_TEMP_LOW: 68, } calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_HVAC_MODE) await trt.execute( trait.COMMAND_THERMOSTAT_SET_MODE, BASIC_DATA, {"thermostatMode": "cool"}, {} ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "climate.bla", climate.ATTR_HVAC_MODE: climate.HVAC_MODE_COOL, } with pytest.raises(helpers.SmartHomeError) as err: await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, BASIC_DATA, {"thermostatTemperatureSetpoint": -100}, {}, ) assert err.value.code == const.ERR_VALUE_OUT_OF_RANGE hass.config.units.temperature_unit = TEMP_CELSIUS async def test_temperature_setting_climate_setpoint(hass): """Test TemperatureSetting trait support for climate domain - setpoint.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.TemperatureSettingTrait.supported(climate.DOMAIN, 0, None) hass.config.units.temperature_unit = TEMP_CELSIUS trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_COOL, { climate.ATTR_HVAC_MODES: [STATE_OFF, climate.HVAC_MODE_COOL], climate.ATTR_MIN_TEMP: 10, climate.ATTR_MAX_TEMP: 30, ATTR_TEMPERATURE: 18, climate.ATTR_CURRENT_TEMPERATURE: 20, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["off", "cool", "on"], "thermostatTemperatureUnit": "C", } assert trt.query_attributes() == { "thermostatMode": "cool", "thermostatTemperatureAmbient": 20, "thermostatTemperatureSetpoint": 18, } assert trt.can_execute(trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, {}) assert trt.can_execute(trait.COMMAND_THERMOSTAT_SET_MODE, {}) calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_TEMPERATURE) with pytest.raises(helpers.SmartHomeError): await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, BASIC_DATA, {"thermostatTemperatureSetpoint": -100}, {}, ) await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, BASIC_DATA, {"thermostatTemperatureSetpoint": 19}, {}, ) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "climate.bla", ATTR_TEMPERATURE: 19} async def test_temperature_setting_climate_setpoint_auto(hass): """ Test TemperatureSetting trait support for climate domain. Setpoint in auto mode. """ hass.config.units.temperature_unit = TEMP_CELSIUS trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_HEAT_COOL, { climate.ATTR_HVAC_MODES: [ climate.HVAC_MODE_OFF, climate.HVAC_MODE_HEAT_COOL, ], climate.ATTR_MIN_TEMP: 10, climate.ATTR_MAX_TEMP: 30, ATTR_TEMPERATURE: 18, climate.ATTR_CURRENT_TEMPERATURE: 20, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["off", "heatcool", "on"], "thermostatTemperatureUnit": "C", } assert trt.query_attributes() == { "thermostatMode": "heatcool", "thermostatTemperatureAmbient": 20, "thermostatTemperatureSetpointHigh": 18, "thermostatTemperatureSetpointLow": 18, } assert trt.can_execute(trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, {}) assert trt.can_execute(trait.COMMAND_THERMOSTAT_SET_MODE, {}) calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_TEMPERATURE) await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, BASIC_DATA, {"thermostatTemperatureSetpoint": 19}, {}, ) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "climate.bla", ATTR_TEMPERATURE: 19} async def test_humidity_setting_humidifier_setpoint(hass): """Test HumiditySetting trait support for humidifier domain - setpoint.""" assert helpers.get_google_type(humidifier.DOMAIN, None) is not None assert trait.HumiditySettingTrait.supported(humidifier.DOMAIN, 0, None) trt = trait.HumiditySettingTrait( hass, State( "humidifier.bla", STATE_ON, { humidifier.ATTR_MIN_HUMIDITY: 20, humidifier.ATTR_MAX_HUMIDITY: 90, humidifier.ATTR_HUMIDITY: 38, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "humiditySetpointRange": {"minPercent": 20, "maxPercent": 90} } assert trt.query_attributes() == { "humiditySetpointPercent": 38, } assert trt.can_execute(trait.COMMAND_SET_HUMIDITY, {}) calls = async_mock_service(hass, humidifier.DOMAIN, humidifier.SERVICE_SET_HUMIDITY) await trt.execute(trait.COMMAND_SET_HUMIDITY, BASIC_DATA, {"humidity": 32}, {}) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "humidifier.bla", humidifier.ATTR_HUMIDITY: 32, } async def test_lock_unlock_lock(hass): """Test LockUnlock trait locking support for lock domain.""" assert helpers.get_google_type(lock.DOMAIN, None) is not None assert trait.LockUnlockTrait.supported(lock.DOMAIN, lock.SUPPORT_OPEN, None) assert trait.LockUnlockTrait.might_2fa(lock.DOMAIN, lock.SUPPORT_OPEN, None) trt = trait.LockUnlockTrait( hass, State("lock.front_door", lock.STATE_LOCKED), PIN_CONFIG ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"isLocked": True} assert trt.can_execute(trait.COMMAND_LOCKUNLOCK, {"lock": True}) calls = async_mock_service(hass, lock.DOMAIN, lock.SERVICE_LOCK) await trt.execute(trait.COMMAND_LOCKUNLOCK, PIN_DATA, {"lock": True}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "lock.front_door"} async def test_lock_unlock_unlock(hass): """Test LockUnlock trait unlocking support for lock domain.""" assert helpers.get_google_type(lock.DOMAIN, None) is not None assert trait.LockUnlockTrait.supported(lock.DOMAIN, lock.SUPPORT_OPEN, None) trt = trait.LockUnlockTrait( hass, State("lock.front_door", lock.STATE_LOCKED), PIN_CONFIG ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"isLocked": True} assert trt.can_execute(trait.COMMAND_LOCKUNLOCK, {"lock": False}) calls = async_mock_service(hass, lock.DOMAIN, lock.SERVICE_UNLOCK) # No challenge data with pytest.raises(error.ChallengeNeeded) as err: await trt.execute(trait.COMMAND_LOCKUNLOCK, PIN_DATA, {"lock": False}, {}) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # invalid pin with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_LOCKUNLOCK, PIN_DATA, {"lock": False}, {"pin": 9999} ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_FAILED_PIN_NEEDED await trt.execute( trait.COMMAND_LOCKUNLOCK, PIN_DATA, {"lock": False}, {"pin": "1234"} ) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "lock.front_door"} # Test without pin trt = trait.LockUnlockTrait( hass, State("lock.front_door", lock.STATE_LOCKED), BASIC_CONFIG ) with pytest.raises(error.SmartHomeError) as err: await trt.execute(trait.COMMAND_LOCKUNLOCK, BASIC_DATA, {"lock": False}, {}) assert len(calls) == 1 assert err.value.code == const.ERR_CHALLENGE_NOT_SETUP # Test with 2FA override with patch.object( BASIC_CONFIG, "should_2fa", return_value=False, ): await trt.execute(trait.COMMAND_LOCKUNLOCK, BASIC_DATA, {"lock": False}, {}) assert len(calls) == 2 async def test_arm_disarm_arm_away(hass): """Test ArmDisarm trait Arming support for alarm_control_panel domain.""" assert helpers.get_google_type(alarm_control_panel.DOMAIN, None) is not None assert trait.ArmDisArmTrait.supported(alarm_control_panel.DOMAIN, 0, None) assert trait.ArmDisArmTrait.might_2fa(alarm_control_panel.DOMAIN, 0, None) trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, { alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True, ATTR_SUPPORTED_FEATURES: alarm_control_panel.const.SUPPORT_ALARM_ARM_HOME | alarm_control_panel.const.SUPPORT_ALARM_ARM_AWAY, }, ), PIN_CONFIG, ) assert trt.sync_attributes() == { "availableArmLevels": { "levels": [ { "level_name": "armed_home", "level_values": [ {"level_synonym": ["armed home", "home"], "lang": "en"} ], }, { "level_name": "armed_away", "level_values": [ {"level_synonym": ["armed away", "away"], "lang": "en"} ], }, ], "ordered": False, } } assert trt.query_attributes() == { "isArmed": True, "currentArmLevel": STATE_ALARM_ARMED_AWAY, } assert trt.can_execute( trait.COMMAND_ARMDISARM, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY} ) calls = async_mock_service( hass, alarm_control_panel.DOMAIN, alarm_control_panel.SERVICE_ALARM_ARM_AWAY ) # Test with no secure_pin configured with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), BASIC_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, BASIC_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {}, ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NOT_SETUP trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), PIN_CONFIG, ) # No challenge data with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {}, ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # invalid pin with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {"pin": 9999}, ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_FAILED_PIN_NEEDED # correct pin await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {"pin": "1234"}, ) assert len(calls) == 1 # Test already armed with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), PIN_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {}, ) assert len(calls) == 1 assert err.value.code == const.ERR_ALREADY_ARMED # Test with code_arm_required False trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: False}, ), PIN_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {}, ) assert len(calls) == 2 with pytest.raises(error.SmartHomeError) as err: await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True}, {}, ) async def test_arm_disarm_disarm(hass): """Test ArmDisarm trait Disarming support for alarm_control_panel domain.""" assert helpers.get_google_type(alarm_control_panel.DOMAIN, None) is not None assert trait.ArmDisArmTrait.supported(alarm_control_panel.DOMAIN, 0, None) assert trait.ArmDisArmTrait.might_2fa(alarm_control_panel.DOMAIN, 0, None) trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, { alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True, ATTR_SUPPORTED_FEATURES: alarm_control_panel.const.SUPPORT_ALARM_TRIGGER | alarm_control_panel.const.SUPPORT_ALARM_ARM_CUSTOM_BYPASS, }, ), PIN_CONFIG, ) assert trt.sync_attributes() == { "availableArmLevels": { "levels": [ { "level_name": "armed_custom_bypass", "level_values": [ { "level_synonym": ["armed custom bypass", "custom"], "lang": "en", } ], }, { "level_name": "triggered", "level_values": [{"level_synonym": ["triggered"], "lang": "en"}], }, ], "ordered": False, } } assert trt.query_attributes() == {"isArmed": False} assert trt.can_execute(trait.COMMAND_ARMDISARM, {"arm": False}) calls = async_mock_service( hass, alarm_control_panel.DOMAIN, alarm_control_panel.SERVICE_ALARM_DISARM ) # Test without secure_pin configured with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), BASIC_CONFIG, ) await trt.execute(trait.COMMAND_ARMDISARM, BASIC_DATA, {"arm": False}, {}) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NOT_SETUP trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), PIN_CONFIG, ) # No challenge data with pytest.raises(error.ChallengeNeeded) as err: await trt.execute(trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": False}, {}) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # invalid pin with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": False}, {"pin": 9999} ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_FAILED_PIN_NEEDED # correct pin await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": False}, {"pin": "1234"} ) assert len(calls) == 1 # Test already disarmed with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), PIN_CONFIG, ) await trt.execute(trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": False}, {}) assert len(calls) == 1 assert err.value.code == const.ERR_ALREADY_DISARMED # Cancel arming after already armed will require pin with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: False}, ), PIN_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "cancel": True}, {} ) assert len(calls) == 1 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # Cancel arming while pending to arm doesn't require pin trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_PENDING, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: False}, ), PIN_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "cancel": True}, {} ) assert len(calls) == 2 async def test_fan_speed(hass): """Test FanSpeed trait speed control support for fan domain.""" assert helpers.get_google_type(fan.DOMAIN, None) is not None assert trait.FanSpeedTrait.supported(fan.DOMAIN, fan.SUPPORT_SET_SPEED, None) trt = trait.FanSpeedTrait( hass, State( "fan.living_room_fan", fan.SPEED_HIGH, attributes={ "speed_list": [ fan.SPEED_OFF, fan.SPEED_LOW, fan.SPEED_MEDIUM, fan.SPEED_HIGH, ], "speed": "low", "percentage": 33, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableFanSpeeds": { "ordered": True, "speeds": [ { "speed_name": "off", "speed_values": [{"speed_synonym": ["stop", "off"], "lang": "en"}], }, { "speed_name": "low", "speed_values": [ { "speed_synonym": ["slow", "low", "slowest", "lowest"], "lang": "en", } ], }, { "speed_name": "medium", "speed_values": [ {"speed_synonym": ["medium", "mid", "middle"], "lang": "en"} ], }, { "speed_name": "high", "speed_values": [ { "speed_synonym": [ "high", "max", "fast", "highest", "fastest", "maximum", ], "lang": "en", } ], }, ], }, "reversible": False, "supportsFanSpeedPercent": True, } assert trt.query_attributes() == { "currentFanSpeedSetting": "low", "on": True, "currentFanSpeedPercent": 33, } assert trt.can_execute(trait.COMMAND_FANSPEED, params={"fanSpeed": "medium"}) calls = async_mock_service(hass, fan.DOMAIN, fan.SERVICE_SET_SPEED) await trt.execute(trait.COMMAND_FANSPEED, BASIC_DATA, {"fanSpeed": "medium"}, {}) assert len(calls) == 1 assert calls[0].data == {"entity_id": "fan.living_room_fan", "speed": "medium"} assert trt.can_execute(trait.COMMAND_FANSPEED, params={"fanSpeedPercent": 10}) calls = async_mock_service(hass, fan.DOMAIN, fan.SERVICE_SET_PERCENTAGE) await trt.execute(trait.COMMAND_FANSPEED, BASIC_DATA, {"fanSpeedPercent": 10}, {}) assert len(calls) == 1 assert calls[0].data == {"entity_id": "fan.living_room_fan", "percentage": 10} async def test_climate_fan_speed(hass): """Test FanSpeed trait speed control support for climate domain.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.FanSpeedTrait.supported(climate.DOMAIN, climate.SUPPORT_FAN_MODE, None) trt = trait.FanSpeedTrait( hass, State( "climate.living_room_ac", "on", attributes={ "fan_modes": ["auto", "low", "medium", "high"], "fan_mode": "low", }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableFanSpeeds": { "ordered": True, "speeds": [ { "speed_name": "auto", "speed_values": [{"speed_synonym": ["auto"], "lang": "en"}], }, { "speed_name": "low", "speed_values": [{"speed_synonym": ["low"], "lang": "en"}], }, { "speed_name": "medium", "speed_values": [{"speed_synonym": ["medium"], "lang": "en"}], }, { "speed_name": "high", "speed_values": [{"speed_synonym": ["high"], "lang": "en"}], }, ], }, "reversible": False, "supportsFanSpeedPercent": True, } assert trt.query_attributes() == { "currentFanSpeedSetting": "low", } assert trt.can_execute(trait.COMMAND_FANSPEED, params={"fanSpeed": "medium"}) calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_FAN_MODE) await trt.execute(trait.COMMAND_FANSPEED, BASIC_DATA, {"fanSpeed": "medium"}, {}) assert len(calls) == 1 assert calls[0].data == { "entity_id": "climate.living_room_ac", "fan_mode": "medium", } async def test_inputselector(hass): """Test input selector trait.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.InputSelectorTrait.supported( media_player.DOMAIN, media_player.SUPPORT_SELECT_SOURCE, None ) trt = trait.InputSelectorTrait( hass, State( "media_player.living_room", media_player.STATE_PLAYING, attributes={ media_player.ATTR_INPUT_SOURCE_LIST: [ "media", "game", "chromecast", "plex", ], media_player.ATTR_INPUT_SOURCE: "game", }, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableInputs": [ {"key": "media", "names": [{"name_synonym": ["media"], "lang": "en"}]}, {"key": "game", "names": [{"name_synonym": ["game"], "lang": "en"}]}, { "key": "chromecast", "names": [{"name_synonym": ["chromecast"], "lang": "en"}], }, {"key": "plex", "names": [{"name_synonym": ["plex"], "lang": "en"}]}, ], "orderedInputs": True, } assert trt.query_attributes() == { "currentInput": "game", } assert trt.can_execute( trait.COMMAND_INPUT, params={"newInput": "media"}, ) calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_SELECT_SOURCE ) await trt.execute( trait.COMMAND_INPUT, BASIC_DATA, {"newInput": "media"}, {}, ) assert len(calls) == 1 assert calls[0].data == {"entity_id": "media_player.living_room", "source": "media"} @pytest.mark.parametrize( "sources,source,source_next,source_prev", [ (["a"], "a", "a", "a"), (["a", "b"], "a", "b", "b"), (["a", "b", "c"], "a", "b", "c"), ], ) async def test_inputselector_nextprev(hass, sources, source, source_next, source_prev): """Test input selector trait.""" trt = trait.InputSelectorTrait( hass, State( "media_player.living_room", media_player.STATE_PLAYING, attributes={ media_player.ATTR_INPUT_SOURCE_LIST: sources, media_player.ATTR_INPUT_SOURCE: source, }, ), BASIC_CONFIG, ) assert trt.can_execute("action.devices.commands.NextInput", params={}) assert trt.can_execute("action.devices.commands.PreviousInput", params={}) calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_SELECT_SOURCE ) await trt.execute( "action.devices.commands.NextInput", BASIC_DATA, {}, {}, ) await trt.execute( "action.devices.commands.PreviousInput", BASIC_DATA, {}, {}, ) assert len(calls) == 2 assert calls[0].data == { "entity_id": "media_player.living_room", "source": source_next, } assert calls[1].data == { "entity_id": "media_player.living_room", "source": source_prev, } @pytest.mark.parametrize( "sources,source", [(None, "a"), (["a", "b"], None), (["a", "b"], "c")] ) async def test_inputselector_nextprev_invalid(hass, sources, source): """Test input selector trait.""" trt = trait.InputSelectorTrait( hass, State( "media_player.living_room", media_player.STATE_PLAYING, attributes={ media_player.ATTR_INPUT_SOURCE_LIST: sources, media_player.ATTR_INPUT_SOURCE: source, }, ), BASIC_CONFIG, ) with pytest.raises(SmartHomeError): await trt.execute( "action.devices.commands.NextInput", BASIC_DATA, {}, {}, ) with pytest.raises(SmartHomeError): await trt.execute( "action.devices.commands.PreviousInput", BASIC_DATA, {}, {}, ) with pytest.raises(SmartHomeError): await trt.execute( "action.devices.commands.InvalidCommand", BASIC_DATA, {}, {}, ) async def test_modes_input_select(hass): """Test Input Select Mode trait.""" assert helpers.get_google_type(input_select.DOMAIN, None) is not None assert trait.ModesTrait.supported(input_select.DOMAIN, None, None) trt = trait.ModesTrait( hass, State("input_select.bla", "unavailable"), BASIC_CONFIG, ) assert trt.sync_attributes() == {"availableModes": []} trt = trait.ModesTrait( hass, State( "input_select.bla", "abc", attributes={input_select.ATTR_OPTIONS: ["abc", "123", "xyz"]}, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableModes": [ { "name": "option", "name_values": [ { "name_synonym": ["option", "setting", "mode", "value"], "lang": "en", } ], "settings": [ { "setting_name": "abc", "setting_values": [{"setting_synonym": ["abc"], "lang": "en"}], }, { "setting_name": "123", "setting_values": [{"setting_synonym": ["123"], "lang": "en"}], }, { "setting_name": "xyz", "setting_values": [{"setting_synonym": ["xyz"], "lang": "en"}], }, ], "ordered": False, } ] } assert trt.query_attributes() == { "currentModeSettings": {"option": "abc"}, "on": True, } assert trt.can_execute( trait.COMMAND_MODES, params={"updateModeSettings": {"option": "xyz"}}, ) calls = async_mock_service( hass, input_select.DOMAIN, input_select.SERVICE_SELECT_OPTION ) await trt.execute( trait.COMMAND_MODES, BASIC_DATA, {"updateModeSettings": {"option": "xyz"}}, {}, ) assert len(calls) == 1 assert calls[0].data == {"entity_id": "input_select.bla", "option": "xyz"} async def test_modes_humidifier(hass): """Test Humidifier Mode trait.""" assert helpers.get_google_type(humidifier.DOMAIN, None) is not None assert trait.ModesTrait.supported(humidifier.DOMAIN, humidifier.SUPPORT_MODES, None) trt = trait.ModesTrait( hass, State( "humidifier.humidifier", STATE_OFF, attributes={ humidifier.ATTR_AVAILABLE_MODES: [ humidifier.MODE_NORMAL, humidifier.MODE_AUTO, humidifier.MODE_AWAY, ], ATTR_SUPPORTED_FEATURES: humidifier.SUPPORT_MODES, humidifier.ATTR_MIN_HUMIDITY: 30, humidifier.ATTR_MAX_HUMIDITY: 99, humidifier.ATTR_HUMIDITY: 50, ATTR_MODE: humidifier.MODE_AUTO, }, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableModes": [ { "name": "mode", "name_values": [{"name_synonym": ["mode"], "lang": "en"}], "settings": [ { "setting_name": "normal", "setting_values": [ {"setting_synonym": ["normal"], "lang": "en"} ], }, { "setting_name": "auto", "setting_values": [{"setting_synonym": ["auto"], "lang": "en"}], }, { "setting_name": "away", "setting_values": [{"setting_synonym": ["away"], "lang": "en"}], }, ], "ordered": False, }, ] } assert trt.query_attributes() == { "currentModeSettings": {"mode": "auto"}, "on": False, } assert trt.can_execute( trait.COMMAND_MODES, params={"updateModeSettings": {"mode": "away"}} ) calls = async_mock_service(hass, humidifier.DOMAIN, humidifier.SERVICE_SET_MODE) await trt.execute( trait.COMMAND_MODES, BASIC_DATA, {"updateModeSettings": {"mode": "away"}}, {}, ) assert len(calls) == 1 assert calls[0].data == { "entity_id": "humidifier.humidifier", "mode": "away", } async def test_sound_modes(hass): """Test Mode trait.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.ModesTrait.supported( media_player.DOMAIN, media_player.SUPPORT_SELECT_SOUND_MODE, None ) trt = trait.ModesTrait( hass, State( "media_player.living_room", media_player.STATE_PLAYING, attributes={ media_player.ATTR_SOUND_MODE_LIST: ["stereo", "prologic"], media_player.ATTR_SOUND_MODE: "stereo", }, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableModes": [ { "name": "sound mode", "name_values": [ {"name_synonym": ["sound mode", "effects"], "lang": "en"} ], "settings": [ { "setting_name": "stereo", "setting_values": [ {"setting_synonym": ["stereo"], "lang": "en"} ], }, { "setting_name": "prologic", "setting_values": [ {"setting_synonym": ["prologic"], "lang": "en"} ], }, ], "ordered": False, } ] } assert trt.query_attributes() == { "currentModeSettings": {"sound mode": "stereo"}, "on": True, } assert trt.can_execute( trait.COMMAND_MODES, params={"updateModeSettings": {"sound mode": "stereo"}}, ) calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_SELECT_SOUND_MODE ) await trt.execute( trait.COMMAND_MODES, BASIC_DATA, {"updateModeSettings": {"sound mode": "stereo"}}, {}, ) assert len(calls) == 1 assert calls[0].data == { "entity_id": "media_player.living_room", "sound_mode": "stereo", } async def test_openclose_cover(hass): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_SET_POSITION, None ) trt = trait.OpenCloseTrait( hass, State( "cover.bla", cover.STATE_OPEN, { cover.ATTR_CURRENT_POSITION: 75, ATTR_SUPPORTED_FEATURES: cover.SUPPORT_SET_POSITION, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"openPercent": 75} calls_set = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_SET_COVER_POSITION) calls_open = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_OPEN_COVER) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 50}, {}) await trt.execute( trait.COMMAND_OPENCLOSE_RELATIVE, BASIC_DATA, {"openRelativePercent": 50}, {} ) assert len(calls_set) == 1 assert calls_set[0].data == {ATTR_ENTITY_ID: "cover.bla", cover.ATTR_POSITION: 50} assert len(calls_open) == 1 assert calls_open[0].data == {ATTR_ENTITY_ID: "cover.bla"} async def test_openclose_cover_unknown_state(hass): """Test OpenClose trait support for cover domain with unknown state.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_SET_POSITION, None ) # No state trt = trait.OpenCloseTrait( hass, State( "cover.bla", STATE_UNKNOWN, {ATTR_SUPPORTED_FEATURES: cover.SUPPORT_OPEN} ), BASIC_CONFIG, ) assert trt.sync_attributes() == {"discreteOnlyOpenClose": True} with pytest.raises(helpers.SmartHomeError): trt.query_attributes() calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_OPEN_COVER) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 100}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} with pytest.raises(helpers.SmartHomeError): trt.query_attributes() async def test_openclose_cover_assumed_state(hass): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_SET_POSITION, None ) trt = trait.OpenCloseTrait( hass, State( "cover.bla", cover.STATE_OPEN, { ATTR_ASSUMED_STATE: True, ATTR_SUPPORTED_FEATURES: cover.SUPPORT_SET_POSITION, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == {"commandOnlyOpenClose": True} assert trt.query_attributes() == {} calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_SET_COVER_POSITION) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 40}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla", cover.ATTR_POSITION: 40} async def test_openclose_cover_query_only(hass): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported(cover.DOMAIN, 0, None) state = State( "cover.bla", cover.STATE_OPEN, ) trt = trait.OpenCloseTrait( hass, state, BASIC_CONFIG, ) assert trt.sync_attributes() == { "discreteOnlyOpenClose": True, "queryOnlyOpenClose": True, } assert trt.query_attributes() == {"openPercent": 100} async def test_openclose_cover_no_position(hass): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_OPEN | cover.SUPPORT_CLOSE, None ) state = State( "cover.bla", cover.STATE_OPEN, { ATTR_SUPPORTED_FEATURES: cover.SUPPORT_OPEN | cover.SUPPORT_CLOSE, }, ) trt = trait.OpenCloseTrait( hass, state, BASIC_CONFIG, ) assert trt.sync_attributes() == {"discreteOnlyOpenClose": True} assert trt.query_attributes() == {"openPercent": 100} state.state = cover.STATE_CLOSED assert trt.sync_attributes() == {"discreteOnlyOpenClose": True} assert trt.query_attributes() == {"openPercent": 0} calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_CLOSE_COVER) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 0}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_OPEN_COVER) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 100}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} with pytest.raises( SmartHomeError, match=r"Current position not know for relative command" ): await trt.execute( trait.COMMAND_OPENCLOSE_RELATIVE, BASIC_DATA, {"openRelativePercent": 100}, {}, ) with pytest.raises(SmartHomeError, match=r"No support for partial open close"): await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 50}, {}) @pytest.mark.parametrize( "device_class", (cover.DEVICE_CLASS_DOOR, cover.DEVICE_CLASS_GARAGE, cover.DEVICE_CLASS_GATE), ) async def test_openclose_cover_secure(hass, device_class): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, device_class) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_SET_POSITION, device_class ) assert trait.OpenCloseTrait.might_2fa( cover.DOMAIN, cover.SUPPORT_SET_POSITION, device_class ) trt = trait.OpenCloseTrait( hass, State( "cover.bla", cover.STATE_OPEN, { ATTR_DEVICE_CLASS: device_class, ATTR_SUPPORTED_FEATURES: cover.SUPPORT_SET_POSITION, cover.ATTR_CURRENT_POSITION: 75, }, ), PIN_CONFIG, ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"openPercent": 75} calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_SET_COVER_POSITION) calls_close = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_CLOSE_COVER) # No challenge data with pytest.raises(error.ChallengeNeeded) as err: await trt.execute(trait.COMMAND_OPENCLOSE, PIN_DATA, {"openPercent": 50}, {}) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # invalid pin with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_OPENCLOSE, PIN_DATA, {"openPercent": 50}, {"pin": "9999"} ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_FAILED_PIN_NEEDED await trt.execute( trait.COMMAND_OPENCLOSE, PIN_DATA, {"openPercent": 50}, {"pin": "1234"} ) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla", cover.ATTR_POSITION: 50} # no challenge on close await trt.execute(trait.COMMAND_OPENCLOSE, PIN_DATA, {"openPercent": 0}, {}) assert len(calls_close) == 1 assert calls_close[0].data == {ATTR_ENTITY_ID: "cover.bla"} @pytest.mark.parametrize( "device_class", ( binary_sensor.DEVICE_CLASS_DOOR, binary_sensor.DEVICE_CLASS_GARAGE_DOOR, binary_sensor.DEVICE_CLASS_LOCK, binary_sensor.DEVICE_CLASS_OPENING, binary_sensor.DEVICE_CLASS_WINDOW, ), ) async def test_openclose_binary_sensor(hass, device_class): """Test OpenClose trait support for binary_sensor domain.""" assert helpers.get_google_type(binary_sensor.DOMAIN, device_class) is not None assert trait.OpenCloseTrait.supported(binary_sensor.DOMAIN, 0, device_class) trt = trait.OpenCloseTrait( hass, State("binary_sensor.test", STATE_ON, {ATTR_DEVICE_CLASS: device_class}), BASIC_CONFIG, ) assert trt.sync_attributes() == { "queryOnlyOpenClose": True, "discreteOnlyOpenClose": True, } assert trt.query_attributes() == {"openPercent": 100} trt = trait.OpenCloseTrait( hass, State("binary_sensor.test", STATE_OFF, {ATTR_DEVICE_CLASS: device_class}), BASIC_CONFIG, ) assert trt.sync_attributes() == { "queryOnlyOpenClose": True, "discreteOnlyOpenClose": True, } assert trt.query_attributes() == {"openPercent": 0} async def test_volume_media_player(hass): """Test volume trait support for media player domain.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.VolumeTrait.supported( media_player.DOMAIN, media_player.SUPPORT_VOLUME_SET, None, ) trt = trait.VolumeTrait( hass, State( "media_player.bla", media_player.STATE_PLAYING, { ATTR_SUPPORTED_FEATURES: media_player.SUPPORT_VOLUME_SET, media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.3, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "volumeMaxLevel": 100, "levelStepSize": 10, "volumeCanMuteAndUnmute": False, "commandOnlyVolume": False, } assert trt.query_attributes() == {"currentVolume": 30} calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_SET ) await trt.execute(trait.COMMAND_SET_VOLUME, BASIC_DATA, {"volumeLevel": 60}, {}) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.6, } calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_SET ) await trt.execute( trait.COMMAND_VOLUME_RELATIVE, BASIC_DATA, {"relativeSteps": 10}, {} ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.4, } async def test_volume_media_player_relative(hass): """Test volume trait support for relative-volume-only media players.""" assert trait.VolumeTrait.supported( media_player.DOMAIN, media_player.SUPPORT_VOLUME_STEP, None, ) trt = trait.VolumeTrait( hass, State( "media_player.bla", media_player.STATE_PLAYING, { ATTR_ASSUMED_STATE: True, ATTR_SUPPORTED_FEATURES: media_player.SUPPORT_VOLUME_STEP, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "volumeMaxLevel": 100, "levelStepSize": 10, "volumeCanMuteAndUnmute": False, "commandOnlyVolume": True, } assert trt.query_attributes() == {} calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_UP ) await trt.execute( trait.COMMAND_VOLUME_RELATIVE, BASIC_DATA, {"relativeSteps": 10}, {}, ) assert len(calls) == 10 for call in calls: assert call.data == { ATTR_ENTITY_ID: "media_player.bla", } calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_DOWN ) await trt.execute( trait.COMMAND_VOLUME_RELATIVE, BASIC_DATA, {"relativeSteps": -10}, {}, ) assert len(calls) == 10 for call in calls: assert call.data == { ATTR_ENTITY_ID: "media_player.bla", } with pytest.raises(SmartHomeError): await trt.execute(trait.COMMAND_SET_VOLUME, BASIC_DATA, {"volumeLevel": 42}, {}) with pytest.raises(SmartHomeError): await trt.execute(trait.COMMAND_MUTE, BASIC_DATA, {"mute": True}, {}) async def test_media_player_mute(hass): """Test volume trait support for muting.""" assert trait.VolumeTrait.supported( media_player.DOMAIN, media_player.SUPPORT_VOLUME_STEP | media_player.SUPPORT_VOLUME_MUTE, None, ) trt = trait.VolumeTrait( hass, State( "media_player.bla", media_player.STATE_PLAYING, { ATTR_SUPPORTED_FEATURES: ( media_player.SUPPORT_VOLUME_STEP | media_player.SUPPORT_VOLUME_MUTE ), media_player.ATTR_MEDIA_VOLUME_MUTED: False, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "volumeMaxLevel": 100, "levelStepSize": 10, "volumeCanMuteAndUnmute": True, "commandOnlyVolume": False, } assert trt.query_attributes() == {"isMuted": False} mute_calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_MUTE ) await trt.execute( trait.COMMAND_MUTE, BASIC_DATA, {"mute": True}, {}, ) assert len(mute_calls) == 1 assert mute_calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_VOLUME_MUTED: True, } unmute_calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_MUTE ) await trt.execute( trait.COMMAND_MUTE, BASIC_DATA, {"mute": False}, {}, ) assert len(unmute_calls) == 1 assert unmute_calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_VOLUME_MUTED: False, } async def test_temperature_setting_sensor(hass): """Test TemperatureSetting trait support for temperature sensor.""" assert ( helpers.get_google_type(sensor.DOMAIN, sensor.DEVICE_CLASS_TEMPERATURE) is not None ) assert not trait.TemperatureSettingTrait.supported( sensor.DOMAIN, 0, sensor.DEVICE_CLASS_HUMIDITY ) assert trait.TemperatureSettingTrait.supported( sensor.DOMAIN, 0, sensor.DEVICE_CLASS_TEMPERATURE ) @pytest.mark.parametrize( "unit_in,unit_out,state,ambient", [ (TEMP_FAHRENHEIT, "F", "70", 21.1), (TEMP_CELSIUS, "C", "21.1", 21.1), (TEMP_FAHRENHEIT, "F", "unavailable", None), (TEMP_FAHRENHEIT, "F", "unknown", None), ], ) async def test_temperature_setting_sensor_data(hass, unit_in, unit_out, state, ambient): """Test TemperatureSetting trait support for temperature sensor.""" hass.config.units.temperature_unit = unit_in trt = trait.TemperatureSettingTrait( hass, State( "sensor.test", state, {ATTR_DEVICE_CLASS: sensor.DEVICE_CLASS_TEMPERATURE} ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "queryOnlyTemperatureSetting": True, "thermostatTemperatureUnit": unit_out, } if ambient: assert trt.query_attributes() == {"thermostatTemperatureAmbient": ambient} else: assert trt.query_attributes() == {} hass.config.units.temperature_unit = TEMP_CELSIUS async def test_humidity_setting_sensor(hass): """Test HumiditySetting trait support for humidity sensor.""" assert ( helpers.get_google_type(sensor.DOMAIN, sensor.DEVICE_CLASS_HUMIDITY) is not None ) assert not trait.HumiditySettingTrait.supported( sensor.DOMAIN, 0, sensor.DEVICE_CLASS_TEMPERATURE ) assert trait.HumiditySettingTrait.supported( sensor.DOMAIN, 0, sensor.DEVICE_CLASS_HUMIDITY ) @pytest.mark.parametrize( "state,ambient", [("70", 70), ("unavailable", None), ("unknown", None)] ) async def test_humidity_setting_sensor_data(hass, state, ambient): """Test HumiditySetting trait support for humidity sensor.""" trt = trait.HumiditySettingTrait( hass, State("sensor.test", state, {ATTR_DEVICE_CLASS: sensor.DEVICE_CLASS_HUMIDITY}), BASIC_CONFIG, ) assert trt.sync_attributes() == {"queryOnlyHumiditySetting": True} if ambient: assert trt.query_attributes() == {"humidityAmbientPercent": ambient} else: assert trt.query_attributes() == {} with pytest.raises(helpers.SmartHomeError) as err: await trt.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert err.value.code == const.ERR_NOT_SUPPORTED async def test_transport_control(hass): """Test the TransportControlTrait.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None for feature in trait.MEDIA_COMMAND_SUPPORT_MAPPING.values(): assert trait.TransportControlTrait.supported(media_player.DOMAIN, feature, None) now = datetime(2020, 1, 1) trt = trait.TransportControlTrait( hass, State( "media_player.bla", media_player.STATE_PLAYING, { media_player.ATTR_MEDIA_POSITION: 100, media_player.ATTR_MEDIA_DURATION: 200, media_player.ATTR_MEDIA_POSITION_UPDATED_AT: now - timedelta(seconds=10), media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.5, ATTR_SUPPORTED_FEATURES: media_player.SUPPORT_PLAY | media_player.SUPPORT_STOP, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "transportControlSupportedCommands": ["RESUME", "STOP"] } assert trt.query_attributes() == {} # COMMAND_MEDIA_SEEK_RELATIVE calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_SEEK ) # Patch to avoid time ticking over during the command failing the test with patch("homeassistant.util.dt.utcnow", return_value=now): await trt.execute( trait.COMMAND_MEDIA_SEEK_RELATIVE, BASIC_DATA, {"relativePositionMs": 10000}, {}, ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", # 100s (current position) + 10s (from command) + 10s (from updated_at) media_player.ATTR_MEDIA_SEEK_POSITION: 120, } # COMMAND_MEDIA_SEEK_TO_POSITION calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_SEEK ) await trt.execute( trait.COMMAND_MEDIA_SEEK_TO_POSITION, BASIC_DATA, {"absPositionMs": 50000}, {} ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_SEEK_POSITION: 50, } # COMMAND_MEDIA_NEXT calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_NEXT_TRACK ) await trt.execute(trait.COMMAND_MEDIA_NEXT, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} # COMMAND_MEDIA_PAUSE calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_PAUSE ) await trt.execute(trait.COMMAND_MEDIA_PAUSE, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} # COMMAND_MEDIA_PREVIOUS calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_PREVIOUS_TRACK ) await trt.execute(trait.COMMAND_MEDIA_PREVIOUS, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} # COMMAND_MEDIA_RESUME calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_PLAY ) await trt.execute(trait.COMMAND_MEDIA_RESUME, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} # COMMAND_MEDIA_SHUFFLE calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_SHUFFLE_SET ) await trt.execute(trait.COMMAND_MEDIA_SHUFFLE, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_SHUFFLE: True, } # COMMAND_MEDIA_STOP calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_STOP ) await trt.execute(trait.COMMAND_MEDIA_STOP, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} @pytest.mark.parametrize( "state", ( STATE_OFF, STATE_IDLE, STATE_PLAYING, STATE_ON, STATE_PAUSED, STATE_STANDBY, STATE_UNAVAILABLE, STATE_UNKNOWN, ), ) async def test_media_state(hass, state): """Test the MediaStateTrait.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.TransportControlTrait.supported( media_player.DOMAIN, media_player.SUPPORT_PLAY, None ) trt = trait.MediaStateTrait( hass, State( "media_player.bla", state, { media_player.ATTR_MEDIA_POSITION: 100, media_player.ATTR_MEDIA_DURATION: 200, media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.5, ATTR_SUPPORTED_FEATURES: media_player.SUPPORT_PLAY | media_player.SUPPORT_STOP, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "supportActivityState": True, "supportPlaybackState": True, } assert trt.query_attributes() == { "activityState": trt.activity_lookup.get(state), "playbackState": trt.playback_lookup.get(state), }
sander76/home-assistant
tests/components/google_assistant/test_trait.py
homeassistant/components/xiaomi_aqara/config_flow.py
"""Shared utilities for different supported platforms.""" import asyncio from datetime import datetime, timedelta import logging import aiohttp import async_timeout from buienradar.buienradar import parse_data from buienradar.constants import ( ATTRIBUTION, CONDITION, CONTENT, DATA, FORECAST, HUMIDITY, MESSAGE, PRESSURE, STATIONNAME, STATUS_CODE, SUCCESS, TEMPERATURE, VISIBILITY, WINDAZIMUTH, WINDSPEED, ) from buienradar.urls import JSON_FEED_URL, json_precipitation_forecast_url from homeassistant.const import CONF_LATITUDE, CONF_LONGITUDE, HTTP_OK from homeassistant.helpers.aiohttp_client import async_get_clientsession from homeassistant.helpers.event import async_track_point_in_utc_time from homeassistant.util import dt as dt_util from .const import SCHEDULE_NOK, SCHEDULE_OK __all__ = ["BrData"] _LOGGER = logging.getLogger(__name__) """ Log at WARN level after WARN_THRESHOLD failures, otherwise log at DEBUG level. """ WARN_THRESHOLD = 4 def threshold_log(count: int, *args, **kwargs) -> None: """Log at warn level after WARN_THRESHOLD failures, debug otherwise.""" if count >= WARN_THRESHOLD: _LOGGER.warning(*args, **kwargs) else: _LOGGER.debug(*args, **kwargs) class BrData: """Get the latest data and updates the states.""" # Initialize to warn immediately if the first call fails. load_error_count: int = WARN_THRESHOLD rain_error_count: int = WARN_THRESHOLD def __init__(self, hass, coordinates, timeframe, devices): """Initialize the data object.""" self.devices = devices self.data = {} self.hass = hass self.coordinates = coordinates self.timeframe = timeframe async def update_devices(self): """Update all devices/sensors.""" if not self.devices: return # Update all devices for dev in self.devices: dev.data_updated(self.data) async def schedule_update(self, minute=1): """Schedule an update after minute minutes.""" _LOGGER.debug("Scheduling next update in %s minutes", minute) nxt = dt_util.utcnow() + timedelta(minutes=minute) async_track_point_in_utc_time(self.hass, self.async_update, nxt) async def get_data(self, url): """Load data from specified url.""" _LOGGER.debug("Calling url: %s", url) result = {SUCCESS: False, MESSAGE: None} resp = None try: websession = async_get_clientsession(self.hass) with async_timeout.timeout(10): resp = await websession.get(url) result[STATUS_CODE] = resp.status result[CONTENT] = await resp.text() if resp.status == HTTP_OK: result[SUCCESS] = True else: result[MESSAGE] = "Got http statuscode: %d" % (resp.status) return result except (asyncio.TimeoutError, aiohttp.ClientError) as err: result[MESSAGE] = "%s" % err return result finally: if resp is not None: await resp.release() async def async_update(self, *_): """Update the data from buienradar.""" content = await self.get_data(JSON_FEED_URL) if content.get(SUCCESS) is not True: # unable to get the data self.load_error_count += 1 threshold_log( self.load_error_count, "Unable to retrieve json data from Buienradar" "(Msg: %s, status: %s,)", content.get(MESSAGE), content.get(STATUS_CODE), ) # schedule new call await self.schedule_update(SCHEDULE_NOK) return self.load_error_count = 0 # rounding coordinates prevents unnecessary redirects/calls lat = self.coordinates[CONF_LATITUDE] lon = self.coordinates[CONF_LONGITUDE] rainurl = json_precipitation_forecast_url(lat, lon) raincontent = await self.get_data(rainurl) if raincontent.get(SUCCESS) is not True: self.rain_error_count += 1 # unable to get the data threshold_log( self.rain_error_count, "Unable to retrieve rain data from Buienradar" "(Msg: %s, status: %s)", raincontent.get(MESSAGE), raincontent.get(STATUS_CODE), ) # schedule new call await self.schedule_update(SCHEDULE_NOK) return self.rain_error_count = 0 result = parse_data( content.get(CONTENT), raincontent.get(CONTENT), self.coordinates[CONF_LATITUDE], self.coordinates[CONF_LONGITUDE], self.timeframe, False, ) _LOGGER.debug("Buienradar parsed data: %s", result) if result.get(SUCCESS) is not True: if int(datetime.now().strftime("%H")) > 0: _LOGGER.warning( "Unable to parse data from Buienradar. (Msg: %s)", result.get(MESSAGE), ) await self.schedule_update(SCHEDULE_NOK) return self.data = result.get(DATA) await self.update_devices() await self.schedule_update(SCHEDULE_OK) @property def attribution(self): """Return the attribution.""" return self.data.get(ATTRIBUTION) @property def stationname(self): """Return the name of the selected weatherstation.""" return self.data.get(STATIONNAME) @property def condition(self): """Return the condition.""" return self.data.get(CONDITION) @property def temperature(self): """Return the temperature, or None.""" try: return float(self.data.get(TEMPERATURE)) except (ValueError, TypeError): return None @property def pressure(self): """Return the pressure, or None.""" try: return float(self.data.get(PRESSURE)) except (ValueError, TypeError): return None @property def humidity(self): """Return the humidity, or None.""" try: return int(self.data.get(HUMIDITY)) except (ValueError, TypeError): return None @property def visibility(self): """Return the visibility, or None.""" try: return int(self.data.get(VISIBILITY)) except (ValueError, TypeError): return None @property def wind_speed(self): """Return the windspeed, or None.""" try: return float(self.data.get(WINDSPEED)) except (ValueError, TypeError): return None @property def wind_bearing(self): """Return the wind bearing, or None.""" try: return int(self.data.get(WINDAZIMUTH)) except (ValueError, TypeError): return None @property def forecast(self): """Return the forecast data.""" return self.data.get(FORECAST)
"""Tests for the Google Assistant traits.""" from datetime import datetime, timedelta from unittest.mock import patch import pytest from homeassistant.components import ( alarm_control_panel, binary_sensor, camera, cover, fan, group, input_boolean, input_select, light, lock, media_player, scene, script, sensor, switch, vacuum, ) from homeassistant.components.climate import const as climate from homeassistant.components.google_assistant import const, error, helpers, trait from homeassistant.components.google_assistant.error import SmartHomeError from homeassistant.components.humidifier import const as humidifier from homeassistant.config import async_process_ha_core_config from homeassistant.const import ( ATTR_ASSUMED_STATE, ATTR_DEVICE_CLASS, ATTR_ENTITY_ID, ATTR_MODE, ATTR_SUPPORTED_FEATURES, ATTR_TEMPERATURE, SERVICE_TURN_OFF, SERVICE_TURN_ON, STATE_ALARM_ARMED_AWAY, STATE_ALARM_DISARMED, STATE_ALARM_PENDING, STATE_IDLE, STATE_OFF, STATE_ON, STATE_PAUSED, STATE_PLAYING, STATE_STANDBY, STATE_UNAVAILABLE, STATE_UNKNOWN, TEMP_CELSIUS, TEMP_FAHRENHEIT, ) from homeassistant.core import DOMAIN as HA_DOMAIN, EVENT_CALL_SERVICE, State from homeassistant.util import color from . import BASIC_CONFIG, MockConfig from tests.common import async_capture_events, async_mock_service REQ_ID = "ff36a3cc-ec34-11e6-b1a0-64510650abcf" BASIC_DATA = helpers.RequestData( BASIC_CONFIG, "test-agent", const.SOURCE_CLOUD, REQ_ID, None ) PIN_CONFIG = MockConfig(secure_devices_pin="1234") PIN_DATA = helpers.RequestData( PIN_CONFIG, "test-agent", const.SOURCE_CLOUD, REQ_ID, None ) async def test_brightness_light(hass): """Test brightness trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert trait.BrightnessTrait.supported(light.DOMAIN, light.SUPPORT_BRIGHTNESS, None) trt = trait.BrightnessTrait( hass, State("light.bla", light.STATE_ON, {light.ATTR_BRIGHTNESS: 243}), BASIC_CONFIG, ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"brightness": 95} events = async_capture_events(hass, EVENT_CALL_SERVICE) calls = async_mock_service(hass, light.DOMAIN, light.SERVICE_TURN_ON) await trt.execute( trait.COMMAND_BRIGHTNESS_ABSOLUTE, BASIC_DATA, {"brightness": 50}, {} ) await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "light.bla", light.ATTR_BRIGHTNESS_PCT: 50} assert len(events) == 1 assert events[0].data == { "domain": "light", "service": "turn_on", "service_data": {"brightness_pct": 50, "entity_id": "light.bla"}, } async def test_camera_stream(hass): """Test camera stream trait support for camera domain.""" await async_process_ha_core_config( hass, {"external_url": "https://example.com"}, ) assert helpers.get_google_type(camera.DOMAIN, None) is not None assert trait.CameraStreamTrait.supported(camera.DOMAIN, camera.SUPPORT_STREAM, None) trt = trait.CameraStreamTrait( hass, State("camera.bla", camera.STATE_IDLE, {}), BASIC_CONFIG ) assert trt.sync_attributes() == { "cameraStreamSupportedProtocols": ["hls"], "cameraStreamNeedAuthToken": False, "cameraStreamNeedDrmEncryption": False, } assert trt.query_attributes() == {} with patch( "homeassistant.components.camera.async_request_stream", return_value="/api/streams/bla", ): await trt.execute(trait.COMMAND_GET_CAMERA_STREAM, BASIC_DATA, {}, {}) assert trt.query_attributes() == { "cameraStreamAccessUrl": "https://example.com/api/streams/bla", "cameraStreamReceiverAppId": "B12CE3CA", } async def test_onoff_group(hass): """Test OnOff trait support for group domain.""" assert helpers.get_google_type(group.DOMAIN, None) is not None assert trait.OnOffTrait.supported(group.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("group.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("group.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, HA_DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "group.bla"} off_calls = async_mock_service(hass, HA_DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "group.bla"} async def test_onoff_input_boolean(hass): """Test OnOff trait support for input_boolean domain.""" assert helpers.get_google_type(input_boolean.DOMAIN, None) is not None assert trait.OnOffTrait.supported(input_boolean.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("input_boolean.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait( hass, State("input_boolean.bla", STATE_OFF), BASIC_CONFIG ) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, input_boolean.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "input_boolean.bla"} off_calls = async_mock_service(hass, input_boolean.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "input_boolean.bla"} async def test_onoff_switch(hass): """Test OnOff trait support for switch domain.""" assert helpers.get_google_type(switch.DOMAIN, None) is not None assert trait.OnOffTrait.supported(switch.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("switch.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("switch.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} trt_assumed = trait.OnOffTrait( hass, State("switch.bla", STATE_OFF, {"assumed_state": True}), BASIC_CONFIG ) assert trt_assumed.sync_attributes() == {"commandOnlyOnOff": True} on_calls = async_mock_service(hass, switch.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "switch.bla"} off_calls = async_mock_service(hass, switch.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "switch.bla"} async def test_onoff_fan(hass): """Test OnOff trait support for fan domain.""" assert helpers.get_google_type(fan.DOMAIN, None) is not None assert trait.OnOffTrait.supported(fan.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("fan.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("fan.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, fan.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "fan.bla"} off_calls = async_mock_service(hass, fan.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "fan.bla"} async def test_onoff_light(hass): """Test OnOff trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert trait.OnOffTrait.supported(light.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("light.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("light.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "light.bla"} off_calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "light.bla"} async def test_onoff_media_player(hass): """Test OnOff trait support for media_player domain.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.OnOffTrait.supported(media_player.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("media_player.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("media_player.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, media_player.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} off_calls = async_mock_service(hass, media_player.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} async def test_onoff_humidifier(hass): """Test OnOff trait support for humidifier domain.""" assert helpers.get_google_type(humidifier.DOMAIN, None) is not None assert trait.OnOffTrait.supported(humidifier.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("humidifier.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("humidifier.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, humidifier.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "humidifier.bla"} off_calls = async_mock_service(hass, humidifier.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "humidifier.bla"} async def test_dock_vacuum(hass): """Test dock trait support for vacuum domain.""" assert helpers.get_google_type(vacuum.DOMAIN, None) is not None assert trait.DockTrait.supported(vacuum.DOMAIN, 0, None) trt = trait.DockTrait(hass, State("vacuum.bla", vacuum.STATE_IDLE), BASIC_CONFIG) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"isDocked": False} calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_RETURN_TO_BASE) await trt.execute(trait.COMMAND_DOCK, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} async def test_startstop_vacuum(hass): """Test startStop trait support for vacuum domain.""" assert helpers.get_google_type(vacuum.DOMAIN, None) is not None assert trait.StartStopTrait.supported(vacuum.DOMAIN, 0, None) trt = trait.StartStopTrait( hass, State( "vacuum.bla", vacuum.STATE_PAUSED, {ATTR_SUPPORTED_FEATURES: vacuum.SUPPORT_PAUSE}, ), BASIC_CONFIG, ) assert trt.sync_attributes() == {"pausable": True} assert trt.query_attributes() == {"isRunning": False, "isPaused": True} start_calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_START) await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": True}, {}) assert len(start_calls) == 1 assert start_calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} stop_calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_STOP) await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": False}, {}) assert len(stop_calls) == 1 assert stop_calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} pause_calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_PAUSE) await trt.execute(trait.COMMAND_PAUSEUNPAUSE, BASIC_DATA, {"pause": True}, {}) assert len(pause_calls) == 1 assert pause_calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} unpause_calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_START) await trt.execute(trait.COMMAND_PAUSEUNPAUSE, BASIC_DATA, {"pause": False}, {}) assert len(unpause_calls) == 1 assert unpause_calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} async def test_startstop_cover(hass): """Test startStop trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.StartStopTrait.supported(cover.DOMAIN, cover.SUPPORT_STOP, None) state = State( "cover.bla", cover.STATE_CLOSED, {ATTR_SUPPORTED_FEATURES: cover.SUPPORT_STOP}, ) trt = trait.StartStopTrait( hass, state, BASIC_CONFIG, ) assert trt.sync_attributes() == {} for state_value in (cover.STATE_CLOSING, cover.STATE_OPENING): state.state = state_value assert trt.query_attributes() == {"isRunning": True} stop_calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_STOP_COVER) await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": False}, {}) assert len(stop_calls) == 1 assert stop_calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} for state_value in (cover.STATE_CLOSED, cover.STATE_OPEN): state.state = state_value assert trt.query_attributes() == {"isRunning": False} with pytest.raises(SmartHomeError, match="Cover is already stopped"): await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": False}, {}) with pytest.raises(SmartHomeError, match="Starting a cover is not supported"): await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": True}, {}) with pytest.raises( SmartHomeError, match="Command action.devices.commands.PauseUnpause is not supported", ): await trt.execute(trait.COMMAND_PAUSEUNPAUSE, BASIC_DATA, {"start": True}, {}) async def test_startstop_cover_assumed(hass): """Test startStop trait support for cover domain of assumed state.""" trt = trait.StartStopTrait( hass, State( "cover.bla", cover.STATE_CLOSED, {ATTR_SUPPORTED_FEATURES: cover.SUPPORT_STOP, ATTR_ASSUMED_STATE: True}, ), BASIC_CONFIG, ) stop_calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_STOP_COVER) await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": False}, {}) assert len(stop_calls) == 1 assert stop_calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} async def test_color_setting_color_light(hass): """Test ColorSpectrum trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert not trait.ColorSettingTrait.supported(light.DOMAIN, 0, None) assert trait.ColorSettingTrait.supported(light.DOMAIN, light.SUPPORT_COLOR, None) trt = trait.ColorSettingTrait( hass, State( "light.bla", STATE_ON, { light.ATTR_HS_COLOR: (20, 94), light.ATTR_BRIGHTNESS: 200, ATTR_SUPPORTED_FEATURES: light.SUPPORT_COLOR, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == {"colorModel": "hsv"} assert trt.query_attributes() == { "color": {"spectrumHsv": {"hue": 20, "saturation": 0.94, "value": 200 / 255}} } assert trt.can_execute( trait.COMMAND_COLOR_ABSOLUTE, {"color": {"spectrumRGB": 16715792}} ) calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON) await trt.execute( trait.COMMAND_COLOR_ABSOLUTE, BASIC_DATA, {"color": {"spectrumRGB": 1052927}}, {}, ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "light.bla", light.ATTR_HS_COLOR: (240, 93.725), } await trt.execute( trait.COMMAND_COLOR_ABSOLUTE, BASIC_DATA, {"color": {"spectrumHSV": {"hue": 100, "saturation": 0.50, "value": 0.20}}}, {}, ) assert len(calls) == 2 assert calls[1].data == { ATTR_ENTITY_ID: "light.bla", light.ATTR_HS_COLOR: [100, 50], light.ATTR_BRIGHTNESS: 0.2 * 255, } async def test_color_setting_temperature_light(hass): """Test ColorTemperature trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert not trait.ColorSettingTrait.supported(light.DOMAIN, 0, None) assert trait.ColorSettingTrait.supported( light.DOMAIN, light.SUPPORT_COLOR_TEMP, None ) trt = trait.ColorSettingTrait( hass, State( "light.bla", STATE_ON, { light.ATTR_MIN_MIREDS: 200, light.ATTR_COLOR_TEMP: 300, light.ATTR_MAX_MIREDS: 500, ATTR_SUPPORTED_FEATURES: light.SUPPORT_COLOR_TEMP, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "colorTemperatureRange": {"temperatureMinK": 2000, "temperatureMaxK": 5000} } assert trt.query_attributes() == {"color": {"temperatureK": 3333}} assert trt.can_execute( trait.COMMAND_COLOR_ABSOLUTE, {"color": {"temperature": 400}} ) calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON) with pytest.raises(helpers.SmartHomeError) as err: await trt.execute( trait.COMMAND_COLOR_ABSOLUTE, BASIC_DATA, {"color": {"temperature": 5555}}, {}, ) assert err.value.code == const.ERR_VALUE_OUT_OF_RANGE await trt.execute( trait.COMMAND_COLOR_ABSOLUTE, BASIC_DATA, {"color": {"temperature": 2857}}, {} ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "light.bla", light.ATTR_COLOR_TEMP: color.color_temperature_kelvin_to_mired(2857), } async def test_color_light_temperature_light_bad_temp(hass): """Test ColorTemperature trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert not trait.ColorSettingTrait.supported(light.DOMAIN, 0, None) assert trait.ColorSettingTrait.supported( light.DOMAIN, light.SUPPORT_COLOR_TEMP, None ) trt = trait.ColorSettingTrait( hass, State( "light.bla", STATE_ON, { light.ATTR_MIN_MIREDS: 200, light.ATTR_COLOR_TEMP: 0, light.ATTR_MAX_MIREDS: 500, }, ), BASIC_CONFIG, ) assert trt.query_attributes() == {} async def test_light_modes(hass): """Test Light Mode trait.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert trait.ModesTrait.supported(light.DOMAIN, light.SUPPORT_EFFECT, None) trt = trait.ModesTrait( hass, State( "light.living_room", light.STATE_ON, attributes={ light.ATTR_EFFECT_LIST: ["random", "colorloop"], light.ATTR_EFFECT: "random", }, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableModes": [ { "name": "effect", "name_values": [{"name_synonym": ["effect"], "lang": "en"}], "settings": [ { "setting_name": "random", "setting_values": [ {"setting_synonym": ["random"], "lang": "en"} ], }, { "setting_name": "colorloop", "setting_values": [ {"setting_synonym": ["colorloop"], "lang": "en"} ], }, ], "ordered": False, } ] } assert trt.query_attributes() == { "currentModeSettings": {"effect": "random"}, "on": True, } assert trt.can_execute( trait.COMMAND_MODES, params={"updateModeSettings": {"effect": "colorloop"}}, ) calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON) await trt.execute( trait.COMMAND_MODES, BASIC_DATA, {"updateModeSettings": {"effect": "colorloop"}}, {}, ) assert len(calls) == 1 assert calls[0].data == { "entity_id": "light.living_room", "effect": "colorloop", } async def test_scene_scene(hass): """Test Scene trait support for scene domain.""" assert helpers.get_google_type(scene.DOMAIN, None) is not None assert trait.SceneTrait.supported(scene.DOMAIN, 0, None) trt = trait.SceneTrait(hass, State("scene.bla", scene.STATE), BASIC_CONFIG) assert trt.sync_attributes() == {} assert trt.query_attributes() == {} assert trt.can_execute(trait.COMMAND_ACTIVATE_SCENE, {}) calls = async_mock_service(hass, scene.DOMAIN, SERVICE_TURN_ON) await trt.execute(trait.COMMAND_ACTIVATE_SCENE, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "scene.bla"} async def test_scene_script(hass): """Test Scene trait support for script domain.""" assert helpers.get_google_type(script.DOMAIN, None) is not None assert trait.SceneTrait.supported(script.DOMAIN, 0, None) trt = trait.SceneTrait(hass, State("script.bla", STATE_OFF), BASIC_CONFIG) assert trt.sync_attributes() == {} assert trt.query_attributes() == {} assert trt.can_execute(trait.COMMAND_ACTIVATE_SCENE, {}) calls = async_mock_service(hass, script.DOMAIN, SERVICE_TURN_ON) await trt.execute(trait.COMMAND_ACTIVATE_SCENE, BASIC_DATA, {}, {}) # We don't wait till script execution is done. await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "script.bla"} async def test_temperature_setting_climate_onoff(hass): """Test TemperatureSetting trait support for climate domain - range.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.TemperatureSettingTrait.supported(climate.DOMAIN, 0, None) hass.config.units.temperature_unit = TEMP_FAHRENHEIT trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_AUTO, { ATTR_SUPPORTED_FEATURES: climate.SUPPORT_TARGET_TEMPERATURE_RANGE, climate.ATTR_HVAC_MODES: [ climate.HVAC_MODE_OFF, climate.HVAC_MODE_COOL, climate.HVAC_MODE_HEAT, climate.HVAC_MODE_HEAT_COOL, ], climate.ATTR_MIN_TEMP: None, climate.ATTR_MAX_TEMP: None, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["off", "cool", "heat", "heatcool", "on"], "thermostatTemperatureUnit": "F", } assert trt.can_execute(trait.COMMAND_THERMOSTAT_SET_MODE, {}) calls = async_mock_service(hass, climate.DOMAIN, SERVICE_TURN_ON) await trt.execute( trait.COMMAND_THERMOSTAT_SET_MODE, BASIC_DATA, {"thermostatMode": "on"}, {} ) assert len(calls) == 1 calls = async_mock_service(hass, climate.DOMAIN, SERVICE_TURN_OFF) await trt.execute( trait.COMMAND_THERMOSTAT_SET_MODE, BASIC_DATA, {"thermostatMode": "off"}, {} ) assert len(calls) == 1 async def test_temperature_setting_climate_no_modes(hass): """Test TemperatureSetting trait support for climate domain not supporting any modes.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.TemperatureSettingTrait.supported(climate.DOMAIN, 0, None) hass.config.units.temperature_unit = TEMP_CELSIUS trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_AUTO, { climate.ATTR_HVAC_MODES: [], climate.ATTR_MIN_TEMP: None, climate.ATTR_MAX_TEMP: None, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["heat"], "thermostatTemperatureUnit": "C", } async def test_temperature_setting_climate_range(hass): """Test TemperatureSetting trait support for climate domain - range.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.TemperatureSettingTrait.supported(climate.DOMAIN, 0, None) hass.config.units.temperature_unit = TEMP_FAHRENHEIT trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_AUTO, { climate.ATTR_CURRENT_TEMPERATURE: 70, climate.ATTR_CURRENT_HUMIDITY: 25, ATTR_SUPPORTED_FEATURES: climate.SUPPORT_TARGET_TEMPERATURE_RANGE, climate.ATTR_HVAC_MODES: [ STATE_OFF, climate.HVAC_MODE_COOL, climate.HVAC_MODE_HEAT, climate.HVAC_MODE_AUTO, ], climate.ATTR_TARGET_TEMP_HIGH: 75, climate.ATTR_TARGET_TEMP_LOW: 65, climate.ATTR_MIN_TEMP: 50, climate.ATTR_MAX_TEMP: 80, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["off", "cool", "heat", "auto", "on"], "thermostatTemperatureUnit": "F", } assert trt.query_attributes() == { "thermostatMode": "auto", "thermostatTemperatureAmbient": 21.1, "thermostatHumidityAmbient": 25, "thermostatTemperatureSetpointLow": 18.3, "thermostatTemperatureSetpointHigh": 23.9, } assert trt.can_execute(trait.COMMAND_THERMOSTAT_TEMPERATURE_SET_RANGE, {}) assert trt.can_execute(trait.COMMAND_THERMOSTAT_SET_MODE, {}) calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_TEMPERATURE) await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SET_RANGE, BASIC_DATA, { "thermostatTemperatureSetpointHigh": 25, "thermostatTemperatureSetpointLow": 20, }, {}, ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "climate.bla", climate.ATTR_TARGET_TEMP_HIGH: 77, climate.ATTR_TARGET_TEMP_LOW: 68, } calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_HVAC_MODE) await trt.execute( trait.COMMAND_THERMOSTAT_SET_MODE, BASIC_DATA, {"thermostatMode": "cool"}, {} ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "climate.bla", climate.ATTR_HVAC_MODE: climate.HVAC_MODE_COOL, } with pytest.raises(helpers.SmartHomeError) as err: await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, BASIC_DATA, {"thermostatTemperatureSetpoint": -100}, {}, ) assert err.value.code == const.ERR_VALUE_OUT_OF_RANGE hass.config.units.temperature_unit = TEMP_CELSIUS async def test_temperature_setting_climate_setpoint(hass): """Test TemperatureSetting trait support for climate domain - setpoint.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.TemperatureSettingTrait.supported(climate.DOMAIN, 0, None) hass.config.units.temperature_unit = TEMP_CELSIUS trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_COOL, { climate.ATTR_HVAC_MODES: [STATE_OFF, climate.HVAC_MODE_COOL], climate.ATTR_MIN_TEMP: 10, climate.ATTR_MAX_TEMP: 30, ATTR_TEMPERATURE: 18, climate.ATTR_CURRENT_TEMPERATURE: 20, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["off", "cool", "on"], "thermostatTemperatureUnit": "C", } assert trt.query_attributes() == { "thermostatMode": "cool", "thermostatTemperatureAmbient": 20, "thermostatTemperatureSetpoint": 18, } assert trt.can_execute(trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, {}) assert trt.can_execute(trait.COMMAND_THERMOSTAT_SET_MODE, {}) calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_TEMPERATURE) with pytest.raises(helpers.SmartHomeError): await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, BASIC_DATA, {"thermostatTemperatureSetpoint": -100}, {}, ) await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, BASIC_DATA, {"thermostatTemperatureSetpoint": 19}, {}, ) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "climate.bla", ATTR_TEMPERATURE: 19} async def test_temperature_setting_climate_setpoint_auto(hass): """ Test TemperatureSetting trait support for climate domain. Setpoint in auto mode. """ hass.config.units.temperature_unit = TEMP_CELSIUS trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_HEAT_COOL, { climate.ATTR_HVAC_MODES: [ climate.HVAC_MODE_OFF, climate.HVAC_MODE_HEAT_COOL, ], climate.ATTR_MIN_TEMP: 10, climate.ATTR_MAX_TEMP: 30, ATTR_TEMPERATURE: 18, climate.ATTR_CURRENT_TEMPERATURE: 20, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["off", "heatcool", "on"], "thermostatTemperatureUnit": "C", } assert trt.query_attributes() == { "thermostatMode": "heatcool", "thermostatTemperatureAmbient": 20, "thermostatTemperatureSetpointHigh": 18, "thermostatTemperatureSetpointLow": 18, } assert trt.can_execute(trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, {}) assert trt.can_execute(trait.COMMAND_THERMOSTAT_SET_MODE, {}) calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_TEMPERATURE) await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, BASIC_DATA, {"thermostatTemperatureSetpoint": 19}, {}, ) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "climate.bla", ATTR_TEMPERATURE: 19} async def test_humidity_setting_humidifier_setpoint(hass): """Test HumiditySetting trait support for humidifier domain - setpoint.""" assert helpers.get_google_type(humidifier.DOMAIN, None) is not None assert trait.HumiditySettingTrait.supported(humidifier.DOMAIN, 0, None) trt = trait.HumiditySettingTrait( hass, State( "humidifier.bla", STATE_ON, { humidifier.ATTR_MIN_HUMIDITY: 20, humidifier.ATTR_MAX_HUMIDITY: 90, humidifier.ATTR_HUMIDITY: 38, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "humiditySetpointRange": {"minPercent": 20, "maxPercent": 90} } assert trt.query_attributes() == { "humiditySetpointPercent": 38, } assert trt.can_execute(trait.COMMAND_SET_HUMIDITY, {}) calls = async_mock_service(hass, humidifier.DOMAIN, humidifier.SERVICE_SET_HUMIDITY) await trt.execute(trait.COMMAND_SET_HUMIDITY, BASIC_DATA, {"humidity": 32}, {}) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "humidifier.bla", humidifier.ATTR_HUMIDITY: 32, } async def test_lock_unlock_lock(hass): """Test LockUnlock trait locking support for lock domain.""" assert helpers.get_google_type(lock.DOMAIN, None) is not None assert trait.LockUnlockTrait.supported(lock.DOMAIN, lock.SUPPORT_OPEN, None) assert trait.LockUnlockTrait.might_2fa(lock.DOMAIN, lock.SUPPORT_OPEN, None) trt = trait.LockUnlockTrait( hass, State("lock.front_door", lock.STATE_LOCKED), PIN_CONFIG ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"isLocked": True} assert trt.can_execute(trait.COMMAND_LOCKUNLOCK, {"lock": True}) calls = async_mock_service(hass, lock.DOMAIN, lock.SERVICE_LOCK) await trt.execute(trait.COMMAND_LOCKUNLOCK, PIN_DATA, {"lock": True}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "lock.front_door"} async def test_lock_unlock_unlock(hass): """Test LockUnlock trait unlocking support for lock domain.""" assert helpers.get_google_type(lock.DOMAIN, None) is not None assert trait.LockUnlockTrait.supported(lock.DOMAIN, lock.SUPPORT_OPEN, None) trt = trait.LockUnlockTrait( hass, State("lock.front_door", lock.STATE_LOCKED), PIN_CONFIG ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"isLocked": True} assert trt.can_execute(trait.COMMAND_LOCKUNLOCK, {"lock": False}) calls = async_mock_service(hass, lock.DOMAIN, lock.SERVICE_UNLOCK) # No challenge data with pytest.raises(error.ChallengeNeeded) as err: await trt.execute(trait.COMMAND_LOCKUNLOCK, PIN_DATA, {"lock": False}, {}) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # invalid pin with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_LOCKUNLOCK, PIN_DATA, {"lock": False}, {"pin": 9999} ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_FAILED_PIN_NEEDED await trt.execute( trait.COMMAND_LOCKUNLOCK, PIN_DATA, {"lock": False}, {"pin": "1234"} ) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "lock.front_door"} # Test without pin trt = trait.LockUnlockTrait( hass, State("lock.front_door", lock.STATE_LOCKED), BASIC_CONFIG ) with pytest.raises(error.SmartHomeError) as err: await trt.execute(trait.COMMAND_LOCKUNLOCK, BASIC_DATA, {"lock": False}, {}) assert len(calls) == 1 assert err.value.code == const.ERR_CHALLENGE_NOT_SETUP # Test with 2FA override with patch.object( BASIC_CONFIG, "should_2fa", return_value=False, ): await trt.execute(trait.COMMAND_LOCKUNLOCK, BASIC_DATA, {"lock": False}, {}) assert len(calls) == 2 async def test_arm_disarm_arm_away(hass): """Test ArmDisarm trait Arming support for alarm_control_panel domain.""" assert helpers.get_google_type(alarm_control_panel.DOMAIN, None) is not None assert trait.ArmDisArmTrait.supported(alarm_control_panel.DOMAIN, 0, None) assert trait.ArmDisArmTrait.might_2fa(alarm_control_panel.DOMAIN, 0, None) trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, { alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True, ATTR_SUPPORTED_FEATURES: alarm_control_panel.const.SUPPORT_ALARM_ARM_HOME | alarm_control_panel.const.SUPPORT_ALARM_ARM_AWAY, }, ), PIN_CONFIG, ) assert trt.sync_attributes() == { "availableArmLevels": { "levels": [ { "level_name": "armed_home", "level_values": [ {"level_synonym": ["armed home", "home"], "lang": "en"} ], }, { "level_name": "armed_away", "level_values": [ {"level_synonym": ["armed away", "away"], "lang": "en"} ], }, ], "ordered": False, } } assert trt.query_attributes() == { "isArmed": True, "currentArmLevel": STATE_ALARM_ARMED_AWAY, } assert trt.can_execute( trait.COMMAND_ARMDISARM, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY} ) calls = async_mock_service( hass, alarm_control_panel.DOMAIN, alarm_control_panel.SERVICE_ALARM_ARM_AWAY ) # Test with no secure_pin configured with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), BASIC_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, BASIC_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {}, ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NOT_SETUP trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), PIN_CONFIG, ) # No challenge data with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {}, ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # invalid pin with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {"pin": 9999}, ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_FAILED_PIN_NEEDED # correct pin await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {"pin": "1234"}, ) assert len(calls) == 1 # Test already armed with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), PIN_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {}, ) assert len(calls) == 1 assert err.value.code == const.ERR_ALREADY_ARMED # Test with code_arm_required False trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: False}, ), PIN_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {}, ) assert len(calls) == 2 with pytest.raises(error.SmartHomeError) as err: await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True}, {}, ) async def test_arm_disarm_disarm(hass): """Test ArmDisarm trait Disarming support for alarm_control_panel domain.""" assert helpers.get_google_type(alarm_control_panel.DOMAIN, None) is not None assert trait.ArmDisArmTrait.supported(alarm_control_panel.DOMAIN, 0, None) assert trait.ArmDisArmTrait.might_2fa(alarm_control_panel.DOMAIN, 0, None) trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, { alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True, ATTR_SUPPORTED_FEATURES: alarm_control_panel.const.SUPPORT_ALARM_TRIGGER | alarm_control_panel.const.SUPPORT_ALARM_ARM_CUSTOM_BYPASS, }, ), PIN_CONFIG, ) assert trt.sync_attributes() == { "availableArmLevels": { "levels": [ { "level_name": "armed_custom_bypass", "level_values": [ { "level_synonym": ["armed custom bypass", "custom"], "lang": "en", } ], }, { "level_name": "triggered", "level_values": [{"level_synonym": ["triggered"], "lang": "en"}], }, ], "ordered": False, } } assert trt.query_attributes() == {"isArmed": False} assert trt.can_execute(trait.COMMAND_ARMDISARM, {"arm": False}) calls = async_mock_service( hass, alarm_control_panel.DOMAIN, alarm_control_panel.SERVICE_ALARM_DISARM ) # Test without secure_pin configured with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), BASIC_CONFIG, ) await trt.execute(trait.COMMAND_ARMDISARM, BASIC_DATA, {"arm": False}, {}) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NOT_SETUP trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), PIN_CONFIG, ) # No challenge data with pytest.raises(error.ChallengeNeeded) as err: await trt.execute(trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": False}, {}) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # invalid pin with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": False}, {"pin": 9999} ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_FAILED_PIN_NEEDED # correct pin await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": False}, {"pin": "1234"} ) assert len(calls) == 1 # Test already disarmed with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), PIN_CONFIG, ) await trt.execute(trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": False}, {}) assert len(calls) == 1 assert err.value.code == const.ERR_ALREADY_DISARMED # Cancel arming after already armed will require pin with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: False}, ), PIN_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "cancel": True}, {} ) assert len(calls) == 1 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # Cancel arming while pending to arm doesn't require pin trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_PENDING, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: False}, ), PIN_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "cancel": True}, {} ) assert len(calls) == 2 async def test_fan_speed(hass): """Test FanSpeed trait speed control support for fan domain.""" assert helpers.get_google_type(fan.DOMAIN, None) is not None assert trait.FanSpeedTrait.supported(fan.DOMAIN, fan.SUPPORT_SET_SPEED, None) trt = trait.FanSpeedTrait( hass, State( "fan.living_room_fan", fan.SPEED_HIGH, attributes={ "speed_list": [ fan.SPEED_OFF, fan.SPEED_LOW, fan.SPEED_MEDIUM, fan.SPEED_HIGH, ], "speed": "low", "percentage": 33, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableFanSpeeds": { "ordered": True, "speeds": [ { "speed_name": "off", "speed_values": [{"speed_synonym": ["stop", "off"], "lang": "en"}], }, { "speed_name": "low", "speed_values": [ { "speed_synonym": ["slow", "low", "slowest", "lowest"], "lang": "en", } ], }, { "speed_name": "medium", "speed_values": [ {"speed_synonym": ["medium", "mid", "middle"], "lang": "en"} ], }, { "speed_name": "high", "speed_values": [ { "speed_synonym": [ "high", "max", "fast", "highest", "fastest", "maximum", ], "lang": "en", } ], }, ], }, "reversible": False, "supportsFanSpeedPercent": True, } assert trt.query_attributes() == { "currentFanSpeedSetting": "low", "on": True, "currentFanSpeedPercent": 33, } assert trt.can_execute(trait.COMMAND_FANSPEED, params={"fanSpeed": "medium"}) calls = async_mock_service(hass, fan.DOMAIN, fan.SERVICE_SET_SPEED) await trt.execute(trait.COMMAND_FANSPEED, BASIC_DATA, {"fanSpeed": "medium"}, {}) assert len(calls) == 1 assert calls[0].data == {"entity_id": "fan.living_room_fan", "speed": "medium"} assert trt.can_execute(trait.COMMAND_FANSPEED, params={"fanSpeedPercent": 10}) calls = async_mock_service(hass, fan.DOMAIN, fan.SERVICE_SET_PERCENTAGE) await trt.execute(trait.COMMAND_FANSPEED, BASIC_DATA, {"fanSpeedPercent": 10}, {}) assert len(calls) == 1 assert calls[0].data == {"entity_id": "fan.living_room_fan", "percentage": 10} async def test_climate_fan_speed(hass): """Test FanSpeed trait speed control support for climate domain.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.FanSpeedTrait.supported(climate.DOMAIN, climate.SUPPORT_FAN_MODE, None) trt = trait.FanSpeedTrait( hass, State( "climate.living_room_ac", "on", attributes={ "fan_modes": ["auto", "low", "medium", "high"], "fan_mode": "low", }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableFanSpeeds": { "ordered": True, "speeds": [ { "speed_name": "auto", "speed_values": [{"speed_synonym": ["auto"], "lang": "en"}], }, { "speed_name": "low", "speed_values": [{"speed_synonym": ["low"], "lang": "en"}], }, { "speed_name": "medium", "speed_values": [{"speed_synonym": ["medium"], "lang": "en"}], }, { "speed_name": "high", "speed_values": [{"speed_synonym": ["high"], "lang": "en"}], }, ], }, "reversible": False, "supportsFanSpeedPercent": True, } assert trt.query_attributes() == { "currentFanSpeedSetting": "low", } assert trt.can_execute(trait.COMMAND_FANSPEED, params={"fanSpeed": "medium"}) calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_FAN_MODE) await trt.execute(trait.COMMAND_FANSPEED, BASIC_DATA, {"fanSpeed": "medium"}, {}) assert len(calls) == 1 assert calls[0].data == { "entity_id": "climate.living_room_ac", "fan_mode": "medium", } async def test_inputselector(hass): """Test input selector trait.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.InputSelectorTrait.supported( media_player.DOMAIN, media_player.SUPPORT_SELECT_SOURCE, None ) trt = trait.InputSelectorTrait( hass, State( "media_player.living_room", media_player.STATE_PLAYING, attributes={ media_player.ATTR_INPUT_SOURCE_LIST: [ "media", "game", "chromecast", "plex", ], media_player.ATTR_INPUT_SOURCE: "game", }, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableInputs": [ {"key": "media", "names": [{"name_synonym": ["media"], "lang": "en"}]}, {"key": "game", "names": [{"name_synonym": ["game"], "lang": "en"}]}, { "key": "chromecast", "names": [{"name_synonym": ["chromecast"], "lang": "en"}], }, {"key": "plex", "names": [{"name_synonym": ["plex"], "lang": "en"}]}, ], "orderedInputs": True, } assert trt.query_attributes() == { "currentInput": "game", } assert trt.can_execute( trait.COMMAND_INPUT, params={"newInput": "media"}, ) calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_SELECT_SOURCE ) await trt.execute( trait.COMMAND_INPUT, BASIC_DATA, {"newInput": "media"}, {}, ) assert len(calls) == 1 assert calls[0].data == {"entity_id": "media_player.living_room", "source": "media"} @pytest.mark.parametrize( "sources,source,source_next,source_prev", [ (["a"], "a", "a", "a"), (["a", "b"], "a", "b", "b"), (["a", "b", "c"], "a", "b", "c"), ], ) async def test_inputselector_nextprev(hass, sources, source, source_next, source_prev): """Test input selector trait.""" trt = trait.InputSelectorTrait( hass, State( "media_player.living_room", media_player.STATE_PLAYING, attributes={ media_player.ATTR_INPUT_SOURCE_LIST: sources, media_player.ATTR_INPUT_SOURCE: source, }, ), BASIC_CONFIG, ) assert trt.can_execute("action.devices.commands.NextInput", params={}) assert trt.can_execute("action.devices.commands.PreviousInput", params={}) calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_SELECT_SOURCE ) await trt.execute( "action.devices.commands.NextInput", BASIC_DATA, {}, {}, ) await trt.execute( "action.devices.commands.PreviousInput", BASIC_DATA, {}, {}, ) assert len(calls) == 2 assert calls[0].data == { "entity_id": "media_player.living_room", "source": source_next, } assert calls[1].data == { "entity_id": "media_player.living_room", "source": source_prev, } @pytest.mark.parametrize( "sources,source", [(None, "a"), (["a", "b"], None), (["a", "b"], "c")] ) async def test_inputselector_nextprev_invalid(hass, sources, source): """Test input selector trait.""" trt = trait.InputSelectorTrait( hass, State( "media_player.living_room", media_player.STATE_PLAYING, attributes={ media_player.ATTR_INPUT_SOURCE_LIST: sources, media_player.ATTR_INPUT_SOURCE: source, }, ), BASIC_CONFIG, ) with pytest.raises(SmartHomeError): await trt.execute( "action.devices.commands.NextInput", BASIC_DATA, {}, {}, ) with pytest.raises(SmartHomeError): await trt.execute( "action.devices.commands.PreviousInput", BASIC_DATA, {}, {}, ) with pytest.raises(SmartHomeError): await trt.execute( "action.devices.commands.InvalidCommand", BASIC_DATA, {}, {}, ) async def test_modes_input_select(hass): """Test Input Select Mode trait.""" assert helpers.get_google_type(input_select.DOMAIN, None) is not None assert trait.ModesTrait.supported(input_select.DOMAIN, None, None) trt = trait.ModesTrait( hass, State("input_select.bla", "unavailable"), BASIC_CONFIG, ) assert trt.sync_attributes() == {"availableModes": []} trt = trait.ModesTrait( hass, State( "input_select.bla", "abc", attributes={input_select.ATTR_OPTIONS: ["abc", "123", "xyz"]}, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableModes": [ { "name": "option", "name_values": [ { "name_synonym": ["option", "setting", "mode", "value"], "lang": "en", } ], "settings": [ { "setting_name": "abc", "setting_values": [{"setting_synonym": ["abc"], "lang": "en"}], }, { "setting_name": "123", "setting_values": [{"setting_synonym": ["123"], "lang": "en"}], }, { "setting_name": "xyz", "setting_values": [{"setting_synonym": ["xyz"], "lang": "en"}], }, ], "ordered": False, } ] } assert trt.query_attributes() == { "currentModeSettings": {"option": "abc"}, "on": True, } assert trt.can_execute( trait.COMMAND_MODES, params={"updateModeSettings": {"option": "xyz"}}, ) calls = async_mock_service( hass, input_select.DOMAIN, input_select.SERVICE_SELECT_OPTION ) await trt.execute( trait.COMMAND_MODES, BASIC_DATA, {"updateModeSettings": {"option": "xyz"}}, {}, ) assert len(calls) == 1 assert calls[0].data == {"entity_id": "input_select.bla", "option": "xyz"} async def test_modes_humidifier(hass): """Test Humidifier Mode trait.""" assert helpers.get_google_type(humidifier.DOMAIN, None) is not None assert trait.ModesTrait.supported(humidifier.DOMAIN, humidifier.SUPPORT_MODES, None) trt = trait.ModesTrait( hass, State( "humidifier.humidifier", STATE_OFF, attributes={ humidifier.ATTR_AVAILABLE_MODES: [ humidifier.MODE_NORMAL, humidifier.MODE_AUTO, humidifier.MODE_AWAY, ], ATTR_SUPPORTED_FEATURES: humidifier.SUPPORT_MODES, humidifier.ATTR_MIN_HUMIDITY: 30, humidifier.ATTR_MAX_HUMIDITY: 99, humidifier.ATTR_HUMIDITY: 50, ATTR_MODE: humidifier.MODE_AUTO, }, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableModes": [ { "name": "mode", "name_values": [{"name_synonym": ["mode"], "lang": "en"}], "settings": [ { "setting_name": "normal", "setting_values": [ {"setting_synonym": ["normal"], "lang": "en"} ], }, { "setting_name": "auto", "setting_values": [{"setting_synonym": ["auto"], "lang": "en"}], }, { "setting_name": "away", "setting_values": [{"setting_synonym": ["away"], "lang": "en"}], }, ], "ordered": False, }, ] } assert trt.query_attributes() == { "currentModeSettings": {"mode": "auto"}, "on": False, } assert trt.can_execute( trait.COMMAND_MODES, params={"updateModeSettings": {"mode": "away"}} ) calls = async_mock_service(hass, humidifier.DOMAIN, humidifier.SERVICE_SET_MODE) await trt.execute( trait.COMMAND_MODES, BASIC_DATA, {"updateModeSettings": {"mode": "away"}}, {}, ) assert len(calls) == 1 assert calls[0].data == { "entity_id": "humidifier.humidifier", "mode": "away", } async def test_sound_modes(hass): """Test Mode trait.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.ModesTrait.supported( media_player.DOMAIN, media_player.SUPPORT_SELECT_SOUND_MODE, None ) trt = trait.ModesTrait( hass, State( "media_player.living_room", media_player.STATE_PLAYING, attributes={ media_player.ATTR_SOUND_MODE_LIST: ["stereo", "prologic"], media_player.ATTR_SOUND_MODE: "stereo", }, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableModes": [ { "name": "sound mode", "name_values": [ {"name_synonym": ["sound mode", "effects"], "lang": "en"} ], "settings": [ { "setting_name": "stereo", "setting_values": [ {"setting_synonym": ["stereo"], "lang": "en"} ], }, { "setting_name": "prologic", "setting_values": [ {"setting_synonym": ["prologic"], "lang": "en"} ], }, ], "ordered": False, } ] } assert trt.query_attributes() == { "currentModeSettings": {"sound mode": "stereo"}, "on": True, } assert trt.can_execute( trait.COMMAND_MODES, params={"updateModeSettings": {"sound mode": "stereo"}}, ) calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_SELECT_SOUND_MODE ) await trt.execute( trait.COMMAND_MODES, BASIC_DATA, {"updateModeSettings": {"sound mode": "stereo"}}, {}, ) assert len(calls) == 1 assert calls[0].data == { "entity_id": "media_player.living_room", "sound_mode": "stereo", } async def test_openclose_cover(hass): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_SET_POSITION, None ) trt = trait.OpenCloseTrait( hass, State( "cover.bla", cover.STATE_OPEN, { cover.ATTR_CURRENT_POSITION: 75, ATTR_SUPPORTED_FEATURES: cover.SUPPORT_SET_POSITION, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"openPercent": 75} calls_set = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_SET_COVER_POSITION) calls_open = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_OPEN_COVER) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 50}, {}) await trt.execute( trait.COMMAND_OPENCLOSE_RELATIVE, BASIC_DATA, {"openRelativePercent": 50}, {} ) assert len(calls_set) == 1 assert calls_set[0].data == {ATTR_ENTITY_ID: "cover.bla", cover.ATTR_POSITION: 50} assert len(calls_open) == 1 assert calls_open[0].data == {ATTR_ENTITY_ID: "cover.bla"} async def test_openclose_cover_unknown_state(hass): """Test OpenClose trait support for cover domain with unknown state.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_SET_POSITION, None ) # No state trt = trait.OpenCloseTrait( hass, State( "cover.bla", STATE_UNKNOWN, {ATTR_SUPPORTED_FEATURES: cover.SUPPORT_OPEN} ), BASIC_CONFIG, ) assert trt.sync_attributes() == {"discreteOnlyOpenClose": True} with pytest.raises(helpers.SmartHomeError): trt.query_attributes() calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_OPEN_COVER) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 100}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} with pytest.raises(helpers.SmartHomeError): trt.query_attributes() async def test_openclose_cover_assumed_state(hass): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_SET_POSITION, None ) trt = trait.OpenCloseTrait( hass, State( "cover.bla", cover.STATE_OPEN, { ATTR_ASSUMED_STATE: True, ATTR_SUPPORTED_FEATURES: cover.SUPPORT_SET_POSITION, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == {"commandOnlyOpenClose": True} assert trt.query_attributes() == {} calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_SET_COVER_POSITION) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 40}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla", cover.ATTR_POSITION: 40} async def test_openclose_cover_query_only(hass): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported(cover.DOMAIN, 0, None) state = State( "cover.bla", cover.STATE_OPEN, ) trt = trait.OpenCloseTrait( hass, state, BASIC_CONFIG, ) assert trt.sync_attributes() == { "discreteOnlyOpenClose": True, "queryOnlyOpenClose": True, } assert trt.query_attributes() == {"openPercent": 100} async def test_openclose_cover_no_position(hass): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_OPEN | cover.SUPPORT_CLOSE, None ) state = State( "cover.bla", cover.STATE_OPEN, { ATTR_SUPPORTED_FEATURES: cover.SUPPORT_OPEN | cover.SUPPORT_CLOSE, }, ) trt = trait.OpenCloseTrait( hass, state, BASIC_CONFIG, ) assert trt.sync_attributes() == {"discreteOnlyOpenClose": True} assert trt.query_attributes() == {"openPercent": 100} state.state = cover.STATE_CLOSED assert trt.sync_attributes() == {"discreteOnlyOpenClose": True} assert trt.query_attributes() == {"openPercent": 0} calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_CLOSE_COVER) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 0}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_OPEN_COVER) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 100}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} with pytest.raises( SmartHomeError, match=r"Current position not know for relative command" ): await trt.execute( trait.COMMAND_OPENCLOSE_RELATIVE, BASIC_DATA, {"openRelativePercent": 100}, {}, ) with pytest.raises(SmartHomeError, match=r"No support for partial open close"): await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 50}, {}) @pytest.mark.parametrize( "device_class", (cover.DEVICE_CLASS_DOOR, cover.DEVICE_CLASS_GARAGE, cover.DEVICE_CLASS_GATE), ) async def test_openclose_cover_secure(hass, device_class): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, device_class) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_SET_POSITION, device_class ) assert trait.OpenCloseTrait.might_2fa( cover.DOMAIN, cover.SUPPORT_SET_POSITION, device_class ) trt = trait.OpenCloseTrait( hass, State( "cover.bla", cover.STATE_OPEN, { ATTR_DEVICE_CLASS: device_class, ATTR_SUPPORTED_FEATURES: cover.SUPPORT_SET_POSITION, cover.ATTR_CURRENT_POSITION: 75, }, ), PIN_CONFIG, ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"openPercent": 75} calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_SET_COVER_POSITION) calls_close = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_CLOSE_COVER) # No challenge data with pytest.raises(error.ChallengeNeeded) as err: await trt.execute(trait.COMMAND_OPENCLOSE, PIN_DATA, {"openPercent": 50}, {}) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # invalid pin with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_OPENCLOSE, PIN_DATA, {"openPercent": 50}, {"pin": "9999"} ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_FAILED_PIN_NEEDED await trt.execute( trait.COMMAND_OPENCLOSE, PIN_DATA, {"openPercent": 50}, {"pin": "1234"} ) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla", cover.ATTR_POSITION: 50} # no challenge on close await trt.execute(trait.COMMAND_OPENCLOSE, PIN_DATA, {"openPercent": 0}, {}) assert len(calls_close) == 1 assert calls_close[0].data == {ATTR_ENTITY_ID: "cover.bla"} @pytest.mark.parametrize( "device_class", ( binary_sensor.DEVICE_CLASS_DOOR, binary_sensor.DEVICE_CLASS_GARAGE_DOOR, binary_sensor.DEVICE_CLASS_LOCK, binary_sensor.DEVICE_CLASS_OPENING, binary_sensor.DEVICE_CLASS_WINDOW, ), ) async def test_openclose_binary_sensor(hass, device_class): """Test OpenClose trait support for binary_sensor domain.""" assert helpers.get_google_type(binary_sensor.DOMAIN, device_class) is not None assert trait.OpenCloseTrait.supported(binary_sensor.DOMAIN, 0, device_class) trt = trait.OpenCloseTrait( hass, State("binary_sensor.test", STATE_ON, {ATTR_DEVICE_CLASS: device_class}), BASIC_CONFIG, ) assert trt.sync_attributes() == { "queryOnlyOpenClose": True, "discreteOnlyOpenClose": True, } assert trt.query_attributes() == {"openPercent": 100} trt = trait.OpenCloseTrait( hass, State("binary_sensor.test", STATE_OFF, {ATTR_DEVICE_CLASS: device_class}), BASIC_CONFIG, ) assert trt.sync_attributes() == { "queryOnlyOpenClose": True, "discreteOnlyOpenClose": True, } assert trt.query_attributes() == {"openPercent": 0} async def test_volume_media_player(hass): """Test volume trait support for media player domain.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.VolumeTrait.supported( media_player.DOMAIN, media_player.SUPPORT_VOLUME_SET, None, ) trt = trait.VolumeTrait( hass, State( "media_player.bla", media_player.STATE_PLAYING, { ATTR_SUPPORTED_FEATURES: media_player.SUPPORT_VOLUME_SET, media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.3, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "volumeMaxLevel": 100, "levelStepSize": 10, "volumeCanMuteAndUnmute": False, "commandOnlyVolume": False, } assert trt.query_attributes() == {"currentVolume": 30} calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_SET ) await trt.execute(trait.COMMAND_SET_VOLUME, BASIC_DATA, {"volumeLevel": 60}, {}) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.6, } calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_SET ) await trt.execute( trait.COMMAND_VOLUME_RELATIVE, BASIC_DATA, {"relativeSteps": 10}, {} ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.4, } async def test_volume_media_player_relative(hass): """Test volume trait support for relative-volume-only media players.""" assert trait.VolumeTrait.supported( media_player.DOMAIN, media_player.SUPPORT_VOLUME_STEP, None, ) trt = trait.VolumeTrait( hass, State( "media_player.bla", media_player.STATE_PLAYING, { ATTR_ASSUMED_STATE: True, ATTR_SUPPORTED_FEATURES: media_player.SUPPORT_VOLUME_STEP, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "volumeMaxLevel": 100, "levelStepSize": 10, "volumeCanMuteAndUnmute": False, "commandOnlyVolume": True, } assert trt.query_attributes() == {} calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_UP ) await trt.execute( trait.COMMAND_VOLUME_RELATIVE, BASIC_DATA, {"relativeSteps": 10}, {}, ) assert len(calls) == 10 for call in calls: assert call.data == { ATTR_ENTITY_ID: "media_player.bla", } calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_DOWN ) await trt.execute( trait.COMMAND_VOLUME_RELATIVE, BASIC_DATA, {"relativeSteps": -10}, {}, ) assert len(calls) == 10 for call in calls: assert call.data == { ATTR_ENTITY_ID: "media_player.bla", } with pytest.raises(SmartHomeError): await trt.execute(trait.COMMAND_SET_VOLUME, BASIC_DATA, {"volumeLevel": 42}, {}) with pytest.raises(SmartHomeError): await trt.execute(trait.COMMAND_MUTE, BASIC_DATA, {"mute": True}, {}) async def test_media_player_mute(hass): """Test volume trait support for muting.""" assert trait.VolumeTrait.supported( media_player.DOMAIN, media_player.SUPPORT_VOLUME_STEP | media_player.SUPPORT_VOLUME_MUTE, None, ) trt = trait.VolumeTrait( hass, State( "media_player.bla", media_player.STATE_PLAYING, { ATTR_SUPPORTED_FEATURES: ( media_player.SUPPORT_VOLUME_STEP | media_player.SUPPORT_VOLUME_MUTE ), media_player.ATTR_MEDIA_VOLUME_MUTED: False, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "volumeMaxLevel": 100, "levelStepSize": 10, "volumeCanMuteAndUnmute": True, "commandOnlyVolume": False, } assert trt.query_attributes() == {"isMuted": False} mute_calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_MUTE ) await trt.execute( trait.COMMAND_MUTE, BASIC_DATA, {"mute": True}, {}, ) assert len(mute_calls) == 1 assert mute_calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_VOLUME_MUTED: True, } unmute_calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_MUTE ) await trt.execute( trait.COMMAND_MUTE, BASIC_DATA, {"mute": False}, {}, ) assert len(unmute_calls) == 1 assert unmute_calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_VOLUME_MUTED: False, } async def test_temperature_setting_sensor(hass): """Test TemperatureSetting trait support for temperature sensor.""" assert ( helpers.get_google_type(sensor.DOMAIN, sensor.DEVICE_CLASS_TEMPERATURE) is not None ) assert not trait.TemperatureSettingTrait.supported( sensor.DOMAIN, 0, sensor.DEVICE_CLASS_HUMIDITY ) assert trait.TemperatureSettingTrait.supported( sensor.DOMAIN, 0, sensor.DEVICE_CLASS_TEMPERATURE ) @pytest.mark.parametrize( "unit_in,unit_out,state,ambient", [ (TEMP_FAHRENHEIT, "F", "70", 21.1), (TEMP_CELSIUS, "C", "21.1", 21.1), (TEMP_FAHRENHEIT, "F", "unavailable", None), (TEMP_FAHRENHEIT, "F", "unknown", None), ], ) async def test_temperature_setting_sensor_data(hass, unit_in, unit_out, state, ambient): """Test TemperatureSetting trait support for temperature sensor.""" hass.config.units.temperature_unit = unit_in trt = trait.TemperatureSettingTrait( hass, State( "sensor.test", state, {ATTR_DEVICE_CLASS: sensor.DEVICE_CLASS_TEMPERATURE} ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "queryOnlyTemperatureSetting": True, "thermostatTemperatureUnit": unit_out, } if ambient: assert trt.query_attributes() == {"thermostatTemperatureAmbient": ambient} else: assert trt.query_attributes() == {} hass.config.units.temperature_unit = TEMP_CELSIUS async def test_humidity_setting_sensor(hass): """Test HumiditySetting trait support for humidity sensor.""" assert ( helpers.get_google_type(sensor.DOMAIN, sensor.DEVICE_CLASS_HUMIDITY) is not None ) assert not trait.HumiditySettingTrait.supported( sensor.DOMAIN, 0, sensor.DEVICE_CLASS_TEMPERATURE ) assert trait.HumiditySettingTrait.supported( sensor.DOMAIN, 0, sensor.DEVICE_CLASS_HUMIDITY ) @pytest.mark.parametrize( "state,ambient", [("70", 70), ("unavailable", None), ("unknown", None)] ) async def test_humidity_setting_sensor_data(hass, state, ambient): """Test HumiditySetting trait support for humidity sensor.""" trt = trait.HumiditySettingTrait( hass, State("sensor.test", state, {ATTR_DEVICE_CLASS: sensor.DEVICE_CLASS_HUMIDITY}), BASIC_CONFIG, ) assert trt.sync_attributes() == {"queryOnlyHumiditySetting": True} if ambient: assert trt.query_attributes() == {"humidityAmbientPercent": ambient} else: assert trt.query_attributes() == {} with pytest.raises(helpers.SmartHomeError) as err: await trt.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert err.value.code == const.ERR_NOT_SUPPORTED async def test_transport_control(hass): """Test the TransportControlTrait.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None for feature in trait.MEDIA_COMMAND_SUPPORT_MAPPING.values(): assert trait.TransportControlTrait.supported(media_player.DOMAIN, feature, None) now = datetime(2020, 1, 1) trt = trait.TransportControlTrait( hass, State( "media_player.bla", media_player.STATE_PLAYING, { media_player.ATTR_MEDIA_POSITION: 100, media_player.ATTR_MEDIA_DURATION: 200, media_player.ATTR_MEDIA_POSITION_UPDATED_AT: now - timedelta(seconds=10), media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.5, ATTR_SUPPORTED_FEATURES: media_player.SUPPORT_PLAY | media_player.SUPPORT_STOP, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "transportControlSupportedCommands": ["RESUME", "STOP"] } assert trt.query_attributes() == {} # COMMAND_MEDIA_SEEK_RELATIVE calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_SEEK ) # Patch to avoid time ticking over during the command failing the test with patch("homeassistant.util.dt.utcnow", return_value=now): await trt.execute( trait.COMMAND_MEDIA_SEEK_RELATIVE, BASIC_DATA, {"relativePositionMs": 10000}, {}, ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", # 100s (current position) + 10s (from command) + 10s (from updated_at) media_player.ATTR_MEDIA_SEEK_POSITION: 120, } # COMMAND_MEDIA_SEEK_TO_POSITION calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_SEEK ) await trt.execute( trait.COMMAND_MEDIA_SEEK_TO_POSITION, BASIC_DATA, {"absPositionMs": 50000}, {} ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_SEEK_POSITION: 50, } # COMMAND_MEDIA_NEXT calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_NEXT_TRACK ) await trt.execute(trait.COMMAND_MEDIA_NEXT, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} # COMMAND_MEDIA_PAUSE calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_PAUSE ) await trt.execute(trait.COMMAND_MEDIA_PAUSE, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} # COMMAND_MEDIA_PREVIOUS calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_PREVIOUS_TRACK ) await trt.execute(trait.COMMAND_MEDIA_PREVIOUS, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} # COMMAND_MEDIA_RESUME calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_PLAY ) await trt.execute(trait.COMMAND_MEDIA_RESUME, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} # COMMAND_MEDIA_SHUFFLE calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_SHUFFLE_SET ) await trt.execute(trait.COMMAND_MEDIA_SHUFFLE, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_SHUFFLE: True, } # COMMAND_MEDIA_STOP calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_STOP ) await trt.execute(trait.COMMAND_MEDIA_STOP, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} @pytest.mark.parametrize( "state", ( STATE_OFF, STATE_IDLE, STATE_PLAYING, STATE_ON, STATE_PAUSED, STATE_STANDBY, STATE_UNAVAILABLE, STATE_UNKNOWN, ), ) async def test_media_state(hass, state): """Test the MediaStateTrait.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.TransportControlTrait.supported( media_player.DOMAIN, media_player.SUPPORT_PLAY, None ) trt = trait.MediaStateTrait( hass, State( "media_player.bla", state, { media_player.ATTR_MEDIA_POSITION: 100, media_player.ATTR_MEDIA_DURATION: 200, media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.5, ATTR_SUPPORTED_FEATURES: media_player.SUPPORT_PLAY | media_player.SUPPORT_STOP, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "supportActivityState": True, "supportPlaybackState": True, } assert trt.query_attributes() == { "activityState": trt.activity_lookup.get(state), "playbackState": trt.playback_lookup.get(state), }
sander76/home-assistant
tests/components/google_assistant/test_trait.py
homeassistant/components/buienradar/util.py
"""Update the IP addresses of your Cloudflare DNS records.""" from __future__ import annotations from datetime import timedelta import logging from pycfdns import CloudflareUpdater from pycfdns.exceptions import ( CloudflareAuthenticationException, CloudflareConnectionException, CloudflareException, ) import voluptuous as vol from homeassistant.components import persistent_notification from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_API_KEY, CONF_API_TOKEN, CONF_EMAIL, CONF_ZONE from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.helpers.aiohttp_client import async_get_clientsession import homeassistant.helpers.config_validation as cv from homeassistant.helpers.event import async_track_time_interval from .const import ( CONF_RECORDS, DATA_UNDO_UPDATE_INTERVAL, DEFAULT_UPDATE_INTERVAL, DOMAIN, SERVICE_UPDATE_RECORDS, ) _LOGGER = logging.getLogger(__name__) CONFIG_SCHEMA = vol.Schema( { DOMAIN: vol.All( cv.deprecated(CONF_EMAIL), cv.deprecated(CONF_API_KEY), cv.deprecated(CONF_ZONE), cv.deprecated(CONF_RECORDS), vol.Schema( { vol.Optional(CONF_EMAIL): cv.string, vol.Optional(CONF_API_KEY): cv.string, vol.Optional(CONF_ZONE): cv.string, vol.Optional(CONF_RECORDS): vol.All(cv.ensure_list, [cv.string]), } ), ) }, extra=vol.ALLOW_EXTRA, ) async def async_setup(hass: HomeAssistant, config: dict) -> bool: """Set up the component.""" hass.data.setdefault(DOMAIN, {}) if len(hass.config_entries.async_entries(DOMAIN)) > 0: return True if DOMAIN in config and CONF_API_KEY in config[DOMAIN]: persistent_notification.async_create( hass, "Cloudflare integration now requires an API Token. Please go to the integrations page to setup.", "Cloudflare Setup", "cloudflare_setup", ) return True async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Set up Cloudflare from a config entry.""" cfupdate = CloudflareUpdater( async_get_clientsession(hass), entry.data[CONF_API_TOKEN], entry.data[CONF_ZONE], entry.data[CONF_RECORDS], ) try: zone_id = await cfupdate.get_zone_id() except CloudflareAuthenticationException: _LOGGER.error("API access forbidden. Please reauthenticate") return False except CloudflareConnectionException as error: raise ConfigEntryNotReady from error async def update_records(now): """Set up recurring update.""" try: await _async_update_cloudflare(cfupdate, zone_id) except CloudflareException as error: _LOGGER.error("Error updating zone %s: %s", entry.data[CONF_ZONE], error) async def update_records_service(call): """Set up service for manual trigger.""" try: await _async_update_cloudflare(cfupdate, zone_id) except CloudflareException as error: _LOGGER.error("Error updating zone %s: %s", entry.data[CONF_ZONE], error) update_interval = timedelta(minutes=DEFAULT_UPDATE_INTERVAL) undo_interval = async_track_time_interval(hass, update_records, update_interval) hass.data[DOMAIN][entry.entry_id] = { DATA_UNDO_UPDATE_INTERVAL: undo_interval, } hass.services.async_register(DOMAIN, SERVICE_UPDATE_RECORDS, update_records_service) return True async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Unload Cloudflare config entry.""" hass.data[DOMAIN][entry.entry_id][DATA_UNDO_UPDATE_INTERVAL]() hass.data[DOMAIN].pop(entry.entry_id) return True async def _async_update_cloudflare(cfupdate: CloudflareUpdater, zone_id: str): _LOGGER.debug("Starting update for zone %s", cfupdate.zone) records = await cfupdate.get_record_info(zone_id) _LOGGER.debug("Records: %s", records) await cfupdate.update_records(zone_id, records) _LOGGER.debug("Update for zone %s is complete", cfupdate.zone)
"""Tests for the Google Assistant traits.""" from datetime import datetime, timedelta from unittest.mock import patch import pytest from homeassistant.components import ( alarm_control_panel, binary_sensor, camera, cover, fan, group, input_boolean, input_select, light, lock, media_player, scene, script, sensor, switch, vacuum, ) from homeassistant.components.climate import const as climate from homeassistant.components.google_assistant import const, error, helpers, trait from homeassistant.components.google_assistant.error import SmartHomeError from homeassistant.components.humidifier import const as humidifier from homeassistant.config import async_process_ha_core_config from homeassistant.const import ( ATTR_ASSUMED_STATE, ATTR_DEVICE_CLASS, ATTR_ENTITY_ID, ATTR_MODE, ATTR_SUPPORTED_FEATURES, ATTR_TEMPERATURE, SERVICE_TURN_OFF, SERVICE_TURN_ON, STATE_ALARM_ARMED_AWAY, STATE_ALARM_DISARMED, STATE_ALARM_PENDING, STATE_IDLE, STATE_OFF, STATE_ON, STATE_PAUSED, STATE_PLAYING, STATE_STANDBY, STATE_UNAVAILABLE, STATE_UNKNOWN, TEMP_CELSIUS, TEMP_FAHRENHEIT, ) from homeassistant.core import DOMAIN as HA_DOMAIN, EVENT_CALL_SERVICE, State from homeassistant.util import color from . import BASIC_CONFIG, MockConfig from tests.common import async_capture_events, async_mock_service REQ_ID = "ff36a3cc-ec34-11e6-b1a0-64510650abcf" BASIC_DATA = helpers.RequestData( BASIC_CONFIG, "test-agent", const.SOURCE_CLOUD, REQ_ID, None ) PIN_CONFIG = MockConfig(secure_devices_pin="1234") PIN_DATA = helpers.RequestData( PIN_CONFIG, "test-agent", const.SOURCE_CLOUD, REQ_ID, None ) async def test_brightness_light(hass): """Test brightness trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert trait.BrightnessTrait.supported(light.DOMAIN, light.SUPPORT_BRIGHTNESS, None) trt = trait.BrightnessTrait( hass, State("light.bla", light.STATE_ON, {light.ATTR_BRIGHTNESS: 243}), BASIC_CONFIG, ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"brightness": 95} events = async_capture_events(hass, EVENT_CALL_SERVICE) calls = async_mock_service(hass, light.DOMAIN, light.SERVICE_TURN_ON) await trt.execute( trait.COMMAND_BRIGHTNESS_ABSOLUTE, BASIC_DATA, {"brightness": 50}, {} ) await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "light.bla", light.ATTR_BRIGHTNESS_PCT: 50} assert len(events) == 1 assert events[0].data == { "domain": "light", "service": "turn_on", "service_data": {"brightness_pct": 50, "entity_id": "light.bla"}, } async def test_camera_stream(hass): """Test camera stream trait support for camera domain.""" await async_process_ha_core_config( hass, {"external_url": "https://example.com"}, ) assert helpers.get_google_type(camera.DOMAIN, None) is not None assert trait.CameraStreamTrait.supported(camera.DOMAIN, camera.SUPPORT_STREAM, None) trt = trait.CameraStreamTrait( hass, State("camera.bla", camera.STATE_IDLE, {}), BASIC_CONFIG ) assert trt.sync_attributes() == { "cameraStreamSupportedProtocols": ["hls"], "cameraStreamNeedAuthToken": False, "cameraStreamNeedDrmEncryption": False, } assert trt.query_attributes() == {} with patch( "homeassistant.components.camera.async_request_stream", return_value="/api/streams/bla", ): await trt.execute(trait.COMMAND_GET_CAMERA_STREAM, BASIC_DATA, {}, {}) assert trt.query_attributes() == { "cameraStreamAccessUrl": "https://example.com/api/streams/bla", "cameraStreamReceiverAppId": "B12CE3CA", } async def test_onoff_group(hass): """Test OnOff trait support for group domain.""" assert helpers.get_google_type(group.DOMAIN, None) is not None assert trait.OnOffTrait.supported(group.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("group.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("group.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, HA_DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "group.bla"} off_calls = async_mock_service(hass, HA_DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "group.bla"} async def test_onoff_input_boolean(hass): """Test OnOff trait support for input_boolean domain.""" assert helpers.get_google_type(input_boolean.DOMAIN, None) is not None assert trait.OnOffTrait.supported(input_boolean.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("input_boolean.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait( hass, State("input_boolean.bla", STATE_OFF), BASIC_CONFIG ) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, input_boolean.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "input_boolean.bla"} off_calls = async_mock_service(hass, input_boolean.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "input_boolean.bla"} async def test_onoff_switch(hass): """Test OnOff trait support for switch domain.""" assert helpers.get_google_type(switch.DOMAIN, None) is not None assert trait.OnOffTrait.supported(switch.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("switch.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("switch.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} trt_assumed = trait.OnOffTrait( hass, State("switch.bla", STATE_OFF, {"assumed_state": True}), BASIC_CONFIG ) assert trt_assumed.sync_attributes() == {"commandOnlyOnOff": True} on_calls = async_mock_service(hass, switch.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "switch.bla"} off_calls = async_mock_service(hass, switch.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "switch.bla"} async def test_onoff_fan(hass): """Test OnOff trait support for fan domain.""" assert helpers.get_google_type(fan.DOMAIN, None) is not None assert trait.OnOffTrait.supported(fan.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("fan.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("fan.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, fan.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "fan.bla"} off_calls = async_mock_service(hass, fan.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "fan.bla"} async def test_onoff_light(hass): """Test OnOff trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert trait.OnOffTrait.supported(light.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("light.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("light.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "light.bla"} off_calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "light.bla"} async def test_onoff_media_player(hass): """Test OnOff trait support for media_player domain.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.OnOffTrait.supported(media_player.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("media_player.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("media_player.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, media_player.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} off_calls = async_mock_service(hass, media_player.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} async def test_onoff_humidifier(hass): """Test OnOff trait support for humidifier domain.""" assert helpers.get_google_type(humidifier.DOMAIN, None) is not None assert trait.OnOffTrait.supported(humidifier.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("humidifier.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("humidifier.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, humidifier.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "humidifier.bla"} off_calls = async_mock_service(hass, humidifier.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "humidifier.bla"} async def test_dock_vacuum(hass): """Test dock trait support for vacuum domain.""" assert helpers.get_google_type(vacuum.DOMAIN, None) is not None assert trait.DockTrait.supported(vacuum.DOMAIN, 0, None) trt = trait.DockTrait(hass, State("vacuum.bla", vacuum.STATE_IDLE), BASIC_CONFIG) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"isDocked": False} calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_RETURN_TO_BASE) await trt.execute(trait.COMMAND_DOCK, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} async def test_startstop_vacuum(hass): """Test startStop trait support for vacuum domain.""" assert helpers.get_google_type(vacuum.DOMAIN, None) is not None assert trait.StartStopTrait.supported(vacuum.DOMAIN, 0, None) trt = trait.StartStopTrait( hass, State( "vacuum.bla", vacuum.STATE_PAUSED, {ATTR_SUPPORTED_FEATURES: vacuum.SUPPORT_PAUSE}, ), BASIC_CONFIG, ) assert trt.sync_attributes() == {"pausable": True} assert trt.query_attributes() == {"isRunning": False, "isPaused": True} start_calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_START) await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": True}, {}) assert len(start_calls) == 1 assert start_calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} stop_calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_STOP) await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": False}, {}) assert len(stop_calls) == 1 assert stop_calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} pause_calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_PAUSE) await trt.execute(trait.COMMAND_PAUSEUNPAUSE, BASIC_DATA, {"pause": True}, {}) assert len(pause_calls) == 1 assert pause_calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} unpause_calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_START) await trt.execute(trait.COMMAND_PAUSEUNPAUSE, BASIC_DATA, {"pause": False}, {}) assert len(unpause_calls) == 1 assert unpause_calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} async def test_startstop_cover(hass): """Test startStop trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.StartStopTrait.supported(cover.DOMAIN, cover.SUPPORT_STOP, None) state = State( "cover.bla", cover.STATE_CLOSED, {ATTR_SUPPORTED_FEATURES: cover.SUPPORT_STOP}, ) trt = trait.StartStopTrait( hass, state, BASIC_CONFIG, ) assert trt.sync_attributes() == {} for state_value in (cover.STATE_CLOSING, cover.STATE_OPENING): state.state = state_value assert trt.query_attributes() == {"isRunning": True} stop_calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_STOP_COVER) await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": False}, {}) assert len(stop_calls) == 1 assert stop_calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} for state_value in (cover.STATE_CLOSED, cover.STATE_OPEN): state.state = state_value assert trt.query_attributes() == {"isRunning": False} with pytest.raises(SmartHomeError, match="Cover is already stopped"): await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": False}, {}) with pytest.raises(SmartHomeError, match="Starting a cover is not supported"): await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": True}, {}) with pytest.raises( SmartHomeError, match="Command action.devices.commands.PauseUnpause is not supported", ): await trt.execute(trait.COMMAND_PAUSEUNPAUSE, BASIC_DATA, {"start": True}, {}) async def test_startstop_cover_assumed(hass): """Test startStop trait support for cover domain of assumed state.""" trt = trait.StartStopTrait( hass, State( "cover.bla", cover.STATE_CLOSED, {ATTR_SUPPORTED_FEATURES: cover.SUPPORT_STOP, ATTR_ASSUMED_STATE: True}, ), BASIC_CONFIG, ) stop_calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_STOP_COVER) await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": False}, {}) assert len(stop_calls) == 1 assert stop_calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} async def test_color_setting_color_light(hass): """Test ColorSpectrum trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert not trait.ColorSettingTrait.supported(light.DOMAIN, 0, None) assert trait.ColorSettingTrait.supported(light.DOMAIN, light.SUPPORT_COLOR, None) trt = trait.ColorSettingTrait( hass, State( "light.bla", STATE_ON, { light.ATTR_HS_COLOR: (20, 94), light.ATTR_BRIGHTNESS: 200, ATTR_SUPPORTED_FEATURES: light.SUPPORT_COLOR, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == {"colorModel": "hsv"} assert trt.query_attributes() == { "color": {"spectrumHsv": {"hue": 20, "saturation": 0.94, "value": 200 / 255}} } assert trt.can_execute( trait.COMMAND_COLOR_ABSOLUTE, {"color": {"spectrumRGB": 16715792}} ) calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON) await trt.execute( trait.COMMAND_COLOR_ABSOLUTE, BASIC_DATA, {"color": {"spectrumRGB": 1052927}}, {}, ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "light.bla", light.ATTR_HS_COLOR: (240, 93.725), } await trt.execute( trait.COMMAND_COLOR_ABSOLUTE, BASIC_DATA, {"color": {"spectrumHSV": {"hue": 100, "saturation": 0.50, "value": 0.20}}}, {}, ) assert len(calls) == 2 assert calls[1].data == { ATTR_ENTITY_ID: "light.bla", light.ATTR_HS_COLOR: [100, 50], light.ATTR_BRIGHTNESS: 0.2 * 255, } async def test_color_setting_temperature_light(hass): """Test ColorTemperature trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert not trait.ColorSettingTrait.supported(light.DOMAIN, 0, None) assert trait.ColorSettingTrait.supported( light.DOMAIN, light.SUPPORT_COLOR_TEMP, None ) trt = trait.ColorSettingTrait( hass, State( "light.bla", STATE_ON, { light.ATTR_MIN_MIREDS: 200, light.ATTR_COLOR_TEMP: 300, light.ATTR_MAX_MIREDS: 500, ATTR_SUPPORTED_FEATURES: light.SUPPORT_COLOR_TEMP, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "colorTemperatureRange": {"temperatureMinK": 2000, "temperatureMaxK": 5000} } assert trt.query_attributes() == {"color": {"temperatureK": 3333}} assert trt.can_execute( trait.COMMAND_COLOR_ABSOLUTE, {"color": {"temperature": 400}} ) calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON) with pytest.raises(helpers.SmartHomeError) as err: await trt.execute( trait.COMMAND_COLOR_ABSOLUTE, BASIC_DATA, {"color": {"temperature": 5555}}, {}, ) assert err.value.code == const.ERR_VALUE_OUT_OF_RANGE await trt.execute( trait.COMMAND_COLOR_ABSOLUTE, BASIC_DATA, {"color": {"temperature": 2857}}, {} ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "light.bla", light.ATTR_COLOR_TEMP: color.color_temperature_kelvin_to_mired(2857), } async def test_color_light_temperature_light_bad_temp(hass): """Test ColorTemperature trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert not trait.ColorSettingTrait.supported(light.DOMAIN, 0, None) assert trait.ColorSettingTrait.supported( light.DOMAIN, light.SUPPORT_COLOR_TEMP, None ) trt = trait.ColorSettingTrait( hass, State( "light.bla", STATE_ON, { light.ATTR_MIN_MIREDS: 200, light.ATTR_COLOR_TEMP: 0, light.ATTR_MAX_MIREDS: 500, }, ), BASIC_CONFIG, ) assert trt.query_attributes() == {} async def test_light_modes(hass): """Test Light Mode trait.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert trait.ModesTrait.supported(light.DOMAIN, light.SUPPORT_EFFECT, None) trt = trait.ModesTrait( hass, State( "light.living_room", light.STATE_ON, attributes={ light.ATTR_EFFECT_LIST: ["random", "colorloop"], light.ATTR_EFFECT: "random", }, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableModes": [ { "name": "effect", "name_values": [{"name_synonym": ["effect"], "lang": "en"}], "settings": [ { "setting_name": "random", "setting_values": [ {"setting_synonym": ["random"], "lang": "en"} ], }, { "setting_name": "colorloop", "setting_values": [ {"setting_synonym": ["colorloop"], "lang": "en"} ], }, ], "ordered": False, } ] } assert trt.query_attributes() == { "currentModeSettings": {"effect": "random"}, "on": True, } assert trt.can_execute( trait.COMMAND_MODES, params={"updateModeSettings": {"effect": "colorloop"}}, ) calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON) await trt.execute( trait.COMMAND_MODES, BASIC_DATA, {"updateModeSettings": {"effect": "colorloop"}}, {}, ) assert len(calls) == 1 assert calls[0].data == { "entity_id": "light.living_room", "effect": "colorloop", } async def test_scene_scene(hass): """Test Scene trait support for scene domain.""" assert helpers.get_google_type(scene.DOMAIN, None) is not None assert trait.SceneTrait.supported(scene.DOMAIN, 0, None) trt = trait.SceneTrait(hass, State("scene.bla", scene.STATE), BASIC_CONFIG) assert trt.sync_attributes() == {} assert trt.query_attributes() == {} assert trt.can_execute(trait.COMMAND_ACTIVATE_SCENE, {}) calls = async_mock_service(hass, scene.DOMAIN, SERVICE_TURN_ON) await trt.execute(trait.COMMAND_ACTIVATE_SCENE, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "scene.bla"} async def test_scene_script(hass): """Test Scene trait support for script domain.""" assert helpers.get_google_type(script.DOMAIN, None) is not None assert trait.SceneTrait.supported(script.DOMAIN, 0, None) trt = trait.SceneTrait(hass, State("script.bla", STATE_OFF), BASIC_CONFIG) assert trt.sync_attributes() == {} assert trt.query_attributes() == {} assert trt.can_execute(trait.COMMAND_ACTIVATE_SCENE, {}) calls = async_mock_service(hass, script.DOMAIN, SERVICE_TURN_ON) await trt.execute(trait.COMMAND_ACTIVATE_SCENE, BASIC_DATA, {}, {}) # We don't wait till script execution is done. await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "script.bla"} async def test_temperature_setting_climate_onoff(hass): """Test TemperatureSetting trait support for climate domain - range.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.TemperatureSettingTrait.supported(climate.DOMAIN, 0, None) hass.config.units.temperature_unit = TEMP_FAHRENHEIT trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_AUTO, { ATTR_SUPPORTED_FEATURES: climate.SUPPORT_TARGET_TEMPERATURE_RANGE, climate.ATTR_HVAC_MODES: [ climate.HVAC_MODE_OFF, climate.HVAC_MODE_COOL, climate.HVAC_MODE_HEAT, climate.HVAC_MODE_HEAT_COOL, ], climate.ATTR_MIN_TEMP: None, climate.ATTR_MAX_TEMP: None, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["off", "cool", "heat", "heatcool", "on"], "thermostatTemperatureUnit": "F", } assert trt.can_execute(trait.COMMAND_THERMOSTAT_SET_MODE, {}) calls = async_mock_service(hass, climate.DOMAIN, SERVICE_TURN_ON) await trt.execute( trait.COMMAND_THERMOSTAT_SET_MODE, BASIC_DATA, {"thermostatMode": "on"}, {} ) assert len(calls) == 1 calls = async_mock_service(hass, climate.DOMAIN, SERVICE_TURN_OFF) await trt.execute( trait.COMMAND_THERMOSTAT_SET_MODE, BASIC_DATA, {"thermostatMode": "off"}, {} ) assert len(calls) == 1 async def test_temperature_setting_climate_no_modes(hass): """Test TemperatureSetting trait support for climate domain not supporting any modes.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.TemperatureSettingTrait.supported(climate.DOMAIN, 0, None) hass.config.units.temperature_unit = TEMP_CELSIUS trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_AUTO, { climate.ATTR_HVAC_MODES: [], climate.ATTR_MIN_TEMP: None, climate.ATTR_MAX_TEMP: None, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["heat"], "thermostatTemperatureUnit": "C", } async def test_temperature_setting_climate_range(hass): """Test TemperatureSetting trait support for climate domain - range.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.TemperatureSettingTrait.supported(climate.DOMAIN, 0, None) hass.config.units.temperature_unit = TEMP_FAHRENHEIT trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_AUTO, { climate.ATTR_CURRENT_TEMPERATURE: 70, climate.ATTR_CURRENT_HUMIDITY: 25, ATTR_SUPPORTED_FEATURES: climate.SUPPORT_TARGET_TEMPERATURE_RANGE, climate.ATTR_HVAC_MODES: [ STATE_OFF, climate.HVAC_MODE_COOL, climate.HVAC_MODE_HEAT, climate.HVAC_MODE_AUTO, ], climate.ATTR_TARGET_TEMP_HIGH: 75, climate.ATTR_TARGET_TEMP_LOW: 65, climate.ATTR_MIN_TEMP: 50, climate.ATTR_MAX_TEMP: 80, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["off", "cool", "heat", "auto", "on"], "thermostatTemperatureUnit": "F", } assert trt.query_attributes() == { "thermostatMode": "auto", "thermostatTemperatureAmbient": 21.1, "thermostatHumidityAmbient": 25, "thermostatTemperatureSetpointLow": 18.3, "thermostatTemperatureSetpointHigh": 23.9, } assert trt.can_execute(trait.COMMAND_THERMOSTAT_TEMPERATURE_SET_RANGE, {}) assert trt.can_execute(trait.COMMAND_THERMOSTAT_SET_MODE, {}) calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_TEMPERATURE) await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SET_RANGE, BASIC_DATA, { "thermostatTemperatureSetpointHigh": 25, "thermostatTemperatureSetpointLow": 20, }, {}, ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "climate.bla", climate.ATTR_TARGET_TEMP_HIGH: 77, climate.ATTR_TARGET_TEMP_LOW: 68, } calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_HVAC_MODE) await trt.execute( trait.COMMAND_THERMOSTAT_SET_MODE, BASIC_DATA, {"thermostatMode": "cool"}, {} ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "climate.bla", climate.ATTR_HVAC_MODE: climate.HVAC_MODE_COOL, } with pytest.raises(helpers.SmartHomeError) as err: await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, BASIC_DATA, {"thermostatTemperatureSetpoint": -100}, {}, ) assert err.value.code == const.ERR_VALUE_OUT_OF_RANGE hass.config.units.temperature_unit = TEMP_CELSIUS async def test_temperature_setting_climate_setpoint(hass): """Test TemperatureSetting trait support for climate domain - setpoint.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.TemperatureSettingTrait.supported(climate.DOMAIN, 0, None) hass.config.units.temperature_unit = TEMP_CELSIUS trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_COOL, { climate.ATTR_HVAC_MODES: [STATE_OFF, climate.HVAC_MODE_COOL], climate.ATTR_MIN_TEMP: 10, climate.ATTR_MAX_TEMP: 30, ATTR_TEMPERATURE: 18, climate.ATTR_CURRENT_TEMPERATURE: 20, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["off", "cool", "on"], "thermostatTemperatureUnit": "C", } assert trt.query_attributes() == { "thermostatMode": "cool", "thermostatTemperatureAmbient": 20, "thermostatTemperatureSetpoint": 18, } assert trt.can_execute(trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, {}) assert trt.can_execute(trait.COMMAND_THERMOSTAT_SET_MODE, {}) calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_TEMPERATURE) with pytest.raises(helpers.SmartHomeError): await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, BASIC_DATA, {"thermostatTemperatureSetpoint": -100}, {}, ) await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, BASIC_DATA, {"thermostatTemperatureSetpoint": 19}, {}, ) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "climate.bla", ATTR_TEMPERATURE: 19} async def test_temperature_setting_climate_setpoint_auto(hass): """ Test TemperatureSetting trait support for climate domain. Setpoint in auto mode. """ hass.config.units.temperature_unit = TEMP_CELSIUS trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_HEAT_COOL, { climate.ATTR_HVAC_MODES: [ climate.HVAC_MODE_OFF, climate.HVAC_MODE_HEAT_COOL, ], climate.ATTR_MIN_TEMP: 10, climate.ATTR_MAX_TEMP: 30, ATTR_TEMPERATURE: 18, climate.ATTR_CURRENT_TEMPERATURE: 20, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["off", "heatcool", "on"], "thermostatTemperatureUnit": "C", } assert trt.query_attributes() == { "thermostatMode": "heatcool", "thermostatTemperatureAmbient": 20, "thermostatTemperatureSetpointHigh": 18, "thermostatTemperatureSetpointLow": 18, } assert trt.can_execute(trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, {}) assert trt.can_execute(trait.COMMAND_THERMOSTAT_SET_MODE, {}) calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_TEMPERATURE) await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, BASIC_DATA, {"thermostatTemperatureSetpoint": 19}, {}, ) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "climate.bla", ATTR_TEMPERATURE: 19} async def test_humidity_setting_humidifier_setpoint(hass): """Test HumiditySetting trait support for humidifier domain - setpoint.""" assert helpers.get_google_type(humidifier.DOMAIN, None) is not None assert trait.HumiditySettingTrait.supported(humidifier.DOMAIN, 0, None) trt = trait.HumiditySettingTrait( hass, State( "humidifier.bla", STATE_ON, { humidifier.ATTR_MIN_HUMIDITY: 20, humidifier.ATTR_MAX_HUMIDITY: 90, humidifier.ATTR_HUMIDITY: 38, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "humiditySetpointRange": {"minPercent": 20, "maxPercent": 90} } assert trt.query_attributes() == { "humiditySetpointPercent": 38, } assert trt.can_execute(trait.COMMAND_SET_HUMIDITY, {}) calls = async_mock_service(hass, humidifier.DOMAIN, humidifier.SERVICE_SET_HUMIDITY) await trt.execute(trait.COMMAND_SET_HUMIDITY, BASIC_DATA, {"humidity": 32}, {}) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "humidifier.bla", humidifier.ATTR_HUMIDITY: 32, } async def test_lock_unlock_lock(hass): """Test LockUnlock trait locking support for lock domain.""" assert helpers.get_google_type(lock.DOMAIN, None) is not None assert trait.LockUnlockTrait.supported(lock.DOMAIN, lock.SUPPORT_OPEN, None) assert trait.LockUnlockTrait.might_2fa(lock.DOMAIN, lock.SUPPORT_OPEN, None) trt = trait.LockUnlockTrait( hass, State("lock.front_door", lock.STATE_LOCKED), PIN_CONFIG ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"isLocked": True} assert trt.can_execute(trait.COMMAND_LOCKUNLOCK, {"lock": True}) calls = async_mock_service(hass, lock.DOMAIN, lock.SERVICE_LOCK) await trt.execute(trait.COMMAND_LOCKUNLOCK, PIN_DATA, {"lock": True}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "lock.front_door"} async def test_lock_unlock_unlock(hass): """Test LockUnlock trait unlocking support for lock domain.""" assert helpers.get_google_type(lock.DOMAIN, None) is not None assert trait.LockUnlockTrait.supported(lock.DOMAIN, lock.SUPPORT_OPEN, None) trt = trait.LockUnlockTrait( hass, State("lock.front_door", lock.STATE_LOCKED), PIN_CONFIG ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"isLocked": True} assert trt.can_execute(trait.COMMAND_LOCKUNLOCK, {"lock": False}) calls = async_mock_service(hass, lock.DOMAIN, lock.SERVICE_UNLOCK) # No challenge data with pytest.raises(error.ChallengeNeeded) as err: await trt.execute(trait.COMMAND_LOCKUNLOCK, PIN_DATA, {"lock": False}, {}) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # invalid pin with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_LOCKUNLOCK, PIN_DATA, {"lock": False}, {"pin": 9999} ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_FAILED_PIN_NEEDED await trt.execute( trait.COMMAND_LOCKUNLOCK, PIN_DATA, {"lock": False}, {"pin": "1234"} ) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "lock.front_door"} # Test without pin trt = trait.LockUnlockTrait( hass, State("lock.front_door", lock.STATE_LOCKED), BASIC_CONFIG ) with pytest.raises(error.SmartHomeError) as err: await trt.execute(trait.COMMAND_LOCKUNLOCK, BASIC_DATA, {"lock": False}, {}) assert len(calls) == 1 assert err.value.code == const.ERR_CHALLENGE_NOT_SETUP # Test with 2FA override with patch.object( BASIC_CONFIG, "should_2fa", return_value=False, ): await trt.execute(trait.COMMAND_LOCKUNLOCK, BASIC_DATA, {"lock": False}, {}) assert len(calls) == 2 async def test_arm_disarm_arm_away(hass): """Test ArmDisarm trait Arming support for alarm_control_panel domain.""" assert helpers.get_google_type(alarm_control_panel.DOMAIN, None) is not None assert trait.ArmDisArmTrait.supported(alarm_control_panel.DOMAIN, 0, None) assert trait.ArmDisArmTrait.might_2fa(alarm_control_panel.DOMAIN, 0, None) trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, { alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True, ATTR_SUPPORTED_FEATURES: alarm_control_panel.const.SUPPORT_ALARM_ARM_HOME | alarm_control_panel.const.SUPPORT_ALARM_ARM_AWAY, }, ), PIN_CONFIG, ) assert trt.sync_attributes() == { "availableArmLevels": { "levels": [ { "level_name": "armed_home", "level_values": [ {"level_synonym": ["armed home", "home"], "lang": "en"} ], }, { "level_name": "armed_away", "level_values": [ {"level_synonym": ["armed away", "away"], "lang": "en"} ], }, ], "ordered": False, } } assert trt.query_attributes() == { "isArmed": True, "currentArmLevel": STATE_ALARM_ARMED_AWAY, } assert trt.can_execute( trait.COMMAND_ARMDISARM, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY} ) calls = async_mock_service( hass, alarm_control_panel.DOMAIN, alarm_control_panel.SERVICE_ALARM_ARM_AWAY ) # Test with no secure_pin configured with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), BASIC_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, BASIC_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {}, ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NOT_SETUP trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), PIN_CONFIG, ) # No challenge data with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {}, ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # invalid pin with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {"pin": 9999}, ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_FAILED_PIN_NEEDED # correct pin await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {"pin": "1234"}, ) assert len(calls) == 1 # Test already armed with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), PIN_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {}, ) assert len(calls) == 1 assert err.value.code == const.ERR_ALREADY_ARMED # Test with code_arm_required False trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: False}, ), PIN_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {}, ) assert len(calls) == 2 with pytest.raises(error.SmartHomeError) as err: await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True}, {}, ) async def test_arm_disarm_disarm(hass): """Test ArmDisarm trait Disarming support for alarm_control_panel domain.""" assert helpers.get_google_type(alarm_control_panel.DOMAIN, None) is not None assert trait.ArmDisArmTrait.supported(alarm_control_panel.DOMAIN, 0, None) assert trait.ArmDisArmTrait.might_2fa(alarm_control_panel.DOMAIN, 0, None) trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, { alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True, ATTR_SUPPORTED_FEATURES: alarm_control_panel.const.SUPPORT_ALARM_TRIGGER | alarm_control_panel.const.SUPPORT_ALARM_ARM_CUSTOM_BYPASS, }, ), PIN_CONFIG, ) assert trt.sync_attributes() == { "availableArmLevels": { "levels": [ { "level_name": "armed_custom_bypass", "level_values": [ { "level_synonym": ["armed custom bypass", "custom"], "lang": "en", } ], }, { "level_name": "triggered", "level_values": [{"level_synonym": ["triggered"], "lang": "en"}], }, ], "ordered": False, } } assert trt.query_attributes() == {"isArmed": False} assert trt.can_execute(trait.COMMAND_ARMDISARM, {"arm": False}) calls = async_mock_service( hass, alarm_control_panel.DOMAIN, alarm_control_panel.SERVICE_ALARM_DISARM ) # Test without secure_pin configured with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), BASIC_CONFIG, ) await trt.execute(trait.COMMAND_ARMDISARM, BASIC_DATA, {"arm": False}, {}) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NOT_SETUP trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), PIN_CONFIG, ) # No challenge data with pytest.raises(error.ChallengeNeeded) as err: await trt.execute(trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": False}, {}) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # invalid pin with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": False}, {"pin": 9999} ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_FAILED_PIN_NEEDED # correct pin await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": False}, {"pin": "1234"} ) assert len(calls) == 1 # Test already disarmed with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), PIN_CONFIG, ) await trt.execute(trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": False}, {}) assert len(calls) == 1 assert err.value.code == const.ERR_ALREADY_DISARMED # Cancel arming after already armed will require pin with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: False}, ), PIN_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "cancel": True}, {} ) assert len(calls) == 1 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # Cancel arming while pending to arm doesn't require pin trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_PENDING, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: False}, ), PIN_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "cancel": True}, {} ) assert len(calls) == 2 async def test_fan_speed(hass): """Test FanSpeed trait speed control support for fan domain.""" assert helpers.get_google_type(fan.DOMAIN, None) is not None assert trait.FanSpeedTrait.supported(fan.DOMAIN, fan.SUPPORT_SET_SPEED, None) trt = trait.FanSpeedTrait( hass, State( "fan.living_room_fan", fan.SPEED_HIGH, attributes={ "speed_list": [ fan.SPEED_OFF, fan.SPEED_LOW, fan.SPEED_MEDIUM, fan.SPEED_HIGH, ], "speed": "low", "percentage": 33, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableFanSpeeds": { "ordered": True, "speeds": [ { "speed_name": "off", "speed_values": [{"speed_synonym": ["stop", "off"], "lang": "en"}], }, { "speed_name": "low", "speed_values": [ { "speed_synonym": ["slow", "low", "slowest", "lowest"], "lang": "en", } ], }, { "speed_name": "medium", "speed_values": [ {"speed_synonym": ["medium", "mid", "middle"], "lang": "en"} ], }, { "speed_name": "high", "speed_values": [ { "speed_synonym": [ "high", "max", "fast", "highest", "fastest", "maximum", ], "lang": "en", } ], }, ], }, "reversible": False, "supportsFanSpeedPercent": True, } assert trt.query_attributes() == { "currentFanSpeedSetting": "low", "on": True, "currentFanSpeedPercent": 33, } assert trt.can_execute(trait.COMMAND_FANSPEED, params={"fanSpeed": "medium"}) calls = async_mock_service(hass, fan.DOMAIN, fan.SERVICE_SET_SPEED) await trt.execute(trait.COMMAND_FANSPEED, BASIC_DATA, {"fanSpeed": "medium"}, {}) assert len(calls) == 1 assert calls[0].data == {"entity_id": "fan.living_room_fan", "speed": "medium"} assert trt.can_execute(trait.COMMAND_FANSPEED, params={"fanSpeedPercent": 10}) calls = async_mock_service(hass, fan.DOMAIN, fan.SERVICE_SET_PERCENTAGE) await trt.execute(trait.COMMAND_FANSPEED, BASIC_DATA, {"fanSpeedPercent": 10}, {}) assert len(calls) == 1 assert calls[0].data == {"entity_id": "fan.living_room_fan", "percentage": 10} async def test_climate_fan_speed(hass): """Test FanSpeed trait speed control support for climate domain.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.FanSpeedTrait.supported(climate.DOMAIN, climate.SUPPORT_FAN_MODE, None) trt = trait.FanSpeedTrait( hass, State( "climate.living_room_ac", "on", attributes={ "fan_modes": ["auto", "low", "medium", "high"], "fan_mode": "low", }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableFanSpeeds": { "ordered": True, "speeds": [ { "speed_name": "auto", "speed_values": [{"speed_synonym": ["auto"], "lang": "en"}], }, { "speed_name": "low", "speed_values": [{"speed_synonym": ["low"], "lang": "en"}], }, { "speed_name": "medium", "speed_values": [{"speed_synonym": ["medium"], "lang": "en"}], }, { "speed_name": "high", "speed_values": [{"speed_synonym": ["high"], "lang": "en"}], }, ], }, "reversible": False, "supportsFanSpeedPercent": True, } assert trt.query_attributes() == { "currentFanSpeedSetting": "low", } assert trt.can_execute(trait.COMMAND_FANSPEED, params={"fanSpeed": "medium"}) calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_FAN_MODE) await trt.execute(trait.COMMAND_FANSPEED, BASIC_DATA, {"fanSpeed": "medium"}, {}) assert len(calls) == 1 assert calls[0].data == { "entity_id": "climate.living_room_ac", "fan_mode": "medium", } async def test_inputselector(hass): """Test input selector trait.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.InputSelectorTrait.supported( media_player.DOMAIN, media_player.SUPPORT_SELECT_SOURCE, None ) trt = trait.InputSelectorTrait( hass, State( "media_player.living_room", media_player.STATE_PLAYING, attributes={ media_player.ATTR_INPUT_SOURCE_LIST: [ "media", "game", "chromecast", "plex", ], media_player.ATTR_INPUT_SOURCE: "game", }, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableInputs": [ {"key": "media", "names": [{"name_synonym": ["media"], "lang": "en"}]}, {"key": "game", "names": [{"name_synonym": ["game"], "lang": "en"}]}, { "key": "chromecast", "names": [{"name_synonym": ["chromecast"], "lang": "en"}], }, {"key": "plex", "names": [{"name_synonym": ["plex"], "lang": "en"}]}, ], "orderedInputs": True, } assert trt.query_attributes() == { "currentInput": "game", } assert trt.can_execute( trait.COMMAND_INPUT, params={"newInput": "media"}, ) calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_SELECT_SOURCE ) await trt.execute( trait.COMMAND_INPUT, BASIC_DATA, {"newInput": "media"}, {}, ) assert len(calls) == 1 assert calls[0].data == {"entity_id": "media_player.living_room", "source": "media"} @pytest.mark.parametrize( "sources,source,source_next,source_prev", [ (["a"], "a", "a", "a"), (["a", "b"], "a", "b", "b"), (["a", "b", "c"], "a", "b", "c"), ], ) async def test_inputselector_nextprev(hass, sources, source, source_next, source_prev): """Test input selector trait.""" trt = trait.InputSelectorTrait( hass, State( "media_player.living_room", media_player.STATE_PLAYING, attributes={ media_player.ATTR_INPUT_SOURCE_LIST: sources, media_player.ATTR_INPUT_SOURCE: source, }, ), BASIC_CONFIG, ) assert trt.can_execute("action.devices.commands.NextInput", params={}) assert trt.can_execute("action.devices.commands.PreviousInput", params={}) calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_SELECT_SOURCE ) await trt.execute( "action.devices.commands.NextInput", BASIC_DATA, {}, {}, ) await trt.execute( "action.devices.commands.PreviousInput", BASIC_DATA, {}, {}, ) assert len(calls) == 2 assert calls[0].data == { "entity_id": "media_player.living_room", "source": source_next, } assert calls[1].data == { "entity_id": "media_player.living_room", "source": source_prev, } @pytest.mark.parametrize( "sources,source", [(None, "a"), (["a", "b"], None), (["a", "b"], "c")] ) async def test_inputselector_nextprev_invalid(hass, sources, source): """Test input selector trait.""" trt = trait.InputSelectorTrait( hass, State( "media_player.living_room", media_player.STATE_PLAYING, attributes={ media_player.ATTR_INPUT_SOURCE_LIST: sources, media_player.ATTR_INPUT_SOURCE: source, }, ), BASIC_CONFIG, ) with pytest.raises(SmartHomeError): await trt.execute( "action.devices.commands.NextInput", BASIC_DATA, {}, {}, ) with pytest.raises(SmartHomeError): await trt.execute( "action.devices.commands.PreviousInput", BASIC_DATA, {}, {}, ) with pytest.raises(SmartHomeError): await trt.execute( "action.devices.commands.InvalidCommand", BASIC_DATA, {}, {}, ) async def test_modes_input_select(hass): """Test Input Select Mode trait.""" assert helpers.get_google_type(input_select.DOMAIN, None) is not None assert trait.ModesTrait.supported(input_select.DOMAIN, None, None) trt = trait.ModesTrait( hass, State("input_select.bla", "unavailable"), BASIC_CONFIG, ) assert trt.sync_attributes() == {"availableModes": []} trt = trait.ModesTrait( hass, State( "input_select.bla", "abc", attributes={input_select.ATTR_OPTIONS: ["abc", "123", "xyz"]}, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableModes": [ { "name": "option", "name_values": [ { "name_synonym": ["option", "setting", "mode", "value"], "lang": "en", } ], "settings": [ { "setting_name": "abc", "setting_values": [{"setting_synonym": ["abc"], "lang": "en"}], }, { "setting_name": "123", "setting_values": [{"setting_synonym": ["123"], "lang": "en"}], }, { "setting_name": "xyz", "setting_values": [{"setting_synonym": ["xyz"], "lang": "en"}], }, ], "ordered": False, } ] } assert trt.query_attributes() == { "currentModeSettings": {"option": "abc"}, "on": True, } assert trt.can_execute( trait.COMMAND_MODES, params={"updateModeSettings": {"option": "xyz"}}, ) calls = async_mock_service( hass, input_select.DOMAIN, input_select.SERVICE_SELECT_OPTION ) await trt.execute( trait.COMMAND_MODES, BASIC_DATA, {"updateModeSettings": {"option": "xyz"}}, {}, ) assert len(calls) == 1 assert calls[0].data == {"entity_id": "input_select.bla", "option": "xyz"} async def test_modes_humidifier(hass): """Test Humidifier Mode trait.""" assert helpers.get_google_type(humidifier.DOMAIN, None) is not None assert trait.ModesTrait.supported(humidifier.DOMAIN, humidifier.SUPPORT_MODES, None) trt = trait.ModesTrait( hass, State( "humidifier.humidifier", STATE_OFF, attributes={ humidifier.ATTR_AVAILABLE_MODES: [ humidifier.MODE_NORMAL, humidifier.MODE_AUTO, humidifier.MODE_AWAY, ], ATTR_SUPPORTED_FEATURES: humidifier.SUPPORT_MODES, humidifier.ATTR_MIN_HUMIDITY: 30, humidifier.ATTR_MAX_HUMIDITY: 99, humidifier.ATTR_HUMIDITY: 50, ATTR_MODE: humidifier.MODE_AUTO, }, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableModes": [ { "name": "mode", "name_values": [{"name_synonym": ["mode"], "lang": "en"}], "settings": [ { "setting_name": "normal", "setting_values": [ {"setting_synonym": ["normal"], "lang": "en"} ], }, { "setting_name": "auto", "setting_values": [{"setting_synonym": ["auto"], "lang": "en"}], }, { "setting_name": "away", "setting_values": [{"setting_synonym": ["away"], "lang": "en"}], }, ], "ordered": False, }, ] } assert trt.query_attributes() == { "currentModeSettings": {"mode": "auto"}, "on": False, } assert trt.can_execute( trait.COMMAND_MODES, params={"updateModeSettings": {"mode": "away"}} ) calls = async_mock_service(hass, humidifier.DOMAIN, humidifier.SERVICE_SET_MODE) await trt.execute( trait.COMMAND_MODES, BASIC_DATA, {"updateModeSettings": {"mode": "away"}}, {}, ) assert len(calls) == 1 assert calls[0].data == { "entity_id": "humidifier.humidifier", "mode": "away", } async def test_sound_modes(hass): """Test Mode trait.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.ModesTrait.supported( media_player.DOMAIN, media_player.SUPPORT_SELECT_SOUND_MODE, None ) trt = trait.ModesTrait( hass, State( "media_player.living_room", media_player.STATE_PLAYING, attributes={ media_player.ATTR_SOUND_MODE_LIST: ["stereo", "prologic"], media_player.ATTR_SOUND_MODE: "stereo", }, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableModes": [ { "name": "sound mode", "name_values": [ {"name_synonym": ["sound mode", "effects"], "lang": "en"} ], "settings": [ { "setting_name": "stereo", "setting_values": [ {"setting_synonym": ["stereo"], "lang": "en"} ], }, { "setting_name": "prologic", "setting_values": [ {"setting_synonym": ["prologic"], "lang": "en"} ], }, ], "ordered": False, } ] } assert trt.query_attributes() == { "currentModeSettings": {"sound mode": "stereo"}, "on": True, } assert trt.can_execute( trait.COMMAND_MODES, params={"updateModeSettings": {"sound mode": "stereo"}}, ) calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_SELECT_SOUND_MODE ) await trt.execute( trait.COMMAND_MODES, BASIC_DATA, {"updateModeSettings": {"sound mode": "stereo"}}, {}, ) assert len(calls) == 1 assert calls[0].data == { "entity_id": "media_player.living_room", "sound_mode": "stereo", } async def test_openclose_cover(hass): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_SET_POSITION, None ) trt = trait.OpenCloseTrait( hass, State( "cover.bla", cover.STATE_OPEN, { cover.ATTR_CURRENT_POSITION: 75, ATTR_SUPPORTED_FEATURES: cover.SUPPORT_SET_POSITION, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"openPercent": 75} calls_set = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_SET_COVER_POSITION) calls_open = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_OPEN_COVER) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 50}, {}) await trt.execute( trait.COMMAND_OPENCLOSE_RELATIVE, BASIC_DATA, {"openRelativePercent": 50}, {} ) assert len(calls_set) == 1 assert calls_set[0].data == {ATTR_ENTITY_ID: "cover.bla", cover.ATTR_POSITION: 50} assert len(calls_open) == 1 assert calls_open[0].data == {ATTR_ENTITY_ID: "cover.bla"} async def test_openclose_cover_unknown_state(hass): """Test OpenClose trait support for cover domain with unknown state.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_SET_POSITION, None ) # No state trt = trait.OpenCloseTrait( hass, State( "cover.bla", STATE_UNKNOWN, {ATTR_SUPPORTED_FEATURES: cover.SUPPORT_OPEN} ), BASIC_CONFIG, ) assert trt.sync_attributes() == {"discreteOnlyOpenClose": True} with pytest.raises(helpers.SmartHomeError): trt.query_attributes() calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_OPEN_COVER) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 100}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} with pytest.raises(helpers.SmartHomeError): trt.query_attributes() async def test_openclose_cover_assumed_state(hass): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_SET_POSITION, None ) trt = trait.OpenCloseTrait( hass, State( "cover.bla", cover.STATE_OPEN, { ATTR_ASSUMED_STATE: True, ATTR_SUPPORTED_FEATURES: cover.SUPPORT_SET_POSITION, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == {"commandOnlyOpenClose": True} assert trt.query_attributes() == {} calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_SET_COVER_POSITION) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 40}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla", cover.ATTR_POSITION: 40} async def test_openclose_cover_query_only(hass): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported(cover.DOMAIN, 0, None) state = State( "cover.bla", cover.STATE_OPEN, ) trt = trait.OpenCloseTrait( hass, state, BASIC_CONFIG, ) assert trt.sync_attributes() == { "discreteOnlyOpenClose": True, "queryOnlyOpenClose": True, } assert trt.query_attributes() == {"openPercent": 100} async def test_openclose_cover_no_position(hass): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_OPEN | cover.SUPPORT_CLOSE, None ) state = State( "cover.bla", cover.STATE_OPEN, { ATTR_SUPPORTED_FEATURES: cover.SUPPORT_OPEN | cover.SUPPORT_CLOSE, }, ) trt = trait.OpenCloseTrait( hass, state, BASIC_CONFIG, ) assert trt.sync_attributes() == {"discreteOnlyOpenClose": True} assert trt.query_attributes() == {"openPercent": 100} state.state = cover.STATE_CLOSED assert trt.sync_attributes() == {"discreteOnlyOpenClose": True} assert trt.query_attributes() == {"openPercent": 0} calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_CLOSE_COVER) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 0}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_OPEN_COVER) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 100}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} with pytest.raises( SmartHomeError, match=r"Current position not know for relative command" ): await trt.execute( trait.COMMAND_OPENCLOSE_RELATIVE, BASIC_DATA, {"openRelativePercent": 100}, {}, ) with pytest.raises(SmartHomeError, match=r"No support for partial open close"): await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 50}, {}) @pytest.mark.parametrize( "device_class", (cover.DEVICE_CLASS_DOOR, cover.DEVICE_CLASS_GARAGE, cover.DEVICE_CLASS_GATE), ) async def test_openclose_cover_secure(hass, device_class): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, device_class) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_SET_POSITION, device_class ) assert trait.OpenCloseTrait.might_2fa( cover.DOMAIN, cover.SUPPORT_SET_POSITION, device_class ) trt = trait.OpenCloseTrait( hass, State( "cover.bla", cover.STATE_OPEN, { ATTR_DEVICE_CLASS: device_class, ATTR_SUPPORTED_FEATURES: cover.SUPPORT_SET_POSITION, cover.ATTR_CURRENT_POSITION: 75, }, ), PIN_CONFIG, ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"openPercent": 75} calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_SET_COVER_POSITION) calls_close = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_CLOSE_COVER) # No challenge data with pytest.raises(error.ChallengeNeeded) as err: await trt.execute(trait.COMMAND_OPENCLOSE, PIN_DATA, {"openPercent": 50}, {}) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # invalid pin with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_OPENCLOSE, PIN_DATA, {"openPercent": 50}, {"pin": "9999"} ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_FAILED_PIN_NEEDED await trt.execute( trait.COMMAND_OPENCLOSE, PIN_DATA, {"openPercent": 50}, {"pin": "1234"} ) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla", cover.ATTR_POSITION: 50} # no challenge on close await trt.execute(trait.COMMAND_OPENCLOSE, PIN_DATA, {"openPercent": 0}, {}) assert len(calls_close) == 1 assert calls_close[0].data == {ATTR_ENTITY_ID: "cover.bla"} @pytest.mark.parametrize( "device_class", ( binary_sensor.DEVICE_CLASS_DOOR, binary_sensor.DEVICE_CLASS_GARAGE_DOOR, binary_sensor.DEVICE_CLASS_LOCK, binary_sensor.DEVICE_CLASS_OPENING, binary_sensor.DEVICE_CLASS_WINDOW, ), ) async def test_openclose_binary_sensor(hass, device_class): """Test OpenClose trait support for binary_sensor domain.""" assert helpers.get_google_type(binary_sensor.DOMAIN, device_class) is not None assert trait.OpenCloseTrait.supported(binary_sensor.DOMAIN, 0, device_class) trt = trait.OpenCloseTrait( hass, State("binary_sensor.test", STATE_ON, {ATTR_DEVICE_CLASS: device_class}), BASIC_CONFIG, ) assert trt.sync_attributes() == { "queryOnlyOpenClose": True, "discreteOnlyOpenClose": True, } assert trt.query_attributes() == {"openPercent": 100} trt = trait.OpenCloseTrait( hass, State("binary_sensor.test", STATE_OFF, {ATTR_DEVICE_CLASS: device_class}), BASIC_CONFIG, ) assert trt.sync_attributes() == { "queryOnlyOpenClose": True, "discreteOnlyOpenClose": True, } assert trt.query_attributes() == {"openPercent": 0} async def test_volume_media_player(hass): """Test volume trait support for media player domain.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.VolumeTrait.supported( media_player.DOMAIN, media_player.SUPPORT_VOLUME_SET, None, ) trt = trait.VolumeTrait( hass, State( "media_player.bla", media_player.STATE_PLAYING, { ATTR_SUPPORTED_FEATURES: media_player.SUPPORT_VOLUME_SET, media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.3, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "volumeMaxLevel": 100, "levelStepSize": 10, "volumeCanMuteAndUnmute": False, "commandOnlyVolume": False, } assert trt.query_attributes() == {"currentVolume": 30} calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_SET ) await trt.execute(trait.COMMAND_SET_VOLUME, BASIC_DATA, {"volumeLevel": 60}, {}) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.6, } calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_SET ) await trt.execute( trait.COMMAND_VOLUME_RELATIVE, BASIC_DATA, {"relativeSteps": 10}, {} ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.4, } async def test_volume_media_player_relative(hass): """Test volume trait support for relative-volume-only media players.""" assert trait.VolumeTrait.supported( media_player.DOMAIN, media_player.SUPPORT_VOLUME_STEP, None, ) trt = trait.VolumeTrait( hass, State( "media_player.bla", media_player.STATE_PLAYING, { ATTR_ASSUMED_STATE: True, ATTR_SUPPORTED_FEATURES: media_player.SUPPORT_VOLUME_STEP, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "volumeMaxLevel": 100, "levelStepSize": 10, "volumeCanMuteAndUnmute": False, "commandOnlyVolume": True, } assert trt.query_attributes() == {} calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_UP ) await trt.execute( trait.COMMAND_VOLUME_RELATIVE, BASIC_DATA, {"relativeSteps": 10}, {}, ) assert len(calls) == 10 for call in calls: assert call.data == { ATTR_ENTITY_ID: "media_player.bla", } calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_DOWN ) await trt.execute( trait.COMMAND_VOLUME_RELATIVE, BASIC_DATA, {"relativeSteps": -10}, {}, ) assert len(calls) == 10 for call in calls: assert call.data == { ATTR_ENTITY_ID: "media_player.bla", } with pytest.raises(SmartHomeError): await trt.execute(trait.COMMAND_SET_VOLUME, BASIC_DATA, {"volumeLevel": 42}, {}) with pytest.raises(SmartHomeError): await trt.execute(trait.COMMAND_MUTE, BASIC_DATA, {"mute": True}, {}) async def test_media_player_mute(hass): """Test volume trait support for muting.""" assert trait.VolumeTrait.supported( media_player.DOMAIN, media_player.SUPPORT_VOLUME_STEP | media_player.SUPPORT_VOLUME_MUTE, None, ) trt = trait.VolumeTrait( hass, State( "media_player.bla", media_player.STATE_PLAYING, { ATTR_SUPPORTED_FEATURES: ( media_player.SUPPORT_VOLUME_STEP | media_player.SUPPORT_VOLUME_MUTE ), media_player.ATTR_MEDIA_VOLUME_MUTED: False, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "volumeMaxLevel": 100, "levelStepSize": 10, "volumeCanMuteAndUnmute": True, "commandOnlyVolume": False, } assert trt.query_attributes() == {"isMuted": False} mute_calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_MUTE ) await trt.execute( trait.COMMAND_MUTE, BASIC_DATA, {"mute": True}, {}, ) assert len(mute_calls) == 1 assert mute_calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_VOLUME_MUTED: True, } unmute_calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_MUTE ) await trt.execute( trait.COMMAND_MUTE, BASIC_DATA, {"mute": False}, {}, ) assert len(unmute_calls) == 1 assert unmute_calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_VOLUME_MUTED: False, } async def test_temperature_setting_sensor(hass): """Test TemperatureSetting trait support for temperature sensor.""" assert ( helpers.get_google_type(sensor.DOMAIN, sensor.DEVICE_CLASS_TEMPERATURE) is not None ) assert not trait.TemperatureSettingTrait.supported( sensor.DOMAIN, 0, sensor.DEVICE_CLASS_HUMIDITY ) assert trait.TemperatureSettingTrait.supported( sensor.DOMAIN, 0, sensor.DEVICE_CLASS_TEMPERATURE ) @pytest.mark.parametrize( "unit_in,unit_out,state,ambient", [ (TEMP_FAHRENHEIT, "F", "70", 21.1), (TEMP_CELSIUS, "C", "21.1", 21.1), (TEMP_FAHRENHEIT, "F", "unavailable", None), (TEMP_FAHRENHEIT, "F", "unknown", None), ], ) async def test_temperature_setting_sensor_data(hass, unit_in, unit_out, state, ambient): """Test TemperatureSetting trait support for temperature sensor.""" hass.config.units.temperature_unit = unit_in trt = trait.TemperatureSettingTrait( hass, State( "sensor.test", state, {ATTR_DEVICE_CLASS: sensor.DEVICE_CLASS_TEMPERATURE} ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "queryOnlyTemperatureSetting": True, "thermostatTemperatureUnit": unit_out, } if ambient: assert trt.query_attributes() == {"thermostatTemperatureAmbient": ambient} else: assert trt.query_attributes() == {} hass.config.units.temperature_unit = TEMP_CELSIUS async def test_humidity_setting_sensor(hass): """Test HumiditySetting trait support for humidity sensor.""" assert ( helpers.get_google_type(sensor.DOMAIN, sensor.DEVICE_CLASS_HUMIDITY) is not None ) assert not trait.HumiditySettingTrait.supported( sensor.DOMAIN, 0, sensor.DEVICE_CLASS_TEMPERATURE ) assert trait.HumiditySettingTrait.supported( sensor.DOMAIN, 0, sensor.DEVICE_CLASS_HUMIDITY ) @pytest.mark.parametrize( "state,ambient", [("70", 70), ("unavailable", None), ("unknown", None)] ) async def test_humidity_setting_sensor_data(hass, state, ambient): """Test HumiditySetting trait support for humidity sensor.""" trt = trait.HumiditySettingTrait( hass, State("sensor.test", state, {ATTR_DEVICE_CLASS: sensor.DEVICE_CLASS_HUMIDITY}), BASIC_CONFIG, ) assert trt.sync_attributes() == {"queryOnlyHumiditySetting": True} if ambient: assert trt.query_attributes() == {"humidityAmbientPercent": ambient} else: assert trt.query_attributes() == {} with pytest.raises(helpers.SmartHomeError) as err: await trt.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert err.value.code == const.ERR_NOT_SUPPORTED async def test_transport_control(hass): """Test the TransportControlTrait.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None for feature in trait.MEDIA_COMMAND_SUPPORT_MAPPING.values(): assert trait.TransportControlTrait.supported(media_player.DOMAIN, feature, None) now = datetime(2020, 1, 1) trt = trait.TransportControlTrait( hass, State( "media_player.bla", media_player.STATE_PLAYING, { media_player.ATTR_MEDIA_POSITION: 100, media_player.ATTR_MEDIA_DURATION: 200, media_player.ATTR_MEDIA_POSITION_UPDATED_AT: now - timedelta(seconds=10), media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.5, ATTR_SUPPORTED_FEATURES: media_player.SUPPORT_PLAY | media_player.SUPPORT_STOP, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "transportControlSupportedCommands": ["RESUME", "STOP"] } assert trt.query_attributes() == {} # COMMAND_MEDIA_SEEK_RELATIVE calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_SEEK ) # Patch to avoid time ticking over during the command failing the test with patch("homeassistant.util.dt.utcnow", return_value=now): await trt.execute( trait.COMMAND_MEDIA_SEEK_RELATIVE, BASIC_DATA, {"relativePositionMs": 10000}, {}, ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", # 100s (current position) + 10s (from command) + 10s (from updated_at) media_player.ATTR_MEDIA_SEEK_POSITION: 120, } # COMMAND_MEDIA_SEEK_TO_POSITION calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_SEEK ) await trt.execute( trait.COMMAND_MEDIA_SEEK_TO_POSITION, BASIC_DATA, {"absPositionMs": 50000}, {} ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_SEEK_POSITION: 50, } # COMMAND_MEDIA_NEXT calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_NEXT_TRACK ) await trt.execute(trait.COMMAND_MEDIA_NEXT, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} # COMMAND_MEDIA_PAUSE calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_PAUSE ) await trt.execute(trait.COMMAND_MEDIA_PAUSE, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} # COMMAND_MEDIA_PREVIOUS calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_PREVIOUS_TRACK ) await trt.execute(trait.COMMAND_MEDIA_PREVIOUS, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} # COMMAND_MEDIA_RESUME calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_PLAY ) await trt.execute(trait.COMMAND_MEDIA_RESUME, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} # COMMAND_MEDIA_SHUFFLE calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_SHUFFLE_SET ) await trt.execute(trait.COMMAND_MEDIA_SHUFFLE, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_SHUFFLE: True, } # COMMAND_MEDIA_STOP calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_STOP ) await trt.execute(trait.COMMAND_MEDIA_STOP, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} @pytest.mark.parametrize( "state", ( STATE_OFF, STATE_IDLE, STATE_PLAYING, STATE_ON, STATE_PAUSED, STATE_STANDBY, STATE_UNAVAILABLE, STATE_UNKNOWN, ), ) async def test_media_state(hass, state): """Test the MediaStateTrait.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.TransportControlTrait.supported( media_player.DOMAIN, media_player.SUPPORT_PLAY, None ) trt = trait.MediaStateTrait( hass, State( "media_player.bla", state, { media_player.ATTR_MEDIA_POSITION: 100, media_player.ATTR_MEDIA_DURATION: 200, media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.5, ATTR_SUPPORTED_FEATURES: media_player.SUPPORT_PLAY | media_player.SUPPORT_STOP, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "supportActivityState": True, "supportPlaybackState": True, } assert trt.query_attributes() == { "activityState": trt.activity_lookup.get(state), "playbackState": trt.playback_lookup.get(state), }
sander76/home-assistant
tests/components/google_assistant/test_trait.py
homeassistant/components/cloudflare/__init__.py
"""Support for getting information from Arduino pins.""" import voluptuous as vol from homeassistant.components.sensor import PLATFORM_SCHEMA, SensorEntity from homeassistant.const import CONF_NAME import homeassistant.helpers.config_validation as cv from . import DOMAIN CONF_PINS = "pins" CONF_TYPE = "analog" PIN_SCHEMA = vol.Schema({vol.Required(CONF_NAME): cv.string}) PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( {vol.Required(CONF_PINS): vol.Schema({cv.positive_int: PIN_SCHEMA})} ) def setup_platform(hass, config, add_entities, discovery_info=None): """Set up the Arduino platform.""" board = hass.data[DOMAIN] pins = config[CONF_PINS] sensors = [] for pinnum, pin in pins.items(): sensors.append(ArduinoSensor(pin.get(CONF_NAME), pinnum, CONF_TYPE, board)) add_entities(sensors) class ArduinoSensor(SensorEntity): """Representation of an Arduino Sensor.""" def __init__(self, name, pin, pin_type, board): """Initialize the sensor.""" self._pin = pin self._name = name self.pin_type = pin_type self.direction = "in" self._value = None board.set_mode(self._pin, self.direction, self.pin_type) self._board = board @property def state(self): """Return the state of the sensor.""" return self._value @property def name(self): """Get the name of the sensor.""" return self._name def update(self): """Get the latest value from the pin.""" self._value = self._board.get_analog_inputs()[self._pin][1]
"""Tests for the Google Assistant traits.""" from datetime import datetime, timedelta from unittest.mock import patch import pytest from homeassistant.components import ( alarm_control_panel, binary_sensor, camera, cover, fan, group, input_boolean, input_select, light, lock, media_player, scene, script, sensor, switch, vacuum, ) from homeassistant.components.climate import const as climate from homeassistant.components.google_assistant import const, error, helpers, trait from homeassistant.components.google_assistant.error import SmartHomeError from homeassistant.components.humidifier import const as humidifier from homeassistant.config import async_process_ha_core_config from homeassistant.const import ( ATTR_ASSUMED_STATE, ATTR_DEVICE_CLASS, ATTR_ENTITY_ID, ATTR_MODE, ATTR_SUPPORTED_FEATURES, ATTR_TEMPERATURE, SERVICE_TURN_OFF, SERVICE_TURN_ON, STATE_ALARM_ARMED_AWAY, STATE_ALARM_DISARMED, STATE_ALARM_PENDING, STATE_IDLE, STATE_OFF, STATE_ON, STATE_PAUSED, STATE_PLAYING, STATE_STANDBY, STATE_UNAVAILABLE, STATE_UNKNOWN, TEMP_CELSIUS, TEMP_FAHRENHEIT, ) from homeassistant.core import DOMAIN as HA_DOMAIN, EVENT_CALL_SERVICE, State from homeassistant.util import color from . import BASIC_CONFIG, MockConfig from tests.common import async_capture_events, async_mock_service REQ_ID = "ff36a3cc-ec34-11e6-b1a0-64510650abcf" BASIC_DATA = helpers.RequestData( BASIC_CONFIG, "test-agent", const.SOURCE_CLOUD, REQ_ID, None ) PIN_CONFIG = MockConfig(secure_devices_pin="1234") PIN_DATA = helpers.RequestData( PIN_CONFIG, "test-agent", const.SOURCE_CLOUD, REQ_ID, None ) async def test_brightness_light(hass): """Test brightness trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert trait.BrightnessTrait.supported(light.DOMAIN, light.SUPPORT_BRIGHTNESS, None) trt = trait.BrightnessTrait( hass, State("light.bla", light.STATE_ON, {light.ATTR_BRIGHTNESS: 243}), BASIC_CONFIG, ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"brightness": 95} events = async_capture_events(hass, EVENT_CALL_SERVICE) calls = async_mock_service(hass, light.DOMAIN, light.SERVICE_TURN_ON) await trt.execute( trait.COMMAND_BRIGHTNESS_ABSOLUTE, BASIC_DATA, {"brightness": 50}, {} ) await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "light.bla", light.ATTR_BRIGHTNESS_PCT: 50} assert len(events) == 1 assert events[0].data == { "domain": "light", "service": "turn_on", "service_data": {"brightness_pct": 50, "entity_id": "light.bla"}, } async def test_camera_stream(hass): """Test camera stream trait support for camera domain.""" await async_process_ha_core_config( hass, {"external_url": "https://example.com"}, ) assert helpers.get_google_type(camera.DOMAIN, None) is not None assert trait.CameraStreamTrait.supported(camera.DOMAIN, camera.SUPPORT_STREAM, None) trt = trait.CameraStreamTrait( hass, State("camera.bla", camera.STATE_IDLE, {}), BASIC_CONFIG ) assert trt.sync_attributes() == { "cameraStreamSupportedProtocols": ["hls"], "cameraStreamNeedAuthToken": False, "cameraStreamNeedDrmEncryption": False, } assert trt.query_attributes() == {} with patch( "homeassistant.components.camera.async_request_stream", return_value="/api/streams/bla", ): await trt.execute(trait.COMMAND_GET_CAMERA_STREAM, BASIC_DATA, {}, {}) assert trt.query_attributes() == { "cameraStreamAccessUrl": "https://example.com/api/streams/bla", "cameraStreamReceiverAppId": "B12CE3CA", } async def test_onoff_group(hass): """Test OnOff trait support for group domain.""" assert helpers.get_google_type(group.DOMAIN, None) is not None assert trait.OnOffTrait.supported(group.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("group.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("group.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, HA_DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "group.bla"} off_calls = async_mock_service(hass, HA_DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "group.bla"} async def test_onoff_input_boolean(hass): """Test OnOff trait support for input_boolean domain.""" assert helpers.get_google_type(input_boolean.DOMAIN, None) is not None assert trait.OnOffTrait.supported(input_boolean.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("input_boolean.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait( hass, State("input_boolean.bla", STATE_OFF), BASIC_CONFIG ) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, input_boolean.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "input_boolean.bla"} off_calls = async_mock_service(hass, input_boolean.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "input_boolean.bla"} async def test_onoff_switch(hass): """Test OnOff trait support for switch domain.""" assert helpers.get_google_type(switch.DOMAIN, None) is not None assert trait.OnOffTrait.supported(switch.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("switch.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("switch.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} trt_assumed = trait.OnOffTrait( hass, State("switch.bla", STATE_OFF, {"assumed_state": True}), BASIC_CONFIG ) assert trt_assumed.sync_attributes() == {"commandOnlyOnOff": True} on_calls = async_mock_service(hass, switch.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "switch.bla"} off_calls = async_mock_service(hass, switch.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "switch.bla"} async def test_onoff_fan(hass): """Test OnOff trait support for fan domain.""" assert helpers.get_google_type(fan.DOMAIN, None) is not None assert trait.OnOffTrait.supported(fan.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("fan.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("fan.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, fan.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "fan.bla"} off_calls = async_mock_service(hass, fan.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "fan.bla"} async def test_onoff_light(hass): """Test OnOff trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert trait.OnOffTrait.supported(light.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("light.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("light.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "light.bla"} off_calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "light.bla"} async def test_onoff_media_player(hass): """Test OnOff trait support for media_player domain.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.OnOffTrait.supported(media_player.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("media_player.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("media_player.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, media_player.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} off_calls = async_mock_service(hass, media_player.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} async def test_onoff_humidifier(hass): """Test OnOff trait support for humidifier domain.""" assert helpers.get_google_type(humidifier.DOMAIN, None) is not None assert trait.OnOffTrait.supported(humidifier.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("humidifier.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("humidifier.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, humidifier.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "humidifier.bla"} off_calls = async_mock_service(hass, humidifier.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "humidifier.bla"} async def test_dock_vacuum(hass): """Test dock trait support for vacuum domain.""" assert helpers.get_google_type(vacuum.DOMAIN, None) is not None assert trait.DockTrait.supported(vacuum.DOMAIN, 0, None) trt = trait.DockTrait(hass, State("vacuum.bla", vacuum.STATE_IDLE), BASIC_CONFIG) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"isDocked": False} calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_RETURN_TO_BASE) await trt.execute(trait.COMMAND_DOCK, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} async def test_startstop_vacuum(hass): """Test startStop trait support for vacuum domain.""" assert helpers.get_google_type(vacuum.DOMAIN, None) is not None assert trait.StartStopTrait.supported(vacuum.DOMAIN, 0, None) trt = trait.StartStopTrait( hass, State( "vacuum.bla", vacuum.STATE_PAUSED, {ATTR_SUPPORTED_FEATURES: vacuum.SUPPORT_PAUSE}, ), BASIC_CONFIG, ) assert trt.sync_attributes() == {"pausable": True} assert trt.query_attributes() == {"isRunning": False, "isPaused": True} start_calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_START) await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": True}, {}) assert len(start_calls) == 1 assert start_calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} stop_calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_STOP) await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": False}, {}) assert len(stop_calls) == 1 assert stop_calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} pause_calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_PAUSE) await trt.execute(trait.COMMAND_PAUSEUNPAUSE, BASIC_DATA, {"pause": True}, {}) assert len(pause_calls) == 1 assert pause_calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} unpause_calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_START) await trt.execute(trait.COMMAND_PAUSEUNPAUSE, BASIC_DATA, {"pause": False}, {}) assert len(unpause_calls) == 1 assert unpause_calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} async def test_startstop_cover(hass): """Test startStop trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.StartStopTrait.supported(cover.DOMAIN, cover.SUPPORT_STOP, None) state = State( "cover.bla", cover.STATE_CLOSED, {ATTR_SUPPORTED_FEATURES: cover.SUPPORT_STOP}, ) trt = trait.StartStopTrait( hass, state, BASIC_CONFIG, ) assert trt.sync_attributes() == {} for state_value in (cover.STATE_CLOSING, cover.STATE_OPENING): state.state = state_value assert trt.query_attributes() == {"isRunning": True} stop_calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_STOP_COVER) await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": False}, {}) assert len(stop_calls) == 1 assert stop_calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} for state_value in (cover.STATE_CLOSED, cover.STATE_OPEN): state.state = state_value assert trt.query_attributes() == {"isRunning": False} with pytest.raises(SmartHomeError, match="Cover is already stopped"): await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": False}, {}) with pytest.raises(SmartHomeError, match="Starting a cover is not supported"): await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": True}, {}) with pytest.raises( SmartHomeError, match="Command action.devices.commands.PauseUnpause is not supported", ): await trt.execute(trait.COMMAND_PAUSEUNPAUSE, BASIC_DATA, {"start": True}, {}) async def test_startstop_cover_assumed(hass): """Test startStop trait support for cover domain of assumed state.""" trt = trait.StartStopTrait( hass, State( "cover.bla", cover.STATE_CLOSED, {ATTR_SUPPORTED_FEATURES: cover.SUPPORT_STOP, ATTR_ASSUMED_STATE: True}, ), BASIC_CONFIG, ) stop_calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_STOP_COVER) await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": False}, {}) assert len(stop_calls) == 1 assert stop_calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} async def test_color_setting_color_light(hass): """Test ColorSpectrum trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert not trait.ColorSettingTrait.supported(light.DOMAIN, 0, None) assert trait.ColorSettingTrait.supported(light.DOMAIN, light.SUPPORT_COLOR, None) trt = trait.ColorSettingTrait( hass, State( "light.bla", STATE_ON, { light.ATTR_HS_COLOR: (20, 94), light.ATTR_BRIGHTNESS: 200, ATTR_SUPPORTED_FEATURES: light.SUPPORT_COLOR, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == {"colorModel": "hsv"} assert trt.query_attributes() == { "color": {"spectrumHsv": {"hue": 20, "saturation": 0.94, "value": 200 / 255}} } assert trt.can_execute( trait.COMMAND_COLOR_ABSOLUTE, {"color": {"spectrumRGB": 16715792}} ) calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON) await trt.execute( trait.COMMAND_COLOR_ABSOLUTE, BASIC_DATA, {"color": {"spectrumRGB": 1052927}}, {}, ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "light.bla", light.ATTR_HS_COLOR: (240, 93.725), } await trt.execute( trait.COMMAND_COLOR_ABSOLUTE, BASIC_DATA, {"color": {"spectrumHSV": {"hue": 100, "saturation": 0.50, "value": 0.20}}}, {}, ) assert len(calls) == 2 assert calls[1].data == { ATTR_ENTITY_ID: "light.bla", light.ATTR_HS_COLOR: [100, 50], light.ATTR_BRIGHTNESS: 0.2 * 255, } async def test_color_setting_temperature_light(hass): """Test ColorTemperature trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert not trait.ColorSettingTrait.supported(light.DOMAIN, 0, None) assert trait.ColorSettingTrait.supported( light.DOMAIN, light.SUPPORT_COLOR_TEMP, None ) trt = trait.ColorSettingTrait( hass, State( "light.bla", STATE_ON, { light.ATTR_MIN_MIREDS: 200, light.ATTR_COLOR_TEMP: 300, light.ATTR_MAX_MIREDS: 500, ATTR_SUPPORTED_FEATURES: light.SUPPORT_COLOR_TEMP, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "colorTemperatureRange": {"temperatureMinK": 2000, "temperatureMaxK": 5000} } assert trt.query_attributes() == {"color": {"temperatureK": 3333}} assert trt.can_execute( trait.COMMAND_COLOR_ABSOLUTE, {"color": {"temperature": 400}} ) calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON) with pytest.raises(helpers.SmartHomeError) as err: await trt.execute( trait.COMMAND_COLOR_ABSOLUTE, BASIC_DATA, {"color": {"temperature": 5555}}, {}, ) assert err.value.code == const.ERR_VALUE_OUT_OF_RANGE await trt.execute( trait.COMMAND_COLOR_ABSOLUTE, BASIC_DATA, {"color": {"temperature": 2857}}, {} ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "light.bla", light.ATTR_COLOR_TEMP: color.color_temperature_kelvin_to_mired(2857), } async def test_color_light_temperature_light_bad_temp(hass): """Test ColorTemperature trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert not trait.ColorSettingTrait.supported(light.DOMAIN, 0, None) assert trait.ColorSettingTrait.supported( light.DOMAIN, light.SUPPORT_COLOR_TEMP, None ) trt = trait.ColorSettingTrait( hass, State( "light.bla", STATE_ON, { light.ATTR_MIN_MIREDS: 200, light.ATTR_COLOR_TEMP: 0, light.ATTR_MAX_MIREDS: 500, }, ), BASIC_CONFIG, ) assert trt.query_attributes() == {} async def test_light_modes(hass): """Test Light Mode trait.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert trait.ModesTrait.supported(light.DOMAIN, light.SUPPORT_EFFECT, None) trt = trait.ModesTrait( hass, State( "light.living_room", light.STATE_ON, attributes={ light.ATTR_EFFECT_LIST: ["random", "colorloop"], light.ATTR_EFFECT: "random", }, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableModes": [ { "name": "effect", "name_values": [{"name_synonym": ["effect"], "lang": "en"}], "settings": [ { "setting_name": "random", "setting_values": [ {"setting_synonym": ["random"], "lang": "en"} ], }, { "setting_name": "colorloop", "setting_values": [ {"setting_synonym": ["colorloop"], "lang": "en"} ], }, ], "ordered": False, } ] } assert trt.query_attributes() == { "currentModeSettings": {"effect": "random"}, "on": True, } assert trt.can_execute( trait.COMMAND_MODES, params={"updateModeSettings": {"effect": "colorloop"}}, ) calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON) await trt.execute( trait.COMMAND_MODES, BASIC_DATA, {"updateModeSettings": {"effect": "colorloop"}}, {}, ) assert len(calls) == 1 assert calls[0].data == { "entity_id": "light.living_room", "effect": "colorloop", } async def test_scene_scene(hass): """Test Scene trait support for scene domain.""" assert helpers.get_google_type(scene.DOMAIN, None) is not None assert trait.SceneTrait.supported(scene.DOMAIN, 0, None) trt = trait.SceneTrait(hass, State("scene.bla", scene.STATE), BASIC_CONFIG) assert trt.sync_attributes() == {} assert trt.query_attributes() == {} assert trt.can_execute(trait.COMMAND_ACTIVATE_SCENE, {}) calls = async_mock_service(hass, scene.DOMAIN, SERVICE_TURN_ON) await trt.execute(trait.COMMAND_ACTIVATE_SCENE, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "scene.bla"} async def test_scene_script(hass): """Test Scene trait support for script domain.""" assert helpers.get_google_type(script.DOMAIN, None) is not None assert trait.SceneTrait.supported(script.DOMAIN, 0, None) trt = trait.SceneTrait(hass, State("script.bla", STATE_OFF), BASIC_CONFIG) assert trt.sync_attributes() == {} assert trt.query_attributes() == {} assert trt.can_execute(trait.COMMAND_ACTIVATE_SCENE, {}) calls = async_mock_service(hass, script.DOMAIN, SERVICE_TURN_ON) await trt.execute(trait.COMMAND_ACTIVATE_SCENE, BASIC_DATA, {}, {}) # We don't wait till script execution is done. await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "script.bla"} async def test_temperature_setting_climate_onoff(hass): """Test TemperatureSetting trait support for climate domain - range.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.TemperatureSettingTrait.supported(climate.DOMAIN, 0, None) hass.config.units.temperature_unit = TEMP_FAHRENHEIT trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_AUTO, { ATTR_SUPPORTED_FEATURES: climate.SUPPORT_TARGET_TEMPERATURE_RANGE, climate.ATTR_HVAC_MODES: [ climate.HVAC_MODE_OFF, climate.HVAC_MODE_COOL, climate.HVAC_MODE_HEAT, climate.HVAC_MODE_HEAT_COOL, ], climate.ATTR_MIN_TEMP: None, climate.ATTR_MAX_TEMP: None, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["off", "cool", "heat", "heatcool", "on"], "thermostatTemperatureUnit": "F", } assert trt.can_execute(trait.COMMAND_THERMOSTAT_SET_MODE, {}) calls = async_mock_service(hass, climate.DOMAIN, SERVICE_TURN_ON) await trt.execute( trait.COMMAND_THERMOSTAT_SET_MODE, BASIC_DATA, {"thermostatMode": "on"}, {} ) assert len(calls) == 1 calls = async_mock_service(hass, climate.DOMAIN, SERVICE_TURN_OFF) await trt.execute( trait.COMMAND_THERMOSTAT_SET_MODE, BASIC_DATA, {"thermostatMode": "off"}, {} ) assert len(calls) == 1 async def test_temperature_setting_climate_no_modes(hass): """Test TemperatureSetting trait support for climate domain not supporting any modes.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.TemperatureSettingTrait.supported(climate.DOMAIN, 0, None) hass.config.units.temperature_unit = TEMP_CELSIUS trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_AUTO, { climate.ATTR_HVAC_MODES: [], climate.ATTR_MIN_TEMP: None, climate.ATTR_MAX_TEMP: None, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["heat"], "thermostatTemperatureUnit": "C", } async def test_temperature_setting_climate_range(hass): """Test TemperatureSetting trait support for climate domain - range.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.TemperatureSettingTrait.supported(climate.DOMAIN, 0, None) hass.config.units.temperature_unit = TEMP_FAHRENHEIT trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_AUTO, { climate.ATTR_CURRENT_TEMPERATURE: 70, climate.ATTR_CURRENT_HUMIDITY: 25, ATTR_SUPPORTED_FEATURES: climate.SUPPORT_TARGET_TEMPERATURE_RANGE, climate.ATTR_HVAC_MODES: [ STATE_OFF, climate.HVAC_MODE_COOL, climate.HVAC_MODE_HEAT, climate.HVAC_MODE_AUTO, ], climate.ATTR_TARGET_TEMP_HIGH: 75, climate.ATTR_TARGET_TEMP_LOW: 65, climate.ATTR_MIN_TEMP: 50, climate.ATTR_MAX_TEMP: 80, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["off", "cool", "heat", "auto", "on"], "thermostatTemperatureUnit": "F", } assert trt.query_attributes() == { "thermostatMode": "auto", "thermostatTemperatureAmbient": 21.1, "thermostatHumidityAmbient": 25, "thermostatTemperatureSetpointLow": 18.3, "thermostatTemperatureSetpointHigh": 23.9, } assert trt.can_execute(trait.COMMAND_THERMOSTAT_TEMPERATURE_SET_RANGE, {}) assert trt.can_execute(trait.COMMAND_THERMOSTAT_SET_MODE, {}) calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_TEMPERATURE) await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SET_RANGE, BASIC_DATA, { "thermostatTemperatureSetpointHigh": 25, "thermostatTemperatureSetpointLow": 20, }, {}, ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "climate.bla", climate.ATTR_TARGET_TEMP_HIGH: 77, climate.ATTR_TARGET_TEMP_LOW: 68, } calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_HVAC_MODE) await trt.execute( trait.COMMAND_THERMOSTAT_SET_MODE, BASIC_DATA, {"thermostatMode": "cool"}, {} ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "climate.bla", climate.ATTR_HVAC_MODE: climate.HVAC_MODE_COOL, } with pytest.raises(helpers.SmartHomeError) as err: await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, BASIC_DATA, {"thermostatTemperatureSetpoint": -100}, {}, ) assert err.value.code == const.ERR_VALUE_OUT_OF_RANGE hass.config.units.temperature_unit = TEMP_CELSIUS async def test_temperature_setting_climate_setpoint(hass): """Test TemperatureSetting trait support for climate domain - setpoint.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.TemperatureSettingTrait.supported(climate.DOMAIN, 0, None) hass.config.units.temperature_unit = TEMP_CELSIUS trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_COOL, { climate.ATTR_HVAC_MODES: [STATE_OFF, climate.HVAC_MODE_COOL], climate.ATTR_MIN_TEMP: 10, climate.ATTR_MAX_TEMP: 30, ATTR_TEMPERATURE: 18, climate.ATTR_CURRENT_TEMPERATURE: 20, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["off", "cool", "on"], "thermostatTemperatureUnit": "C", } assert trt.query_attributes() == { "thermostatMode": "cool", "thermostatTemperatureAmbient": 20, "thermostatTemperatureSetpoint": 18, } assert trt.can_execute(trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, {}) assert trt.can_execute(trait.COMMAND_THERMOSTAT_SET_MODE, {}) calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_TEMPERATURE) with pytest.raises(helpers.SmartHomeError): await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, BASIC_DATA, {"thermostatTemperatureSetpoint": -100}, {}, ) await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, BASIC_DATA, {"thermostatTemperatureSetpoint": 19}, {}, ) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "climate.bla", ATTR_TEMPERATURE: 19} async def test_temperature_setting_climate_setpoint_auto(hass): """ Test TemperatureSetting trait support for climate domain. Setpoint in auto mode. """ hass.config.units.temperature_unit = TEMP_CELSIUS trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_HEAT_COOL, { climate.ATTR_HVAC_MODES: [ climate.HVAC_MODE_OFF, climate.HVAC_MODE_HEAT_COOL, ], climate.ATTR_MIN_TEMP: 10, climate.ATTR_MAX_TEMP: 30, ATTR_TEMPERATURE: 18, climate.ATTR_CURRENT_TEMPERATURE: 20, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["off", "heatcool", "on"], "thermostatTemperatureUnit": "C", } assert trt.query_attributes() == { "thermostatMode": "heatcool", "thermostatTemperatureAmbient": 20, "thermostatTemperatureSetpointHigh": 18, "thermostatTemperatureSetpointLow": 18, } assert trt.can_execute(trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, {}) assert trt.can_execute(trait.COMMAND_THERMOSTAT_SET_MODE, {}) calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_TEMPERATURE) await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, BASIC_DATA, {"thermostatTemperatureSetpoint": 19}, {}, ) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "climate.bla", ATTR_TEMPERATURE: 19} async def test_humidity_setting_humidifier_setpoint(hass): """Test HumiditySetting trait support for humidifier domain - setpoint.""" assert helpers.get_google_type(humidifier.DOMAIN, None) is not None assert trait.HumiditySettingTrait.supported(humidifier.DOMAIN, 0, None) trt = trait.HumiditySettingTrait( hass, State( "humidifier.bla", STATE_ON, { humidifier.ATTR_MIN_HUMIDITY: 20, humidifier.ATTR_MAX_HUMIDITY: 90, humidifier.ATTR_HUMIDITY: 38, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "humiditySetpointRange": {"minPercent": 20, "maxPercent": 90} } assert trt.query_attributes() == { "humiditySetpointPercent": 38, } assert trt.can_execute(trait.COMMAND_SET_HUMIDITY, {}) calls = async_mock_service(hass, humidifier.DOMAIN, humidifier.SERVICE_SET_HUMIDITY) await trt.execute(trait.COMMAND_SET_HUMIDITY, BASIC_DATA, {"humidity": 32}, {}) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "humidifier.bla", humidifier.ATTR_HUMIDITY: 32, } async def test_lock_unlock_lock(hass): """Test LockUnlock trait locking support for lock domain.""" assert helpers.get_google_type(lock.DOMAIN, None) is not None assert trait.LockUnlockTrait.supported(lock.DOMAIN, lock.SUPPORT_OPEN, None) assert trait.LockUnlockTrait.might_2fa(lock.DOMAIN, lock.SUPPORT_OPEN, None) trt = trait.LockUnlockTrait( hass, State("lock.front_door", lock.STATE_LOCKED), PIN_CONFIG ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"isLocked": True} assert trt.can_execute(trait.COMMAND_LOCKUNLOCK, {"lock": True}) calls = async_mock_service(hass, lock.DOMAIN, lock.SERVICE_LOCK) await trt.execute(trait.COMMAND_LOCKUNLOCK, PIN_DATA, {"lock": True}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "lock.front_door"} async def test_lock_unlock_unlock(hass): """Test LockUnlock trait unlocking support for lock domain.""" assert helpers.get_google_type(lock.DOMAIN, None) is not None assert trait.LockUnlockTrait.supported(lock.DOMAIN, lock.SUPPORT_OPEN, None) trt = trait.LockUnlockTrait( hass, State("lock.front_door", lock.STATE_LOCKED), PIN_CONFIG ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"isLocked": True} assert trt.can_execute(trait.COMMAND_LOCKUNLOCK, {"lock": False}) calls = async_mock_service(hass, lock.DOMAIN, lock.SERVICE_UNLOCK) # No challenge data with pytest.raises(error.ChallengeNeeded) as err: await trt.execute(trait.COMMAND_LOCKUNLOCK, PIN_DATA, {"lock": False}, {}) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # invalid pin with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_LOCKUNLOCK, PIN_DATA, {"lock": False}, {"pin": 9999} ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_FAILED_PIN_NEEDED await trt.execute( trait.COMMAND_LOCKUNLOCK, PIN_DATA, {"lock": False}, {"pin": "1234"} ) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "lock.front_door"} # Test without pin trt = trait.LockUnlockTrait( hass, State("lock.front_door", lock.STATE_LOCKED), BASIC_CONFIG ) with pytest.raises(error.SmartHomeError) as err: await trt.execute(trait.COMMAND_LOCKUNLOCK, BASIC_DATA, {"lock": False}, {}) assert len(calls) == 1 assert err.value.code == const.ERR_CHALLENGE_NOT_SETUP # Test with 2FA override with patch.object( BASIC_CONFIG, "should_2fa", return_value=False, ): await trt.execute(trait.COMMAND_LOCKUNLOCK, BASIC_DATA, {"lock": False}, {}) assert len(calls) == 2 async def test_arm_disarm_arm_away(hass): """Test ArmDisarm trait Arming support for alarm_control_panel domain.""" assert helpers.get_google_type(alarm_control_panel.DOMAIN, None) is not None assert trait.ArmDisArmTrait.supported(alarm_control_panel.DOMAIN, 0, None) assert trait.ArmDisArmTrait.might_2fa(alarm_control_panel.DOMAIN, 0, None) trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, { alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True, ATTR_SUPPORTED_FEATURES: alarm_control_panel.const.SUPPORT_ALARM_ARM_HOME | alarm_control_panel.const.SUPPORT_ALARM_ARM_AWAY, }, ), PIN_CONFIG, ) assert trt.sync_attributes() == { "availableArmLevels": { "levels": [ { "level_name": "armed_home", "level_values": [ {"level_synonym": ["armed home", "home"], "lang": "en"} ], }, { "level_name": "armed_away", "level_values": [ {"level_synonym": ["armed away", "away"], "lang": "en"} ], }, ], "ordered": False, } } assert trt.query_attributes() == { "isArmed": True, "currentArmLevel": STATE_ALARM_ARMED_AWAY, } assert trt.can_execute( trait.COMMAND_ARMDISARM, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY} ) calls = async_mock_service( hass, alarm_control_panel.DOMAIN, alarm_control_panel.SERVICE_ALARM_ARM_AWAY ) # Test with no secure_pin configured with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), BASIC_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, BASIC_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {}, ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NOT_SETUP trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), PIN_CONFIG, ) # No challenge data with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {}, ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # invalid pin with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {"pin": 9999}, ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_FAILED_PIN_NEEDED # correct pin await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {"pin": "1234"}, ) assert len(calls) == 1 # Test already armed with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), PIN_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {}, ) assert len(calls) == 1 assert err.value.code == const.ERR_ALREADY_ARMED # Test with code_arm_required False trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: False}, ), PIN_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {}, ) assert len(calls) == 2 with pytest.raises(error.SmartHomeError) as err: await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True}, {}, ) async def test_arm_disarm_disarm(hass): """Test ArmDisarm trait Disarming support for alarm_control_panel domain.""" assert helpers.get_google_type(alarm_control_panel.DOMAIN, None) is not None assert trait.ArmDisArmTrait.supported(alarm_control_panel.DOMAIN, 0, None) assert trait.ArmDisArmTrait.might_2fa(alarm_control_panel.DOMAIN, 0, None) trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, { alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True, ATTR_SUPPORTED_FEATURES: alarm_control_panel.const.SUPPORT_ALARM_TRIGGER | alarm_control_panel.const.SUPPORT_ALARM_ARM_CUSTOM_BYPASS, }, ), PIN_CONFIG, ) assert trt.sync_attributes() == { "availableArmLevels": { "levels": [ { "level_name": "armed_custom_bypass", "level_values": [ { "level_synonym": ["armed custom bypass", "custom"], "lang": "en", } ], }, { "level_name": "triggered", "level_values": [{"level_synonym": ["triggered"], "lang": "en"}], }, ], "ordered": False, } } assert trt.query_attributes() == {"isArmed": False} assert trt.can_execute(trait.COMMAND_ARMDISARM, {"arm": False}) calls = async_mock_service( hass, alarm_control_panel.DOMAIN, alarm_control_panel.SERVICE_ALARM_DISARM ) # Test without secure_pin configured with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), BASIC_CONFIG, ) await trt.execute(trait.COMMAND_ARMDISARM, BASIC_DATA, {"arm": False}, {}) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NOT_SETUP trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), PIN_CONFIG, ) # No challenge data with pytest.raises(error.ChallengeNeeded) as err: await trt.execute(trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": False}, {}) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # invalid pin with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": False}, {"pin": 9999} ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_FAILED_PIN_NEEDED # correct pin await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": False}, {"pin": "1234"} ) assert len(calls) == 1 # Test already disarmed with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), PIN_CONFIG, ) await trt.execute(trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": False}, {}) assert len(calls) == 1 assert err.value.code == const.ERR_ALREADY_DISARMED # Cancel arming after already armed will require pin with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: False}, ), PIN_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "cancel": True}, {} ) assert len(calls) == 1 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # Cancel arming while pending to arm doesn't require pin trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_PENDING, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: False}, ), PIN_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "cancel": True}, {} ) assert len(calls) == 2 async def test_fan_speed(hass): """Test FanSpeed trait speed control support for fan domain.""" assert helpers.get_google_type(fan.DOMAIN, None) is not None assert trait.FanSpeedTrait.supported(fan.DOMAIN, fan.SUPPORT_SET_SPEED, None) trt = trait.FanSpeedTrait( hass, State( "fan.living_room_fan", fan.SPEED_HIGH, attributes={ "speed_list": [ fan.SPEED_OFF, fan.SPEED_LOW, fan.SPEED_MEDIUM, fan.SPEED_HIGH, ], "speed": "low", "percentage": 33, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableFanSpeeds": { "ordered": True, "speeds": [ { "speed_name": "off", "speed_values": [{"speed_synonym": ["stop", "off"], "lang": "en"}], }, { "speed_name": "low", "speed_values": [ { "speed_synonym": ["slow", "low", "slowest", "lowest"], "lang": "en", } ], }, { "speed_name": "medium", "speed_values": [ {"speed_synonym": ["medium", "mid", "middle"], "lang": "en"} ], }, { "speed_name": "high", "speed_values": [ { "speed_synonym": [ "high", "max", "fast", "highest", "fastest", "maximum", ], "lang": "en", } ], }, ], }, "reversible": False, "supportsFanSpeedPercent": True, } assert trt.query_attributes() == { "currentFanSpeedSetting": "low", "on": True, "currentFanSpeedPercent": 33, } assert trt.can_execute(trait.COMMAND_FANSPEED, params={"fanSpeed": "medium"}) calls = async_mock_service(hass, fan.DOMAIN, fan.SERVICE_SET_SPEED) await trt.execute(trait.COMMAND_FANSPEED, BASIC_DATA, {"fanSpeed": "medium"}, {}) assert len(calls) == 1 assert calls[0].data == {"entity_id": "fan.living_room_fan", "speed": "medium"} assert trt.can_execute(trait.COMMAND_FANSPEED, params={"fanSpeedPercent": 10}) calls = async_mock_service(hass, fan.DOMAIN, fan.SERVICE_SET_PERCENTAGE) await trt.execute(trait.COMMAND_FANSPEED, BASIC_DATA, {"fanSpeedPercent": 10}, {}) assert len(calls) == 1 assert calls[0].data == {"entity_id": "fan.living_room_fan", "percentage": 10} async def test_climate_fan_speed(hass): """Test FanSpeed trait speed control support for climate domain.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.FanSpeedTrait.supported(climate.DOMAIN, climate.SUPPORT_FAN_MODE, None) trt = trait.FanSpeedTrait( hass, State( "climate.living_room_ac", "on", attributes={ "fan_modes": ["auto", "low", "medium", "high"], "fan_mode": "low", }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableFanSpeeds": { "ordered": True, "speeds": [ { "speed_name": "auto", "speed_values": [{"speed_synonym": ["auto"], "lang": "en"}], }, { "speed_name": "low", "speed_values": [{"speed_synonym": ["low"], "lang": "en"}], }, { "speed_name": "medium", "speed_values": [{"speed_synonym": ["medium"], "lang": "en"}], }, { "speed_name": "high", "speed_values": [{"speed_synonym": ["high"], "lang": "en"}], }, ], }, "reversible": False, "supportsFanSpeedPercent": True, } assert trt.query_attributes() == { "currentFanSpeedSetting": "low", } assert trt.can_execute(trait.COMMAND_FANSPEED, params={"fanSpeed": "medium"}) calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_FAN_MODE) await trt.execute(trait.COMMAND_FANSPEED, BASIC_DATA, {"fanSpeed": "medium"}, {}) assert len(calls) == 1 assert calls[0].data == { "entity_id": "climate.living_room_ac", "fan_mode": "medium", } async def test_inputselector(hass): """Test input selector trait.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.InputSelectorTrait.supported( media_player.DOMAIN, media_player.SUPPORT_SELECT_SOURCE, None ) trt = trait.InputSelectorTrait( hass, State( "media_player.living_room", media_player.STATE_PLAYING, attributes={ media_player.ATTR_INPUT_SOURCE_LIST: [ "media", "game", "chromecast", "plex", ], media_player.ATTR_INPUT_SOURCE: "game", }, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableInputs": [ {"key": "media", "names": [{"name_synonym": ["media"], "lang": "en"}]}, {"key": "game", "names": [{"name_synonym": ["game"], "lang": "en"}]}, { "key": "chromecast", "names": [{"name_synonym": ["chromecast"], "lang": "en"}], }, {"key": "plex", "names": [{"name_synonym": ["plex"], "lang": "en"}]}, ], "orderedInputs": True, } assert trt.query_attributes() == { "currentInput": "game", } assert trt.can_execute( trait.COMMAND_INPUT, params={"newInput": "media"}, ) calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_SELECT_SOURCE ) await trt.execute( trait.COMMAND_INPUT, BASIC_DATA, {"newInput": "media"}, {}, ) assert len(calls) == 1 assert calls[0].data == {"entity_id": "media_player.living_room", "source": "media"} @pytest.mark.parametrize( "sources,source,source_next,source_prev", [ (["a"], "a", "a", "a"), (["a", "b"], "a", "b", "b"), (["a", "b", "c"], "a", "b", "c"), ], ) async def test_inputselector_nextprev(hass, sources, source, source_next, source_prev): """Test input selector trait.""" trt = trait.InputSelectorTrait( hass, State( "media_player.living_room", media_player.STATE_PLAYING, attributes={ media_player.ATTR_INPUT_SOURCE_LIST: sources, media_player.ATTR_INPUT_SOURCE: source, }, ), BASIC_CONFIG, ) assert trt.can_execute("action.devices.commands.NextInput", params={}) assert trt.can_execute("action.devices.commands.PreviousInput", params={}) calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_SELECT_SOURCE ) await trt.execute( "action.devices.commands.NextInput", BASIC_DATA, {}, {}, ) await trt.execute( "action.devices.commands.PreviousInput", BASIC_DATA, {}, {}, ) assert len(calls) == 2 assert calls[0].data == { "entity_id": "media_player.living_room", "source": source_next, } assert calls[1].data == { "entity_id": "media_player.living_room", "source": source_prev, } @pytest.mark.parametrize( "sources,source", [(None, "a"), (["a", "b"], None), (["a", "b"], "c")] ) async def test_inputselector_nextprev_invalid(hass, sources, source): """Test input selector trait.""" trt = trait.InputSelectorTrait( hass, State( "media_player.living_room", media_player.STATE_PLAYING, attributes={ media_player.ATTR_INPUT_SOURCE_LIST: sources, media_player.ATTR_INPUT_SOURCE: source, }, ), BASIC_CONFIG, ) with pytest.raises(SmartHomeError): await trt.execute( "action.devices.commands.NextInput", BASIC_DATA, {}, {}, ) with pytest.raises(SmartHomeError): await trt.execute( "action.devices.commands.PreviousInput", BASIC_DATA, {}, {}, ) with pytest.raises(SmartHomeError): await trt.execute( "action.devices.commands.InvalidCommand", BASIC_DATA, {}, {}, ) async def test_modes_input_select(hass): """Test Input Select Mode trait.""" assert helpers.get_google_type(input_select.DOMAIN, None) is not None assert trait.ModesTrait.supported(input_select.DOMAIN, None, None) trt = trait.ModesTrait( hass, State("input_select.bla", "unavailable"), BASIC_CONFIG, ) assert trt.sync_attributes() == {"availableModes": []} trt = trait.ModesTrait( hass, State( "input_select.bla", "abc", attributes={input_select.ATTR_OPTIONS: ["abc", "123", "xyz"]}, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableModes": [ { "name": "option", "name_values": [ { "name_synonym": ["option", "setting", "mode", "value"], "lang": "en", } ], "settings": [ { "setting_name": "abc", "setting_values": [{"setting_synonym": ["abc"], "lang": "en"}], }, { "setting_name": "123", "setting_values": [{"setting_synonym": ["123"], "lang": "en"}], }, { "setting_name": "xyz", "setting_values": [{"setting_synonym": ["xyz"], "lang": "en"}], }, ], "ordered": False, } ] } assert trt.query_attributes() == { "currentModeSettings": {"option": "abc"}, "on": True, } assert trt.can_execute( trait.COMMAND_MODES, params={"updateModeSettings": {"option": "xyz"}}, ) calls = async_mock_service( hass, input_select.DOMAIN, input_select.SERVICE_SELECT_OPTION ) await trt.execute( trait.COMMAND_MODES, BASIC_DATA, {"updateModeSettings": {"option": "xyz"}}, {}, ) assert len(calls) == 1 assert calls[0].data == {"entity_id": "input_select.bla", "option": "xyz"} async def test_modes_humidifier(hass): """Test Humidifier Mode trait.""" assert helpers.get_google_type(humidifier.DOMAIN, None) is not None assert trait.ModesTrait.supported(humidifier.DOMAIN, humidifier.SUPPORT_MODES, None) trt = trait.ModesTrait( hass, State( "humidifier.humidifier", STATE_OFF, attributes={ humidifier.ATTR_AVAILABLE_MODES: [ humidifier.MODE_NORMAL, humidifier.MODE_AUTO, humidifier.MODE_AWAY, ], ATTR_SUPPORTED_FEATURES: humidifier.SUPPORT_MODES, humidifier.ATTR_MIN_HUMIDITY: 30, humidifier.ATTR_MAX_HUMIDITY: 99, humidifier.ATTR_HUMIDITY: 50, ATTR_MODE: humidifier.MODE_AUTO, }, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableModes": [ { "name": "mode", "name_values": [{"name_synonym": ["mode"], "lang": "en"}], "settings": [ { "setting_name": "normal", "setting_values": [ {"setting_synonym": ["normal"], "lang": "en"} ], }, { "setting_name": "auto", "setting_values": [{"setting_synonym": ["auto"], "lang": "en"}], }, { "setting_name": "away", "setting_values": [{"setting_synonym": ["away"], "lang": "en"}], }, ], "ordered": False, }, ] } assert trt.query_attributes() == { "currentModeSettings": {"mode": "auto"}, "on": False, } assert trt.can_execute( trait.COMMAND_MODES, params={"updateModeSettings": {"mode": "away"}} ) calls = async_mock_service(hass, humidifier.DOMAIN, humidifier.SERVICE_SET_MODE) await trt.execute( trait.COMMAND_MODES, BASIC_DATA, {"updateModeSettings": {"mode": "away"}}, {}, ) assert len(calls) == 1 assert calls[0].data == { "entity_id": "humidifier.humidifier", "mode": "away", } async def test_sound_modes(hass): """Test Mode trait.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.ModesTrait.supported( media_player.DOMAIN, media_player.SUPPORT_SELECT_SOUND_MODE, None ) trt = trait.ModesTrait( hass, State( "media_player.living_room", media_player.STATE_PLAYING, attributes={ media_player.ATTR_SOUND_MODE_LIST: ["stereo", "prologic"], media_player.ATTR_SOUND_MODE: "stereo", }, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableModes": [ { "name": "sound mode", "name_values": [ {"name_synonym": ["sound mode", "effects"], "lang": "en"} ], "settings": [ { "setting_name": "stereo", "setting_values": [ {"setting_synonym": ["stereo"], "lang": "en"} ], }, { "setting_name": "prologic", "setting_values": [ {"setting_synonym": ["prologic"], "lang": "en"} ], }, ], "ordered": False, } ] } assert trt.query_attributes() == { "currentModeSettings": {"sound mode": "stereo"}, "on": True, } assert trt.can_execute( trait.COMMAND_MODES, params={"updateModeSettings": {"sound mode": "stereo"}}, ) calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_SELECT_SOUND_MODE ) await trt.execute( trait.COMMAND_MODES, BASIC_DATA, {"updateModeSettings": {"sound mode": "stereo"}}, {}, ) assert len(calls) == 1 assert calls[0].data == { "entity_id": "media_player.living_room", "sound_mode": "stereo", } async def test_openclose_cover(hass): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_SET_POSITION, None ) trt = trait.OpenCloseTrait( hass, State( "cover.bla", cover.STATE_OPEN, { cover.ATTR_CURRENT_POSITION: 75, ATTR_SUPPORTED_FEATURES: cover.SUPPORT_SET_POSITION, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"openPercent": 75} calls_set = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_SET_COVER_POSITION) calls_open = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_OPEN_COVER) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 50}, {}) await trt.execute( trait.COMMAND_OPENCLOSE_RELATIVE, BASIC_DATA, {"openRelativePercent": 50}, {} ) assert len(calls_set) == 1 assert calls_set[0].data == {ATTR_ENTITY_ID: "cover.bla", cover.ATTR_POSITION: 50} assert len(calls_open) == 1 assert calls_open[0].data == {ATTR_ENTITY_ID: "cover.bla"} async def test_openclose_cover_unknown_state(hass): """Test OpenClose trait support for cover domain with unknown state.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_SET_POSITION, None ) # No state trt = trait.OpenCloseTrait( hass, State( "cover.bla", STATE_UNKNOWN, {ATTR_SUPPORTED_FEATURES: cover.SUPPORT_OPEN} ), BASIC_CONFIG, ) assert trt.sync_attributes() == {"discreteOnlyOpenClose": True} with pytest.raises(helpers.SmartHomeError): trt.query_attributes() calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_OPEN_COVER) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 100}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} with pytest.raises(helpers.SmartHomeError): trt.query_attributes() async def test_openclose_cover_assumed_state(hass): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_SET_POSITION, None ) trt = trait.OpenCloseTrait( hass, State( "cover.bla", cover.STATE_OPEN, { ATTR_ASSUMED_STATE: True, ATTR_SUPPORTED_FEATURES: cover.SUPPORT_SET_POSITION, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == {"commandOnlyOpenClose": True} assert trt.query_attributes() == {} calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_SET_COVER_POSITION) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 40}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla", cover.ATTR_POSITION: 40} async def test_openclose_cover_query_only(hass): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported(cover.DOMAIN, 0, None) state = State( "cover.bla", cover.STATE_OPEN, ) trt = trait.OpenCloseTrait( hass, state, BASIC_CONFIG, ) assert trt.sync_attributes() == { "discreteOnlyOpenClose": True, "queryOnlyOpenClose": True, } assert trt.query_attributes() == {"openPercent": 100} async def test_openclose_cover_no_position(hass): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_OPEN | cover.SUPPORT_CLOSE, None ) state = State( "cover.bla", cover.STATE_OPEN, { ATTR_SUPPORTED_FEATURES: cover.SUPPORT_OPEN | cover.SUPPORT_CLOSE, }, ) trt = trait.OpenCloseTrait( hass, state, BASIC_CONFIG, ) assert trt.sync_attributes() == {"discreteOnlyOpenClose": True} assert trt.query_attributes() == {"openPercent": 100} state.state = cover.STATE_CLOSED assert trt.sync_attributes() == {"discreteOnlyOpenClose": True} assert trt.query_attributes() == {"openPercent": 0} calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_CLOSE_COVER) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 0}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_OPEN_COVER) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 100}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} with pytest.raises( SmartHomeError, match=r"Current position not know for relative command" ): await trt.execute( trait.COMMAND_OPENCLOSE_RELATIVE, BASIC_DATA, {"openRelativePercent": 100}, {}, ) with pytest.raises(SmartHomeError, match=r"No support for partial open close"): await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 50}, {}) @pytest.mark.parametrize( "device_class", (cover.DEVICE_CLASS_DOOR, cover.DEVICE_CLASS_GARAGE, cover.DEVICE_CLASS_GATE), ) async def test_openclose_cover_secure(hass, device_class): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, device_class) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_SET_POSITION, device_class ) assert trait.OpenCloseTrait.might_2fa( cover.DOMAIN, cover.SUPPORT_SET_POSITION, device_class ) trt = trait.OpenCloseTrait( hass, State( "cover.bla", cover.STATE_OPEN, { ATTR_DEVICE_CLASS: device_class, ATTR_SUPPORTED_FEATURES: cover.SUPPORT_SET_POSITION, cover.ATTR_CURRENT_POSITION: 75, }, ), PIN_CONFIG, ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"openPercent": 75} calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_SET_COVER_POSITION) calls_close = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_CLOSE_COVER) # No challenge data with pytest.raises(error.ChallengeNeeded) as err: await trt.execute(trait.COMMAND_OPENCLOSE, PIN_DATA, {"openPercent": 50}, {}) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # invalid pin with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_OPENCLOSE, PIN_DATA, {"openPercent": 50}, {"pin": "9999"} ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_FAILED_PIN_NEEDED await trt.execute( trait.COMMAND_OPENCLOSE, PIN_DATA, {"openPercent": 50}, {"pin": "1234"} ) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla", cover.ATTR_POSITION: 50} # no challenge on close await trt.execute(trait.COMMAND_OPENCLOSE, PIN_DATA, {"openPercent": 0}, {}) assert len(calls_close) == 1 assert calls_close[0].data == {ATTR_ENTITY_ID: "cover.bla"} @pytest.mark.parametrize( "device_class", ( binary_sensor.DEVICE_CLASS_DOOR, binary_sensor.DEVICE_CLASS_GARAGE_DOOR, binary_sensor.DEVICE_CLASS_LOCK, binary_sensor.DEVICE_CLASS_OPENING, binary_sensor.DEVICE_CLASS_WINDOW, ), ) async def test_openclose_binary_sensor(hass, device_class): """Test OpenClose trait support for binary_sensor domain.""" assert helpers.get_google_type(binary_sensor.DOMAIN, device_class) is not None assert trait.OpenCloseTrait.supported(binary_sensor.DOMAIN, 0, device_class) trt = trait.OpenCloseTrait( hass, State("binary_sensor.test", STATE_ON, {ATTR_DEVICE_CLASS: device_class}), BASIC_CONFIG, ) assert trt.sync_attributes() == { "queryOnlyOpenClose": True, "discreteOnlyOpenClose": True, } assert trt.query_attributes() == {"openPercent": 100} trt = trait.OpenCloseTrait( hass, State("binary_sensor.test", STATE_OFF, {ATTR_DEVICE_CLASS: device_class}), BASIC_CONFIG, ) assert trt.sync_attributes() == { "queryOnlyOpenClose": True, "discreteOnlyOpenClose": True, } assert trt.query_attributes() == {"openPercent": 0} async def test_volume_media_player(hass): """Test volume trait support for media player domain.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.VolumeTrait.supported( media_player.DOMAIN, media_player.SUPPORT_VOLUME_SET, None, ) trt = trait.VolumeTrait( hass, State( "media_player.bla", media_player.STATE_PLAYING, { ATTR_SUPPORTED_FEATURES: media_player.SUPPORT_VOLUME_SET, media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.3, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "volumeMaxLevel": 100, "levelStepSize": 10, "volumeCanMuteAndUnmute": False, "commandOnlyVolume": False, } assert trt.query_attributes() == {"currentVolume": 30} calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_SET ) await trt.execute(trait.COMMAND_SET_VOLUME, BASIC_DATA, {"volumeLevel": 60}, {}) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.6, } calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_SET ) await trt.execute( trait.COMMAND_VOLUME_RELATIVE, BASIC_DATA, {"relativeSteps": 10}, {} ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.4, } async def test_volume_media_player_relative(hass): """Test volume trait support for relative-volume-only media players.""" assert trait.VolumeTrait.supported( media_player.DOMAIN, media_player.SUPPORT_VOLUME_STEP, None, ) trt = trait.VolumeTrait( hass, State( "media_player.bla", media_player.STATE_PLAYING, { ATTR_ASSUMED_STATE: True, ATTR_SUPPORTED_FEATURES: media_player.SUPPORT_VOLUME_STEP, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "volumeMaxLevel": 100, "levelStepSize": 10, "volumeCanMuteAndUnmute": False, "commandOnlyVolume": True, } assert trt.query_attributes() == {} calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_UP ) await trt.execute( trait.COMMAND_VOLUME_RELATIVE, BASIC_DATA, {"relativeSteps": 10}, {}, ) assert len(calls) == 10 for call in calls: assert call.data == { ATTR_ENTITY_ID: "media_player.bla", } calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_DOWN ) await trt.execute( trait.COMMAND_VOLUME_RELATIVE, BASIC_DATA, {"relativeSteps": -10}, {}, ) assert len(calls) == 10 for call in calls: assert call.data == { ATTR_ENTITY_ID: "media_player.bla", } with pytest.raises(SmartHomeError): await trt.execute(trait.COMMAND_SET_VOLUME, BASIC_DATA, {"volumeLevel": 42}, {}) with pytest.raises(SmartHomeError): await trt.execute(trait.COMMAND_MUTE, BASIC_DATA, {"mute": True}, {}) async def test_media_player_mute(hass): """Test volume trait support for muting.""" assert trait.VolumeTrait.supported( media_player.DOMAIN, media_player.SUPPORT_VOLUME_STEP | media_player.SUPPORT_VOLUME_MUTE, None, ) trt = trait.VolumeTrait( hass, State( "media_player.bla", media_player.STATE_PLAYING, { ATTR_SUPPORTED_FEATURES: ( media_player.SUPPORT_VOLUME_STEP | media_player.SUPPORT_VOLUME_MUTE ), media_player.ATTR_MEDIA_VOLUME_MUTED: False, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "volumeMaxLevel": 100, "levelStepSize": 10, "volumeCanMuteAndUnmute": True, "commandOnlyVolume": False, } assert trt.query_attributes() == {"isMuted": False} mute_calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_MUTE ) await trt.execute( trait.COMMAND_MUTE, BASIC_DATA, {"mute": True}, {}, ) assert len(mute_calls) == 1 assert mute_calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_VOLUME_MUTED: True, } unmute_calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_MUTE ) await trt.execute( trait.COMMAND_MUTE, BASIC_DATA, {"mute": False}, {}, ) assert len(unmute_calls) == 1 assert unmute_calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_VOLUME_MUTED: False, } async def test_temperature_setting_sensor(hass): """Test TemperatureSetting trait support for temperature sensor.""" assert ( helpers.get_google_type(sensor.DOMAIN, sensor.DEVICE_CLASS_TEMPERATURE) is not None ) assert not trait.TemperatureSettingTrait.supported( sensor.DOMAIN, 0, sensor.DEVICE_CLASS_HUMIDITY ) assert trait.TemperatureSettingTrait.supported( sensor.DOMAIN, 0, sensor.DEVICE_CLASS_TEMPERATURE ) @pytest.mark.parametrize( "unit_in,unit_out,state,ambient", [ (TEMP_FAHRENHEIT, "F", "70", 21.1), (TEMP_CELSIUS, "C", "21.1", 21.1), (TEMP_FAHRENHEIT, "F", "unavailable", None), (TEMP_FAHRENHEIT, "F", "unknown", None), ], ) async def test_temperature_setting_sensor_data(hass, unit_in, unit_out, state, ambient): """Test TemperatureSetting trait support for temperature sensor.""" hass.config.units.temperature_unit = unit_in trt = trait.TemperatureSettingTrait( hass, State( "sensor.test", state, {ATTR_DEVICE_CLASS: sensor.DEVICE_CLASS_TEMPERATURE} ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "queryOnlyTemperatureSetting": True, "thermostatTemperatureUnit": unit_out, } if ambient: assert trt.query_attributes() == {"thermostatTemperatureAmbient": ambient} else: assert trt.query_attributes() == {} hass.config.units.temperature_unit = TEMP_CELSIUS async def test_humidity_setting_sensor(hass): """Test HumiditySetting trait support for humidity sensor.""" assert ( helpers.get_google_type(sensor.DOMAIN, sensor.DEVICE_CLASS_HUMIDITY) is not None ) assert not trait.HumiditySettingTrait.supported( sensor.DOMAIN, 0, sensor.DEVICE_CLASS_TEMPERATURE ) assert trait.HumiditySettingTrait.supported( sensor.DOMAIN, 0, sensor.DEVICE_CLASS_HUMIDITY ) @pytest.mark.parametrize( "state,ambient", [("70", 70), ("unavailable", None), ("unknown", None)] ) async def test_humidity_setting_sensor_data(hass, state, ambient): """Test HumiditySetting trait support for humidity sensor.""" trt = trait.HumiditySettingTrait( hass, State("sensor.test", state, {ATTR_DEVICE_CLASS: sensor.DEVICE_CLASS_HUMIDITY}), BASIC_CONFIG, ) assert trt.sync_attributes() == {"queryOnlyHumiditySetting": True} if ambient: assert trt.query_attributes() == {"humidityAmbientPercent": ambient} else: assert trt.query_attributes() == {} with pytest.raises(helpers.SmartHomeError) as err: await trt.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert err.value.code == const.ERR_NOT_SUPPORTED async def test_transport_control(hass): """Test the TransportControlTrait.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None for feature in trait.MEDIA_COMMAND_SUPPORT_MAPPING.values(): assert trait.TransportControlTrait.supported(media_player.DOMAIN, feature, None) now = datetime(2020, 1, 1) trt = trait.TransportControlTrait( hass, State( "media_player.bla", media_player.STATE_PLAYING, { media_player.ATTR_MEDIA_POSITION: 100, media_player.ATTR_MEDIA_DURATION: 200, media_player.ATTR_MEDIA_POSITION_UPDATED_AT: now - timedelta(seconds=10), media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.5, ATTR_SUPPORTED_FEATURES: media_player.SUPPORT_PLAY | media_player.SUPPORT_STOP, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "transportControlSupportedCommands": ["RESUME", "STOP"] } assert trt.query_attributes() == {} # COMMAND_MEDIA_SEEK_RELATIVE calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_SEEK ) # Patch to avoid time ticking over during the command failing the test with patch("homeassistant.util.dt.utcnow", return_value=now): await trt.execute( trait.COMMAND_MEDIA_SEEK_RELATIVE, BASIC_DATA, {"relativePositionMs": 10000}, {}, ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", # 100s (current position) + 10s (from command) + 10s (from updated_at) media_player.ATTR_MEDIA_SEEK_POSITION: 120, } # COMMAND_MEDIA_SEEK_TO_POSITION calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_SEEK ) await trt.execute( trait.COMMAND_MEDIA_SEEK_TO_POSITION, BASIC_DATA, {"absPositionMs": 50000}, {} ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_SEEK_POSITION: 50, } # COMMAND_MEDIA_NEXT calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_NEXT_TRACK ) await trt.execute(trait.COMMAND_MEDIA_NEXT, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} # COMMAND_MEDIA_PAUSE calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_PAUSE ) await trt.execute(trait.COMMAND_MEDIA_PAUSE, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} # COMMAND_MEDIA_PREVIOUS calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_PREVIOUS_TRACK ) await trt.execute(trait.COMMAND_MEDIA_PREVIOUS, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} # COMMAND_MEDIA_RESUME calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_PLAY ) await trt.execute(trait.COMMAND_MEDIA_RESUME, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} # COMMAND_MEDIA_SHUFFLE calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_SHUFFLE_SET ) await trt.execute(trait.COMMAND_MEDIA_SHUFFLE, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_SHUFFLE: True, } # COMMAND_MEDIA_STOP calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_STOP ) await trt.execute(trait.COMMAND_MEDIA_STOP, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} @pytest.mark.parametrize( "state", ( STATE_OFF, STATE_IDLE, STATE_PLAYING, STATE_ON, STATE_PAUSED, STATE_STANDBY, STATE_UNAVAILABLE, STATE_UNKNOWN, ), ) async def test_media_state(hass, state): """Test the MediaStateTrait.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.TransportControlTrait.supported( media_player.DOMAIN, media_player.SUPPORT_PLAY, None ) trt = trait.MediaStateTrait( hass, State( "media_player.bla", state, { media_player.ATTR_MEDIA_POSITION: 100, media_player.ATTR_MEDIA_DURATION: 200, media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.5, ATTR_SUPPORTED_FEATURES: media_player.SUPPORT_PLAY | media_player.SUPPORT_STOP, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "supportActivityState": True, "supportPlaybackState": True, } assert trt.query_attributes() == { "activityState": trt.activity_lookup.get(state), "playbackState": trt.playback_lookup.get(state), }
sander76/home-assistant
tests/components/google_assistant/test_trait.py
homeassistant/components/arduino/sensor.py
"""Support for the Vallox ventilation unit fan.""" import logging from homeassistant.components.fan import FanEntity from homeassistant.core import callback from homeassistant.helpers.dispatcher import async_dispatcher_connect from . import ( DOMAIN, METRIC_KEY_MODE, METRIC_KEY_PROFILE_FAN_SPEED_AWAY, METRIC_KEY_PROFILE_FAN_SPEED_BOOST, METRIC_KEY_PROFILE_FAN_SPEED_HOME, SIGNAL_VALLOX_STATE_UPDATE, ) _LOGGER = logging.getLogger(__name__) # Device attributes ATTR_PROFILE_FAN_SPEED_HOME = { "description": "fan_speed_home", "metric_key": METRIC_KEY_PROFILE_FAN_SPEED_HOME, } ATTR_PROFILE_FAN_SPEED_AWAY = { "description": "fan_speed_away", "metric_key": METRIC_KEY_PROFILE_FAN_SPEED_AWAY, } ATTR_PROFILE_FAN_SPEED_BOOST = { "description": "fan_speed_boost", "metric_key": METRIC_KEY_PROFILE_FAN_SPEED_BOOST, } async def async_setup_platform(hass, config, async_add_entities, discovery_info=None): """Set up the fan device.""" if discovery_info is None: return client = hass.data[DOMAIN]["client"] client.set_settable_address(METRIC_KEY_MODE, int) device = ValloxFan( hass.data[DOMAIN]["name"], client, hass.data[DOMAIN]["state_proxy"] ) async_add_entities([device], update_before_add=False) class ValloxFan(FanEntity): """Representation of the fan.""" def __init__(self, name, client, state_proxy): """Initialize the fan.""" self._name = name self._client = client self._state_proxy = state_proxy self._available = False self._state = None self._fan_speed_home = None self._fan_speed_away = None self._fan_speed_boost = None @property def should_poll(self): """Do not poll the device.""" return False @property def name(self): """Return the name of the device.""" return self._name @property def available(self): """Return if state is known.""" return self._available @property def is_on(self): """Return if device is on.""" return self._state @property def extra_state_attributes(self): """Return device specific state attributes.""" return { ATTR_PROFILE_FAN_SPEED_HOME["description"]: self._fan_speed_home, ATTR_PROFILE_FAN_SPEED_AWAY["description"]: self._fan_speed_away, ATTR_PROFILE_FAN_SPEED_BOOST["description"]: self._fan_speed_boost, } async def async_added_to_hass(self): """Call to update.""" self.async_on_remove( async_dispatcher_connect( self.hass, SIGNAL_VALLOX_STATE_UPDATE, self._update_callback ) ) @callback def _update_callback(self): """Call update method.""" self.async_schedule_update_ha_state(True) async def async_update(self): """Fetch state from the device.""" try: # Fetch if the whole device is in regular operation state. mode = self._state_proxy.fetch_metric(METRIC_KEY_MODE) if mode == 0: self._state = True else: self._state = False # Fetch the profile fan speeds. self._fan_speed_home = int( self._state_proxy.fetch_metric( ATTR_PROFILE_FAN_SPEED_HOME["metric_key"] ) ) self._fan_speed_away = int( self._state_proxy.fetch_metric( ATTR_PROFILE_FAN_SPEED_AWAY["metric_key"] ) ) self._fan_speed_boost = int( self._state_proxy.fetch_metric( ATTR_PROFILE_FAN_SPEED_BOOST["metric_key"] ) ) self._available = True except (OSError, KeyError) as err: self._available = False _LOGGER.error("Error updating fan: %s", err) # # The fan entity model has changed to use percentages and preset_modes # instead of speeds. # # Please review # https://developers.home-assistant.io/docs/core/entity/fan/ # async def async_turn_on( self, speed: str = None, percentage: int = None, preset_mode: str = None, **kwargs, ) -> None: """Turn the device on.""" _LOGGER.debug("Turn on: %s", speed) # Only the case speed == None equals the GUI toggle switch being # activated. if speed is not None: return if self._state is False: try: await self._client.set_values({METRIC_KEY_MODE: 0}) # This state change affects other entities like sensors. Force # an immediate update that can be observed by all parties # involved. await self._state_proxy.async_update(None) except OSError as err: self._available = False _LOGGER.error("Error turning on: %s", err) else: _LOGGER.error("Already on") async def async_turn_off(self, **kwargs) -> None: """Turn the device off.""" if self._state is True: try: await self._client.set_values({METRIC_KEY_MODE: 5}) # Same as for turn_on method. await self._state_proxy.async_update(None) except OSError as err: self._available = False _LOGGER.error("Error turning off: %s", err) else: _LOGGER.error("Already off")
"""Tests for the Google Assistant traits.""" from datetime import datetime, timedelta from unittest.mock import patch import pytest from homeassistant.components import ( alarm_control_panel, binary_sensor, camera, cover, fan, group, input_boolean, input_select, light, lock, media_player, scene, script, sensor, switch, vacuum, ) from homeassistant.components.climate import const as climate from homeassistant.components.google_assistant import const, error, helpers, trait from homeassistant.components.google_assistant.error import SmartHomeError from homeassistant.components.humidifier import const as humidifier from homeassistant.config import async_process_ha_core_config from homeassistant.const import ( ATTR_ASSUMED_STATE, ATTR_DEVICE_CLASS, ATTR_ENTITY_ID, ATTR_MODE, ATTR_SUPPORTED_FEATURES, ATTR_TEMPERATURE, SERVICE_TURN_OFF, SERVICE_TURN_ON, STATE_ALARM_ARMED_AWAY, STATE_ALARM_DISARMED, STATE_ALARM_PENDING, STATE_IDLE, STATE_OFF, STATE_ON, STATE_PAUSED, STATE_PLAYING, STATE_STANDBY, STATE_UNAVAILABLE, STATE_UNKNOWN, TEMP_CELSIUS, TEMP_FAHRENHEIT, ) from homeassistant.core import DOMAIN as HA_DOMAIN, EVENT_CALL_SERVICE, State from homeassistant.util import color from . import BASIC_CONFIG, MockConfig from tests.common import async_capture_events, async_mock_service REQ_ID = "ff36a3cc-ec34-11e6-b1a0-64510650abcf" BASIC_DATA = helpers.RequestData( BASIC_CONFIG, "test-agent", const.SOURCE_CLOUD, REQ_ID, None ) PIN_CONFIG = MockConfig(secure_devices_pin="1234") PIN_DATA = helpers.RequestData( PIN_CONFIG, "test-agent", const.SOURCE_CLOUD, REQ_ID, None ) async def test_brightness_light(hass): """Test brightness trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert trait.BrightnessTrait.supported(light.DOMAIN, light.SUPPORT_BRIGHTNESS, None) trt = trait.BrightnessTrait( hass, State("light.bla", light.STATE_ON, {light.ATTR_BRIGHTNESS: 243}), BASIC_CONFIG, ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"brightness": 95} events = async_capture_events(hass, EVENT_CALL_SERVICE) calls = async_mock_service(hass, light.DOMAIN, light.SERVICE_TURN_ON) await trt.execute( trait.COMMAND_BRIGHTNESS_ABSOLUTE, BASIC_DATA, {"brightness": 50}, {} ) await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "light.bla", light.ATTR_BRIGHTNESS_PCT: 50} assert len(events) == 1 assert events[0].data == { "domain": "light", "service": "turn_on", "service_data": {"brightness_pct": 50, "entity_id": "light.bla"}, } async def test_camera_stream(hass): """Test camera stream trait support for camera domain.""" await async_process_ha_core_config( hass, {"external_url": "https://example.com"}, ) assert helpers.get_google_type(camera.DOMAIN, None) is not None assert trait.CameraStreamTrait.supported(camera.DOMAIN, camera.SUPPORT_STREAM, None) trt = trait.CameraStreamTrait( hass, State("camera.bla", camera.STATE_IDLE, {}), BASIC_CONFIG ) assert trt.sync_attributes() == { "cameraStreamSupportedProtocols": ["hls"], "cameraStreamNeedAuthToken": False, "cameraStreamNeedDrmEncryption": False, } assert trt.query_attributes() == {} with patch( "homeassistant.components.camera.async_request_stream", return_value="/api/streams/bla", ): await trt.execute(trait.COMMAND_GET_CAMERA_STREAM, BASIC_DATA, {}, {}) assert trt.query_attributes() == { "cameraStreamAccessUrl": "https://example.com/api/streams/bla", "cameraStreamReceiverAppId": "B12CE3CA", } async def test_onoff_group(hass): """Test OnOff trait support for group domain.""" assert helpers.get_google_type(group.DOMAIN, None) is not None assert trait.OnOffTrait.supported(group.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("group.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("group.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, HA_DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "group.bla"} off_calls = async_mock_service(hass, HA_DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "group.bla"} async def test_onoff_input_boolean(hass): """Test OnOff trait support for input_boolean domain.""" assert helpers.get_google_type(input_boolean.DOMAIN, None) is not None assert trait.OnOffTrait.supported(input_boolean.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("input_boolean.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait( hass, State("input_boolean.bla", STATE_OFF), BASIC_CONFIG ) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, input_boolean.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "input_boolean.bla"} off_calls = async_mock_service(hass, input_boolean.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "input_boolean.bla"} async def test_onoff_switch(hass): """Test OnOff trait support for switch domain.""" assert helpers.get_google_type(switch.DOMAIN, None) is not None assert trait.OnOffTrait.supported(switch.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("switch.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("switch.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} trt_assumed = trait.OnOffTrait( hass, State("switch.bla", STATE_OFF, {"assumed_state": True}), BASIC_CONFIG ) assert trt_assumed.sync_attributes() == {"commandOnlyOnOff": True} on_calls = async_mock_service(hass, switch.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "switch.bla"} off_calls = async_mock_service(hass, switch.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "switch.bla"} async def test_onoff_fan(hass): """Test OnOff trait support for fan domain.""" assert helpers.get_google_type(fan.DOMAIN, None) is not None assert trait.OnOffTrait.supported(fan.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("fan.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("fan.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, fan.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "fan.bla"} off_calls = async_mock_service(hass, fan.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "fan.bla"} async def test_onoff_light(hass): """Test OnOff trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert trait.OnOffTrait.supported(light.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("light.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("light.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "light.bla"} off_calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "light.bla"} async def test_onoff_media_player(hass): """Test OnOff trait support for media_player domain.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.OnOffTrait.supported(media_player.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("media_player.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("media_player.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, media_player.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} off_calls = async_mock_service(hass, media_player.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} async def test_onoff_humidifier(hass): """Test OnOff trait support for humidifier domain.""" assert helpers.get_google_type(humidifier.DOMAIN, None) is not None assert trait.OnOffTrait.supported(humidifier.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("humidifier.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("humidifier.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, humidifier.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "humidifier.bla"} off_calls = async_mock_service(hass, humidifier.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "humidifier.bla"} async def test_dock_vacuum(hass): """Test dock trait support for vacuum domain.""" assert helpers.get_google_type(vacuum.DOMAIN, None) is not None assert trait.DockTrait.supported(vacuum.DOMAIN, 0, None) trt = trait.DockTrait(hass, State("vacuum.bla", vacuum.STATE_IDLE), BASIC_CONFIG) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"isDocked": False} calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_RETURN_TO_BASE) await trt.execute(trait.COMMAND_DOCK, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} async def test_startstop_vacuum(hass): """Test startStop trait support for vacuum domain.""" assert helpers.get_google_type(vacuum.DOMAIN, None) is not None assert trait.StartStopTrait.supported(vacuum.DOMAIN, 0, None) trt = trait.StartStopTrait( hass, State( "vacuum.bla", vacuum.STATE_PAUSED, {ATTR_SUPPORTED_FEATURES: vacuum.SUPPORT_PAUSE}, ), BASIC_CONFIG, ) assert trt.sync_attributes() == {"pausable": True} assert trt.query_attributes() == {"isRunning": False, "isPaused": True} start_calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_START) await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": True}, {}) assert len(start_calls) == 1 assert start_calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} stop_calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_STOP) await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": False}, {}) assert len(stop_calls) == 1 assert stop_calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} pause_calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_PAUSE) await trt.execute(trait.COMMAND_PAUSEUNPAUSE, BASIC_DATA, {"pause": True}, {}) assert len(pause_calls) == 1 assert pause_calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} unpause_calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_START) await trt.execute(trait.COMMAND_PAUSEUNPAUSE, BASIC_DATA, {"pause": False}, {}) assert len(unpause_calls) == 1 assert unpause_calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} async def test_startstop_cover(hass): """Test startStop trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.StartStopTrait.supported(cover.DOMAIN, cover.SUPPORT_STOP, None) state = State( "cover.bla", cover.STATE_CLOSED, {ATTR_SUPPORTED_FEATURES: cover.SUPPORT_STOP}, ) trt = trait.StartStopTrait( hass, state, BASIC_CONFIG, ) assert trt.sync_attributes() == {} for state_value in (cover.STATE_CLOSING, cover.STATE_OPENING): state.state = state_value assert trt.query_attributes() == {"isRunning": True} stop_calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_STOP_COVER) await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": False}, {}) assert len(stop_calls) == 1 assert stop_calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} for state_value in (cover.STATE_CLOSED, cover.STATE_OPEN): state.state = state_value assert trt.query_attributes() == {"isRunning": False} with pytest.raises(SmartHomeError, match="Cover is already stopped"): await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": False}, {}) with pytest.raises(SmartHomeError, match="Starting a cover is not supported"): await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": True}, {}) with pytest.raises( SmartHomeError, match="Command action.devices.commands.PauseUnpause is not supported", ): await trt.execute(trait.COMMAND_PAUSEUNPAUSE, BASIC_DATA, {"start": True}, {}) async def test_startstop_cover_assumed(hass): """Test startStop trait support for cover domain of assumed state.""" trt = trait.StartStopTrait( hass, State( "cover.bla", cover.STATE_CLOSED, {ATTR_SUPPORTED_FEATURES: cover.SUPPORT_STOP, ATTR_ASSUMED_STATE: True}, ), BASIC_CONFIG, ) stop_calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_STOP_COVER) await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": False}, {}) assert len(stop_calls) == 1 assert stop_calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} async def test_color_setting_color_light(hass): """Test ColorSpectrum trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert not trait.ColorSettingTrait.supported(light.DOMAIN, 0, None) assert trait.ColorSettingTrait.supported(light.DOMAIN, light.SUPPORT_COLOR, None) trt = trait.ColorSettingTrait( hass, State( "light.bla", STATE_ON, { light.ATTR_HS_COLOR: (20, 94), light.ATTR_BRIGHTNESS: 200, ATTR_SUPPORTED_FEATURES: light.SUPPORT_COLOR, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == {"colorModel": "hsv"} assert trt.query_attributes() == { "color": {"spectrumHsv": {"hue": 20, "saturation": 0.94, "value": 200 / 255}} } assert trt.can_execute( trait.COMMAND_COLOR_ABSOLUTE, {"color": {"spectrumRGB": 16715792}} ) calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON) await trt.execute( trait.COMMAND_COLOR_ABSOLUTE, BASIC_DATA, {"color": {"spectrumRGB": 1052927}}, {}, ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "light.bla", light.ATTR_HS_COLOR: (240, 93.725), } await trt.execute( trait.COMMAND_COLOR_ABSOLUTE, BASIC_DATA, {"color": {"spectrumHSV": {"hue": 100, "saturation": 0.50, "value": 0.20}}}, {}, ) assert len(calls) == 2 assert calls[1].data == { ATTR_ENTITY_ID: "light.bla", light.ATTR_HS_COLOR: [100, 50], light.ATTR_BRIGHTNESS: 0.2 * 255, } async def test_color_setting_temperature_light(hass): """Test ColorTemperature trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert not trait.ColorSettingTrait.supported(light.DOMAIN, 0, None) assert trait.ColorSettingTrait.supported( light.DOMAIN, light.SUPPORT_COLOR_TEMP, None ) trt = trait.ColorSettingTrait( hass, State( "light.bla", STATE_ON, { light.ATTR_MIN_MIREDS: 200, light.ATTR_COLOR_TEMP: 300, light.ATTR_MAX_MIREDS: 500, ATTR_SUPPORTED_FEATURES: light.SUPPORT_COLOR_TEMP, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "colorTemperatureRange": {"temperatureMinK": 2000, "temperatureMaxK": 5000} } assert trt.query_attributes() == {"color": {"temperatureK": 3333}} assert trt.can_execute( trait.COMMAND_COLOR_ABSOLUTE, {"color": {"temperature": 400}} ) calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON) with pytest.raises(helpers.SmartHomeError) as err: await trt.execute( trait.COMMAND_COLOR_ABSOLUTE, BASIC_DATA, {"color": {"temperature": 5555}}, {}, ) assert err.value.code == const.ERR_VALUE_OUT_OF_RANGE await trt.execute( trait.COMMAND_COLOR_ABSOLUTE, BASIC_DATA, {"color": {"temperature": 2857}}, {} ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "light.bla", light.ATTR_COLOR_TEMP: color.color_temperature_kelvin_to_mired(2857), } async def test_color_light_temperature_light_bad_temp(hass): """Test ColorTemperature trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert not trait.ColorSettingTrait.supported(light.DOMAIN, 0, None) assert trait.ColorSettingTrait.supported( light.DOMAIN, light.SUPPORT_COLOR_TEMP, None ) trt = trait.ColorSettingTrait( hass, State( "light.bla", STATE_ON, { light.ATTR_MIN_MIREDS: 200, light.ATTR_COLOR_TEMP: 0, light.ATTR_MAX_MIREDS: 500, }, ), BASIC_CONFIG, ) assert trt.query_attributes() == {} async def test_light_modes(hass): """Test Light Mode trait.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert trait.ModesTrait.supported(light.DOMAIN, light.SUPPORT_EFFECT, None) trt = trait.ModesTrait( hass, State( "light.living_room", light.STATE_ON, attributes={ light.ATTR_EFFECT_LIST: ["random", "colorloop"], light.ATTR_EFFECT: "random", }, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableModes": [ { "name": "effect", "name_values": [{"name_synonym": ["effect"], "lang": "en"}], "settings": [ { "setting_name": "random", "setting_values": [ {"setting_synonym": ["random"], "lang": "en"} ], }, { "setting_name": "colorloop", "setting_values": [ {"setting_synonym": ["colorloop"], "lang": "en"} ], }, ], "ordered": False, } ] } assert trt.query_attributes() == { "currentModeSettings": {"effect": "random"}, "on": True, } assert trt.can_execute( trait.COMMAND_MODES, params={"updateModeSettings": {"effect": "colorloop"}}, ) calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON) await trt.execute( trait.COMMAND_MODES, BASIC_DATA, {"updateModeSettings": {"effect": "colorloop"}}, {}, ) assert len(calls) == 1 assert calls[0].data == { "entity_id": "light.living_room", "effect": "colorloop", } async def test_scene_scene(hass): """Test Scene trait support for scene domain.""" assert helpers.get_google_type(scene.DOMAIN, None) is not None assert trait.SceneTrait.supported(scene.DOMAIN, 0, None) trt = trait.SceneTrait(hass, State("scene.bla", scene.STATE), BASIC_CONFIG) assert trt.sync_attributes() == {} assert trt.query_attributes() == {} assert trt.can_execute(trait.COMMAND_ACTIVATE_SCENE, {}) calls = async_mock_service(hass, scene.DOMAIN, SERVICE_TURN_ON) await trt.execute(trait.COMMAND_ACTIVATE_SCENE, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "scene.bla"} async def test_scene_script(hass): """Test Scene trait support for script domain.""" assert helpers.get_google_type(script.DOMAIN, None) is not None assert trait.SceneTrait.supported(script.DOMAIN, 0, None) trt = trait.SceneTrait(hass, State("script.bla", STATE_OFF), BASIC_CONFIG) assert trt.sync_attributes() == {} assert trt.query_attributes() == {} assert trt.can_execute(trait.COMMAND_ACTIVATE_SCENE, {}) calls = async_mock_service(hass, script.DOMAIN, SERVICE_TURN_ON) await trt.execute(trait.COMMAND_ACTIVATE_SCENE, BASIC_DATA, {}, {}) # We don't wait till script execution is done. await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "script.bla"} async def test_temperature_setting_climate_onoff(hass): """Test TemperatureSetting trait support for climate domain - range.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.TemperatureSettingTrait.supported(climate.DOMAIN, 0, None) hass.config.units.temperature_unit = TEMP_FAHRENHEIT trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_AUTO, { ATTR_SUPPORTED_FEATURES: climate.SUPPORT_TARGET_TEMPERATURE_RANGE, climate.ATTR_HVAC_MODES: [ climate.HVAC_MODE_OFF, climate.HVAC_MODE_COOL, climate.HVAC_MODE_HEAT, climate.HVAC_MODE_HEAT_COOL, ], climate.ATTR_MIN_TEMP: None, climate.ATTR_MAX_TEMP: None, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["off", "cool", "heat", "heatcool", "on"], "thermostatTemperatureUnit": "F", } assert trt.can_execute(trait.COMMAND_THERMOSTAT_SET_MODE, {}) calls = async_mock_service(hass, climate.DOMAIN, SERVICE_TURN_ON) await trt.execute( trait.COMMAND_THERMOSTAT_SET_MODE, BASIC_DATA, {"thermostatMode": "on"}, {} ) assert len(calls) == 1 calls = async_mock_service(hass, climate.DOMAIN, SERVICE_TURN_OFF) await trt.execute( trait.COMMAND_THERMOSTAT_SET_MODE, BASIC_DATA, {"thermostatMode": "off"}, {} ) assert len(calls) == 1 async def test_temperature_setting_climate_no_modes(hass): """Test TemperatureSetting trait support for climate domain not supporting any modes.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.TemperatureSettingTrait.supported(climate.DOMAIN, 0, None) hass.config.units.temperature_unit = TEMP_CELSIUS trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_AUTO, { climate.ATTR_HVAC_MODES: [], climate.ATTR_MIN_TEMP: None, climate.ATTR_MAX_TEMP: None, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["heat"], "thermostatTemperatureUnit": "C", } async def test_temperature_setting_climate_range(hass): """Test TemperatureSetting trait support for climate domain - range.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.TemperatureSettingTrait.supported(climate.DOMAIN, 0, None) hass.config.units.temperature_unit = TEMP_FAHRENHEIT trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_AUTO, { climate.ATTR_CURRENT_TEMPERATURE: 70, climate.ATTR_CURRENT_HUMIDITY: 25, ATTR_SUPPORTED_FEATURES: climate.SUPPORT_TARGET_TEMPERATURE_RANGE, climate.ATTR_HVAC_MODES: [ STATE_OFF, climate.HVAC_MODE_COOL, climate.HVAC_MODE_HEAT, climate.HVAC_MODE_AUTO, ], climate.ATTR_TARGET_TEMP_HIGH: 75, climate.ATTR_TARGET_TEMP_LOW: 65, climate.ATTR_MIN_TEMP: 50, climate.ATTR_MAX_TEMP: 80, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["off", "cool", "heat", "auto", "on"], "thermostatTemperatureUnit": "F", } assert trt.query_attributes() == { "thermostatMode": "auto", "thermostatTemperatureAmbient": 21.1, "thermostatHumidityAmbient": 25, "thermostatTemperatureSetpointLow": 18.3, "thermostatTemperatureSetpointHigh": 23.9, } assert trt.can_execute(trait.COMMAND_THERMOSTAT_TEMPERATURE_SET_RANGE, {}) assert trt.can_execute(trait.COMMAND_THERMOSTAT_SET_MODE, {}) calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_TEMPERATURE) await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SET_RANGE, BASIC_DATA, { "thermostatTemperatureSetpointHigh": 25, "thermostatTemperatureSetpointLow": 20, }, {}, ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "climate.bla", climate.ATTR_TARGET_TEMP_HIGH: 77, climate.ATTR_TARGET_TEMP_LOW: 68, } calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_HVAC_MODE) await trt.execute( trait.COMMAND_THERMOSTAT_SET_MODE, BASIC_DATA, {"thermostatMode": "cool"}, {} ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "climate.bla", climate.ATTR_HVAC_MODE: climate.HVAC_MODE_COOL, } with pytest.raises(helpers.SmartHomeError) as err: await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, BASIC_DATA, {"thermostatTemperatureSetpoint": -100}, {}, ) assert err.value.code == const.ERR_VALUE_OUT_OF_RANGE hass.config.units.temperature_unit = TEMP_CELSIUS async def test_temperature_setting_climate_setpoint(hass): """Test TemperatureSetting trait support for climate domain - setpoint.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.TemperatureSettingTrait.supported(climate.DOMAIN, 0, None) hass.config.units.temperature_unit = TEMP_CELSIUS trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_COOL, { climate.ATTR_HVAC_MODES: [STATE_OFF, climate.HVAC_MODE_COOL], climate.ATTR_MIN_TEMP: 10, climate.ATTR_MAX_TEMP: 30, ATTR_TEMPERATURE: 18, climate.ATTR_CURRENT_TEMPERATURE: 20, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["off", "cool", "on"], "thermostatTemperatureUnit": "C", } assert trt.query_attributes() == { "thermostatMode": "cool", "thermostatTemperatureAmbient": 20, "thermostatTemperatureSetpoint": 18, } assert trt.can_execute(trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, {}) assert trt.can_execute(trait.COMMAND_THERMOSTAT_SET_MODE, {}) calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_TEMPERATURE) with pytest.raises(helpers.SmartHomeError): await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, BASIC_DATA, {"thermostatTemperatureSetpoint": -100}, {}, ) await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, BASIC_DATA, {"thermostatTemperatureSetpoint": 19}, {}, ) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "climate.bla", ATTR_TEMPERATURE: 19} async def test_temperature_setting_climate_setpoint_auto(hass): """ Test TemperatureSetting trait support for climate domain. Setpoint in auto mode. """ hass.config.units.temperature_unit = TEMP_CELSIUS trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_HEAT_COOL, { climate.ATTR_HVAC_MODES: [ climate.HVAC_MODE_OFF, climate.HVAC_MODE_HEAT_COOL, ], climate.ATTR_MIN_TEMP: 10, climate.ATTR_MAX_TEMP: 30, ATTR_TEMPERATURE: 18, climate.ATTR_CURRENT_TEMPERATURE: 20, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["off", "heatcool", "on"], "thermostatTemperatureUnit": "C", } assert trt.query_attributes() == { "thermostatMode": "heatcool", "thermostatTemperatureAmbient": 20, "thermostatTemperatureSetpointHigh": 18, "thermostatTemperatureSetpointLow": 18, } assert trt.can_execute(trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, {}) assert trt.can_execute(trait.COMMAND_THERMOSTAT_SET_MODE, {}) calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_TEMPERATURE) await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, BASIC_DATA, {"thermostatTemperatureSetpoint": 19}, {}, ) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "climate.bla", ATTR_TEMPERATURE: 19} async def test_humidity_setting_humidifier_setpoint(hass): """Test HumiditySetting trait support for humidifier domain - setpoint.""" assert helpers.get_google_type(humidifier.DOMAIN, None) is not None assert trait.HumiditySettingTrait.supported(humidifier.DOMAIN, 0, None) trt = trait.HumiditySettingTrait( hass, State( "humidifier.bla", STATE_ON, { humidifier.ATTR_MIN_HUMIDITY: 20, humidifier.ATTR_MAX_HUMIDITY: 90, humidifier.ATTR_HUMIDITY: 38, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "humiditySetpointRange": {"minPercent": 20, "maxPercent": 90} } assert trt.query_attributes() == { "humiditySetpointPercent": 38, } assert trt.can_execute(trait.COMMAND_SET_HUMIDITY, {}) calls = async_mock_service(hass, humidifier.DOMAIN, humidifier.SERVICE_SET_HUMIDITY) await trt.execute(trait.COMMAND_SET_HUMIDITY, BASIC_DATA, {"humidity": 32}, {}) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "humidifier.bla", humidifier.ATTR_HUMIDITY: 32, } async def test_lock_unlock_lock(hass): """Test LockUnlock trait locking support for lock domain.""" assert helpers.get_google_type(lock.DOMAIN, None) is not None assert trait.LockUnlockTrait.supported(lock.DOMAIN, lock.SUPPORT_OPEN, None) assert trait.LockUnlockTrait.might_2fa(lock.DOMAIN, lock.SUPPORT_OPEN, None) trt = trait.LockUnlockTrait( hass, State("lock.front_door", lock.STATE_LOCKED), PIN_CONFIG ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"isLocked": True} assert trt.can_execute(trait.COMMAND_LOCKUNLOCK, {"lock": True}) calls = async_mock_service(hass, lock.DOMAIN, lock.SERVICE_LOCK) await trt.execute(trait.COMMAND_LOCKUNLOCK, PIN_DATA, {"lock": True}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "lock.front_door"} async def test_lock_unlock_unlock(hass): """Test LockUnlock trait unlocking support for lock domain.""" assert helpers.get_google_type(lock.DOMAIN, None) is not None assert trait.LockUnlockTrait.supported(lock.DOMAIN, lock.SUPPORT_OPEN, None) trt = trait.LockUnlockTrait( hass, State("lock.front_door", lock.STATE_LOCKED), PIN_CONFIG ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"isLocked": True} assert trt.can_execute(trait.COMMAND_LOCKUNLOCK, {"lock": False}) calls = async_mock_service(hass, lock.DOMAIN, lock.SERVICE_UNLOCK) # No challenge data with pytest.raises(error.ChallengeNeeded) as err: await trt.execute(trait.COMMAND_LOCKUNLOCK, PIN_DATA, {"lock": False}, {}) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # invalid pin with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_LOCKUNLOCK, PIN_DATA, {"lock": False}, {"pin": 9999} ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_FAILED_PIN_NEEDED await trt.execute( trait.COMMAND_LOCKUNLOCK, PIN_DATA, {"lock": False}, {"pin": "1234"} ) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "lock.front_door"} # Test without pin trt = trait.LockUnlockTrait( hass, State("lock.front_door", lock.STATE_LOCKED), BASIC_CONFIG ) with pytest.raises(error.SmartHomeError) as err: await trt.execute(trait.COMMAND_LOCKUNLOCK, BASIC_DATA, {"lock": False}, {}) assert len(calls) == 1 assert err.value.code == const.ERR_CHALLENGE_NOT_SETUP # Test with 2FA override with patch.object( BASIC_CONFIG, "should_2fa", return_value=False, ): await trt.execute(trait.COMMAND_LOCKUNLOCK, BASIC_DATA, {"lock": False}, {}) assert len(calls) == 2 async def test_arm_disarm_arm_away(hass): """Test ArmDisarm trait Arming support for alarm_control_panel domain.""" assert helpers.get_google_type(alarm_control_panel.DOMAIN, None) is not None assert trait.ArmDisArmTrait.supported(alarm_control_panel.DOMAIN, 0, None) assert trait.ArmDisArmTrait.might_2fa(alarm_control_panel.DOMAIN, 0, None) trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, { alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True, ATTR_SUPPORTED_FEATURES: alarm_control_panel.const.SUPPORT_ALARM_ARM_HOME | alarm_control_panel.const.SUPPORT_ALARM_ARM_AWAY, }, ), PIN_CONFIG, ) assert trt.sync_attributes() == { "availableArmLevels": { "levels": [ { "level_name": "armed_home", "level_values": [ {"level_synonym": ["armed home", "home"], "lang": "en"} ], }, { "level_name": "armed_away", "level_values": [ {"level_synonym": ["armed away", "away"], "lang": "en"} ], }, ], "ordered": False, } } assert trt.query_attributes() == { "isArmed": True, "currentArmLevel": STATE_ALARM_ARMED_AWAY, } assert trt.can_execute( trait.COMMAND_ARMDISARM, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY} ) calls = async_mock_service( hass, alarm_control_panel.DOMAIN, alarm_control_panel.SERVICE_ALARM_ARM_AWAY ) # Test with no secure_pin configured with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), BASIC_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, BASIC_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {}, ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NOT_SETUP trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), PIN_CONFIG, ) # No challenge data with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {}, ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # invalid pin with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {"pin": 9999}, ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_FAILED_PIN_NEEDED # correct pin await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {"pin": "1234"}, ) assert len(calls) == 1 # Test already armed with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), PIN_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {}, ) assert len(calls) == 1 assert err.value.code == const.ERR_ALREADY_ARMED # Test with code_arm_required False trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: False}, ), PIN_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {}, ) assert len(calls) == 2 with pytest.raises(error.SmartHomeError) as err: await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True}, {}, ) async def test_arm_disarm_disarm(hass): """Test ArmDisarm trait Disarming support for alarm_control_panel domain.""" assert helpers.get_google_type(alarm_control_panel.DOMAIN, None) is not None assert trait.ArmDisArmTrait.supported(alarm_control_panel.DOMAIN, 0, None) assert trait.ArmDisArmTrait.might_2fa(alarm_control_panel.DOMAIN, 0, None) trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, { alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True, ATTR_SUPPORTED_FEATURES: alarm_control_panel.const.SUPPORT_ALARM_TRIGGER | alarm_control_panel.const.SUPPORT_ALARM_ARM_CUSTOM_BYPASS, }, ), PIN_CONFIG, ) assert trt.sync_attributes() == { "availableArmLevels": { "levels": [ { "level_name": "armed_custom_bypass", "level_values": [ { "level_synonym": ["armed custom bypass", "custom"], "lang": "en", } ], }, { "level_name": "triggered", "level_values": [{"level_synonym": ["triggered"], "lang": "en"}], }, ], "ordered": False, } } assert trt.query_attributes() == {"isArmed": False} assert trt.can_execute(trait.COMMAND_ARMDISARM, {"arm": False}) calls = async_mock_service( hass, alarm_control_panel.DOMAIN, alarm_control_panel.SERVICE_ALARM_DISARM ) # Test without secure_pin configured with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), BASIC_CONFIG, ) await trt.execute(trait.COMMAND_ARMDISARM, BASIC_DATA, {"arm": False}, {}) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NOT_SETUP trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), PIN_CONFIG, ) # No challenge data with pytest.raises(error.ChallengeNeeded) as err: await trt.execute(trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": False}, {}) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # invalid pin with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": False}, {"pin": 9999} ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_FAILED_PIN_NEEDED # correct pin await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": False}, {"pin": "1234"} ) assert len(calls) == 1 # Test already disarmed with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), PIN_CONFIG, ) await trt.execute(trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": False}, {}) assert len(calls) == 1 assert err.value.code == const.ERR_ALREADY_DISARMED # Cancel arming after already armed will require pin with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: False}, ), PIN_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "cancel": True}, {} ) assert len(calls) == 1 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # Cancel arming while pending to arm doesn't require pin trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_PENDING, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: False}, ), PIN_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "cancel": True}, {} ) assert len(calls) == 2 async def test_fan_speed(hass): """Test FanSpeed trait speed control support for fan domain.""" assert helpers.get_google_type(fan.DOMAIN, None) is not None assert trait.FanSpeedTrait.supported(fan.DOMAIN, fan.SUPPORT_SET_SPEED, None) trt = trait.FanSpeedTrait( hass, State( "fan.living_room_fan", fan.SPEED_HIGH, attributes={ "speed_list": [ fan.SPEED_OFF, fan.SPEED_LOW, fan.SPEED_MEDIUM, fan.SPEED_HIGH, ], "speed": "low", "percentage": 33, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableFanSpeeds": { "ordered": True, "speeds": [ { "speed_name": "off", "speed_values": [{"speed_synonym": ["stop", "off"], "lang": "en"}], }, { "speed_name": "low", "speed_values": [ { "speed_synonym": ["slow", "low", "slowest", "lowest"], "lang": "en", } ], }, { "speed_name": "medium", "speed_values": [ {"speed_synonym": ["medium", "mid", "middle"], "lang": "en"} ], }, { "speed_name": "high", "speed_values": [ { "speed_synonym": [ "high", "max", "fast", "highest", "fastest", "maximum", ], "lang": "en", } ], }, ], }, "reversible": False, "supportsFanSpeedPercent": True, } assert trt.query_attributes() == { "currentFanSpeedSetting": "low", "on": True, "currentFanSpeedPercent": 33, } assert trt.can_execute(trait.COMMAND_FANSPEED, params={"fanSpeed": "medium"}) calls = async_mock_service(hass, fan.DOMAIN, fan.SERVICE_SET_SPEED) await trt.execute(trait.COMMAND_FANSPEED, BASIC_DATA, {"fanSpeed": "medium"}, {}) assert len(calls) == 1 assert calls[0].data == {"entity_id": "fan.living_room_fan", "speed": "medium"} assert trt.can_execute(trait.COMMAND_FANSPEED, params={"fanSpeedPercent": 10}) calls = async_mock_service(hass, fan.DOMAIN, fan.SERVICE_SET_PERCENTAGE) await trt.execute(trait.COMMAND_FANSPEED, BASIC_DATA, {"fanSpeedPercent": 10}, {}) assert len(calls) == 1 assert calls[0].data == {"entity_id": "fan.living_room_fan", "percentage": 10} async def test_climate_fan_speed(hass): """Test FanSpeed trait speed control support for climate domain.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.FanSpeedTrait.supported(climate.DOMAIN, climate.SUPPORT_FAN_MODE, None) trt = trait.FanSpeedTrait( hass, State( "climate.living_room_ac", "on", attributes={ "fan_modes": ["auto", "low", "medium", "high"], "fan_mode": "low", }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableFanSpeeds": { "ordered": True, "speeds": [ { "speed_name": "auto", "speed_values": [{"speed_synonym": ["auto"], "lang": "en"}], }, { "speed_name": "low", "speed_values": [{"speed_synonym": ["low"], "lang": "en"}], }, { "speed_name": "medium", "speed_values": [{"speed_synonym": ["medium"], "lang": "en"}], }, { "speed_name": "high", "speed_values": [{"speed_synonym": ["high"], "lang": "en"}], }, ], }, "reversible": False, "supportsFanSpeedPercent": True, } assert trt.query_attributes() == { "currentFanSpeedSetting": "low", } assert trt.can_execute(trait.COMMAND_FANSPEED, params={"fanSpeed": "medium"}) calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_FAN_MODE) await trt.execute(trait.COMMAND_FANSPEED, BASIC_DATA, {"fanSpeed": "medium"}, {}) assert len(calls) == 1 assert calls[0].data == { "entity_id": "climate.living_room_ac", "fan_mode": "medium", } async def test_inputselector(hass): """Test input selector trait.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.InputSelectorTrait.supported( media_player.DOMAIN, media_player.SUPPORT_SELECT_SOURCE, None ) trt = trait.InputSelectorTrait( hass, State( "media_player.living_room", media_player.STATE_PLAYING, attributes={ media_player.ATTR_INPUT_SOURCE_LIST: [ "media", "game", "chromecast", "plex", ], media_player.ATTR_INPUT_SOURCE: "game", }, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableInputs": [ {"key": "media", "names": [{"name_synonym": ["media"], "lang": "en"}]}, {"key": "game", "names": [{"name_synonym": ["game"], "lang": "en"}]}, { "key": "chromecast", "names": [{"name_synonym": ["chromecast"], "lang": "en"}], }, {"key": "plex", "names": [{"name_synonym": ["plex"], "lang": "en"}]}, ], "orderedInputs": True, } assert trt.query_attributes() == { "currentInput": "game", } assert trt.can_execute( trait.COMMAND_INPUT, params={"newInput": "media"}, ) calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_SELECT_SOURCE ) await trt.execute( trait.COMMAND_INPUT, BASIC_DATA, {"newInput": "media"}, {}, ) assert len(calls) == 1 assert calls[0].data == {"entity_id": "media_player.living_room", "source": "media"} @pytest.mark.parametrize( "sources,source,source_next,source_prev", [ (["a"], "a", "a", "a"), (["a", "b"], "a", "b", "b"), (["a", "b", "c"], "a", "b", "c"), ], ) async def test_inputselector_nextprev(hass, sources, source, source_next, source_prev): """Test input selector trait.""" trt = trait.InputSelectorTrait( hass, State( "media_player.living_room", media_player.STATE_PLAYING, attributes={ media_player.ATTR_INPUT_SOURCE_LIST: sources, media_player.ATTR_INPUT_SOURCE: source, }, ), BASIC_CONFIG, ) assert trt.can_execute("action.devices.commands.NextInput", params={}) assert trt.can_execute("action.devices.commands.PreviousInput", params={}) calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_SELECT_SOURCE ) await trt.execute( "action.devices.commands.NextInput", BASIC_DATA, {}, {}, ) await trt.execute( "action.devices.commands.PreviousInput", BASIC_DATA, {}, {}, ) assert len(calls) == 2 assert calls[0].data == { "entity_id": "media_player.living_room", "source": source_next, } assert calls[1].data == { "entity_id": "media_player.living_room", "source": source_prev, } @pytest.mark.parametrize( "sources,source", [(None, "a"), (["a", "b"], None), (["a", "b"], "c")] ) async def test_inputselector_nextprev_invalid(hass, sources, source): """Test input selector trait.""" trt = trait.InputSelectorTrait( hass, State( "media_player.living_room", media_player.STATE_PLAYING, attributes={ media_player.ATTR_INPUT_SOURCE_LIST: sources, media_player.ATTR_INPUT_SOURCE: source, }, ), BASIC_CONFIG, ) with pytest.raises(SmartHomeError): await trt.execute( "action.devices.commands.NextInput", BASIC_DATA, {}, {}, ) with pytest.raises(SmartHomeError): await trt.execute( "action.devices.commands.PreviousInput", BASIC_DATA, {}, {}, ) with pytest.raises(SmartHomeError): await trt.execute( "action.devices.commands.InvalidCommand", BASIC_DATA, {}, {}, ) async def test_modes_input_select(hass): """Test Input Select Mode trait.""" assert helpers.get_google_type(input_select.DOMAIN, None) is not None assert trait.ModesTrait.supported(input_select.DOMAIN, None, None) trt = trait.ModesTrait( hass, State("input_select.bla", "unavailable"), BASIC_CONFIG, ) assert trt.sync_attributes() == {"availableModes": []} trt = trait.ModesTrait( hass, State( "input_select.bla", "abc", attributes={input_select.ATTR_OPTIONS: ["abc", "123", "xyz"]}, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableModes": [ { "name": "option", "name_values": [ { "name_synonym": ["option", "setting", "mode", "value"], "lang": "en", } ], "settings": [ { "setting_name": "abc", "setting_values": [{"setting_synonym": ["abc"], "lang": "en"}], }, { "setting_name": "123", "setting_values": [{"setting_synonym": ["123"], "lang": "en"}], }, { "setting_name": "xyz", "setting_values": [{"setting_synonym": ["xyz"], "lang": "en"}], }, ], "ordered": False, } ] } assert trt.query_attributes() == { "currentModeSettings": {"option": "abc"}, "on": True, } assert trt.can_execute( trait.COMMAND_MODES, params={"updateModeSettings": {"option": "xyz"}}, ) calls = async_mock_service( hass, input_select.DOMAIN, input_select.SERVICE_SELECT_OPTION ) await trt.execute( trait.COMMAND_MODES, BASIC_DATA, {"updateModeSettings": {"option": "xyz"}}, {}, ) assert len(calls) == 1 assert calls[0].data == {"entity_id": "input_select.bla", "option": "xyz"} async def test_modes_humidifier(hass): """Test Humidifier Mode trait.""" assert helpers.get_google_type(humidifier.DOMAIN, None) is not None assert trait.ModesTrait.supported(humidifier.DOMAIN, humidifier.SUPPORT_MODES, None) trt = trait.ModesTrait( hass, State( "humidifier.humidifier", STATE_OFF, attributes={ humidifier.ATTR_AVAILABLE_MODES: [ humidifier.MODE_NORMAL, humidifier.MODE_AUTO, humidifier.MODE_AWAY, ], ATTR_SUPPORTED_FEATURES: humidifier.SUPPORT_MODES, humidifier.ATTR_MIN_HUMIDITY: 30, humidifier.ATTR_MAX_HUMIDITY: 99, humidifier.ATTR_HUMIDITY: 50, ATTR_MODE: humidifier.MODE_AUTO, }, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableModes": [ { "name": "mode", "name_values": [{"name_synonym": ["mode"], "lang": "en"}], "settings": [ { "setting_name": "normal", "setting_values": [ {"setting_synonym": ["normal"], "lang": "en"} ], }, { "setting_name": "auto", "setting_values": [{"setting_synonym": ["auto"], "lang": "en"}], }, { "setting_name": "away", "setting_values": [{"setting_synonym": ["away"], "lang": "en"}], }, ], "ordered": False, }, ] } assert trt.query_attributes() == { "currentModeSettings": {"mode": "auto"}, "on": False, } assert trt.can_execute( trait.COMMAND_MODES, params={"updateModeSettings": {"mode": "away"}} ) calls = async_mock_service(hass, humidifier.DOMAIN, humidifier.SERVICE_SET_MODE) await trt.execute( trait.COMMAND_MODES, BASIC_DATA, {"updateModeSettings": {"mode": "away"}}, {}, ) assert len(calls) == 1 assert calls[0].data == { "entity_id": "humidifier.humidifier", "mode": "away", } async def test_sound_modes(hass): """Test Mode trait.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.ModesTrait.supported( media_player.DOMAIN, media_player.SUPPORT_SELECT_SOUND_MODE, None ) trt = trait.ModesTrait( hass, State( "media_player.living_room", media_player.STATE_PLAYING, attributes={ media_player.ATTR_SOUND_MODE_LIST: ["stereo", "prologic"], media_player.ATTR_SOUND_MODE: "stereo", }, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableModes": [ { "name": "sound mode", "name_values": [ {"name_synonym": ["sound mode", "effects"], "lang": "en"} ], "settings": [ { "setting_name": "stereo", "setting_values": [ {"setting_synonym": ["stereo"], "lang": "en"} ], }, { "setting_name": "prologic", "setting_values": [ {"setting_synonym": ["prologic"], "lang": "en"} ], }, ], "ordered": False, } ] } assert trt.query_attributes() == { "currentModeSettings": {"sound mode": "stereo"}, "on": True, } assert trt.can_execute( trait.COMMAND_MODES, params={"updateModeSettings": {"sound mode": "stereo"}}, ) calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_SELECT_SOUND_MODE ) await trt.execute( trait.COMMAND_MODES, BASIC_DATA, {"updateModeSettings": {"sound mode": "stereo"}}, {}, ) assert len(calls) == 1 assert calls[0].data == { "entity_id": "media_player.living_room", "sound_mode": "stereo", } async def test_openclose_cover(hass): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_SET_POSITION, None ) trt = trait.OpenCloseTrait( hass, State( "cover.bla", cover.STATE_OPEN, { cover.ATTR_CURRENT_POSITION: 75, ATTR_SUPPORTED_FEATURES: cover.SUPPORT_SET_POSITION, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"openPercent": 75} calls_set = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_SET_COVER_POSITION) calls_open = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_OPEN_COVER) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 50}, {}) await trt.execute( trait.COMMAND_OPENCLOSE_RELATIVE, BASIC_DATA, {"openRelativePercent": 50}, {} ) assert len(calls_set) == 1 assert calls_set[0].data == {ATTR_ENTITY_ID: "cover.bla", cover.ATTR_POSITION: 50} assert len(calls_open) == 1 assert calls_open[0].data == {ATTR_ENTITY_ID: "cover.bla"} async def test_openclose_cover_unknown_state(hass): """Test OpenClose trait support for cover domain with unknown state.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_SET_POSITION, None ) # No state trt = trait.OpenCloseTrait( hass, State( "cover.bla", STATE_UNKNOWN, {ATTR_SUPPORTED_FEATURES: cover.SUPPORT_OPEN} ), BASIC_CONFIG, ) assert trt.sync_attributes() == {"discreteOnlyOpenClose": True} with pytest.raises(helpers.SmartHomeError): trt.query_attributes() calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_OPEN_COVER) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 100}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} with pytest.raises(helpers.SmartHomeError): trt.query_attributes() async def test_openclose_cover_assumed_state(hass): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_SET_POSITION, None ) trt = trait.OpenCloseTrait( hass, State( "cover.bla", cover.STATE_OPEN, { ATTR_ASSUMED_STATE: True, ATTR_SUPPORTED_FEATURES: cover.SUPPORT_SET_POSITION, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == {"commandOnlyOpenClose": True} assert trt.query_attributes() == {} calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_SET_COVER_POSITION) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 40}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla", cover.ATTR_POSITION: 40} async def test_openclose_cover_query_only(hass): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported(cover.DOMAIN, 0, None) state = State( "cover.bla", cover.STATE_OPEN, ) trt = trait.OpenCloseTrait( hass, state, BASIC_CONFIG, ) assert trt.sync_attributes() == { "discreteOnlyOpenClose": True, "queryOnlyOpenClose": True, } assert trt.query_attributes() == {"openPercent": 100} async def test_openclose_cover_no_position(hass): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_OPEN | cover.SUPPORT_CLOSE, None ) state = State( "cover.bla", cover.STATE_OPEN, { ATTR_SUPPORTED_FEATURES: cover.SUPPORT_OPEN | cover.SUPPORT_CLOSE, }, ) trt = trait.OpenCloseTrait( hass, state, BASIC_CONFIG, ) assert trt.sync_attributes() == {"discreteOnlyOpenClose": True} assert trt.query_attributes() == {"openPercent": 100} state.state = cover.STATE_CLOSED assert trt.sync_attributes() == {"discreteOnlyOpenClose": True} assert trt.query_attributes() == {"openPercent": 0} calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_CLOSE_COVER) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 0}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_OPEN_COVER) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 100}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} with pytest.raises( SmartHomeError, match=r"Current position not know for relative command" ): await trt.execute( trait.COMMAND_OPENCLOSE_RELATIVE, BASIC_DATA, {"openRelativePercent": 100}, {}, ) with pytest.raises(SmartHomeError, match=r"No support for partial open close"): await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 50}, {}) @pytest.mark.parametrize( "device_class", (cover.DEVICE_CLASS_DOOR, cover.DEVICE_CLASS_GARAGE, cover.DEVICE_CLASS_GATE), ) async def test_openclose_cover_secure(hass, device_class): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, device_class) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_SET_POSITION, device_class ) assert trait.OpenCloseTrait.might_2fa( cover.DOMAIN, cover.SUPPORT_SET_POSITION, device_class ) trt = trait.OpenCloseTrait( hass, State( "cover.bla", cover.STATE_OPEN, { ATTR_DEVICE_CLASS: device_class, ATTR_SUPPORTED_FEATURES: cover.SUPPORT_SET_POSITION, cover.ATTR_CURRENT_POSITION: 75, }, ), PIN_CONFIG, ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"openPercent": 75} calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_SET_COVER_POSITION) calls_close = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_CLOSE_COVER) # No challenge data with pytest.raises(error.ChallengeNeeded) as err: await trt.execute(trait.COMMAND_OPENCLOSE, PIN_DATA, {"openPercent": 50}, {}) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # invalid pin with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_OPENCLOSE, PIN_DATA, {"openPercent": 50}, {"pin": "9999"} ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_FAILED_PIN_NEEDED await trt.execute( trait.COMMAND_OPENCLOSE, PIN_DATA, {"openPercent": 50}, {"pin": "1234"} ) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla", cover.ATTR_POSITION: 50} # no challenge on close await trt.execute(trait.COMMAND_OPENCLOSE, PIN_DATA, {"openPercent": 0}, {}) assert len(calls_close) == 1 assert calls_close[0].data == {ATTR_ENTITY_ID: "cover.bla"} @pytest.mark.parametrize( "device_class", ( binary_sensor.DEVICE_CLASS_DOOR, binary_sensor.DEVICE_CLASS_GARAGE_DOOR, binary_sensor.DEVICE_CLASS_LOCK, binary_sensor.DEVICE_CLASS_OPENING, binary_sensor.DEVICE_CLASS_WINDOW, ), ) async def test_openclose_binary_sensor(hass, device_class): """Test OpenClose trait support for binary_sensor domain.""" assert helpers.get_google_type(binary_sensor.DOMAIN, device_class) is not None assert trait.OpenCloseTrait.supported(binary_sensor.DOMAIN, 0, device_class) trt = trait.OpenCloseTrait( hass, State("binary_sensor.test", STATE_ON, {ATTR_DEVICE_CLASS: device_class}), BASIC_CONFIG, ) assert trt.sync_attributes() == { "queryOnlyOpenClose": True, "discreteOnlyOpenClose": True, } assert trt.query_attributes() == {"openPercent": 100} trt = trait.OpenCloseTrait( hass, State("binary_sensor.test", STATE_OFF, {ATTR_DEVICE_CLASS: device_class}), BASIC_CONFIG, ) assert trt.sync_attributes() == { "queryOnlyOpenClose": True, "discreteOnlyOpenClose": True, } assert trt.query_attributes() == {"openPercent": 0} async def test_volume_media_player(hass): """Test volume trait support for media player domain.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.VolumeTrait.supported( media_player.DOMAIN, media_player.SUPPORT_VOLUME_SET, None, ) trt = trait.VolumeTrait( hass, State( "media_player.bla", media_player.STATE_PLAYING, { ATTR_SUPPORTED_FEATURES: media_player.SUPPORT_VOLUME_SET, media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.3, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "volumeMaxLevel": 100, "levelStepSize": 10, "volumeCanMuteAndUnmute": False, "commandOnlyVolume": False, } assert trt.query_attributes() == {"currentVolume": 30} calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_SET ) await trt.execute(trait.COMMAND_SET_VOLUME, BASIC_DATA, {"volumeLevel": 60}, {}) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.6, } calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_SET ) await trt.execute( trait.COMMAND_VOLUME_RELATIVE, BASIC_DATA, {"relativeSteps": 10}, {} ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.4, } async def test_volume_media_player_relative(hass): """Test volume trait support for relative-volume-only media players.""" assert trait.VolumeTrait.supported( media_player.DOMAIN, media_player.SUPPORT_VOLUME_STEP, None, ) trt = trait.VolumeTrait( hass, State( "media_player.bla", media_player.STATE_PLAYING, { ATTR_ASSUMED_STATE: True, ATTR_SUPPORTED_FEATURES: media_player.SUPPORT_VOLUME_STEP, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "volumeMaxLevel": 100, "levelStepSize": 10, "volumeCanMuteAndUnmute": False, "commandOnlyVolume": True, } assert trt.query_attributes() == {} calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_UP ) await trt.execute( trait.COMMAND_VOLUME_RELATIVE, BASIC_DATA, {"relativeSteps": 10}, {}, ) assert len(calls) == 10 for call in calls: assert call.data == { ATTR_ENTITY_ID: "media_player.bla", } calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_DOWN ) await trt.execute( trait.COMMAND_VOLUME_RELATIVE, BASIC_DATA, {"relativeSteps": -10}, {}, ) assert len(calls) == 10 for call in calls: assert call.data == { ATTR_ENTITY_ID: "media_player.bla", } with pytest.raises(SmartHomeError): await trt.execute(trait.COMMAND_SET_VOLUME, BASIC_DATA, {"volumeLevel": 42}, {}) with pytest.raises(SmartHomeError): await trt.execute(trait.COMMAND_MUTE, BASIC_DATA, {"mute": True}, {}) async def test_media_player_mute(hass): """Test volume trait support for muting.""" assert trait.VolumeTrait.supported( media_player.DOMAIN, media_player.SUPPORT_VOLUME_STEP | media_player.SUPPORT_VOLUME_MUTE, None, ) trt = trait.VolumeTrait( hass, State( "media_player.bla", media_player.STATE_PLAYING, { ATTR_SUPPORTED_FEATURES: ( media_player.SUPPORT_VOLUME_STEP | media_player.SUPPORT_VOLUME_MUTE ), media_player.ATTR_MEDIA_VOLUME_MUTED: False, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "volumeMaxLevel": 100, "levelStepSize": 10, "volumeCanMuteAndUnmute": True, "commandOnlyVolume": False, } assert trt.query_attributes() == {"isMuted": False} mute_calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_MUTE ) await trt.execute( trait.COMMAND_MUTE, BASIC_DATA, {"mute": True}, {}, ) assert len(mute_calls) == 1 assert mute_calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_VOLUME_MUTED: True, } unmute_calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_MUTE ) await trt.execute( trait.COMMAND_MUTE, BASIC_DATA, {"mute": False}, {}, ) assert len(unmute_calls) == 1 assert unmute_calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_VOLUME_MUTED: False, } async def test_temperature_setting_sensor(hass): """Test TemperatureSetting trait support for temperature sensor.""" assert ( helpers.get_google_type(sensor.DOMAIN, sensor.DEVICE_CLASS_TEMPERATURE) is not None ) assert not trait.TemperatureSettingTrait.supported( sensor.DOMAIN, 0, sensor.DEVICE_CLASS_HUMIDITY ) assert trait.TemperatureSettingTrait.supported( sensor.DOMAIN, 0, sensor.DEVICE_CLASS_TEMPERATURE ) @pytest.mark.parametrize( "unit_in,unit_out,state,ambient", [ (TEMP_FAHRENHEIT, "F", "70", 21.1), (TEMP_CELSIUS, "C", "21.1", 21.1), (TEMP_FAHRENHEIT, "F", "unavailable", None), (TEMP_FAHRENHEIT, "F", "unknown", None), ], ) async def test_temperature_setting_sensor_data(hass, unit_in, unit_out, state, ambient): """Test TemperatureSetting trait support for temperature sensor.""" hass.config.units.temperature_unit = unit_in trt = trait.TemperatureSettingTrait( hass, State( "sensor.test", state, {ATTR_DEVICE_CLASS: sensor.DEVICE_CLASS_TEMPERATURE} ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "queryOnlyTemperatureSetting": True, "thermostatTemperatureUnit": unit_out, } if ambient: assert trt.query_attributes() == {"thermostatTemperatureAmbient": ambient} else: assert trt.query_attributes() == {} hass.config.units.temperature_unit = TEMP_CELSIUS async def test_humidity_setting_sensor(hass): """Test HumiditySetting trait support for humidity sensor.""" assert ( helpers.get_google_type(sensor.DOMAIN, sensor.DEVICE_CLASS_HUMIDITY) is not None ) assert not trait.HumiditySettingTrait.supported( sensor.DOMAIN, 0, sensor.DEVICE_CLASS_TEMPERATURE ) assert trait.HumiditySettingTrait.supported( sensor.DOMAIN, 0, sensor.DEVICE_CLASS_HUMIDITY ) @pytest.mark.parametrize( "state,ambient", [("70", 70), ("unavailable", None), ("unknown", None)] ) async def test_humidity_setting_sensor_data(hass, state, ambient): """Test HumiditySetting trait support for humidity sensor.""" trt = trait.HumiditySettingTrait( hass, State("sensor.test", state, {ATTR_DEVICE_CLASS: sensor.DEVICE_CLASS_HUMIDITY}), BASIC_CONFIG, ) assert trt.sync_attributes() == {"queryOnlyHumiditySetting": True} if ambient: assert trt.query_attributes() == {"humidityAmbientPercent": ambient} else: assert trt.query_attributes() == {} with pytest.raises(helpers.SmartHomeError) as err: await trt.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert err.value.code == const.ERR_NOT_SUPPORTED async def test_transport_control(hass): """Test the TransportControlTrait.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None for feature in trait.MEDIA_COMMAND_SUPPORT_MAPPING.values(): assert trait.TransportControlTrait.supported(media_player.DOMAIN, feature, None) now = datetime(2020, 1, 1) trt = trait.TransportControlTrait( hass, State( "media_player.bla", media_player.STATE_PLAYING, { media_player.ATTR_MEDIA_POSITION: 100, media_player.ATTR_MEDIA_DURATION: 200, media_player.ATTR_MEDIA_POSITION_UPDATED_AT: now - timedelta(seconds=10), media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.5, ATTR_SUPPORTED_FEATURES: media_player.SUPPORT_PLAY | media_player.SUPPORT_STOP, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "transportControlSupportedCommands": ["RESUME", "STOP"] } assert trt.query_attributes() == {} # COMMAND_MEDIA_SEEK_RELATIVE calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_SEEK ) # Patch to avoid time ticking over during the command failing the test with patch("homeassistant.util.dt.utcnow", return_value=now): await trt.execute( trait.COMMAND_MEDIA_SEEK_RELATIVE, BASIC_DATA, {"relativePositionMs": 10000}, {}, ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", # 100s (current position) + 10s (from command) + 10s (from updated_at) media_player.ATTR_MEDIA_SEEK_POSITION: 120, } # COMMAND_MEDIA_SEEK_TO_POSITION calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_SEEK ) await trt.execute( trait.COMMAND_MEDIA_SEEK_TO_POSITION, BASIC_DATA, {"absPositionMs": 50000}, {} ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_SEEK_POSITION: 50, } # COMMAND_MEDIA_NEXT calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_NEXT_TRACK ) await trt.execute(trait.COMMAND_MEDIA_NEXT, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} # COMMAND_MEDIA_PAUSE calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_PAUSE ) await trt.execute(trait.COMMAND_MEDIA_PAUSE, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} # COMMAND_MEDIA_PREVIOUS calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_PREVIOUS_TRACK ) await trt.execute(trait.COMMAND_MEDIA_PREVIOUS, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} # COMMAND_MEDIA_RESUME calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_PLAY ) await trt.execute(trait.COMMAND_MEDIA_RESUME, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} # COMMAND_MEDIA_SHUFFLE calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_SHUFFLE_SET ) await trt.execute(trait.COMMAND_MEDIA_SHUFFLE, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_SHUFFLE: True, } # COMMAND_MEDIA_STOP calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_STOP ) await trt.execute(trait.COMMAND_MEDIA_STOP, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} @pytest.mark.parametrize( "state", ( STATE_OFF, STATE_IDLE, STATE_PLAYING, STATE_ON, STATE_PAUSED, STATE_STANDBY, STATE_UNAVAILABLE, STATE_UNKNOWN, ), ) async def test_media_state(hass, state): """Test the MediaStateTrait.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.TransportControlTrait.supported( media_player.DOMAIN, media_player.SUPPORT_PLAY, None ) trt = trait.MediaStateTrait( hass, State( "media_player.bla", state, { media_player.ATTR_MEDIA_POSITION: 100, media_player.ATTR_MEDIA_DURATION: 200, media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.5, ATTR_SUPPORTED_FEATURES: media_player.SUPPORT_PLAY | media_player.SUPPORT_STOP, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "supportActivityState": True, "supportPlaybackState": True, } assert trt.query_attributes() == { "activityState": trt.activity_lookup.get(state), "playbackState": trt.playback_lookup.get(state), }
sander76/home-assistant
tests/components/google_assistant/test_trait.py
homeassistant/components/vallox/fan.py
"""Support for currencylayer.com exchange rates service.""" from datetime import timedelta import logging import requests import voluptuous as vol from homeassistant.components.sensor import PLATFORM_SCHEMA, SensorEntity from homeassistant.const import ( ATTR_ATTRIBUTION, CONF_API_KEY, CONF_BASE, CONF_NAME, CONF_QUOTE, ) import homeassistant.helpers.config_validation as cv _LOGGER = logging.getLogger(__name__) _RESOURCE = "http://apilayer.net/api/live" ATTRIBUTION = "Data provided by currencylayer.com" DEFAULT_BASE = "USD" DEFAULT_NAME = "CurrencyLayer Sensor" ICON = "mdi:currency" SCAN_INTERVAL = timedelta(hours=4) PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( { vol.Required(CONF_API_KEY): cv.string, vol.Required(CONF_QUOTE): vol.All(cv.ensure_list, [cv.string]), vol.Optional(CONF_BASE, default=DEFAULT_BASE): cv.string, vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, } ) def setup_platform(hass, config, add_entities, discovery_info=None): """Set up the Currencylayer sensor.""" base = config[CONF_BASE] api_key = config[CONF_API_KEY] parameters = {"source": base, "access_key": api_key, "format": 1} rest = CurrencylayerData(_RESOURCE, parameters) response = requests.get(_RESOURCE, params=parameters, timeout=10) sensors = [] for variable in config[CONF_QUOTE]: sensors.append(CurrencylayerSensor(rest, base, variable)) if "error" in response.json(): return False add_entities(sensors, True) class CurrencylayerSensor(SensorEntity): """Implementing the Currencylayer sensor.""" def __init__(self, rest, base, quote): """Initialize the sensor.""" self.rest = rest self._quote = quote self._base = base self._state = None @property def unit_of_measurement(self): """Return the unit of measurement of this entity, if any.""" return self._quote @property def name(self): """Return the name of the sensor.""" return self._base @property def icon(self): """Return the icon to use in the frontend, if any.""" return ICON @property def state(self): """Return the state of the sensor.""" return self._state @property def extra_state_attributes(self): """Return the state attributes of the sensor.""" return {ATTR_ATTRIBUTION: ATTRIBUTION} def update(self): """Update current date.""" self.rest.update() value = self.rest.data if value is not None: self._state = round(value[f"{self._base}{self._quote}"], 4) class CurrencylayerData: """Get data from Currencylayer.org.""" def __init__(self, resource, parameters): """Initialize the data object.""" self._resource = resource self._parameters = parameters self.data = None def update(self): """Get the latest data from Currencylayer.""" try: result = requests.get(self._resource, params=self._parameters, timeout=10) if "error" in result.json(): raise ValueError(result.json()["error"]["info"]) self.data = result.json()["quotes"] _LOGGER.debug("Currencylayer data updated: %s", result.json()["timestamp"]) except ValueError as err: _LOGGER.error("Check Currencylayer API %s", err.args) self.data = None
"""Tests for the Google Assistant traits.""" from datetime import datetime, timedelta from unittest.mock import patch import pytest from homeassistant.components import ( alarm_control_panel, binary_sensor, camera, cover, fan, group, input_boolean, input_select, light, lock, media_player, scene, script, sensor, switch, vacuum, ) from homeassistant.components.climate import const as climate from homeassistant.components.google_assistant import const, error, helpers, trait from homeassistant.components.google_assistant.error import SmartHomeError from homeassistant.components.humidifier import const as humidifier from homeassistant.config import async_process_ha_core_config from homeassistant.const import ( ATTR_ASSUMED_STATE, ATTR_DEVICE_CLASS, ATTR_ENTITY_ID, ATTR_MODE, ATTR_SUPPORTED_FEATURES, ATTR_TEMPERATURE, SERVICE_TURN_OFF, SERVICE_TURN_ON, STATE_ALARM_ARMED_AWAY, STATE_ALARM_DISARMED, STATE_ALARM_PENDING, STATE_IDLE, STATE_OFF, STATE_ON, STATE_PAUSED, STATE_PLAYING, STATE_STANDBY, STATE_UNAVAILABLE, STATE_UNKNOWN, TEMP_CELSIUS, TEMP_FAHRENHEIT, ) from homeassistant.core import DOMAIN as HA_DOMAIN, EVENT_CALL_SERVICE, State from homeassistant.util import color from . import BASIC_CONFIG, MockConfig from tests.common import async_capture_events, async_mock_service REQ_ID = "ff36a3cc-ec34-11e6-b1a0-64510650abcf" BASIC_DATA = helpers.RequestData( BASIC_CONFIG, "test-agent", const.SOURCE_CLOUD, REQ_ID, None ) PIN_CONFIG = MockConfig(secure_devices_pin="1234") PIN_DATA = helpers.RequestData( PIN_CONFIG, "test-agent", const.SOURCE_CLOUD, REQ_ID, None ) async def test_brightness_light(hass): """Test brightness trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert trait.BrightnessTrait.supported(light.DOMAIN, light.SUPPORT_BRIGHTNESS, None) trt = trait.BrightnessTrait( hass, State("light.bla", light.STATE_ON, {light.ATTR_BRIGHTNESS: 243}), BASIC_CONFIG, ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"brightness": 95} events = async_capture_events(hass, EVENT_CALL_SERVICE) calls = async_mock_service(hass, light.DOMAIN, light.SERVICE_TURN_ON) await trt.execute( trait.COMMAND_BRIGHTNESS_ABSOLUTE, BASIC_DATA, {"brightness": 50}, {} ) await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "light.bla", light.ATTR_BRIGHTNESS_PCT: 50} assert len(events) == 1 assert events[0].data == { "domain": "light", "service": "turn_on", "service_data": {"brightness_pct": 50, "entity_id": "light.bla"}, } async def test_camera_stream(hass): """Test camera stream trait support for camera domain.""" await async_process_ha_core_config( hass, {"external_url": "https://example.com"}, ) assert helpers.get_google_type(camera.DOMAIN, None) is not None assert trait.CameraStreamTrait.supported(camera.DOMAIN, camera.SUPPORT_STREAM, None) trt = trait.CameraStreamTrait( hass, State("camera.bla", camera.STATE_IDLE, {}), BASIC_CONFIG ) assert trt.sync_attributes() == { "cameraStreamSupportedProtocols": ["hls"], "cameraStreamNeedAuthToken": False, "cameraStreamNeedDrmEncryption": False, } assert trt.query_attributes() == {} with patch( "homeassistant.components.camera.async_request_stream", return_value="/api/streams/bla", ): await trt.execute(trait.COMMAND_GET_CAMERA_STREAM, BASIC_DATA, {}, {}) assert trt.query_attributes() == { "cameraStreamAccessUrl": "https://example.com/api/streams/bla", "cameraStreamReceiverAppId": "B12CE3CA", } async def test_onoff_group(hass): """Test OnOff trait support for group domain.""" assert helpers.get_google_type(group.DOMAIN, None) is not None assert trait.OnOffTrait.supported(group.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("group.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("group.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, HA_DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "group.bla"} off_calls = async_mock_service(hass, HA_DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "group.bla"} async def test_onoff_input_boolean(hass): """Test OnOff trait support for input_boolean domain.""" assert helpers.get_google_type(input_boolean.DOMAIN, None) is not None assert trait.OnOffTrait.supported(input_boolean.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("input_boolean.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait( hass, State("input_boolean.bla", STATE_OFF), BASIC_CONFIG ) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, input_boolean.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "input_boolean.bla"} off_calls = async_mock_service(hass, input_boolean.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "input_boolean.bla"} async def test_onoff_switch(hass): """Test OnOff trait support for switch domain.""" assert helpers.get_google_type(switch.DOMAIN, None) is not None assert trait.OnOffTrait.supported(switch.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("switch.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("switch.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} trt_assumed = trait.OnOffTrait( hass, State("switch.bla", STATE_OFF, {"assumed_state": True}), BASIC_CONFIG ) assert trt_assumed.sync_attributes() == {"commandOnlyOnOff": True} on_calls = async_mock_service(hass, switch.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "switch.bla"} off_calls = async_mock_service(hass, switch.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "switch.bla"} async def test_onoff_fan(hass): """Test OnOff trait support for fan domain.""" assert helpers.get_google_type(fan.DOMAIN, None) is not None assert trait.OnOffTrait.supported(fan.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("fan.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("fan.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, fan.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "fan.bla"} off_calls = async_mock_service(hass, fan.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "fan.bla"} async def test_onoff_light(hass): """Test OnOff trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert trait.OnOffTrait.supported(light.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("light.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("light.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "light.bla"} off_calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "light.bla"} async def test_onoff_media_player(hass): """Test OnOff trait support for media_player domain.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.OnOffTrait.supported(media_player.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("media_player.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("media_player.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, media_player.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} off_calls = async_mock_service(hass, media_player.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} async def test_onoff_humidifier(hass): """Test OnOff trait support for humidifier domain.""" assert helpers.get_google_type(humidifier.DOMAIN, None) is not None assert trait.OnOffTrait.supported(humidifier.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("humidifier.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("humidifier.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, humidifier.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "humidifier.bla"} off_calls = async_mock_service(hass, humidifier.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "humidifier.bla"} async def test_dock_vacuum(hass): """Test dock trait support for vacuum domain.""" assert helpers.get_google_type(vacuum.DOMAIN, None) is not None assert trait.DockTrait.supported(vacuum.DOMAIN, 0, None) trt = trait.DockTrait(hass, State("vacuum.bla", vacuum.STATE_IDLE), BASIC_CONFIG) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"isDocked": False} calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_RETURN_TO_BASE) await trt.execute(trait.COMMAND_DOCK, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} async def test_startstop_vacuum(hass): """Test startStop trait support for vacuum domain.""" assert helpers.get_google_type(vacuum.DOMAIN, None) is not None assert trait.StartStopTrait.supported(vacuum.DOMAIN, 0, None) trt = trait.StartStopTrait( hass, State( "vacuum.bla", vacuum.STATE_PAUSED, {ATTR_SUPPORTED_FEATURES: vacuum.SUPPORT_PAUSE}, ), BASIC_CONFIG, ) assert trt.sync_attributes() == {"pausable": True} assert trt.query_attributes() == {"isRunning": False, "isPaused": True} start_calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_START) await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": True}, {}) assert len(start_calls) == 1 assert start_calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} stop_calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_STOP) await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": False}, {}) assert len(stop_calls) == 1 assert stop_calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} pause_calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_PAUSE) await trt.execute(trait.COMMAND_PAUSEUNPAUSE, BASIC_DATA, {"pause": True}, {}) assert len(pause_calls) == 1 assert pause_calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} unpause_calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_START) await trt.execute(trait.COMMAND_PAUSEUNPAUSE, BASIC_DATA, {"pause": False}, {}) assert len(unpause_calls) == 1 assert unpause_calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} async def test_startstop_cover(hass): """Test startStop trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.StartStopTrait.supported(cover.DOMAIN, cover.SUPPORT_STOP, None) state = State( "cover.bla", cover.STATE_CLOSED, {ATTR_SUPPORTED_FEATURES: cover.SUPPORT_STOP}, ) trt = trait.StartStopTrait( hass, state, BASIC_CONFIG, ) assert trt.sync_attributes() == {} for state_value in (cover.STATE_CLOSING, cover.STATE_OPENING): state.state = state_value assert trt.query_attributes() == {"isRunning": True} stop_calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_STOP_COVER) await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": False}, {}) assert len(stop_calls) == 1 assert stop_calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} for state_value in (cover.STATE_CLOSED, cover.STATE_OPEN): state.state = state_value assert trt.query_attributes() == {"isRunning": False} with pytest.raises(SmartHomeError, match="Cover is already stopped"): await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": False}, {}) with pytest.raises(SmartHomeError, match="Starting a cover is not supported"): await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": True}, {}) with pytest.raises( SmartHomeError, match="Command action.devices.commands.PauseUnpause is not supported", ): await trt.execute(trait.COMMAND_PAUSEUNPAUSE, BASIC_DATA, {"start": True}, {}) async def test_startstop_cover_assumed(hass): """Test startStop trait support for cover domain of assumed state.""" trt = trait.StartStopTrait( hass, State( "cover.bla", cover.STATE_CLOSED, {ATTR_SUPPORTED_FEATURES: cover.SUPPORT_STOP, ATTR_ASSUMED_STATE: True}, ), BASIC_CONFIG, ) stop_calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_STOP_COVER) await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": False}, {}) assert len(stop_calls) == 1 assert stop_calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} async def test_color_setting_color_light(hass): """Test ColorSpectrum trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert not trait.ColorSettingTrait.supported(light.DOMAIN, 0, None) assert trait.ColorSettingTrait.supported(light.DOMAIN, light.SUPPORT_COLOR, None) trt = trait.ColorSettingTrait( hass, State( "light.bla", STATE_ON, { light.ATTR_HS_COLOR: (20, 94), light.ATTR_BRIGHTNESS: 200, ATTR_SUPPORTED_FEATURES: light.SUPPORT_COLOR, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == {"colorModel": "hsv"} assert trt.query_attributes() == { "color": {"spectrumHsv": {"hue": 20, "saturation": 0.94, "value": 200 / 255}} } assert trt.can_execute( trait.COMMAND_COLOR_ABSOLUTE, {"color": {"spectrumRGB": 16715792}} ) calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON) await trt.execute( trait.COMMAND_COLOR_ABSOLUTE, BASIC_DATA, {"color": {"spectrumRGB": 1052927}}, {}, ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "light.bla", light.ATTR_HS_COLOR: (240, 93.725), } await trt.execute( trait.COMMAND_COLOR_ABSOLUTE, BASIC_DATA, {"color": {"spectrumHSV": {"hue": 100, "saturation": 0.50, "value": 0.20}}}, {}, ) assert len(calls) == 2 assert calls[1].data == { ATTR_ENTITY_ID: "light.bla", light.ATTR_HS_COLOR: [100, 50], light.ATTR_BRIGHTNESS: 0.2 * 255, } async def test_color_setting_temperature_light(hass): """Test ColorTemperature trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert not trait.ColorSettingTrait.supported(light.DOMAIN, 0, None) assert trait.ColorSettingTrait.supported( light.DOMAIN, light.SUPPORT_COLOR_TEMP, None ) trt = trait.ColorSettingTrait( hass, State( "light.bla", STATE_ON, { light.ATTR_MIN_MIREDS: 200, light.ATTR_COLOR_TEMP: 300, light.ATTR_MAX_MIREDS: 500, ATTR_SUPPORTED_FEATURES: light.SUPPORT_COLOR_TEMP, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "colorTemperatureRange": {"temperatureMinK": 2000, "temperatureMaxK": 5000} } assert trt.query_attributes() == {"color": {"temperatureK": 3333}} assert trt.can_execute( trait.COMMAND_COLOR_ABSOLUTE, {"color": {"temperature": 400}} ) calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON) with pytest.raises(helpers.SmartHomeError) as err: await trt.execute( trait.COMMAND_COLOR_ABSOLUTE, BASIC_DATA, {"color": {"temperature": 5555}}, {}, ) assert err.value.code == const.ERR_VALUE_OUT_OF_RANGE await trt.execute( trait.COMMAND_COLOR_ABSOLUTE, BASIC_DATA, {"color": {"temperature": 2857}}, {} ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "light.bla", light.ATTR_COLOR_TEMP: color.color_temperature_kelvin_to_mired(2857), } async def test_color_light_temperature_light_bad_temp(hass): """Test ColorTemperature trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert not trait.ColorSettingTrait.supported(light.DOMAIN, 0, None) assert trait.ColorSettingTrait.supported( light.DOMAIN, light.SUPPORT_COLOR_TEMP, None ) trt = trait.ColorSettingTrait( hass, State( "light.bla", STATE_ON, { light.ATTR_MIN_MIREDS: 200, light.ATTR_COLOR_TEMP: 0, light.ATTR_MAX_MIREDS: 500, }, ), BASIC_CONFIG, ) assert trt.query_attributes() == {} async def test_light_modes(hass): """Test Light Mode trait.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert trait.ModesTrait.supported(light.DOMAIN, light.SUPPORT_EFFECT, None) trt = trait.ModesTrait( hass, State( "light.living_room", light.STATE_ON, attributes={ light.ATTR_EFFECT_LIST: ["random", "colorloop"], light.ATTR_EFFECT: "random", }, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableModes": [ { "name": "effect", "name_values": [{"name_synonym": ["effect"], "lang": "en"}], "settings": [ { "setting_name": "random", "setting_values": [ {"setting_synonym": ["random"], "lang": "en"} ], }, { "setting_name": "colorloop", "setting_values": [ {"setting_synonym": ["colorloop"], "lang": "en"} ], }, ], "ordered": False, } ] } assert trt.query_attributes() == { "currentModeSettings": {"effect": "random"}, "on": True, } assert trt.can_execute( trait.COMMAND_MODES, params={"updateModeSettings": {"effect": "colorloop"}}, ) calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON) await trt.execute( trait.COMMAND_MODES, BASIC_DATA, {"updateModeSettings": {"effect": "colorloop"}}, {}, ) assert len(calls) == 1 assert calls[0].data == { "entity_id": "light.living_room", "effect": "colorloop", } async def test_scene_scene(hass): """Test Scene trait support for scene domain.""" assert helpers.get_google_type(scene.DOMAIN, None) is not None assert trait.SceneTrait.supported(scene.DOMAIN, 0, None) trt = trait.SceneTrait(hass, State("scene.bla", scene.STATE), BASIC_CONFIG) assert trt.sync_attributes() == {} assert trt.query_attributes() == {} assert trt.can_execute(trait.COMMAND_ACTIVATE_SCENE, {}) calls = async_mock_service(hass, scene.DOMAIN, SERVICE_TURN_ON) await trt.execute(trait.COMMAND_ACTIVATE_SCENE, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "scene.bla"} async def test_scene_script(hass): """Test Scene trait support for script domain.""" assert helpers.get_google_type(script.DOMAIN, None) is not None assert trait.SceneTrait.supported(script.DOMAIN, 0, None) trt = trait.SceneTrait(hass, State("script.bla", STATE_OFF), BASIC_CONFIG) assert trt.sync_attributes() == {} assert trt.query_attributes() == {} assert trt.can_execute(trait.COMMAND_ACTIVATE_SCENE, {}) calls = async_mock_service(hass, script.DOMAIN, SERVICE_TURN_ON) await trt.execute(trait.COMMAND_ACTIVATE_SCENE, BASIC_DATA, {}, {}) # We don't wait till script execution is done. await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "script.bla"} async def test_temperature_setting_climate_onoff(hass): """Test TemperatureSetting trait support for climate domain - range.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.TemperatureSettingTrait.supported(climate.DOMAIN, 0, None) hass.config.units.temperature_unit = TEMP_FAHRENHEIT trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_AUTO, { ATTR_SUPPORTED_FEATURES: climate.SUPPORT_TARGET_TEMPERATURE_RANGE, climate.ATTR_HVAC_MODES: [ climate.HVAC_MODE_OFF, climate.HVAC_MODE_COOL, climate.HVAC_MODE_HEAT, climate.HVAC_MODE_HEAT_COOL, ], climate.ATTR_MIN_TEMP: None, climate.ATTR_MAX_TEMP: None, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["off", "cool", "heat", "heatcool", "on"], "thermostatTemperatureUnit": "F", } assert trt.can_execute(trait.COMMAND_THERMOSTAT_SET_MODE, {}) calls = async_mock_service(hass, climate.DOMAIN, SERVICE_TURN_ON) await trt.execute( trait.COMMAND_THERMOSTAT_SET_MODE, BASIC_DATA, {"thermostatMode": "on"}, {} ) assert len(calls) == 1 calls = async_mock_service(hass, climate.DOMAIN, SERVICE_TURN_OFF) await trt.execute( trait.COMMAND_THERMOSTAT_SET_MODE, BASIC_DATA, {"thermostatMode": "off"}, {} ) assert len(calls) == 1 async def test_temperature_setting_climate_no_modes(hass): """Test TemperatureSetting trait support for climate domain not supporting any modes.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.TemperatureSettingTrait.supported(climate.DOMAIN, 0, None) hass.config.units.temperature_unit = TEMP_CELSIUS trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_AUTO, { climate.ATTR_HVAC_MODES: [], climate.ATTR_MIN_TEMP: None, climate.ATTR_MAX_TEMP: None, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["heat"], "thermostatTemperatureUnit": "C", } async def test_temperature_setting_climate_range(hass): """Test TemperatureSetting trait support for climate domain - range.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.TemperatureSettingTrait.supported(climate.DOMAIN, 0, None) hass.config.units.temperature_unit = TEMP_FAHRENHEIT trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_AUTO, { climate.ATTR_CURRENT_TEMPERATURE: 70, climate.ATTR_CURRENT_HUMIDITY: 25, ATTR_SUPPORTED_FEATURES: climate.SUPPORT_TARGET_TEMPERATURE_RANGE, climate.ATTR_HVAC_MODES: [ STATE_OFF, climate.HVAC_MODE_COOL, climate.HVAC_MODE_HEAT, climate.HVAC_MODE_AUTO, ], climate.ATTR_TARGET_TEMP_HIGH: 75, climate.ATTR_TARGET_TEMP_LOW: 65, climate.ATTR_MIN_TEMP: 50, climate.ATTR_MAX_TEMP: 80, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["off", "cool", "heat", "auto", "on"], "thermostatTemperatureUnit": "F", } assert trt.query_attributes() == { "thermostatMode": "auto", "thermostatTemperatureAmbient": 21.1, "thermostatHumidityAmbient": 25, "thermostatTemperatureSetpointLow": 18.3, "thermostatTemperatureSetpointHigh": 23.9, } assert trt.can_execute(trait.COMMAND_THERMOSTAT_TEMPERATURE_SET_RANGE, {}) assert trt.can_execute(trait.COMMAND_THERMOSTAT_SET_MODE, {}) calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_TEMPERATURE) await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SET_RANGE, BASIC_DATA, { "thermostatTemperatureSetpointHigh": 25, "thermostatTemperatureSetpointLow": 20, }, {}, ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "climate.bla", climate.ATTR_TARGET_TEMP_HIGH: 77, climate.ATTR_TARGET_TEMP_LOW: 68, } calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_HVAC_MODE) await trt.execute( trait.COMMAND_THERMOSTAT_SET_MODE, BASIC_DATA, {"thermostatMode": "cool"}, {} ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "climate.bla", climate.ATTR_HVAC_MODE: climate.HVAC_MODE_COOL, } with pytest.raises(helpers.SmartHomeError) as err: await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, BASIC_DATA, {"thermostatTemperatureSetpoint": -100}, {}, ) assert err.value.code == const.ERR_VALUE_OUT_OF_RANGE hass.config.units.temperature_unit = TEMP_CELSIUS async def test_temperature_setting_climate_setpoint(hass): """Test TemperatureSetting trait support for climate domain - setpoint.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.TemperatureSettingTrait.supported(climate.DOMAIN, 0, None) hass.config.units.temperature_unit = TEMP_CELSIUS trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_COOL, { climate.ATTR_HVAC_MODES: [STATE_OFF, climate.HVAC_MODE_COOL], climate.ATTR_MIN_TEMP: 10, climate.ATTR_MAX_TEMP: 30, ATTR_TEMPERATURE: 18, climate.ATTR_CURRENT_TEMPERATURE: 20, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["off", "cool", "on"], "thermostatTemperatureUnit": "C", } assert trt.query_attributes() == { "thermostatMode": "cool", "thermostatTemperatureAmbient": 20, "thermostatTemperatureSetpoint": 18, } assert trt.can_execute(trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, {}) assert trt.can_execute(trait.COMMAND_THERMOSTAT_SET_MODE, {}) calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_TEMPERATURE) with pytest.raises(helpers.SmartHomeError): await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, BASIC_DATA, {"thermostatTemperatureSetpoint": -100}, {}, ) await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, BASIC_DATA, {"thermostatTemperatureSetpoint": 19}, {}, ) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "climate.bla", ATTR_TEMPERATURE: 19} async def test_temperature_setting_climate_setpoint_auto(hass): """ Test TemperatureSetting trait support for climate domain. Setpoint in auto mode. """ hass.config.units.temperature_unit = TEMP_CELSIUS trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_HEAT_COOL, { climate.ATTR_HVAC_MODES: [ climate.HVAC_MODE_OFF, climate.HVAC_MODE_HEAT_COOL, ], climate.ATTR_MIN_TEMP: 10, climate.ATTR_MAX_TEMP: 30, ATTR_TEMPERATURE: 18, climate.ATTR_CURRENT_TEMPERATURE: 20, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["off", "heatcool", "on"], "thermostatTemperatureUnit": "C", } assert trt.query_attributes() == { "thermostatMode": "heatcool", "thermostatTemperatureAmbient": 20, "thermostatTemperatureSetpointHigh": 18, "thermostatTemperatureSetpointLow": 18, } assert trt.can_execute(trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, {}) assert trt.can_execute(trait.COMMAND_THERMOSTAT_SET_MODE, {}) calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_TEMPERATURE) await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, BASIC_DATA, {"thermostatTemperatureSetpoint": 19}, {}, ) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "climate.bla", ATTR_TEMPERATURE: 19} async def test_humidity_setting_humidifier_setpoint(hass): """Test HumiditySetting trait support for humidifier domain - setpoint.""" assert helpers.get_google_type(humidifier.DOMAIN, None) is not None assert trait.HumiditySettingTrait.supported(humidifier.DOMAIN, 0, None) trt = trait.HumiditySettingTrait( hass, State( "humidifier.bla", STATE_ON, { humidifier.ATTR_MIN_HUMIDITY: 20, humidifier.ATTR_MAX_HUMIDITY: 90, humidifier.ATTR_HUMIDITY: 38, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "humiditySetpointRange": {"minPercent": 20, "maxPercent": 90} } assert trt.query_attributes() == { "humiditySetpointPercent": 38, } assert trt.can_execute(trait.COMMAND_SET_HUMIDITY, {}) calls = async_mock_service(hass, humidifier.DOMAIN, humidifier.SERVICE_SET_HUMIDITY) await trt.execute(trait.COMMAND_SET_HUMIDITY, BASIC_DATA, {"humidity": 32}, {}) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "humidifier.bla", humidifier.ATTR_HUMIDITY: 32, } async def test_lock_unlock_lock(hass): """Test LockUnlock trait locking support for lock domain.""" assert helpers.get_google_type(lock.DOMAIN, None) is not None assert trait.LockUnlockTrait.supported(lock.DOMAIN, lock.SUPPORT_OPEN, None) assert trait.LockUnlockTrait.might_2fa(lock.DOMAIN, lock.SUPPORT_OPEN, None) trt = trait.LockUnlockTrait( hass, State("lock.front_door", lock.STATE_LOCKED), PIN_CONFIG ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"isLocked": True} assert trt.can_execute(trait.COMMAND_LOCKUNLOCK, {"lock": True}) calls = async_mock_service(hass, lock.DOMAIN, lock.SERVICE_LOCK) await trt.execute(trait.COMMAND_LOCKUNLOCK, PIN_DATA, {"lock": True}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "lock.front_door"} async def test_lock_unlock_unlock(hass): """Test LockUnlock trait unlocking support for lock domain.""" assert helpers.get_google_type(lock.DOMAIN, None) is not None assert trait.LockUnlockTrait.supported(lock.DOMAIN, lock.SUPPORT_OPEN, None) trt = trait.LockUnlockTrait( hass, State("lock.front_door", lock.STATE_LOCKED), PIN_CONFIG ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"isLocked": True} assert trt.can_execute(trait.COMMAND_LOCKUNLOCK, {"lock": False}) calls = async_mock_service(hass, lock.DOMAIN, lock.SERVICE_UNLOCK) # No challenge data with pytest.raises(error.ChallengeNeeded) as err: await trt.execute(trait.COMMAND_LOCKUNLOCK, PIN_DATA, {"lock": False}, {}) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # invalid pin with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_LOCKUNLOCK, PIN_DATA, {"lock": False}, {"pin": 9999} ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_FAILED_PIN_NEEDED await trt.execute( trait.COMMAND_LOCKUNLOCK, PIN_DATA, {"lock": False}, {"pin": "1234"} ) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "lock.front_door"} # Test without pin trt = trait.LockUnlockTrait( hass, State("lock.front_door", lock.STATE_LOCKED), BASIC_CONFIG ) with pytest.raises(error.SmartHomeError) as err: await trt.execute(trait.COMMAND_LOCKUNLOCK, BASIC_DATA, {"lock": False}, {}) assert len(calls) == 1 assert err.value.code == const.ERR_CHALLENGE_NOT_SETUP # Test with 2FA override with patch.object( BASIC_CONFIG, "should_2fa", return_value=False, ): await trt.execute(trait.COMMAND_LOCKUNLOCK, BASIC_DATA, {"lock": False}, {}) assert len(calls) == 2 async def test_arm_disarm_arm_away(hass): """Test ArmDisarm trait Arming support for alarm_control_panel domain.""" assert helpers.get_google_type(alarm_control_panel.DOMAIN, None) is not None assert trait.ArmDisArmTrait.supported(alarm_control_panel.DOMAIN, 0, None) assert trait.ArmDisArmTrait.might_2fa(alarm_control_panel.DOMAIN, 0, None) trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, { alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True, ATTR_SUPPORTED_FEATURES: alarm_control_panel.const.SUPPORT_ALARM_ARM_HOME | alarm_control_panel.const.SUPPORT_ALARM_ARM_AWAY, }, ), PIN_CONFIG, ) assert trt.sync_attributes() == { "availableArmLevels": { "levels": [ { "level_name": "armed_home", "level_values": [ {"level_synonym": ["armed home", "home"], "lang": "en"} ], }, { "level_name": "armed_away", "level_values": [ {"level_synonym": ["armed away", "away"], "lang": "en"} ], }, ], "ordered": False, } } assert trt.query_attributes() == { "isArmed": True, "currentArmLevel": STATE_ALARM_ARMED_AWAY, } assert trt.can_execute( trait.COMMAND_ARMDISARM, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY} ) calls = async_mock_service( hass, alarm_control_panel.DOMAIN, alarm_control_panel.SERVICE_ALARM_ARM_AWAY ) # Test with no secure_pin configured with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), BASIC_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, BASIC_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {}, ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NOT_SETUP trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), PIN_CONFIG, ) # No challenge data with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {}, ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # invalid pin with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {"pin": 9999}, ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_FAILED_PIN_NEEDED # correct pin await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {"pin": "1234"}, ) assert len(calls) == 1 # Test already armed with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), PIN_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {}, ) assert len(calls) == 1 assert err.value.code == const.ERR_ALREADY_ARMED # Test with code_arm_required False trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: False}, ), PIN_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {}, ) assert len(calls) == 2 with pytest.raises(error.SmartHomeError) as err: await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True}, {}, ) async def test_arm_disarm_disarm(hass): """Test ArmDisarm trait Disarming support for alarm_control_panel domain.""" assert helpers.get_google_type(alarm_control_panel.DOMAIN, None) is not None assert trait.ArmDisArmTrait.supported(alarm_control_panel.DOMAIN, 0, None) assert trait.ArmDisArmTrait.might_2fa(alarm_control_panel.DOMAIN, 0, None) trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, { alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True, ATTR_SUPPORTED_FEATURES: alarm_control_panel.const.SUPPORT_ALARM_TRIGGER | alarm_control_panel.const.SUPPORT_ALARM_ARM_CUSTOM_BYPASS, }, ), PIN_CONFIG, ) assert trt.sync_attributes() == { "availableArmLevels": { "levels": [ { "level_name": "armed_custom_bypass", "level_values": [ { "level_synonym": ["armed custom bypass", "custom"], "lang": "en", } ], }, { "level_name": "triggered", "level_values": [{"level_synonym": ["triggered"], "lang": "en"}], }, ], "ordered": False, } } assert trt.query_attributes() == {"isArmed": False} assert trt.can_execute(trait.COMMAND_ARMDISARM, {"arm": False}) calls = async_mock_service( hass, alarm_control_panel.DOMAIN, alarm_control_panel.SERVICE_ALARM_DISARM ) # Test without secure_pin configured with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), BASIC_CONFIG, ) await trt.execute(trait.COMMAND_ARMDISARM, BASIC_DATA, {"arm": False}, {}) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NOT_SETUP trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), PIN_CONFIG, ) # No challenge data with pytest.raises(error.ChallengeNeeded) as err: await trt.execute(trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": False}, {}) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # invalid pin with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": False}, {"pin": 9999} ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_FAILED_PIN_NEEDED # correct pin await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": False}, {"pin": "1234"} ) assert len(calls) == 1 # Test already disarmed with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), PIN_CONFIG, ) await trt.execute(trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": False}, {}) assert len(calls) == 1 assert err.value.code == const.ERR_ALREADY_DISARMED # Cancel arming after already armed will require pin with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: False}, ), PIN_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "cancel": True}, {} ) assert len(calls) == 1 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # Cancel arming while pending to arm doesn't require pin trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_PENDING, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: False}, ), PIN_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "cancel": True}, {} ) assert len(calls) == 2 async def test_fan_speed(hass): """Test FanSpeed trait speed control support for fan domain.""" assert helpers.get_google_type(fan.DOMAIN, None) is not None assert trait.FanSpeedTrait.supported(fan.DOMAIN, fan.SUPPORT_SET_SPEED, None) trt = trait.FanSpeedTrait( hass, State( "fan.living_room_fan", fan.SPEED_HIGH, attributes={ "speed_list": [ fan.SPEED_OFF, fan.SPEED_LOW, fan.SPEED_MEDIUM, fan.SPEED_HIGH, ], "speed": "low", "percentage": 33, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableFanSpeeds": { "ordered": True, "speeds": [ { "speed_name": "off", "speed_values": [{"speed_synonym": ["stop", "off"], "lang": "en"}], }, { "speed_name": "low", "speed_values": [ { "speed_synonym": ["slow", "low", "slowest", "lowest"], "lang": "en", } ], }, { "speed_name": "medium", "speed_values": [ {"speed_synonym": ["medium", "mid", "middle"], "lang": "en"} ], }, { "speed_name": "high", "speed_values": [ { "speed_synonym": [ "high", "max", "fast", "highest", "fastest", "maximum", ], "lang": "en", } ], }, ], }, "reversible": False, "supportsFanSpeedPercent": True, } assert trt.query_attributes() == { "currentFanSpeedSetting": "low", "on": True, "currentFanSpeedPercent": 33, } assert trt.can_execute(trait.COMMAND_FANSPEED, params={"fanSpeed": "medium"}) calls = async_mock_service(hass, fan.DOMAIN, fan.SERVICE_SET_SPEED) await trt.execute(trait.COMMAND_FANSPEED, BASIC_DATA, {"fanSpeed": "medium"}, {}) assert len(calls) == 1 assert calls[0].data == {"entity_id": "fan.living_room_fan", "speed": "medium"} assert trt.can_execute(trait.COMMAND_FANSPEED, params={"fanSpeedPercent": 10}) calls = async_mock_service(hass, fan.DOMAIN, fan.SERVICE_SET_PERCENTAGE) await trt.execute(trait.COMMAND_FANSPEED, BASIC_DATA, {"fanSpeedPercent": 10}, {}) assert len(calls) == 1 assert calls[0].data == {"entity_id": "fan.living_room_fan", "percentage": 10} async def test_climate_fan_speed(hass): """Test FanSpeed trait speed control support for climate domain.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.FanSpeedTrait.supported(climate.DOMAIN, climate.SUPPORT_FAN_MODE, None) trt = trait.FanSpeedTrait( hass, State( "climate.living_room_ac", "on", attributes={ "fan_modes": ["auto", "low", "medium", "high"], "fan_mode": "low", }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableFanSpeeds": { "ordered": True, "speeds": [ { "speed_name": "auto", "speed_values": [{"speed_synonym": ["auto"], "lang": "en"}], }, { "speed_name": "low", "speed_values": [{"speed_synonym": ["low"], "lang": "en"}], }, { "speed_name": "medium", "speed_values": [{"speed_synonym": ["medium"], "lang": "en"}], }, { "speed_name": "high", "speed_values": [{"speed_synonym": ["high"], "lang": "en"}], }, ], }, "reversible": False, "supportsFanSpeedPercent": True, } assert trt.query_attributes() == { "currentFanSpeedSetting": "low", } assert trt.can_execute(trait.COMMAND_FANSPEED, params={"fanSpeed": "medium"}) calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_FAN_MODE) await trt.execute(trait.COMMAND_FANSPEED, BASIC_DATA, {"fanSpeed": "medium"}, {}) assert len(calls) == 1 assert calls[0].data == { "entity_id": "climate.living_room_ac", "fan_mode": "medium", } async def test_inputselector(hass): """Test input selector trait.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.InputSelectorTrait.supported( media_player.DOMAIN, media_player.SUPPORT_SELECT_SOURCE, None ) trt = trait.InputSelectorTrait( hass, State( "media_player.living_room", media_player.STATE_PLAYING, attributes={ media_player.ATTR_INPUT_SOURCE_LIST: [ "media", "game", "chromecast", "plex", ], media_player.ATTR_INPUT_SOURCE: "game", }, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableInputs": [ {"key": "media", "names": [{"name_synonym": ["media"], "lang": "en"}]}, {"key": "game", "names": [{"name_synonym": ["game"], "lang": "en"}]}, { "key": "chromecast", "names": [{"name_synonym": ["chromecast"], "lang": "en"}], }, {"key": "plex", "names": [{"name_synonym": ["plex"], "lang": "en"}]}, ], "orderedInputs": True, } assert trt.query_attributes() == { "currentInput": "game", } assert trt.can_execute( trait.COMMAND_INPUT, params={"newInput": "media"}, ) calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_SELECT_SOURCE ) await trt.execute( trait.COMMAND_INPUT, BASIC_DATA, {"newInput": "media"}, {}, ) assert len(calls) == 1 assert calls[0].data == {"entity_id": "media_player.living_room", "source": "media"} @pytest.mark.parametrize( "sources,source,source_next,source_prev", [ (["a"], "a", "a", "a"), (["a", "b"], "a", "b", "b"), (["a", "b", "c"], "a", "b", "c"), ], ) async def test_inputselector_nextprev(hass, sources, source, source_next, source_prev): """Test input selector trait.""" trt = trait.InputSelectorTrait( hass, State( "media_player.living_room", media_player.STATE_PLAYING, attributes={ media_player.ATTR_INPUT_SOURCE_LIST: sources, media_player.ATTR_INPUT_SOURCE: source, }, ), BASIC_CONFIG, ) assert trt.can_execute("action.devices.commands.NextInput", params={}) assert trt.can_execute("action.devices.commands.PreviousInput", params={}) calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_SELECT_SOURCE ) await trt.execute( "action.devices.commands.NextInput", BASIC_DATA, {}, {}, ) await trt.execute( "action.devices.commands.PreviousInput", BASIC_DATA, {}, {}, ) assert len(calls) == 2 assert calls[0].data == { "entity_id": "media_player.living_room", "source": source_next, } assert calls[1].data == { "entity_id": "media_player.living_room", "source": source_prev, } @pytest.mark.parametrize( "sources,source", [(None, "a"), (["a", "b"], None), (["a", "b"], "c")] ) async def test_inputselector_nextprev_invalid(hass, sources, source): """Test input selector trait.""" trt = trait.InputSelectorTrait( hass, State( "media_player.living_room", media_player.STATE_PLAYING, attributes={ media_player.ATTR_INPUT_SOURCE_LIST: sources, media_player.ATTR_INPUT_SOURCE: source, }, ), BASIC_CONFIG, ) with pytest.raises(SmartHomeError): await trt.execute( "action.devices.commands.NextInput", BASIC_DATA, {}, {}, ) with pytest.raises(SmartHomeError): await trt.execute( "action.devices.commands.PreviousInput", BASIC_DATA, {}, {}, ) with pytest.raises(SmartHomeError): await trt.execute( "action.devices.commands.InvalidCommand", BASIC_DATA, {}, {}, ) async def test_modes_input_select(hass): """Test Input Select Mode trait.""" assert helpers.get_google_type(input_select.DOMAIN, None) is not None assert trait.ModesTrait.supported(input_select.DOMAIN, None, None) trt = trait.ModesTrait( hass, State("input_select.bla", "unavailable"), BASIC_CONFIG, ) assert trt.sync_attributes() == {"availableModes": []} trt = trait.ModesTrait( hass, State( "input_select.bla", "abc", attributes={input_select.ATTR_OPTIONS: ["abc", "123", "xyz"]}, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableModes": [ { "name": "option", "name_values": [ { "name_synonym": ["option", "setting", "mode", "value"], "lang": "en", } ], "settings": [ { "setting_name": "abc", "setting_values": [{"setting_synonym": ["abc"], "lang": "en"}], }, { "setting_name": "123", "setting_values": [{"setting_synonym": ["123"], "lang": "en"}], }, { "setting_name": "xyz", "setting_values": [{"setting_synonym": ["xyz"], "lang": "en"}], }, ], "ordered": False, } ] } assert trt.query_attributes() == { "currentModeSettings": {"option": "abc"}, "on": True, } assert trt.can_execute( trait.COMMAND_MODES, params={"updateModeSettings": {"option": "xyz"}}, ) calls = async_mock_service( hass, input_select.DOMAIN, input_select.SERVICE_SELECT_OPTION ) await trt.execute( trait.COMMAND_MODES, BASIC_DATA, {"updateModeSettings": {"option": "xyz"}}, {}, ) assert len(calls) == 1 assert calls[0].data == {"entity_id": "input_select.bla", "option": "xyz"} async def test_modes_humidifier(hass): """Test Humidifier Mode trait.""" assert helpers.get_google_type(humidifier.DOMAIN, None) is not None assert trait.ModesTrait.supported(humidifier.DOMAIN, humidifier.SUPPORT_MODES, None) trt = trait.ModesTrait( hass, State( "humidifier.humidifier", STATE_OFF, attributes={ humidifier.ATTR_AVAILABLE_MODES: [ humidifier.MODE_NORMAL, humidifier.MODE_AUTO, humidifier.MODE_AWAY, ], ATTR_SUPPORTED_FEATURES: humidifier.SUPPORT_MODES, humidifier.ATTR_MIN_HUMIDITY: 30, humidifier.ATTR_MAX_HUMIDITY: 99, humidifier.ATTR_HUMIDITY: 50, ATTR_MODE: humidifier.MODE_AUTO, }, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableModes": [ { "name": "mode", "name_values": [{"name_synonym": ["mode"], "lang": "en"}], "settings": [ { "setting_name": "normal", "setting_values": [ {"setting_synonym": ["normal"], "lang": "en"} ], }, { "setting_name": "auto", "setting_values": [{"setting_synonym": ["auto"], "lang": "en"}], }, { "setting_name": "away", "setting_values": [{"setting_synonym": ["away"], "lang": "en"}], }, ], "ordered": False, }, ] } assert trt.query_attributes() == { "currentModeSettings": {"mode": "auto"}, "on": False, } assert trt.can_execute( trait.COMMAND_MODES, params={"updateModeSettings": {"mode": "away"}} ) calls = async_mock_service(hass, humidifier.DOMAIN, humidifier.SERVICE_SET_MODE) await trt.execute( trait.COMMAND_MODES, BASIC_DATA, {"updateModeSettings": {"mode": "away"}}, {}, ) assert len(calls) == 1 assert calls[0].data == { "entity_id": "humidifier.humidifier", "mode": "away", } async def test_sound_modes(hass): """Test Mode trait.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.ModesTrait.supported( media_player.DOMAIN, media_player.SUPPORT_SELECT_SOUND_MODE, None ) trt = trait.ModesTrait( hass, State( "media_player.living_room", media_player.STATE_PLAYING, attributes={ media_player.ATTR_SOUND_MODE_LIST: ["stereo", "prologic"], media_player.ATTR_SOUND_MODE: "stereo", }, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableModes": [ { "name": "sound mode", "name_values": [ {"name_synonym": ["sound mode", "effects"], "lang": "en"} ], "settings": [ { "setting_name": "stereo", "setting_values": [ {"setting_synonym": ["stereo"], "lang": "en"} ], }, { "setting_name": "prologic", "setting_values": [ {"setting_synonym": ["prologic"], "lang": "en"} ], }, ], "ordered": False, } ] } assert trt.query_attributes() == { "currentModeSettings": {"sound mode": "stereo"}, "on": True, } assert trt.can_execute( trait.COMMAND_MODES, params={"updateModeSettings": {"sound mode": "stereo"}}, ) calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_SELECT_SOUND_MODE ) await trt.execute( trait.COMMAND_MODES, BASIC_DATA, {"updateModeSettings": {"sound mode": "stereo"}}, {}, ) assert len(calls) == 1 assert calls[0].data == { "entity_id": "media_player.living_room", "sound_mode": "stereo", } async def test_openclose_cover(hass): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_SET_POSITION, None ) trt = trait.OpenCloseTrait( hass, State( "cover.bla", cover.STATE_OPEN, { cover.ATTR_CURRENT_POSITION: 75, ATTR_SUPPORTED_FEATURES: cover.SUPPORT_SET_POSITION, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"openPercent": 75} calls_set = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_SET_COVER_POSITION) calls_open = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_OPEN_COVER) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 50}, {}) await trt.execute( trait.COMMAND_OPENCLOSE_RELATIVE, BASIC_DATA, {"openRelativePercent": 50}, {} ) assert len(calls_set) == 1 assert calls_set[0].data == {ATTR_ENTITY_ID: "cover.bla", cover.ATTR_POSITION: 50} assert len(calls_open) == 1 assert calls_open[0].data == {ATTR_ENTITY_ID: "cover.bla"} async def test_openclose_cover_unknown_state(hass): """Test OpenClose trait support for cover domain with unknown state.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_SET_POSITION, None ) # No state trt = trait.OpenCloseTrait( hass, State( "cover.bla", STATE_UNKNOWN, {ATTR_SUPPORTED_FEATURES: cover.SUPPORT_OPEN} ), BASIC_CONFIG, ) assert trt.sync_attributes() == {"discreteOnlyOpenClose": True} with pytest.raises(helpers.SmartHomeError): trt.query_attributes() calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_OPEN_COVER) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 100}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} with pytest.raises(helpers.SmartHomeError): trt.query_attributes() async def test_openclose_cover_assumed_state(hass): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_SET_POSITION, None ) trt = trait.OpenCloseTrait( hass, State( "cover.bla", cover.STATE_OPEN, { ATTR_ASSUMED_STATE: True, ATTR_SUPPORTED_FEATURES: cover.SUPPORT_SET_POSITION, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == {"commandOnlyOpenClose": True} assert trt.query_attributes() == {} calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_SET_COVER_POSITION) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 40}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla", cover.ATTR_POSITION: 40} async def test_openclose_cover_query_only(hass): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported(cover.DOMAIN, 0, None) state = State( "cover.bla", cover.STATE_OPEN, ) trt = trait.OpenCloseTrait( hass, state, BASIC_CONFIG, ) assert trt.sync_attributes() == { "discreteOnlyOpenClose": True, "queryOnlyOpenClose": True, } assert trt.query_attributes() == {"openPercent": 100} async def test_openclose_cover_no_position(hass): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_OPEN | cover.SUPPORT_CLOSE, None ) state = State( "cover.bla", cover.STATE_OPEN, { ATTR_SUPPORTED_FEATURES: cover.SUPPORT_OPEN | cover.SUPPORT_CLOSE, }, ) trt = trait.OpenCloseTrait( hass, state, BASIC_CONFIG, ) assert trt.sync_attributes() == {"discreteOnlyOpenClose": True} assert trt.query_attributes() == {"openPercent": 100} state.state = cover.STATE_CLOSED assert trt.sync_attributes() == {"discreteOnlyOpenClose": True} assert trt.query_attributes() == {"openPercent": 0} calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_CLOSE_COVER) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 0}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_OPEN_COVER) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 100}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} with pytest.raises( SmartHomeError, match=r"Current position not know for relative command" ): await trt.execute( trait.COMMAND_OPENCLOSE_RELATIVE, BASIC_DATA, {"openRelativePercent": 100}, {}, ) with pytest.raises(SmartHomeError, match=r"No support for partial open close"): await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 50}, {}) @pytest.mark.parametrize( "device_class", (cover.DEVICE_CLASS_DOOR, cover.DEVICE_CLASS_GARAGE, cover.DEVICE_CLASS_GATE), ) async def test_openclose_cover_secure(hass, device_class): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, device_class) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_SET_POSITION, device_class ) assert trait.OpenCloseTrait.might_2fa( cover.DOMAIN, cover.SUPPORT_SET_POSITION, device_class ) trt = trait.OpenCloseTrait( hass, State( "cover.bla", cover.STATE_OPEN, { ATTR_DEVICE_CLASS: device_class, ATTR_SUPPORTED_FEATURES: cover.SUPPORT_SET_POSITION, cover.ATTR_CURRENT_POSITION: 75, }, ), PIN_CONFIG, ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"openPercent": 75} calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_SET_COVER_POSITION) calls_close = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_CLOSE_COVER) # No challenge data with pytest.raises(error.ChallengeNeeded) as err: await trt.execute(trait.COMMAND_OPENCLOSE, PIN_DATA, {"openPercent": 50}, {}) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # invalid pin with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_OPENCLOSE, PIN_DATA, {"openPercent": 50}, {"pin": "9999"} ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_FAILED_PIN_NEEDED await trt.execute( trait.COMMAND_OPENCLOSE, PIN_DATA, {"openPercent": 50}, {"pin": "1234"} ) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla", cover.ATTR_POSITION: 50} # no challenge on close await trt.execute(trait.COMMAND_OPENCLOSE, PIN_DATA, {"openPercent": 0}, {}) assert len(calls_close) == 1 assert calls_close[0].data == {ATTR_ENTITY_ID: "cover.bla"} @pytest.mark.parametrize( "device_class", ( binary_sensor.DEVICE_CLASS_DOOR, binary_sensor.DEVICE_CLASS_GARAGE_DOOR, binary_sensor.DEVICE_CLASS_LOCK, binary_sensor.DEVICE_CLASS_OPENING, binary_sensor.DEVICE_CLASS_WINDOW, ), ) async def test_openclose_binary_sensor(hass, device_class): """Test OpenClose trait support for binary_sensor domain.""" assert helpers.get_google_type(binary_sensor.DOMAIN, device_class) is not None assert trait.OpenCloseTrait.supported(binary_sensor.DOMAIN, 0, device_class) trt = trait.OpenCloseTrait( hass, State("binary_sensor.test", STATE_ON, {ATTR_DEVICE_CLASS: device_class}), BASIC_CONFIG, ) assert trt.sync_attributes() == { "queryOnlyOpenClose": True, "discreteOnlyOpenClose": True, } assert trt.query_attributes() == {"openPercent": 100} trt = trait.OpenCloseTrait( hass, State("binary_sensor.test", STATE_OFF, {ATTR_DEVICE_CLASS: device_class}), BASIC_CONFIG, ) assert trt.sync_attributes() == { "queryOnlyOpenClose": True, "discreteOnlyOpenClose": True, } assert trt.query_attributes() == {"openPercent": 0} async def test_volume_media_player(hass): """Test volume trait support for media player domain.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.VolumeTrait.supported( media_player.DOMAIN, media_player.SUPPORT_VOLUME_SET, None, ) trt = trait.VolumeTrait( hass, State( "media_player.bla", media_player.STATE_PLAYING, { ATTR_SUPPORTED_FEATURES: media_player.SUPPORT_VOLUME_SET, media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.3, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "volumeMaxLevel": 100, "levelStepSize": 10, "volumeCanMuteAndUnmute": False, "commandOnlyVolume": False, } assert trt.query_attributes() == {"currentVolume": 30} calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_SET ) await trt.execute(trait.COMMAND_SET_VOLUME, BASIC_DATA, {"volumeLevel": 60}, {}) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.6, } calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_SET ) await trt.execute( trait.COMMAND_VOLUME_RELATIVE, BASIC_DATA, {"relativeSteps": 10}, {} ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.4, } async def test_volume_media_player_relative(hass): """Test volume trait support for relative-volume-only media players.""" assert trait.VolumeTrait.supported( media_player.DOMAIN, media_player.SUPPORT_VOLUME_STEP, None, ) trt = trait.VolumeTrait( hass, State( "media_player.bla", media_player.STATE_PLAYING, { ATTR_ASSUMED_STATE: True, ATTR_SUPPORTED_FEATURES: media_player.SUPPORT_VOLUME_STEP, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "volumeMaxLevel": 100, "levelStepSize": 10, "volumeCanMuteAndUnmute": False, "commandOnlyVolume": True, } assert trt.query_attributes() == {} calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_UP ) await trt.execute( trait.COMMAND_VOLUME_RELATIVE, BASIC_DATA, {"relativeSteps": 10}, {}, ) assert len(calls) == 10 for call in calls: assert call.data == { ATTR_ENTITY_ID: "media_player.bla", } calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_DOWN ) await trt.execute( trait.COMMAND_VOLUME_RELATIVE, BASIC_DATA, {"relativeSteps": -10}, {}, ) assert len(calls) == 10 for call in calls: assert call.data == { ATTR_ENTITY_ID: "media_player.bla", } with pytest.raises(SmartHomeError): await trt.execute(trait.COMMAND_SET_VOLUME, BASIC_DATA, {"volumeLevel": 42}, {}) with pytest.raises(SmartHomeError): await trt.execute(trait.COMMAND_MUTE, BASIC_DATA, {"mute": True}, {}) async def test_media_player_mute(hass): """Test volume trait support for muting.""" assert trait.VolumeTrait.supported( media_player.DOMAIN, media_player.SUPPORT_VOLUME_STEP | media_player.SUPPORT_VOLUME_MUTE, None, ) trt = trait.VolumeTrait( hass, State( "media_player.bla", media_player.STATE_PLAYING, { ATTR_SUPPORTED_FEATURES: ( media_player.SUPPORT_VOLUME_STEP | media_player.SUPPORT_VOLUME_MUTE ), media_player.ATTR_MEDIA_VOLUME_MUTED: False, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "volumeMaxLevel": 100, "levelStepSize": 10, "volumeCanMuteAndUnmute": True, "commandOnlyVolume": False, } assert trt.query_attributes() == {"isMuted": False} mute_calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_MUTE ) await trt.execute( trait.COMMAND_MUTE, BASIC_DATA, {"mute": True}, {}, ) assert len(mute_calls) == 1 assert mute_calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_VOLUME_MUTED: True, } unmute_calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_MUTE ) await trt.execute( trait.COMMAND_MUTE, BASIC_DATA, {"mute": False}, {}, ) assert len(unmute_calls) == 1 assert unmute_calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_VOLUME_MUTED: False, } async def test_temperature_setting_sensor(hass): """Test TemperatureSetting trait support for temperature sensor.""" assert ( helpers.get_google_type(sensor.DOMAIN, sensor.DEVICE_CLASS_TEMPERATURE) is not None ) assert not trait.TemperatureSettingTrait.supported( sensor.DOMAIN, 0, sensor.DEVICE_CLASS_HUMIDITY ) assert trait.TemperatureSettingTrait.supported( sensor.DOMAIN, 0, sensor.DEVICE_CLASS_TEMPERATURE ) @pytest.mark.parametrize( "unit_in,unit_out,state,ambient", [ (TEMP_FAHRENHEIT, "F", "70", 21.1), (TEMP_CELSIUS, "C", "21.1", 21.1), (TEMP_FAHRENHEIT, "F", "unavailable", None), (TEMP_FAHRENHEIT, "F", "unknown", None), ], ) async def test_temperature_setting_sensor_data(hass, unit_in, unit_out, state, ambient): """Test TemperatureSetting trait support for temperature sensor.""" hass.config.units.temperature_unit = unit_in trt = trait.TemperatureSettingTrait( hass, State( "sensor.test", state, {ATTR_DEVICE_CLASS: sensor.DEVICE_CLASS_TEMPERATURE} ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "queryOnlyTemperatureSetting": True, "thermostatTemperatureUnit": unit_out, } if ambient: assert trt.query_attributes() == {"thermostatTemperatureAmbient": ambient} else: assert trt.query_attributes() == {} hass.config.units.temperature_unit = TEMP_CELSIUS async def test_humidity_setting_sensor(hass): """Test HumiditySetting trait support for humidity sensor.""" assert ( helpers.get_google_type(sensor.DOMAIN, sensor.DEVICE_CLASS_HUMIDITY) is not None ) assert not trait.HumiditySettingTrait.supported( sensor.DOMAIN, 0, sensor.DEVICE_CLASS_TEMPERATURE ) assert trait.HumiditySettingTrait.supported( sensor.DOMAIN, 0, sensor.DEVICE_CLASS_HUMIDITY ) @pytest.mark.parametrize( "state,ambient", [("70", 70), ("unavailable", None), ("unknown", None)] ) async def test_humidity_setting_sensor_data(hass, state, ambient): """Test HumiditySetting trait support for humidity sensor.""" trt = trait.HumiditySettingTrait( hass, State("sensor.test", state, {ATTR_DEVICE_CLASS: sensor.DEVICE_CLASS_HUMIDITY}), BASIC_CONFIG, ) assert trt.sync_attributes() == {"queryOnlyHumiditySetting": True} if ambient: assert trt.query_attributes() == {"humidityAmbientPercent": ambient} else: assert trt.query_attributes() == {} with pytest.raises(helpers.SmartHomeError) as err: await trt.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert err.value.code == const.ERR_NOT_SUPPORTED async def test_transport_control(hass): """Test the TransportControlTrait.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None for feature in trait.MEDIA_COMMAND_SUPPORT_MAPPING.values(): assert trait.TransportControlTrait.supported(media_player.DOMAIN, feature, None) now = datetime(2020, 1, 1) trt = trait.TransportControlTrait( hass, State( "media_player.bla", media_player.STATE_PLAYING, { media_player.ATTR_MEDIA_POSITION: 100, media_player.ATTR_MEDIA_DURATION: 200, media_player.ATTR_MEDIA_POSITION_UPDATED_AT: now - timedelta(seconds=10), media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.5, ATTR_SUPPORTED_FEATURES: media_player.SUPPORT_PLAY | media_player.SUPPORT_STOP, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "transportControlSupportedCommands": ["RESUME", "STOP"] } assert trt.query_attributes() == {} # COMMAND_MEDIA_SEEK_RELATIVE calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_SEEK ) # Patch to avoid time ticking over during the command failing the test with patch("homeassistant.util.dt.utcnow", return_value=now): await trt.execute( trait.COMMAND_MEDIA_SEEK_RELATIVE, BASIC_DATA, {"relativePositionMs": 10000}, {}, ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", # 100s (current position) + 10s (from command) + 10s (from updated_at) media_player.ATTR_MEDIA_SEEK_POSITION: 120, } # COMMAND_MEDIA_SEEK_TO_POSITION calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_SEEK ) await trt.execute( trait.COMMAND_MEDIA_SEEK_TO_POSITION, BASIC_DATA, {"absPositionMs": 50000}, {} ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_SEEK_POSITION: 50, } # COMMAND_MEDIA_NEXT calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_NEXT_TRACK ) await trt.execute(trait.COMMAND_MEDIA_NEXT, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} # COMMAND_MEDIA_PAUSE calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_PAUSE ) await trt.execute(trait.COMMAND_MEDIA_PAUSE, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} # COMMAND_MEDIA_PREVIOUS calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_PREVIOUS_TRACK ) await trt.execute(trait.COMMAND_MEDIA_PREVIOUS, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} # COMMAND_MEDIA_RESUME calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_PLAY ) await trt.execute(trait.COMMAND_MEDIA_RESUME, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} # COMMAND_MEDIA_SHUFFLE calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_SHUFFLE_SET ) await trt.execute(trait.COMMAND_MEDIA_SHUFFLE, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_SHUFFLE: True, } # COMMAND_MEDIA_STOP calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_STOP ) await trt.execute(trait.COMMAND_MEDIA_STOP, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} @pytest.mark.parametrize( "state", ( STATE_OFF, STATE_IDLE, STATE_PLAYING, STATE_ON, STATE_PAUSED, STATE_STANDBY, STATE_UNAVAILABLE, STATE_UNKNOWN, ), ) async def test_media_state(hass, state): """Test the MediaStateTrait.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.TransportControlTrait.supported( media_player.DOMAIN, media_player.SUPPORT_PLAY, None ) trt = trait.MediaStateTrait( hass, State( "media_player.bla", state, { media_player.ATTR_MEDIA_POSITION: 100, media_player.ATTR_MEDIA_DURATION: 200, media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.5, ATTR_SUPPORTED_FEATURES: media_player.SUPPORT_PLAY | media_player.SUPPORT_STOP, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "supportActivityState": True, "supportPlaybackState": True, } assert trt.query_attributes() == { "activityState": trt.activity_lookup.get(state), "playbackState": trt.playback_lookup.get(state), }
sander76/home-assistant
tests/components/google_assistant/test_trait.py
homeassistant/components/currencylayer/sensor.py
"""Support for performing TensorFlow classification on images.""" import io import logging import os import sys import time from PIL import Image, ImageDraw, UnidentifiedImageError import numpy as np import tensorflow as tf # pylint: disable=import-error import voluptuous as vol from homeassistant.components.image_processing import ( CONF_CONFIDENCE, CONF_ENTITY_ID, CONF_NAME, CONF_SOURCE, PLATFORM_SCHEMA, ImageProcessingEntity, ) from homeassistant.const import EVENT_HOMEASSISTANT_START from homeassistant.core import split_entity_id from homeassistant.helpers import template import homeassistant.helpers.config_validation as cv from homeassistant.util.pil import draw_box os.environ["TF_CPP_MIN_LOG_LEVEL"] = "2" DOMAIN = "tensorflow" _LOGGER = logging.getLogger(__name__) ATTR_MATCHES = "matches" ATTR_SUMMARY = "summary" ATTR_TOTAL_MATCHES = "total_matches" ATTR_PROCESS_TIME = "process_time" CONF_AREA = "area" CONF_BOTTOM = "bottom" CONF_CATEGORIES = "categories" CONF_CATEGORY = "category" CONF_FILE_OUT = "file_out" CONF_GRAPH = "graph" CONF_LABELS = "labels" CONF_LABEL_OFFSET = "label_offset" CONF_LEFT = "left" CONF_MODEL = "model" CONF_MODEL_DIR = "model_dir" CONF_RIGHT = "right" CONF_TOP = "top" AREA_SCHEMA = vol.Schema( { vol.Optional(CONF_BOTTOM, default=1): cv.small_float, vol.Optional(CONF_LEFT, default=0): cv.small_float, vol.Optional(CONF_RIGHT, default=1): cv.small_float, vol.Optional(CONF_TOP, default=0): cv.small_float, } ) CATEGORY_SCHEMA = vol.Schema( {vol.Required(CONF_CATEGORY): cv.string, vol.Optional(CONF_AREA): AREA_SCHEMA} ) PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( { vol.Optional(CONF_FILE_OUT, default=[]): vol.All(cv.ensure_list, [cv.template]), vol.Required(CONF_MODEL): vol.Schema( { vol.Required(CONF_GRAPH): cv.isdir, vol.Optional(CONF_AREA): AREA_SCHEMA, vol.Optional(CONF_CATEGORIES, default=[]): vol.All( cv.ensure_list, [vol.Any(cv.string, CATEGORY_SCHEMA)] ), vol.Optional(CONF_LABELS): cv.isfile, vol.Optional(CONF_LABEL_OFFSET, default=1): int, vol.Optional(CONF_MODEL_DIR): cv.isdir, } ), } ) def get_model_detection_function(model): """Get a tf.function for detection.""" @tf.function def detect_fn(image): """Detect objects in image.""" image, shapes = model.preprocess(image) prediction_dict = model.predict(image, shapes) detections = model.postprocess(prediction_dict, shapes) return detections return detect_fn def setup_platform(hass, config, add_entities, discovery_info=None): """Set up the TensorFlow image processing platform.""" model_config = config[CONF_MODEL] model_dir = model_config.get(CONF_MODEL_DIR) or hass.config.path("tensorflow") labels = model_config.get(CONF_LABELS) or hass.config.path( "tensorflow", "object_detection", "data", "mscoco_label_map.pbtxt" ) checkpoint = os.path.join(model_config[CONF_GRAPH], "checkpoint") pipeline_config = os.path.join(model_config[CONF_GRAPH], "pipeline.config") # Make sure locations exist if ( not os.path.isdir(model_dir) or not os.path.isdir(checkpoint) or not os.path.exists(pipeline_config) or not os.path.exists(labels) ): _LOGGER.error("Unable to locate tensorflow model or label map") return # append custom model path to sys.path sys.path.append(model_dir) try: # Verify that the TensorFlow Object Detection API is pre-installed # These imports shouldn't be moved to the top, because they depend on code from the model_dir. # (The model_dir is created during the manual setup process. See integration docs.) # pylint: disable=import-outside-toplevel from object_detection.builders import model_builder from object_detection.utils import config_util, label_map_util except ImportError: _LOGGER.error( "No TensorFlow Object Detection library found! Install or compile " "for your system following instructions here: " "https://github.com/tensorflow/models/blob/master/research/object_detection/g3doc/tf2.md#installation" ) return try: # Display warning that PIL will be used if no OpenCV is found. import cv2 # noqa: F401 pylint: disable=unused-import, import-outside-toplevel except ImportError: _LOGGER.warning( "No OpenCV library found. TensorFlow will process image with " "PIL at reduced resolution" ) hass.data[DOMAIN] = {CONF_MODEL: None} def tensorflow_hass_start(_event): """Set up TensorFlow model on hass start.""" start = time.perf_counter() # Load pipeline config and build a detection model pipeline_configs = config_util.get_configs_from_pipeline_file(pipeline_config) detection_model = model_builder.build( model_config=pipeline_configs["model"], is_training=False ) # Restore checkpoint ckpt = tf.compat.v2.train.Checkpoint(model=detection_model) ckpt.restore(os.path.join(checkpoint, "ckpt-0")).expect_partial() _LOGGER.debug( "Model checkpoint restore took %d seconds", time.perf_counter() - start ) model = get_model_detection_function(detection_model) # Preload model cache with empty image tensor inp = np.zeros([2160, 3840, 3], dtype=np.uint8) # The input needs to be a tensor, convert it using `tf.convert_to_tensor`. input_tensor = tf.convert_to_tensor(inp, dtype=tf.float32) # The model expects a batch of images, so add an axis with `tf.newaxis`. input_tensor = input_tensor[tf.newaxis, ...] # Run inference model(input_tensor) _LOGGER.debug("Model load took %d seconds", time.perf_counter() - start) hass.data[DOMAIN][CONF_MODEL] = model hass.bus.listen_once(EVENT_HOMEASSISTANT_START, tensorflow_hass_start) category_index = label_map_util.create_category_index_from_labelmap( labels, use_display_name=True ) entities = [] for camera in config[CONF_SOURCE]: entities.append( TensorFlowImageProcessor( hass, camera[CONF_ENTITY_ID], camera.get(CONF_NAME), category_index, config, ) ) add_entities(entities) class TensorFlowImageProcessor(ImageProcessingEntity): """Representation of an TensorFlow image processor.""" def __init__( self, hass, camera_entity, name, category_index, config, ): """Initialize the TensorFlow entity.""" model_config = config.get(CONF_MODEL) self.hass = hass self._camera_entity = camera_entity if name: self._name = name else: self._name = "TensorFlow {}".format(split_entity_id(camera_entity)[1]) self._category_index = category_index self._min_confidence = config.get(CONF_CONFIDENCE) self._file_out = config.get(CONF_FILE_OUT) # handle categories and specific detection areas self._label_id_offset = model_config.get(CONF_LABEL_OFFSET) categories = model_config.get(CONF_CATEGORIES) self._include_categories = [] self._category_areas = {} for category in categories: if isinstance(category, dict): category_name = category.get(CONF_CATEGORY) category_area = category.get(CONF_AREA) self._include_categories.append(category_name) self._category_areas[category_name] = [0, 0, 1, 1] if category_area: self._category_areas[category_name] = [ category_area.get(CONF_TOP), category_area.get(CONF_LEFT), category_area.get(CONF_BOTTOM), category_area.get(CONF_RIGHT), ] else: self._include_categories.append(category) self._category_areas[category] = [0, 0, 1, 1] # Handle global detection area self._area = [0, 0, 1, 1] area_config = model_config.get(CONF_AREA) if area_config: self._area = [ area_config.get(CONF_TOP), area_config.get(CONF_LEFT), area_config.get(CONF_BOTTOM), area_config.get(CONF_RIGHT), ] template.attach(hass, self._file_out) self._matches = {} self._total_matches = 0 self._last_image = None self._process_time = 0 @property def camera_entity(self): """Return camera entity id from process pictures.""" return self._camera_entity @property def name(self): """Return the name of the image processor.""" return self._name @property def state(self): """Return the state of the entity.""" return self._total_matches @property def extra_state_attributes(self): """Return device specific state attributes.""" return { ATTR_MATCHES: self._matches, ATTR_SUMMARY: { category: len(values) for category, values in self._matches.items() }, ATTR_TOTAL_MATCHES: self._total_matches, ATTR_PROCESS_TIME: self._process_time, } def _save_image(self, image, matches, paths): img = Image.open(io.BytesIO(bytearray(image))).convert("RGB") img_width, img_height = img.size draw = ImageDraw.Draw(img) # Draw custom global region/area if self._area != [0, 0, 1, 1]: draw_box( draw, self._area, img_width, img_height, "Detection Area", (0, 255, 255) ) for category, values in matches.items(): # Draw custom category regions/areas if category in self._category_areas and self._category_areas[category] != [ 0, 0, 1, 1, ]: label = f"{category.capitalize()} Detection Area" draw_box( draw, self._category_areas[category], img_width, img_height, label, (0, 255, 0), ) # Draw detected objects for instance in values: label = "{} {:.1f}%".format(category, instance["score"]) draw_box( draw, instance["box"], img_width, img_height, label, (255, 255, 0) ) for path in paths: _LOGGER.info("Saving results image to %s", path) if not os.path.exists(os.path.dirname(path)): os.makedirs(os.path.dirname(path), exist_ok=True) img.save(path) def process_image(self, image): """Process the image.""" model = self.hass.data[DOMAIN][CONF_MODEL] if not model: _LOGGER.debug("Model not yet ready") return start = time.perf_counter() try: import cv2 # pylint: disable=import-outside-toplevel img = cv2.imdecode(np.asarray(bytearray(image)), cv2.IMREAD_UNCHANGED) inp = img[:, :, [2, 1, 0]] # BGR->RGB inp_expanded = inp.reshape(1, inp.shape[0], inp.shape[1], 3) except ImportError: try: img = Image.open(io.BytesIO(bytearray(image))).convert("RGB") except UnidentifiedImageError: _LOGGER.warning("Unable to process image, bad data") return img.thumbnail((460, 460), Image.ANTIALIAS) img_width, img_height = img.size inp = ( np.array(img.getdata()) .reshape((img_height, img_width, 3)) .astype(np.uint8) ) inp_expanded = np.expand_dims(inp, axis=0) # The input needs to be a tensor, convert it using `tf.convert_to_tensor`. input_tensor = tf.convert_to_tensor(inp_expanded, dtype=tf.float32) detections = model(input_tensor) boxes = detections["detection_boxes"][0].numpy() scores = detections["detection_scores"][0].numpy() classes = ( detections["detection_classes"][0].numpy() + self._label_id_offset ).astype(int) matches = {} total_matches = 0 for box, score, obj_class in zip(boxes, scores, classes): score = score * 100 boxes = box.tolist() # Exclude matches below min confidence value if score < self._min_confidence: continue # Exclude matches outside global area definition if ( boxes[0] < self._area[0] or boxes[1] < self._area[1] or boxes[2] > self._area[2] or boxes[3] > self._area[3] ): continue category = self._category_index[obj_class]["name"] # Exclude unlisted categories if self._include_categories and category not in self._include_categories: continue # Exclude matches outside category specific area definition if self._category_areas and ( boxes[0] < self._category_areas[category][0] or boxes[1] < self._category_areas[category][1] or boxes[2] > self._category_areas[category][2] or boxes[3] > self._category_areas[category][3] ): continue # If we got here, we should include it if category not in matches: matches[category] = [] matches[category].append({"score": float(score), "box": boxes}) total_matches += 1 # Save Images if total_matches and self._file_out: paths = [] for path_template in self._file_out: if isinstance(path_template, template.Template): paths.append( path_template.render(camera_entity=self._camera_entity) ) else: paths.append(path_template) self._save_image(image, matches, paths) self._matches = matches self._total_matches = total_matches self._process_time = time.perf_counter() - start
"""Tests for the Google Assistant traits.""" from datetime import datetime, timedelta from unittest.mock import patch import pytest from homeassistant.components import ( alarm_control_panel, binary_sensor, camera, cover, fan, group, input_boolean, input_select, light, lock, media_player, scene, script, sensor, switch, vacuum, ) from homeassistant.components.climate import const as climate from homeassistant.components.google_assistant import const, error, helpers, trait from homeassistant.components.google_assistant.error import SmartHomeError from homeassistant.components.humidifier import const as humidifier from homeassistant.config import async_process_ha_core_config from homeassistant.const import ( ATTR_ASSUMED_STATE, ATTR_DEVICE_CLASS, ATTR_ENTITY_ID, ATTR_MODE, ATTR_SUPPORTED_FEATURES, ATTR_TEMPERATURE, SERVICE_TURN_OFF, SERVICE_TURN_ON, STATE_ALARM_ARMED_AWAY, STATE_ALARM_DISARMED, STATE_ALARM_PENDING, STATE_IDLE, STATE_OFF, STATE_ON, STATE_PAUSED, STATE_PLAYING, STATE_STANDBY, STATE_UNAVAILABLE, STATE_UNKNOWN, TEMP_CELSIUS, TEMP_FAHRENHEIT, ) from homeassistant.core import DOMAIN as HA_DOMAIN, EVENT_CALL_SERVICE, State from homeassistant.util import color from . import BASIC_CONFIG, MockConfig from tests.common import async_capture_events, async_mock_service REQ_ID = "ff36a3cc-ec34-11e6-b1a0-64510650abcf" BASIC_DATA = helpers.RequestData( BASIC_CONFIG, "test-agent", const.SOURCE_CLOUD, REQ_ID, None ) PIN_CONFIG = MockConfig(secure_devices_pin="1234") PIN_DATA = helpers.RequestData( PIN_CONFIG, "test-agent", const.SOURCE_CLOUD, REQ_ID, None ) async def test_brightness_light(hass): """Test brightness trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert trait.BrightnessTrait.supported(light.DOMAIN, light.SUPPORT_BRIGHTNESS, None) trt = trait.BrightnessTrait( hass, State("light.bla", light.STATE_ON, {light.ATTR_BRIGHTNESS: 243}), BASIC_CONFIG, ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"brightness": 95} events = async_capture_events(hass, EVENT_CALL_SERVICE) calls = async_mock_service(hass, light.DOMAIN, light.SERVICE_TURN_ON) await trt.execute( trait.COMMAND_BRIGHTNESS_ABSOLUTE, BASIC_DATA, {"brightness": 50}, {} ) await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "light.bla", light.ATTR_BRIGHTNESS_PCT: 50} assert len(events) == 1 assert events[0].data == { "domain": "light", "service": "turn_on", "service_data": {"brightness_pct": 50, "entity_id": "light.bla"}, } async def test_camera_stream(hass): """Test camera stream trait support for camera domain.""" await async_process_ha_core_config( hass, {"external_url": "https://example.com"}, ) assert helpers.get_google_type(camera.DOMAIN, None) is not None assert trait.CameraStreamTrait.supported(camera.DOMAIN, camera.SUPPORT_STREAM, None) trt = trait.CameraStreamTrait( hass, State("camera.bla", camera.STATE_IDLE, {}), BASIC_CONFIG ) assert trt.sync_attributes() == { "cameraStreamSupportedProtocols": ["hls"], "cameraStreamNeedAuthToken": False, "cameraStreamNeedDrmEncryption": False, } assert trt.query_attributes() == {} with patch( "homeassistant.components.camera.async_request_stream", return_value="/api/streams/bla", ): await trt.execute(trait.COMMAND_GET_CAMERA_STREAM, BASIC_DATA, {}, {}) assert trt.query_attributes() == { "cameraStreamAccessUrl": "https://example.com/api/streams/bla", "cameraStreamReceiverAppId": "B12CE3CA", } async def test_onoff_group(hass): """Test OnOff trait support for group domain.""" assert helpers.get_google_type(group.DOMAIN, None) is not None assert trait.OnOffTrait.supported(group.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("group.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("group.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, HA_DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "group.bla"} off_calls = async_mock_service(hass, HA_DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "group.bla"} async def test_onoff_input_boolean(hass): """Test OnOff trait support for input_boolean domain.""" assert helpers.get_google_type(input_boolean.DOMAIN, None) is not None assert trait.OnOffTrait.supported(input_boolean.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("input_boolean.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait( hass, State("input_boolean.bla", STATE_OFF), BASIC_CONFIG ) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, input_boolean.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "input_boolean.bla"} off_calls = async_mock_service(hass, input_boolean.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "input_boolean.bla"} async def test_onoff_switch(hass): """Test OnOff trait support for switch domain.""" assert helpers.get_google_type(switch.DOMAIN, None) is not None assert trait.OnOffTrait.supported(switch.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("switch.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("switch.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} trt_assumed = trait.OnOffTrait( hass, State("switch.bla", STATE_OFF, {"assumed_state": True}), BASIC_CONFIG ) assert trt_assumed.sync_attributes() == {"commandOnlyOnOff": True} on_calls = async_mock_service(hass, switch.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "switch.bla"} off_calls = async_mock_service(hass, switch.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "switch.bla"} async def test_onoff_fan(hass): """Test OnOff trait support for fan domain.""" assert helpers.get_google_type(fan.DOMAIN, None) is not None assert trait.OnOffTrait.supported(fan.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("fan.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("fan.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, fan.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "fan.bla"} off_calls = async_mock_service(hass, fan.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "fan.bla"} async def test_onoff_light(hass): """Test OnOff trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert trait.OnOffTrait.supported(light.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("light.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("light.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "light.bla"} off_calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "light.bla"} async def test_onoff_media_player(hass): """Test OnOff trait support for media_player domain.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.OnOffTrait.supported(media_player.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("media_player.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("media_player.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, media_player.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} off_calls = async_mock_service(hass, media_player.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} async def test_onoff_humidifier(hass): """Test OnOff trait support for humidifier domain.""" assert helpers.get_google_type(humidifier.DOMAIN, None) is not None assert trait.OnOffTrait.supported(humidifier.DOMAIN, 0, None) trt_on = trait.OnOffTrait(hass, State("humidifier.bla", STATE_ON), BASIC_CONFIG) assert trt_on.sync_attributes() == {} assert trt_on.query_attributes() == {"on": True} trt_off = trait.OnOffTrait(hass, State("humidifier.bla", STATE_OFF), BASIC_CONFIG) assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, humidifier.DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "humidifier.bla"} off_calls = async_mock_service(hass, humidifier.DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "humidifier.bla"} async def test_dock_vacuum(hass): """Test dock trait support for vacuum domain.""" assert helpers.get_google_type(vacuum.DOMAIN, None) is not None assert trait.DockTrait.supported(vacuum.DOMAIN, 0, None) trt = trait.DockTrait(hass, State("vacuum.bla", vacuum.STATE_IDLE), BASIC_CONFIG) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"isDocked": False} calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_RETURN_TO_BASE) await trt.execute(trait.COMMAND_DOCK, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} async def test_startstop_vacuum(hass): """Test startStop trait support for vacuum domain.""" assert helpers.get_google_type(vacuum.DOMAIN, None) is not None assert trait.StartStopTrait.supported(vacuum.DOMAIN, 0, None) trt = trait.StartStopTrait( hass, State( "vacuum.bla", vacuum.STATE_PAUSED, {ATTR_SUPPORTED_FEATURES: vacuum.SUPPORT_PAUSE}, ), BASIC_CONFIG, ) assert trt.sync_attributes() == {"pausable": True} assert trt.query_attributes() == {"isRunning": False, "isPaused": True} start_calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_START) await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": True}, {}) assert len(start_calls) == 1 assert start_calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} stop_calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_STOP) await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": False}, {}) assert len(stop_calls) == 1 assert stop_calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} pause_calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_PAUSE) await trt.execute(trait.COMMAND_PAUSEUNPAUSE, BASIC_DATA, {"pause": True}, {}) assert len(pause_calls) == 1 assert pause_calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} unpause_calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_START) await trt.execute(trait.COMMAND_PAUSEUNPAUSE, BASIC_DATA, {"pause": False}, {}) assert len(unpause_calls) == 1 assert unpause_calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} async def test_startstop_cover(hass): """Test startStop trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.StartStopTrait.supported(cover.DOMAIN, cover.SUPPORT_STOP, None) state = State( "cover.bla", cover.STATE_CLOSED, {ATTR_SUPPORTED_FEATURES: cover.SUPPORT_STOP}, ) trt = trait.StartStopTrait( hass, state, BASIC_CONFIG, ) assert trt.sync_attributes() == {} for state_value in (cover.STATE_CLOSING, cover.STATE_OPENING): state.state = state_value assert trt.query_attributes() == {"isRunning": True} stop_calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_STOP_COVER) await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": False}, {}) assert len(stop_calls) == 1 assert stop_calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} for state_value in (cover.STATE_CLOSED, cover.STATE_OPEN): state.state = state_value assert trt.query_attributes() == {"isRunning": False} with pytest.raises(SmartHomeError, match="Cover is already stopped"): await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": False}, {}) with pytest.raises(SmartHomeError, match="Starting a cover is not supported"): await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": True}, {}) with pytest.raises( SmartHomeError, match="Command action.devices.commands.PauseUnpause is not supported", ): await trt.execute(trait.COMMAND_PAUSEUNPAUSE, BASIC_DATA, {"start": True}, {}) async def test_startstop_cover_assumed(hass): """Test startStop trait support for cover domain of assumed state.""" trt = trait.StartStopTrait( hass, State( "cover.bla", cover.STATE_CLOSED, {ATTR_SUPPORTED_FEATURES: cover.SUPPORT_STOP, ATTR_ASSUMED_STATE: True}, ), BASIC_CONFIG, ) stop_calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_STOP_COVER) await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": False}, {}) assert len(stop_calls) == 1 assert stop_calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} async def test_color_setting_color_light(hass): """Test ColorSpectrum trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert not trait.ColorSettingTrait.supported(light.DOMAIN, 0, None) assert trait.ColorSettingTrait.supported(light.DOMAIN, light.SUPPORT_COLOR, None) trt = trait.ColorSettingTrait( hass, State( "light.bla", STATE_ON, { light.ATTR_HS_COLOR: (20, 94), light.ATTR_BRIGHTNESS: 200, ATTR_SUPPORTED_FEATURES: light.SUPPORT_COLOR, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == {"colorModel": "hsv"} assert trt.query_attributes() == { "color": {"spectrumHsv": {"hue": 20, "saturation": 0.94, "value": 200 / 255}} } assert trt.can_execute( trait.COMMAND_COLOR_ABSOLUTE, {"color": {"spectrumRGB": 16715792}} ) calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON) await trt.execute( trait.COMMAND_COLOR_ABSOLUTE, BASIC_DATA, {"color": {"spectrumRGB": 1052927}}, {}, ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "light.bla", light.ATTR_HS_COLOR: (240, 93.725), } await trt.execute( trait.COMMAND_COLOR_ABSOLUTE, BASIC_DATA, {"color": {"spectrumHSV": {"hue": 100, "saturation": 0.50, "value": 0.20}}}, {}, ) assert len(calls) == 2 assert calls[1].data == { ATTR_ENTITY_ID: "light.bla", light.ATTR_HS_COLOR: [100, 50], light.ATTR_BRIGHTNESS: 0.2 * 255, } async def test_color_setting_temperature_light(hass): """Test ColorTemperature trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert not trait.ColorSettingTrait.supported(light.DOMAIN, 0, None) assert trait.ColorSettingTrait.supported( light.DOMAIN, light.SUPPORT_COLOR_TEMP, None ) trt = trait.ColorSettingTrait( hass, State( "light.bla", STATE_ON, { light.ATTR_MIN_MIREDS: 200, light.ATTR_COLOR_TEMP: 300, light.ATTR_MAX_MIREDS: 500, ATTR_SUPPORTED_FEATURES: light.SUPPORT_COLOR_TEMP, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "colorTemperatureRange": {"temperatureMinK": 2000, "temperatureMaxK": 5000} } assert trt.query_attributes() == {"color": {"temperatureK": 3333}} assert trt.can_execute( trait.COMMAND_COLOR_ABSOLUTE, {"color": {"temperature": 400}} ) calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON) with pytest.raises(helpers.SmartHomeError) as err: await trt.execute( trait.COMMAND_COLOR_ABSOLUTE, BASIC_DATA, {"color": {"temperature": 5555}}, {}, ) assert err.value.code == const.ERR_VALUE_OUT_OF_RANGE await trt.execute( trait.COMMAND_COLOR_ABSOLUTE, BASIC_DATA, {"color": {"temperature": 2857}}, {} ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "light.bla", light.ATTR_COLOR_TEMP: color.color_temperature_kelvin_to_mired(2857), } async def test_color_light_temperature_light_bad_temp(hass): """Test ColorTemperature trait support for light domain.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert not trait.ColorSettingTrait.supported(light.DOMAIN, 0, None) assert trait.ColorSettingTrait.supported( light.DOMAIN, light.SUPPORT_COLOR_TEMP, None ) trt = trait.ColorSettingTrait( hass, State( "light.bla", STATE_ON, { light.ATTR_MIN_MIREDS: 200, light.ATTR_COLOR_TEMP: 0, light.ATTR_MAX_MIREDS: 500, }, ), BASIC_CONFIG, ) assert trt.query_attributes() == {} async def test_light_modes(hass): """Test Light Mode trait.""" assert helpers.get_google_type(light.DOMAIN, None) is not None assert trait.ModesTrait.supported(light.DOMAIN, light.SUPPORT_EFFECT, None) trt = trait.ModesTrait( hass, State( "light.living_room", light.STATE_ON, attributes={ light.ATTR_EFFECT_LIST: ["random", "colorloop"], light.ATTR_EFFECT: "random", }, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableModes": [ { "name": "effect", "name_values": [{"name_synonym": ["effect"], "lang": "en"}], "settings": [ { "setting_name": "random", "setting_values": [ {"setting_synonym": ["random"], "lang": "en"} ], }, { "setting_name": "colorloop", "setting_values": [ {"setting_synonym": ["colorloop"], "lang": "en"} ], }, ], "ordered": False, } ] } assert trt.query_attributes() == { "currentModeSettings": {"effect": "random"}, "on": True, } assert trt.can_execute( trait.COMMAND_MODES, params={"updateModeSettings": {"effect": "colorloop"}}, ) calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON) await trt.execute( trait.COMMAND_MODES, BASIC_DATA, {"updateModeSettings": {"effect": "colorloop"}}, {}, ) assert len(calls) == 1 assert calls[0].data == { "entity_id": "light.living_room", "effect": "colorloop", } async def test_scene_scene(hass): """Test Scene trait support for scene domain.""" assert helpers.get_google_type(scene.DOMAIN, None) is not None assert trait.SceneTrait.supported(scene.DOMAIN, 0, None) trt = trait.SceneTrait(hass, State("scene.bla", scene.STATE), BASIC_CONFIG) assert trt.sync_attributes() == {} assert trt.query_attributes() == {} assert trt.can_execute(trait.COMMAND_ACTIVATE_SCENE, {}) calls = async_mock_service(hass, scene.DOMAIN, SERVICE_TURN_ON) await trt.execute(trait.COMMAND_ACTIVATE_SCENE, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "scene.bla"} async def test_scene_script(hass): """Test Scene trait support for script domain.""" assert helpers.get_google_type(script.DOMAIN, None) is not None assert trait.SceneTrait.supported(script.DOMAIN, 0, None) trt = trait.SceneTrait(hass, State("script.bla", STATE_OFF), BASIC_CONFIG) assert trt.sync_attributes() == {} assert trt.query_attributes() == {} assert trt.can_execute(trait.COMMAND_ACTIVATE_SCENE, {}) calls = async_mock_service(hass, script.DOMAIN, SERVICE_TURN_ON) await trt.execute(trait.COMMAND_ACTIVATE_SCENE, BASIC_DATA, {}, {}) # We don't wait till script execution is done. await hass.async_block_till_done() assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "script.bla"} async def test_temperature_setting_climate_onoff(hass): """Test TemperatureSetting trait support for climate domain - range.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.TemperatureSettingTrait.supported(climate.DOMAIN, 0, None) hass.config.units.temperature_unit = TEMP_FAHRENHEIT trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_AUTO, { ATTR_SUPPORTED_FEATURES: climate.SUPPORT_TARGET_TEMPERATURE_RANGE, climate.ATTR_HVAC_MODES: [ climate.HVAC_MODE_OFF, climate.HVAC_MODE_COOL, climate.HVAC_MODE_HEAT, climate.HVAC_MODE_HEAT_COOL, ], climate.ATTR_MIN_TEMP: None, climate.ATTR_MAX_TEMP: None, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["off", "cool", "heat", "heatcool", "on"], "thermostatTemperatureUnit": "F", } assert trt.can_execute(trait.COMMAND_THERMOSTAT_SET_MODE, {}) calls = async_mock_service(hass, climate.DOMAIN, SERVICE_TURN_ON) await trt.execute( trait.COMMAND_THERMOSTAT_SET_MODE, BASIC_DATA, {"thermostatMode": "on"}, {} ) assert len(calls) == 1 calls = async_mock_service(hass, climate.DOMAIN, SERVICE_TURN_OFF) await trt.execute( trait.COMMAND_THERMOSTAT_SET_MODE, BASIC_DATA, {"thermostatMode": "off"}, {} ) assert len(calls) == 1 async def test_temperature_setting_climate_no_modes(hass): """Test TemperatureSetting trait support for climate domain not supporting any modes.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.TemperatureSettingTrait.supported(climate.DOMAIN, 0, None) hass.config.units.temperature_unit = TEMP_CELSIUS trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_AUTO, { climate.ATTR_HVAC_MODES: [], climate.ATTR_MIN_TEMP: None, climate.ATTR_MAX_TEMP: None, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["heat"], "thermostatTemperatureUnit": "C", } async def test_temperature_setting_climate_range(hass): """Test TemperatureSetting trait support for climate domain - range.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.TemperatureSettingTrait.supported(climate.DOMAIN, 0, None) hass.config.units.temperature_unit = TEMP_FAHRENHEIT trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_AUTO, { climate.ATTR_CURRENT_TEMPERATURE: 70, climate.ATTR_CURRENT_HUMIDITY: 25, ATTR_SUPPORTED_FEATURES: climate.SUPPORT_TARGET_TEMPERATURE_RANGE, climate.ATTR_HVAC_MODES: [ STATE_OFF, climate.HVAC_MODE_COOL, climate.HVAC_MODE_HEAT, climate.HVAC_MODE_AUTO, ], climate.ATTR_TARGET_TEMP_HIGH: 75, climate.ATTR_TARGET_TEMP_LOW: 65, climate.ATTR_MIN_TEMP: 50, climate.ATTR_MAX_TEMP: 80, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["off", "cool", "heat", "auto", "on"], "thermostatTemperatureUnit": "F", } assert trt.query_attributes() == { "thermostatMode": "auto", "thermostatTemperatureAmbient": 21.1, "thermostatHumidityAmbient": 25, "thermostatTemperatureSetpointLow": 18.3, "thermostatTemperatureSetpointHigh": 23.9, } assert trt.can_execute(trait.COMMAND_THERMOSTAT_TEMPERATURE_SET_RANGE, {}) assert trt.can_execute(trait.COMMAND_THERMOSTAT_SET_MODE, {}) calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_TEMPERATURE) await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SET_RANGE, BASIC_DATA, { "thermostatTemperatureSetpointHigh": 25, "thermostatTemperatureSetpointLow": 20, }, {}, ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "climate.bla", climate.ATTR_TARGET_TEMP_HIGH: 77, climate.ATTR_TARGET_TEMP_LOW: 68, } calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_HVAC_MODE) await trt.execute( trait.COMMAND_THERMOSTAT_SET_MODE, BASIC_DATA, {"thermostatMode": "cool"}, {} ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "climate.bla", climate.ATTR_HVAC_MODE: climate.HVAC_MODE_COOL, } with pytest.raises(helpers.SmartHomeError) as err: await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, BASIC_DATA, {"thermostatTemperatureSetpoint": -100}, {}, ) assert err.value.code == const.ERR_VALUE_OUT_OF_RANGE hass.config.units.temperature_unit = TEMP_CELSIUS async def test_temperature_setting_climate_setpoint(hass): """Test TemperatureSetting trait support for climate domain - setpoint.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.TemperatureSettingTrait.supported(climate.DOMAIN, 0, None) hass.config.units.temperature_unit = TEMP_CELSIUS trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_COOL, { climate.ATTR_HVAC_MODES: [STATE_OFF, climate.HVAC_MODE_COOL], climate.ATTR_MIN_TEMP: 10, climate.ATTR_MAX_TEMP: 30, ATTR_TEMPERATURE: 18, climate.ATTR_CURRENT_TEMPERATURE: 20, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["off", "cool", "on"], "thermostatTemperatureUnit": "C", } assert trt.query_attributes() == { "thermostatMode": "cool", "thermostatTemperatureAmbient": 20, "thermostatTemperatureSetpoint": 18, } assert trt.can_execute(trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, {}) assert trt.can_execute(trait.COMMAND_THERMOSTAT_SET_MODE, {}) calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_TEMPERATURE) with pytest.raises(helpers.SmartHomeError): await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, BASIC_DATA, {"thermostatTemperatureSetpoint": -100}, {}, ) await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, BASIC_DATA, {"thermostatTemperatureSetpoint": 19}, {}, ) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "climate.bla", ATTR_TEMPERATURE: 19} async def test_temperature_setting_climate_setpoint_auto(hass): """ Test TemperatureSetting trait support for climate domain. Setpoint in auto mode. """ hass.config.units.temperature_unit = TEMP_CELSIUS trt = trait.TemperatureSettingTrait( hass, State( "climate.bla", climate.HVAC_MODE_HEAT_COOL, { climate.ATTR_HVAC_MODES: [ climate.HVAC_MODE_OFF, climate.HVAC_MODE_HEAT_COOL, ], climate.ATTR_MIN_TEMP: 10, climate.ATTR_MAX_TEMP: 30, ATTR_TEMPERATURE: 18, climate.ATTR_CURRENT_TEMPERATURE: 20, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableThermostatModes": ["off", "heatcool", "on"], "thermostatTemperatureUnit": "C", } assert trt.query_attributes() == { "thermostatMode": "heatcool", "thermostatTemperatureAmbient": 20, "thermostatTemperatureSetpointHigh": 18, "thermostatTemperatureSetpointLow": 18, } assert trt.can_execute(trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, {}) assert trt.can_execute(trait.COMMAND_THERMOSTAT_SET_MODE, {}) calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_TEMPERATURE) await trt.execute( trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, BASIC_DATA, {"thermostatTemperatureSetpoint": 19}, {}, ) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "climate.bla", ATTR_TEMPERATURE: 19} async def test_humidity_setting_humidifier_setpoint(hass): """Test HumiditySetting trait support for humidifier domain - setpoint.""" assert helpers.get_google_type(humidifier.DOMAIN, None) is not None assert trait.HumiditySettingTrait.supported(humidifier.DOMAIN, 0, None) trt = trait.HumiditySettingTrait( hass, State( "humidifier.bla", STATE_ON, { humidifier.ATTR_MIN_HUMIDITY: 20, humidifier.ATTR_MAX_HUMIDITY: 90, humidifier.ATTR_HUMIDITY: 38, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "humiditySetpointRange": {"minPercent": 20, "maxPercent": 90} } assert trt.query_attributes() == { "humiditySetpointPercent": 38, } assert trt.can_execute(trait.COMMAND_SET_HUMIDITY, {}) calls = async_mock_service(hass, humidifier.DOMAIN, humidifier.SERVICE_SET_HUMIDITY) await trt.execute(trait.COMMAND_SET_HUMIDITY, BASIC_DATA, {"humidity": 32}, {}) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "humidifier.bla", humidifier.ATTR_HUMIDITY: 32, } async def test_lock_unlock_lock(hass): """Test LockUnlock trait locking support for lock domain.""" assert helpers.get_google_type(lock.DOMAIN, None) is not None assert trait.LockUnlockTrait.supported(lock.DOMAIN, lock.SUPPORT_OPEN, None) assert trait.LockUnlockTrait.might_2fa(lock.DOMAIN, lock.SUPPORT_OPEN, None) trt = trait.LockUnlockTrait( hass, State("lock.front_door", lock.STATE_LOCKED), PIN_CONFIG ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"isLocked": True} assert trt.can_execute(trait.COMMAND_LOCKUNLOCK, {"lock": True}) calls = async_mock_service(hass, lock.DOMAIN, lock.SERVICE_LOCK) await trt.execute(trait.COMMAND_LOCKUNLOCK, PIN_DATA, {"lock": True}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "lock.front_door"} async def test_lock_unlock_unlock(hass): """Test LockUnlock trait unlocking support for lock domain.""" assert helpers.get_google_type(lock.DOMAIN, None) is not None assert trait.LockUnlockTrait.supported(lock.DOMAIN, lock.SUPPORT_OPEN, None) trt = trait.LockUnlockTrait( hass, State("lock.front_door", lock.STATE_LOCKED), PIN_CONFIG ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"isLocked": True} assert trt.can_execute(trait.COMMAND_LOCKUNLOCK, {"lock": False}) calls = async_mock_service(hass, lock.DOMAIN, lock.SERVICE_UNLOCK) # No challenge data with pytest.raises(error.ChallengeNeeded) as err: await trt.execute(trait.COMMAND_LOCKUNLOCK, PIN_DATA, {"lock": False}, {}) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # invalid pin with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_LOCKUNLOCK, PIN_DATA, {"lock": False}, {"pin": 9999} ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_FAILED_PIN_NEEDED await trt.execute( trait.COMMAND_LOCKUNLOCK, PIN_DATA, {"lock": False}, {"pin": "1234"} ) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "lock.front_door"} # Test without pin trt = trait.LockUnlockTrait( hass, State("lock.front_door", lock.STATE_LOCKED), BASIC_CONFIG ) with pytest.raises(error.SmartHomeError) as err: await trt.execute(trait.COMMAND_LOCKUNLOCK, BASIC_DATA, {"lock": False}, {}) assert len(calls) == 1 assert err.value.code == const.ERR_CHALLENGE_NOT_SETUP # Test with 2FA override with patch.object( BASIC_CONFIG, "should_2fa", return_value=False, ): await trt.execute(trait.COMMAND_LOCKUNLOCK, BASIC_DATA, {"lock": False}, {}) assert len(calls) == 2 async def test_arm_disarm_arm_away(hass): """Test ArmDisarm trait Arming support for alarm_control_panel domain.""" assert helpers.get_google_type(alarm_control_panel.DOMAIN, None) is not None assert trait.ArmDisArmTrait.supported(alarm_control_panel.DOMAIN, 0, None) assert trait.ArmDisArmTrait.might_2fa(alarm_control_panel.DOMAIN, 0, None) trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, { alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True, ATTR_SUPPORTED_FEATURES: alarm_control_panel.const.SUPPORT_ALARM_ARM_HOME | alarm_control_panel.const.SUPPORT_ALARM_ARM_AWAY, }, ), PIN_CONFIG, ) assert trt.sync_attributes() == { "availableArmLevels": { "levels": [ { "level_name": "armed_home", "level_values": [ {"level_synonym": ["armed home", "home"], "lang": "en"} ], }, { "level_name": "armed_away", "level_values": [ {"level_synonym": ["armed away", "away"], "lang": "en"} ], }, ], "ordered": False, } } assert trt.query_attributes() == { "isArmed": True, "currentArmLevel": STATE_ALARM_ARMED_AWAY, } assert trt.can_execute( trait.COMMAND_ARMDISARM, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY} ) calls = async_mock_service( hass, alarm_control_panel.DOMAIN, alarm_control_panel.SERVICE_ALARM_ARM_AWAY ) # Test with no secure_pin configured with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), BASIC_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, BASIC_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {}, ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NOT_SETUP trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), PIN_CONFIG, ) # No challenge data with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {}, ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # invalid pin with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {"pin": 9999}, ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_FAILED_PIN_NEEDED # correct pin await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {"pin": "1234"}, ) assert len(calls) == 1 # Test already armed with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), PIN_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {}, ) assert len(calls) == 1 assert err.value.code == const.ERR_ALREADY_ARMED # Test with code_arm_required False trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: False}, ), PIN_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, {}, ) assert len(calls) == 2 with pytest.raises(error.SmartHomeError) as err: await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True}, {}, ) async def test_arm_disarm_disarm(hass): """Test ArmDisarm trait Disarming support for alarm_control_panel domain.""" assert helpers.get_google_type(alarm_control_panel.DOMAIN, None) is not None assert trait.ArmDisArmTrait.supported(alarm_control_panel.DOMAIN, 0, None) assert trait.ArmDisArmTrait.might_2fa(alarm_control_panel.DOMAIN, 0, None) trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, { alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True, ATTR_SUPPORTED_FEATURES: alarm_control_panel.const.SUPPORT_ALARM_TRIGGER | alarm_control_panel.const.SUPPORT_ALARM_ARM_CUSTOM_BYPASS, }, ), PIN_CONFIG, ) assert trt.sync_attributes() == { "availableArmLevels": { "levels": [ { "level_name": "armed_custom_bypass", "level_values": [ { "level_synonym": ["armed custom bypass", "custom"], "lang": "en", } ], }, { "level_name": "triggered", "level_values": [{"level_synonym": ["triggered"], "lang": "en"}], }, ], "ordered": False, } } assert trt.query_attributes() == {"isArmed": False} assert trt.can_execute(trait.COMMAND_ARMDISARM, {"arm": False}) calls = async_mock_service( hass, alarm_control_panel.DOMAIN, alarm_control_panel.SERVICE_ALARM_DISARM ) # Test without secure_pin configured with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), BASIC_CONFIG, ) await trt.execute(trait.COMMAND_ARMDISARM, BASIC_DATA, {"arm": False}, {}) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NOT_SETUP trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), PIN_CONFIG, ) # No challenge data with pytest.raises(error.ChallengeNeeded) as err: await trt.execute(trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": False}, {}) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # invalid pin with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": False}, {"pin": 9999} ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_FAILED_PIN_NEEDED # correct pin await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": False}, {"pin": "1234"} ) assert len(calls) == 1 # Test already disarmed with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), PIN_CONFIG, ) await trt.execute(trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": False}, {}) assert len(calls) == 1 assert err.value.code == const.ERR_ALREADY_DISARMED # Cancel arming after already armed will require pin with pytest.raises(error.SmartHomeError) as err: trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: False}, ), PIN_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "cancel": True}, {} ) assert len(calls) == 1 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # Cancel arming while pending to arm doesn't require pin trt = trait.ArmDisArmTrait( hass, State( "alarm_control_panel.alarm", STATE_ALARM_PENDING, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: False}, ), PIN_CONFIG, ) await trt.execute( trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "cancel": True}, {} ) assert len(calls) == 2 async def test_fan_speed(hass): """Test FanSpeed trait speed control support for fan domain.""" assert helpers.get_google_type(fan.DOMAIN, None) is not None assert trait.FanSpeedTrait.supported(fan.DOMAIN, fan.SUPPORT_SET_SPEED, None) trt = trait.FanSpeedTrait( hass, State( "fan.living_room_fan", fan.SPEED_HIGH, attributes={ "speed_list": [ fan.SPEED_OFF, fan.SPEED_LOW, fan.SPEED_MEDIUM, fan.SPEED_HIGH, ], "speed": "low", "percentage": 33, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableFanSpeeds": { "ordered": True, "speeds": [ { "speed_name": "off", "speed_values": [{"speed_synonym": ["stop", "off"], "lang": "en"}], }, { "speed_name": "low", "speed_values": [ { "speed_synonym": ["slow", "low", "slowest", "lowest"], "lang": "en", } ], }, { "speed_name": "medium", "speed_values": [ {"speed_synonym": ["medium", "mid", "middle"], "lang": "en"} ], }, { "speed_name": "high", "speed_values": [ { "speed_synonym": [ "high", "max", "fast", "highest", "fastest", "maximum", ], "lang": "en", } ], }, ], }, "reversible": False, "supportsFanSpeedPercent": True, } assert trt.query_attributes() == { "currentFanSpeedSetting": "low", "on": True, "currentFanSpeedPercent": 33, } assert trt.can_execute(trait.COMMAND_FANSPEED, params={"fanSpeed": "medium"}) calls = async_mock_service(hass, fan.DOMAIN, fan.SERVICE_SET_SPEED) await trt.execute(trait.COMMAND_FANSPEED, BASIC_DATA, {"fanSpeed": "medium"}, {}) assert len(calls) == 1 assert calls[0].data == {"entity_id": "fan.living_room_fan", "speed": "medium"} assert trt.can_execute(trait.COMMAND_FANSPEED, params={"fanSpeedPercent": 10}) calls = async_mock_service(hass, fan.DOMAIN, fan.SERVICE_SET_PERCENTAGE) await trt.execute(trait.COMMAND_FANSPEED, BASIC_DATA, {"fanSpeedPercent": 10}, {}) assert len(calls) == 1 assert calls[0].data == {"entity_id": "fan.living_room_fan", "percentage": 10} async def test_climate_fan_speed(hass): """Test FanSpeed trait speed control support for climate domain.""" assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.FanSpeedTrait.supported(climate.DOMAIN, climate.SUPPORT_FAN_MODE, None) trt = trait.FanSpeedTrait( hass, State( "climate.living_room_ac", "on", attributes={ "fan_modes": ["auto", "low", "medium", "high"], "fan_mode": "low", }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "availableFanSpeeds": { "ordered": True, "speeds": [ { "speed_name": "auto", "speed_values": [{"speed_synonym": ["auto"], "lang": "en"}], }, { "speed_name": "low", "speed_values": [{"speed_synonym": ["low"], "lang": "en"}], }, { "speed_name": "medium", "speed_values": [{"speed_synonym": ["medium"], "lang": "en"}], }, { "speed_name": "high", "speed_values": [{"speed_synonym": ["high"], "lang": "en"}], }, ], }, "reversible": False, "supportsFanSpeedPercent": True, } assert trt.query_attributes() == { "currentFanSpeedSetting": "low", } assert trt.can_execute(trait.COMMAND_FANSPEED, params={"fanSpeed": "medium"}) calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_FAN_MODE) await trt.execute(trait.COMMAND_FANSPEED, BASIC_DATA, {"fanSpeed": "medium"}, {}) assert len(calls) == 1 assert calls[0].data == { "entity_id": "climate.living_room_ac", "fan_mode": "medium", } async def test_inputselector(hass): """Test input selector trait.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.InputSelectorTrait.supported( media_player.DOMAIN, media_player.SUPPORT_SELECT_SOURCE, None ) trt = trait.InputSelectorTrait( hass, State( "media_player.living_room", media_player.STATE_PLAYING, attributes={ media_player.ATTR_INPUT_SOURCE_LIST: [ "media", "game", "chromecast", "plex", ], media_player.ATTR_INPUT_SOURCE: "game", }, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableInputs": [ {"key": "media", "names": [{"name_synonym": ["media"], "lang": "en"}]}, {"key": "game", "names": [{"name_synonym": ["game"], "lang": "en"}]}, { "key": "chromecast", "names": [{"name_synonym": ["chromecast"], "lang": "en"}], }, {"key": "plex", "names": [{"name_synonym": ["plex"], "lang": "en"}]}, ], "orderedInputs": True, } assert trt.query_attributes() == { "currentInput": "game", } assert trt.can_execute( trait.COMMAND_INPUT, params={"newInput": "media"}, ) calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_SELECT_SOURCE ) await trt.execute( trait.COMMAND_INPUT, BASIC_DATA, {"newInput": "media"}, {}, ) assert len(calls) == 1 assert calls[0].data == {"entity_id": "media_player.living_room", "source": "media"} @pytest.mark.parametrize( "sources,source,source_next,source_prev", [ (["a"], "a", "a", "a"), (["a", "b"], "a", "b", "b"), (["a", "b", "c"], "a", "b", "c"), ], ) async def test_inputselector_nextprev(hass, sources, source, source_next, source_prev): """Test input selector trait.""" trt = trait.InputSelectorTrait( hass, State( "media_player.living_room", media_player.STATE_PLAYING, attributes={ media_player.ATTR_INPUT_SOURCE_LIST: sources, media_player.ATTR_INPUT_SOURCE: source, }, ), BASIC_CONFIG, ) assert trt.can_execute("action.devices.commands.NextInput", params={}) assert trt.can_execute("action.devices.commands.PreviousInput", params={}) calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_SELECT_SOURCE ) await trt.execute( "action.devices.commands.NextInput", BASIC_DATA, {}, {}, ) await trt.execute( "action.devices.commands.PreviousInput", BASIC_DATA, {}, {}, ) assert len(calls) == 2 assert calls[0].data == { "entity_id": "media_player.living_room", "source": source_next, } assert calls[1].data == { "entity_id": "media_player.living_room", "source": source_prev, } @pytest.mark.parametrize( "sources,source", [(None, "a"), (["a", "b"], None), (["a", "b"], "c")] ) async def test_inputselector_nextprev_invalid(hass, sources, source): """Test input selector trait.""" trt = trait.InputSelectorTrait( hass, State( "media_player.living_room", media_player.STATE_PLAYING, attributes={ media_player.ATTR_INPUT_SOURCE_LIST: sources, media_player.ATTR_INPUT_SOURCE: source, }, ), BASIC_CONFIG, ) with pytest.raises(SmartHomeError): await trt.execute( "action.devices.commands.NextInput", BASIC_DATA, {}, {}, ) with pytest.raises(SmartHomeError): await trt.execute( "action.devices.commands.PreviousInput", BASIC_DATA, {}, {}, ) with pytest.raises(SmartHomeError): await trt.execute( "action.devices.commands.InvalidCommand", BASIC_DATA, {}, {}, ) async def test_modes_input_select(hass): """Test Input Select Mode trait.""" assert helpers.get_google_type(input_select.DOMAIN, None) is not None assert trait.ModesTrait.supported(input_select.DOMAIN, None, None) trt = trait.ModesTrait( hass, State("input_select.bla", "unavailable"), BASIC_CONFIG, ) assert trt.sync_attributes() == {"availableModes": []} trt = trait.ModesTrait( hass, State( "input_select.bla", "abc", attributes={input_select.ATTR_OPTIONS: ["abc", "123", "xyz"]}, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableModes": [ { "name": "option", "name_values": [ { "name_synonym": ["option", "setting", "mode", "value"], "lang": "en", } ], "settings": [ { "setting_name": "abc", "setting_values": [{"setting_synonym": ["abc"], "lang": "en"}], }, { "setting_name": "123", "setting_values": [{"setting_synonym": ["123"], "lang": "en"}], }, { "setting_name": "xyz", "setting_values": [{"setting_synonym": ["xyz"], "lang": "en"}], }, ], "ordered": False, } ] } assert trt.query_attributes() == { "currentModeSettings": {"option": "abc"}, "on": True, } assert trt.can_execute( trait.COMMAND_MODES, params={"updateModeSettings": {"option": "xyz"}}, ) calls = async_mock_service( hass, input_select.DOMAIN, input_select.SERVICE_SELECT_OPTION ) await trt.execute( trait.COMMAND_MODES, BASIC_DATA, {"updateModeSettings": {"option": "xyz"}}, {}, ) assert len(calls) == 1 assert calls[0].data == {"entity_id": "input_select.bla", "option": "xyz"} async def test_modes_humidifier(hass): """Test Humidifier Mode trait.""" assert helpers.get_google_type(humidifier.DOMAIN, None) is not None assert trait.ModesTrait.supported(humidifier.DOMAIN, humidifier.SUPPORT_MODES, None) trt = trait.ModesTrait( hass, State( "humidifier.humidifier", STATE_OFF, attributes={ humidifier.ATTR_AVAILABLE_MODES: [ humidifier.MODE_NORMAL, humidifier.MODE_AUTO, humidifier.MODE_AWAY, ], ATTR_SUPPORTED_FEATURES: humidifier.SUPPORT_MODES, humidifier.ATTR_MIN_HUMIDITY: 30, humidifier.ATTR_MAX_HUMIDITY: 99, humidifier.ATTR_HUMIDITY: 50, ATTR_MODE: humidifier.MODE_AUTO, }, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableModes": [ { "name": "mode", "name_values": [{"name_synonym": ["mode"], "lang": "en"}], "settings": [ { "setting_name": "normal", "setting_values": [ {"setting_synonym": ["normal"], "lang": "en"} ], }, { "setting_name": "auto", "setting_values": [{"setting_synonym": ["auto"], "lang": "en"}], }, { "setting_name": "away", "setting_values": [{"setting_synonym": ["away"], "lang": "en"}], }, ], "ordered": False, }, ] } assert trt.query_attributes() == { "currentModeSettings": {"mode": "auto"}, "on": False, } assert trt.can_execute( trait.COMMAND_MODES, params={"updateModeSettings": {"mode": "away"}} ) calls = async_mock_service(hass, humidifier.DOMAIN, humidifier.SERVICE_SET_MODE) await trt.execute( trait.COMMAND_MODES, BASIC_DATA, {"updateModeSettings": {"mode": "away"}}, {}, ) assert len(calls) == 1 assert calls[0].data == { "entity_id": "humidifier.humidifier", "mode": "away", } async def test_sound_modes(hass): """Test Mode trait.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.ModesTrait.supported( media_player.DOMAIN, media_player.SUPPORT_SELECT_SOUND_MODE, None ) trt = trait.ModesTrait( hass, State( "media_player.living_room", media_player.STATE_PLAYING, attributes={ media_player.ATTR_SOUND_MODE_LIST: ["stereo", "prologic"], media_player.ATTR_SOUND_MODE: "stereo", }, ), BASIC_CONFIG, ) attribs = trt.sync_attributes() assert attribs == { "availableModes": [ { "name": "sound mode", "name_values": [ {"name_synonym": ["sound mode", "effects"], "lang": "en"} ], "settings": [ { "setting_name": "stereo", "setting_values": [ {"setting_synonym": ["stereo"], "lang": "en"} ], }, { "setting_name": "prologic", "setting_values": [ {"setting_synonym": ["prologic"], "lang": "en"} ], }, ], "ordered": False, } ] } assert trt.query_attributes() == { "currentModeSettings": {"sound mode": "stereo"}, "on": True, } assert trt.can_execute( trait.COMMAND_MODES, params={"updateModeSettings": {"sound mode": "stereo"}}, ) calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_SELECT_SOUND_MODE ) await trt.execute( trait.COMMAND_MODES, BASIC_DATA, {"updateModeSettings": {"sound mode": "stereo"}}, {}, ) assert len(calls) == 1 assert calls[0].data == { "entity_id": "media_player.living_room", "sound_mode": "stereo", } async def test_openclose_cover(hass): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_SET_POSITION, None ) trt = trait.OpenCloseTrait( hass, State( "cover.bla", cover.STATE_OPEN, { cover.ATTR_CURRENT_POSITION: 75, ATTR_SUPPORTED_FEATURES: cover.SUPPORT_SET_POSITION, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"openPercent": 75} calls_set = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_SET_COVER_POSITION) calls_open = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_OPEN_COVER) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 50}, {}) await trt.execute( trait.COMMAND_OPENCLOSE_RELATIVE, BASIC_DATA, {"openRelativePercent": 50}, {} ) assert len(calls_set) == 1 assert calls_set[0].data == {ATTR_ENTITY_ID: "cover.bla", cover.ATTR_POSITION: 50} assert len(calls_open) == 1 assert calls_open[0].data == {ATTR_ENTITY_ID: "cover.bla"} async def test_openclose_cover_unknown_state(hass): """Test OpenClose trait support for cover domain with unknown state.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_SET_POSITION, None ) # No state trt = trait.OpenCloseTrait( hass, State( "cover.bla", STATE_UNKNOWN, {ATTR_SUPPORTED_FEATURES: cover.SUPPORT_OPEN} ), BASIC_CONFIG, ) assert trt.sync_attributes() == {"discreteOnlyOpenClose": True} with pytest.raises(helpers.SmartHomeError): trt.query_attributes() calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_OPEN_COVER) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 100}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} with pytest.raises(helpers.SmartHomeError): trt.query_attributes() async def test_openclose_cover_assumed_state(hass): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_SET_POSITION, None ) trt = trait.OpenCloseTrait( hass, State( "cover.bla", cover.STATE_OPEN, { ATTR_ASSUMED_STATE: True, ATTR_SUPPORTED_FEATURES: cover.SUPPORT_SET_POSITION, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == {"commandOnlyOpenClose": True} assert trt.query_attributes() == {} calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_SET_COVER_POSITION) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 40}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla", cover.ATTR_POSITION: 40} async def test_openclose_cover_query_only(hass): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported(cover.DOMAIN, 0, None) state = State( "cover.bla", cover.STATE_OPEN, ) trt = trait.OpenCloseTrait( hass, state, BASIC_CONFIG, ) assert trt.sync_attributes() == { "discreteOnlyOpenClose": True, "queryOnlyOpenClose": True, } assert trt.query_attributes() == {"openPercent": 100} async def test_openclose_cover_no_position(hass): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, None) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_OPEN | cover.SUPPORT_CLOSE, None ) state = State( "cover.bla", cover.STATE_OPEN, { ATTR_SUPPORTED_FEATURES: cover.SUPPORT_OPEN | cover.SUPPORT_CLOSE, }, ) trt = trait.OpenCloseTrait( hass, state, BASIC_CONFIG, ) assert trt.sync_attributes() == {"discreteOnlyOpenClose": True} assert trt.query_attributes() == {"openPercent": 100} state.state = cover.STATE_CLOSED assert trt.sync_attributes() == {"discreteOnlyOpenClose": True} assert trt.query_attributes() == {"openPercent": 0} calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_CLOSE_COVER) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 0}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_OPEN_COVER) await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 100}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla"} with pytest.raises( SmartHomeError, match=r"Current position not know for relative command" ): await trt.execute( trait.COMMAND_OPENCLOSE_RELATIVE, BASIC_DATA, {"openRelativePercent": 100}, {}, ) with pytest.raises(SmartHomeError, match=r"No support for partial open close"): await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 50}, {}) @pytest.mark.parametrize( "device_class", (cover.DEVICE_CLASS_DOOR, cover.DEVICE_CLASS_GARAGE, cover.DEVICE_CLASS_GATE), ) async def test_openclose_cover_secure(hass, device_class): """Test OpenClose trait support for cover domain.""" assert helpers.get_google_type(cover.DOMAIN, device_class) is not None assert trait.OpenCloseTrait.supported( cover.DOMAIN, cover.SUPPORT_SET_POSITION, device_class ) assert trait.OpenCloseTrait.might_2fa( cover.DOMAIN, cover.SUPPORT_SET_POSITION, device_class ) trt = trait.OpenCloseTrait( hass, State( "cover.bla", cover.STATE_OPEN, { ATTR_DEVICE_CLASS: device_class, ATTR_SUPPORTED_FEATURES: cover.SUPPORT_SET_POSITION, cover.ATTR_CURRENT_POSITION: 75, }, ), PIN_CONFIG, ) assert trt.sync_attributes() == {} assert trt.query_attributes() == {"openPercent": 75} calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_SET_COVER_POSITION) calls_close = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_CLOSE_COVER) # No challenge data with pytest.raises(error.ChallengeNeeded) as err: await trt.execute(trait.COMMAND_OPENCLOSE, PIN_DATA, {"openPercent": 50}, {}) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED # invalid pin with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( trait.COMMAND_OPENCLOSE, PIN_DATA, {"openPercent": 50}, {"pin": "9999"} ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_FAILED_PIN_NEEDED await trt.execute( trait.COMMAND_OPENCLOSE, PIN_DATA, {"openPercent": 50}, {"pin": "1234"} ) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla", cover.ATTR_POSITION: 50} # no challenge on close await trt.execute(trait.COMMAND_OPENCLOSE, PIN_DATA, {"openPercent": 0}, {}) assert len(calls_close) == 1 assert calls_close[0].data == {ATTR_ENTITY_ID: "cover.bla"} @pytest.mark.parametrize( "device_class", ( binary_sensor.DEVICE_CLASS_DOOR, binary_sensor.DEVICE_CLASS_GARAGE_DOOR, binary_sensor.DEVICE_CLASS_LOCK, binary_sensor.DEVICE_CLASS_OPENING, binary_sensor.DEVICE_CLASS_WINDOW, ), ) async def test_openclose_binary_sensor(hass, device_class): """Test OpenClose trait support for binary_sensor domain.""" assert helpers.get_google_type(binary_sensor.DOMAIN, device_class) is not None assert trait.OpenCloseTrait.supported(binary_sensor.DOMAIN, 0, device_class) trt = trait.OpenCloseTrait( hass, State("binary_sensor.test", STATE_ON, {ATTR_DEVICE_CLASS: device_class}), BASIC_CONFIG, ) assert trt.sync_attributes() == { "queryOnlyOpenClose": True, "discreteOnlyOpenClose": True, } assert trt.query_attributes() == {"openPercent": 100} trt = trait.OpenCloseTrait( hass, State("binary_sensor.test", STATE_OFF, {ATTR_DEVICE_CLASS: device_class}), BASIC_CONFIG, ) assert trt.sync_attributes() == { "queryOnlyOpenClose": True, "discreteOnlyOpenClose": True, } assert trt.query_attributes() == {"openPercent": 0} async def test_volume_media_player(hass): """Test volume trait support for media player domain.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.VolumeTrait.supported( media_player.DOMAIN, media_player.SUPPORT_VOLUME_SET, None, ) trt = trait.VolumeTrait( hass, State( "media_player.bla", media_player.STATE_PLAYING, { ATTR_SUPPORTED_FEATURES: media_player.SUPPORT_VOLUME_SET, media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.3, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "volumeMaxLevel": 100, "levelStepSize": 10, "volumeCanMuteAndUnmute": False, "commandOnlyVolume": False, } assert trt.query_attributes() == {"currentVolume": 30} calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_SET ) await trt.execute(trait.COMMAND_SET_VOLUME, BASIC_DATA, {"volumeLevel": 60}, {}) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.6, } calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_SET ) await trt.execute( trait.COMMAND_VOLUME_RELATIVE, BASIC_DATA, {"relativeSteps": 10}, {} ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.4, } async def test_volume_media_player_relative(hass): """Test volume trait support for relative-volume-only media players.""" assert trait.VolumeTrait.supported( media_player.DOMAIN, media_player.SUPPORT_VOLUME_STEP, None, ) trt = trait.VolumeTrait( hass, State( "media_player.bla", media_player.STATE_PLAYING, { ATTR_ASSUMED_STATE: True, ATTR_SUPPORTED_FEATURES: media_player.SUPPORT_VOLUME_STEP, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "volumeMaxLevel": 100, "levelStepSize": 10, "volumeCanMuteAndUnmute": False, "commandOnlyVolume": True, } assert trt.query_attributes() == {} calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_UP ) await trt.execute( trait.COMMAND_VOLUME_RELATIVE, BASIC_DATA, {"relativeSteps": 10}, {}, ) assert len(calls) == 10 for call in calls: assert call.data == { ATTR_ENTITY_ID: "media_player.bla", } calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_DOWN ) await trt.execute( trait.COMMAND_VOLUME_RELATIVE, BASIC_DATA, {"relativeSteps": -10}, {}, ) assert len(calls) == 10 for call in calls: assert call.data == { ATTR_ENTITY_ID: "media_player.bla", } with pytest.raises(SmartHomeError): await trt.execute(trait.COMMAND_SET_VOLUME, BASIC_DATA, {"volumeLevel": 42}, {}) with pytest.raises(SmartHomeError): await trt.execute(trait.COMMAND_MUTE, BASIC_DATA, {"mute": True}, {}) async def test_media_player_mute(hass): """Test volume trait support for muting.""" assert trait.VolumeTrait.supported( media_player.DOMAIN, media_player.SUPPORT_VOLUME_STEP | media_player.SUPPORT_VOLUME_MUTE, None, ) trt = trait.VolumeTrait( hass, State( "media_player.bla", media_player.STATE_PLAYING, { ATTR_SUPPORTED_FEATURES: ( media_player.SUPPORT_VOLUME_STEP | media_player.SUPPORT_VOLUME_MUTE ), media_player.ATTR_MEDIA_VOLUME_MUTED: False, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "volumeMaxLevel": 100, "levelStepSize": 10, "volumeCanMuteAndUnmute": True, "commandOnlyVolume": False, } assert trt.query_attributes() == {"isMuted": False} mute_calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_MUTE ) await trt.execute( trait.COMMAND_MUTE, BASIC_DATA, {"mute": True}, {}, ) assert len(mute_calls) == 1 assert mute_calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_VOLUME_MUTED: True, } unmute_calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_MUTE ) await trt.execute( trait.COMMAND_MUTE, BASIC_DATA, {"mute": False}, {}, ) assert len(unmute_calls) == 1 assert unmute_calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_VOLUME_MUTED: False, } async def test_temperature_setting_sensor(hass): """Test TemperatureSetting trait support for temperature sensor.""" assert ( helpers.get_google_type(sensor.DOMAIN, sensor.DEVICE_CLASS_TEMPERATURE) is not None ) assert not trait.TemperatureSettingTrait.supported( sensor.DOMAIN, 0, sensor.DEVICE_CLASS_HUMIDITY ) assert trait.TemperatureSettingTrait.supported( sensor.DOMAIN, 0, sensor.DEVICE_CLASS_TEMPERATURE ) @pytest.mark.parametrize( "unit_in,unit_out,state,ambient", [ (TEMP_FAHRENHEIT, "F", "70", 21.1), (TEMP_CELSIUS, "C", "21.1", 21.1), (TEMP_FAHRENHEIT, "F", "unavailable", None), (TEMP_FAHRENHEIT, "F", "unknown", None), ], ) async def test_temperature_setting_sensor_data(hass, unit_in, unit_out, state, ambient): """Test TemperatureSetting trait support for temperature sensor.""" hass.config.units.temperature_unit = unit_in trt = trait.TemperatureSettingTrait( hass, State( "sensor.test", state, {ATTR_DEVICE_CLASS: sensor.DEVICE_CLASS_TEMPERATURE} ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "queryOnlyTemperatureSetting": True, "thermostatTemperatureUnit": unit_out, } if ambient: assert trt.query_attributes() == {"thermostatTemperatureAmbient": ambient} else: assert trt.query_attributes() == {} hass.config.units.temperature_unit = TEMP_CELSIUS async def test_humidity_setting_sensor(hass): """Test HumiditySetting trait support for humidity sensor.""" assert ( helpers.get_google_type(sensor.DOMAIN, sensor.DEVICE_CLASS_HUMIDITY) is not None ) assert not trait.HumiditySettingTrait.supported( sensor.DOMAIN, 0, sensor.DEVICE_CLASS_TEMPERATURE ) assert trait.HumiditySettingTrait.supported( sensor.DOMAIN, 0, sensor.DEVICE_CLASS_HUMIDITY ) @pytest.mark.parametrize( "state,ambient", [("70", 70), ("unavailable", None), ("unknown", None)] ) async def test_humidity_setting_sensor_data(hass, state, ambient): """Test HumiditySetting trait support for humidity sensor.""" trt = trait.HumiditySettingTrait( hass, State("sensor.test", state, {ATTR_DEVICE_CLASS: sensor.DEVICE_CLASS_HUMIDITY}), BASIC_CONFIG, ) assert trt.sync_attributes() == {"queryOnlyHumiditySetting": True} if ambient: assert trt.query_attributes() == {"humidityAmbientPercent": ambient} else: assert trt.query_attributes() == {} with pytest.raises(helpers.SmartHomeError) as err: await trt.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert err.value.code == const.ERR_NOT_SUPPORTED async def test_transport_control(hass): """Test the TransportControlTrait.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None for feature in trait.MEDIA_COMMAND_SUPPORT_MAPPING.values(): assert trait.TransportControlTrait.supported(media_player.DOMAIN, feature, None) now = datetime(2020, 1, 1) trt = trait.TransportControlTrait( hass, State( "media_player.bla", media_player.STATE_PLAYING, { media_player.ATTR_MEDIA_POSITION: 100, media_player.ATTR_MEDIA_DURATION: 200, media_player.ATTR_MEDIA_POSITION_UPDATED_AT: now - timedelta(seconds=10), media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.5, ATTR_SUPPORTED_FEATURES: media_player.SUPPORT_PLAY | media_player.SUPPORT_STOP, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "transportControlSupportedCommands": ["RESUME", "STOP"] } assert trt.query_attributes() == {} # COMMAND_MEDIA_SEEK_RELATIVE calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_SEEK ) # Patch to avoid time ticking over during the command failing the test with patch("homeassistant.util.dt.utcnow", return_value=now): await trt.execute( trait.COMMAND_MEDIA_SEEK_RELATIVE, BASIC_DATA, {"relativePositionMs": 10000}, {}, ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", # 100s (current position) + 10s (from command) + 10s (from updated_at) media_player.ATTR_MEDIA_SEEK_POSITION: 120, } # COMMAND_MEDIA_SEEK_TO_POSITION calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_SEEK ) await trt.execute( trait.COMMAND_MEDIA_SEEK_TO_POSITION, BASIC_DATA, {"absPositionMs": 50000}, {} ) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_SEEK_POSITION: 50, } # COMMAND_MEDIA_NEXT calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_NEXT_TRACK ) await trt.execute(trait.COMMAND_MEDIA_NEXT, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} # COMMAND_MEDIA_PAUSE calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_PAUSE ) await trt.execute(trait.COMMAND_MEDIA_PAUSE, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} # COMMAND_MEDIA_PREVIOUS calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_PREVIOUS_TRACK ) await trt.execute(trait.COMMAND_MEDIA_PREVIOUS, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} # COMMAND_MEDIA_RESUME calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_PLAY ) await trt.execute(trait.COMMAND_MEDIA_RESUME, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} # COMMAND_MEDIA_SHUFFLE calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_SHUFFLE_SET ) await trt.execute(trait.COMMAND_MEDIA_SHUFFLE, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "media_player.bla", media_player.ATTR_MEDIA_SHUFFLE: True, } # COMMAND_MEDIA_STOP calls = async_mock_service( hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_STOP ) await trt.execute(trait.COMMAND_MEDIA_STOP, BASIC_DATA, {}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} @pytest.mark.parametrize( "state", ( STATE_OFF, STATE_IDLE, STATE_PLAYING, STATE_ON, STATE_PAUSED, STATE_STANDBY, STATE_UNAVAILABLE, STATE_UNKNOWN, ), ) async def test_media_state(hass, state): """Test the MediaStateTrait.""" assert helpers.get_google_type(media_player.DOMAIN, None) is not None assert trait.TransportControlTrait.supported( media_player.DOMAIN, media_player.SUPPORT_PLAY, None ) trt = trait.MediaStateTrait( hass, State( "media_player.bla", state, { media_player.ATTR_MEDIA_POSITION: 100, media_player.ATTR_MEDIA_DURATION: 200, media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.5, ATTR_SUPPORTED_FEATURES: media_player.SUPPORT_PLAY | media_player.SUPPORT_STOP, }, ), BASIC_CONFIG, ) assert trt.sync_attributes() == { "supportActivityState": True, "supportPlaybackState": True, } assert trt.query_attributes() == { "activityState": trt.activity_lookup.get(state), "playbackState": trt.playback_lookup.get(state), }
sander76/home-assistant
tests/components/google_assistant/test_trait.py
homeassistant/components/tensorflow/image_processing.py